Skip to content

Commit

Permalink
HBASE-24627 Normalize one table at a time
Browse files Browse the repository at this point in the history
Introduce an additional method to our Admin interface that allow an
operator to selectivly run the normalizer. The IPC protocol supports
general table name select via compound filter.
  • Loading branch information
ndimiduk committed Aug 11, 2020
1 parent d2afda3 commit 0335733
Show file tree
Hide file tree
Showing 17 changed files with 389 additions and 66 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -856,11 +856,23 @@ void unassign(byte[] regionName, boolean force)
* the request was submitted successfully. We need to check logs for the details of which regions
* were split/merged.
*
* @return <code>true</code> if region normalizer ran, <code>false</code> otherwise.
* @return {@code true} if region normalizer ran, {@code false} otherwise.
* @throws IOException if a remote or network exception occurs
*/
boolean normalize() throws IOException;

/**
* Invoke region normalizer. Can NOT run for various reasons. Check logs.
* This is a non-blocking invocation to region normalizer. If return value is true, it means
* the request was submitted successfully. We need to check logs for the details of which regions
* were split/merged.
*
* @param ntfp limit to tables matching the specified filter.
* @return {@code true} if region normalizer ran, {@code false} otherwise.
* @throws IOException if a remote or network exception occurs
*/
boolean normalize(NormalizeTableFilterParams ntfp) throws IOException;

/**
* Query the current state of the region normalizer.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -399,6 +399,11 @@ public boolean normalize() throws IOException {
return get(admin.normalize());
}

@Override
public boolean normalize(NormalizeTableFilterParams ntfp) throws IOException {
return get(admin.normalize(ntfp));
}

@Override
public boolean isNormalizerEnabled() throws IOException {
return get(admin.isNormalizerEnabled());
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
Expand Down Expand Up @@ -1281,6 +1281,14 @@ default CompletableFuture<Boolean> balance() {
*/
CompletableFuture<Boolean> normalize();

/**
* Invoke region normalizer. Can NOT run for various reasons. Check logs.
* @param ntfp limit to tables matching the specified filter.
* @return true if region normalizer ran, false otherwise. The return value will be wrapped by a
* {@link CompletableFuture}
*/
CompletableFuture<Boolean> normalize(NormalizeTableFilterParams ntfp);

/**
* Turn the cleaner chore on/off.
* @param on
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
Expand Down Expand Up @@ -713,6 +713,11 @@ public CompletableFuture<Boolean> normalize() {
return wrap(rawAdmin.normalize());
}

@Override
public CompletableFuture<Boolean> normalize(NormalizeTableFilterParams ntfp) {
return wrap(rawAdmin.normalize(ntfp));
}

@Override
public CompletableFuture<Boolean> cleanerChoreSwitch(boolean enabled) {
return wrap(rawAdmin.cleanerChoreSwitch(enabled));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;

import java.util.List;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;

/**
* A collection of criteria used for table selection.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class NormalizeTableFilterParams {
private final List<TableName> tableNames;
private final String regex;
private final String namespace;

public NormalizeTableFilterParams(
final List<TableName> tableNames,
final String regex,
final String namespace
) {
this.tableNames = tableNames;
this.regex = regex;
this.namespace = namespace;
}

public List<TableName> getTableNames() {
return tableNames;
}

public String getRegex() {
return regex;
}

public String getNamespace() {
return namespace;
}

/**
* Used to instantiate an instance of {@link NormalizeTableFilterParams}.
*/
public static class Builder {
private List<TableName> tableNames;
private String regex;
private String namespace;

public Builder tableFilterParams(final NormalizeTableFilterParams ntfp) {
this.tableNames = ntfp.getTableNames();
this.regex = ntfp.getRegex();
this.namespace = ntfp.getNamespace();
return this;
}

public Builder tableNames(final List<TableName> tableNames) {
this.tableNames = tableNames;
return this;
}

public Builder regex(final String regex) {
this.regex = regex;
return this;
}

public Builder namespace(final String namespace) {
this.namespace = namespace;
return this;
}

public NormalizeTableFilterParams build() {
return new NormalizeTableFilterParams(tableNames, regex, namespace);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3287,13 +3287,22 @@ public CompletableFuture<Boolean> isNormalizerEnabled() {

@Override
public CompletableFuture<Boolean> normalize() {
return normalize(RequestConverter.buildNormalizeRequest());
}

@Override
public CompletableFuture<Boolean> normalize(NormalizeTableFilterParams ntfp) {
return normalize(RequestConverter.buildNormalizeRequest(ntfp));
}

private CompletableFuture<Boolean> normalize(NormalizeRequest request) {
return this
.<Boolean> newMasterCaller()
.action(
(controller, stub) -> this.<NormalizeRequest, NormalizeResponse, Boolean> call(
controller, stub, RequestConverter.buildNormalizeRequest(),
(s, c, req, done) -> s.normalize(c, req, done), (resp) -> resp.getNormalizerRan()))
.call();
.<Boolean> newMasterCaller()
.action(
(controller, stub) -> this.<NormalizeRequest, NormalizeResponse, Boolean> call(
controller, stub, request, MasterService.Interface::normalize,
NormalizeResponse::getNormalizerRan))
.call();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2293,6 +2293,13 @@ public static HBaseProtos.TableName toProtoTableName(TableName tableName) {
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).build();
}

public static List<HBaseProtos.TableName> toProtoTableNameList(List<TableName> tableNameList) {
if (tableNameList == null) {
return new ArrayList<>();
}
return tableNameList.stream().map(ProtobufUtil::toProtoTableName).collect(Collectors.toList());
}

public static List<TableName> toTableNameList(List<HBaseProtos.TableName> tableNamesList) {
if (tableNamesList == null) {
return new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
Expand Down Expand Up @@ -46,6 +46,7 @@
import org.apache.hadoop.hbase.client.LogQueryFilter;
import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec;
import org.apache.hadoop.hbase.client.RegionInfo;
Expand Down Expand Up @@ -1480,6 +1481,25 @@ public static NormalizeRequest buildNormalizeRequest() {
return NormalizeRequest.newBuilder().build();
}

/**
* Creates a protocol buffer NormalizeRequest
*
* @return a NormalizeRequest
*/
public static NormalizeRequest buildNormalizeRequest(NormalizeTableFilterParams ntfp) {
final NormalizeRequest.Builder builder = NormalizeRequest.newBuilder();
if (ntfp.getTableNames() != null) {
builder.addAllTableNames(ProtobufUtil.toProtoTableNameList(ntfp.getTableNames()));
}
if (ntfp.getRegex() != null) {
builder.setRegex(ntfp.getRegex());
}
if (ntfp.getNamespace() != null) {
builder.setNamespace(ntfp.getNamespace());
}
return builder.build();
}

/**
* Creates a protocol buffer IsNormalizerEnabledRequest
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -354,6 +354,9 @@ message IsSplitOrMergeEnabledResponse {
}

message NormalizeRequest {
repeated TableName table_names = 1;
optional string regex = 2;
optional string namespace = 3;
}

message NormalizeResponse {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import static org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS;
import static org.apache.hadoop.hbase.HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK;
import static org.apache.hadoop.hbase.util.DNS.MASTER_HOSTNAME_KEY;

import java.io.IOException;
import java.io.InterruptedIOException;
import java.lang.reflect.Constructor;
Expand All @@ -38,6 +37,7 @@
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -80,9 +80,9 @@
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.RegionStatesCount;
Expand Down Expand Up @@ -227,14 +227,13 @@
import org.eclipse.jetty.webapp.WebAppContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
import org.apache.hbase.thirdparty.com.google.protobuf.Descriptors;
import org.apache.hbase.thirdparty.com.google.protobuf.Service;
import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;

import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
Expand Down Expand Up @@ -1910,14 +1909,18 @@ public RegionNormalizer getRegionNormalizer() {
return this.normalizer;
}

public boolean normalizeRegions() throws IOException {
return normalizeRegions(new NormalizeTableFilterParams.Builder().build());
}

/**
* Perform normalization of cluster (invoked by {@link RegionNormalizerChore}).
* Perform normalization of cluster.
*
* @return true if an existing normalization was already in progress, or if a new normalization
* was performed successfully; false otherwise (specifically, if HMaster finished initializing
* or normalization is globally disabled).
*/
public boolean normalizeRegions() throws IOException {
public boolean normalizeRegions(final NormalizeTableFilterParams ntfp) throws IOException {
final long startTime = EnvironmentEdgeManager.currentTime();
if (regionNormalizerTracker == null || !regionNormalizerTracker.isNormalizerOn()) {
LOG.debug("Region normalization is disabled, don't run region normalizer.");
Expand All @@ -1938,12 +1941,19 @@ public boolean normalizeRegions() throws IOException {

int affectedTables = 0;
try {
final List<TableName> allEnabledTables =
new ArrayList<>(tableStateManager.getTablesInStates(TableState.State.ENABLED));
Collections.shuffle(allEnabledTables);
final Set<TableName> matchingTables = getTableDescriptors(new LinkedList<>(),
ntfp.getNamespace(), ntfp.getRegex(), ntfp.getTableNames(), false)
.stream()
.map(TableDescriptor::getTableName)
.collect(Collectors.toSet());
final Set<TableName> allEnabledTables =
tableStateManager.getTablesInStates(TableState.State.ENABLED);
final List<TableName> targetTables =
new ArrayList<>(Sets.intersection(matchingTables, allEnabledTables));
Collections.shuffle(targetTables);

final List<Long> submittedPlanProcIds = new ArrayList<>();
for (TableName table : allEnabledTables) {
for (TableName table : targetTables) {
if (table.isSystemTable()) {
continue;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
Expand Down Expand Up @@ -1920,7 +1921,14 @@ public NormalizeResponse normalize(RpcController controller,
NormalizeRequest request) throws ServiceException {
rpcPreCheck("normalize");
try {
return NormalizeResponse.newBuilder().setNormalizerRan(master.normalizeRegions()).build();
final NormalizeTableFilterParams ntfp = new NormalizeTableFilterParams.Builder()
.tableNames(ProtobufUtil.toTableNameList(request.getTableNamesList()))
.regex(request.hasRegex() ? request.getRegex() : null)
.namespace(request.hasNamespace() ? request.getNamespace() : null)
.build();
return NormalizeResponse.newBuilder()
.setNormalizerRan(master.normalizeRegions(ntfp))
.build();
} catch (IOException ex) {
throw new ServiceException(ex);
}
Expand Down
Loading

0 comments on commit 0335733

Please sign in to comment.