Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into HDDS-11751
Browse files Browse the repository at this point in the history
  • Loading branch information
adoroszlai committed Dec 8, 2024
2 parents 3ec0229 + 9854591 commit 1243d7a
Show file tree
Hide file tree
Showing 359 changed files with 6,439 additions and 3,668 deletions.
52 changes: 36 additions & 16 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ jobs:
distribution: 'temurin'
java-version: ${{ matrix.java }}
- name: Compile Ozone using Java ${{ matrix.java }}
run: hadoop-ozone/dev-support/checks/build.sh -Pdist -Dskip.npx -Dskip.installnpx -Dmaven.javadoc.failOnWarnings=${{ matrix.java != 8 }} -Djavac.version=${{ matrix.java }} ${{ inputs.ratis_args }}
run: hadoop-ozone/dev-support/checks/build.sh -Pdist -DskipRecon -Dmaven.javadoc.failOnWarnings=${{ matrix.java != 8 }} -Djavac.version=${{ matrix.java }} ${{ inputs.ratis_args }}
env:
OZONE_WITH_COVERAGE: false
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
Expand Down Expand Up @@ -257,7 +257,7 @@ jobs:
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
maven-repo-
if: ${{ !contains('author,bats,docs', matrix.check) }}
if: ${{ !contains('author,bats', matrix.check) }}
- name: Download Ratis repo
if: ${{ inputs.ratis_args != '' }}
uses: actions/download-artifact@v4
Expand All @@ -272,12 +272,11 @@ jobs:
java-version: 8
- name: Execute tests
run: hadoop-ozone/dev-support/checks/${{ matrix.check }}.sh ${{ inputs.ratis_args }}
continue-on-error: true
env:
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/${{ matrix.check }}/summary.txt
if: ${{ !cancelled() }}
if: ${{ failure() }}
- name: Archive build results
uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
Expand Down Expand Up @@ -320,12 +319,11 @@ jobs:
java-version: ${{ env.TEST_JAVA_VERSION }}
- name: Execute tests
run: hadoop-ozone/dev-support/checks/${{ github.job }}.sh ${{ inputs.ratis_args }}
continue-on-error: true
env:
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/${{ github.job }}/summary.txt
if: ${{ !cancelled() }}
if: ${{ failure() }}
- name: Archive build results
uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
Expand All @@ -345,6 +343,15 @@ jobs:
uses: actions/checkout@v4
with:
ref: ${{ needs.build-info.outputs.sha }}
- name: Cache for maven dependencies
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
maven-repo-
- name: Download compiled Ozone binaries
uses: actions/download-artifact@v4
with:
Expand Down Expand Up @@ -400,10 +407,9 @@ jobs:
- name: Execute tests
run: |
hadoop-ozone/dev-support/checks/${{ github.job }}.sh
continue-on-error: true
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/${{ github.job }}/summary.txt
if: ${{ !cancelled() }}
if: ${{ failure() }}
- name: Archive build results
uses: actions/upload-artifact@v4
if: always()
Expand Down Expand Up @@ -451,10 +457,9 @@ jobs:
- name: Execute tests
run: |
hadoop-ozone/dev-support/checks/${{ github.job }}.sh -Pdist -Psrc -Dmaven.javadoc.skip=true ${{ inputs.ratis_args }}
continue-on-error: true
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/${{ github.job }}/summary.txt
if: ${{ !cancelled() }}
if: ${{ failure() }}
- name: Install diffoscope
run: |
sudo apt update -q
Expand Down Expand Up @@ -490,6 +495,15 @@ jobs:
uses: actions/checkout@v4
with:
ref: ${{ needs.build-info.outputs.sha }}
- name: Cache for maven dependencies
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
maven-repo-
- name: Download compiled Ozone binaries
uses: actions/download-artifact@v4
with:
Expand All @@ -509,10 +523,9 @@ jobs:
KEEP_IMAGE: false
OZONE_ACCEPTANCE_SUITE: ${{ matrix.suite }}
OZONE_VOLUME_OWNER: 1000
continue-on-error: true
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/${{ github.job }}/summary.txt
if: ${{ !cancelled() }}
if: ${{ failure() }}
- name: Archive build results
uses: actions/upload-artifact@v4
if: always()
Expand All @@ -535,6 +548,15 @@ jobs:
uses: actions/checkout@v4
with:
ref: ${{ needs.build-info.outputs.sha }}
- name: Cache for maven dependencies
uses: actions/cache/restore@v4
with:
path: |
~/.m2/repository/*/*/*
!~/.m2/repository/org/apache/ozone
key: maven-repo-${{ hashFiles('**/pom.xml') }}
restore-keys: |
maven-repo-
- name: Download compiled Ozone binaries
uses: actions/download-artifact@v4
with:
Expand All @@ -549,10 +571,9 @@ jobs:
sudo mkdir .aws && sudo chmod 777 .aws && sudo chown 1000 .aws
popd
./hadoop-ozone/dev-support/checks/kubernetes.sh
continue-on-error: true
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/${{ github.job }}/summary.txt
if: ${{ !cancelled() }}
if: ${{ failure() }}
- name: Archive build results
uses: actions/upload-artifact@v4
if: always()
Expand Down Expand Up @@ -608,7 +629,6 @@ jobs:
distribution: 'temurin'
java-version: ${{ env.TEST_JAVA_VERSION }}
- name: Execute tests
continue-on-error: true
run: |
args="${{ inputs.ratis_args }}"
if [[ "${{ matrix.profile }}" == "flaky" ]]; then
Expand All @@ -627,7 +647,7 @@ jobs:
cat target/${{ github.job }}/summary.md >> $GITHUB_STEP_SUMMARY
fi
hadoop-ozone/dev-support/checks/_summary.sh target/${{ github.job }}/summary.txt
if: ${{ !cancelled() }}
if: ${{ failure() }}
- name: Archive build results
uses: actions/upload-artifact@v4
if: always()
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/intermittent-test-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ jobs:
java-version: 8
- name: Build (most) of Ozone
run: |
args="-Dskip.npx -Dskip.installnpx -DskipShade -Dmaven.javadoc.skip=true"
args="-DskipRecon -DskipShade -Dmaven.javadoc.skip=true"
if [[ "${{ github.event.inputs.ratis-ref }}" != "" ]]; then
args="$args -Dratis.version=${{ needs.ratis.outputs.ratis-version }}"
args="$args -Dratis.thirdparty.version=${{ needs.ratis.outputs.thirdparty-version }}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ public enum ChecksumCombineMode {
description =
"Indicates the time duration in seconds a client will wait "
+ "before retrying a read key request on encountering "
+ "a connectivity excepetion from Datanodes . "
+ "a connectivity exception from Datanodes. "
+ "By default the interval is 1 second",
tags = ConfigTag.CLIENT)
private int readRetryInterval = 1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,7 @@ public BlockOutputStream(
writtenDataLength = 0;
failedServers = new ArrayList<>(0);
ioException = new AtomicReference<>(null);
checksum = new Checksum(config.getChecksumType(),
config.getBytesPerChecksum());
this.checksum = new Checksum(config.getChecksumType(), config.getBytesPerChecksum(), true);
this.clientMetrics = clientMetrics;
this.streamBufferArgs = streamBufferArgs;
this.allowPutBlockPiggybacking = canEnablePutblockPiggybacking();
Expand Down Expand Up @@ -587,6 +586,7 @@ CompletableFuture<PutBlockResult> executePutBlock(boolean close,
final CompletableFuture<ContainerCommandResponseProto> flushFuture;
final XceiverClientReply asyncReply;
try {
// Note: checksum was previously appended to containerBlockData by WriteChunk
BlockData blockData = containerBlockData.build();
LOG.debug("sending PutBlock {} flushPos {}", blockData, flushPos);

Expand Down Expand Up @@ -854,6 +854,8 @@ public synchronized void cleanup(boolean invalidateClient) {
if (lastChunkBuffer != null) {
DIRECT_BUFFER_POOL.returnBuffer(lastChunkBuffer);
lastChunkBuffer = null;
// Clear checksum cache
checksum.clearChecksumCache();
}
}

Expand Down Expand Up @@ -903,7 +905,10 @@ private CompletableFuture<PutBlockResult> writeChunkToContainer(
final long offset = chunkOffset.getAndAdd(effectiveChunkSize);
final ByteString data = chunk.toByteString(
bufferPool.byteStringConversion());
ChecksumData checksumData = checksum.computeChecksum(chunk);
// chunk is incremental, don't cache its checksum
ChecksumData checksumData = checksum.computeChecksum(chunk, false);
// side note: checksum object is shared with PutBlock's (blockData) checksum calc,
// current impl does not support caching both
ChunkInfo chunkInfo = ChunkInfo.newBuilder()
.setChunkName(blockID.get().getLocalID() + "_chunk_" + ++chunkIndex)
.setOffset(offset)
Expand Down Expand Up @@ -1053,6 +1058,7 @@ private void updateBlockDataForWriteChunk(ChunkBuffer chunk)
lastChunkBuffer.capacity() - lastChunkBuffer.position();
appendLastChunkBuffer(chunk, 0, remainingBufferSize);
updateBlockDataWithLastChunkBuffer();
// TODO: Optional refactoring: Can attach ChecksumCache to lastChunkBuffer rather than Checksum
appendLastChunkBuffer(chunk, remainingBufferSize,
chunk.remaining() - remainingBufferSize);
}
Expand All @@ -1069,10 +1075,13 @@ private void updateBlockDataWithLastChunkBuffer()
LOG.debug("lastChunkInfo = {}", lastChunkInfo);
long lastChunkSize = lastChunkInfo.getLen();
addToBlockData(lastChunkInfo);

// Set ByteBuffer limit to capacity, pos to 0. Does not erase data
lastChunkBuffer.clear();

if (lastChunkSize == config.getStreamBufferSize()) {
lastChunkOffset += config.getStreamBufferSize();
// Reached stream buffer size (chunk size), starting new chunk, need to clear checksum cache
checksum.clearChecksumCache();
} else {
lastChunkBuffer.position((int) lastChunkSize);
}
Expand Down Expand Up @@ -1136,8 +1145,9 @@ private ChunkInfo createChunkInfo(long lastPartialChunkOffset)
lastChunkBuffer.flip();
int revisedChunkSize = lastChunkBuffer.remaining();
// create the chunk info to be sent in PutBlock.
ChecksumData revisedChecksumData =
checksum.computeChecksum(lastChunkBuffer);
// checksum cache is utilized for this computation
// this checksum is stored in blockData and later transferred in PutBlock
ChecksumData revisedChecksumData = checksum.computeChecksum(lastChunkBuffer, true);

long chunkID = lastPartialChunkOffset / config.getStreamBufferSize();
ChunkInfo.Builder revisedChunkInfo = ChunkInfo.newBuilder()
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.cli;

import picocli.CommandLine;

import java.util.ServiceLoader;

/**
* Interface for parent commands that accept subcommands to be dynamically registered.
* Subcommands should:
* <li>implement the interface returned by {@link #subcommandType()}</li>
* <li>be annotated with {@code MetaInfServices} parameterized with the same type</li>
*/
public interface ExtensibleParentCommand {

/** @return The class of the marker interface for subcommands. */
Class<?> subcommandType();

/** Recursively find and add subcommands to {@code cli}. */
static void addSubcommands(CommandLine cli) {
Object command = cli.getCommand();

// find and add subcommands
if (command instanceof ExtensibleParentCommand) {
ExtensibleParentCommand parentCommand = (ExtensibleParentCommand) command;
ServiceLoader<?> subcommands = ServiceLoader.load(parentCommand.subcommandType());
for (Object subcommand : subcommands) {
final CommandLine.Command commandAnnotation = subcommand.getClass().getAnnotation(CommandLine.Command.class);
CommandLine subcommandCommandLine = new CommandLine(subcommand, cli.getFactory());
cli.addSubcommand(commandAnnotation.name(), subcommandCommandLine);
}
}

// process subcommands recursively
for (CommandLine subcommand : cli.getSubcommands().values()) {
addSubcommands(subcommand);
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,13 @@
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.ServiceLoader;
import java.util.concurrent.Callable;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;

import com.google.common.annotations.VisibleForTesting;
import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.ExitCode;
import picocli.CommandLine.Model.CommandSpec;
import picocli.CommandLine.Option;
Expand All @@ -52,30 +50,24 @@ public class GenericCli implements Callable<Void>, GenericParentCommand {
private final CommandLine cmd;

public GenericCli() {
cmd = new CommandLine(this);
cmd.setExecutionExceptionHandler((ex, commandLine, parseResult) -> {
printError(ex);
return EXECUTION_ERROR_EXIT_CODE;
});
this(null);
}

public GenericCli(Class<?> type) {
this();
addSubcommands(getCmd(), type);
this(type, CommandLine.defaultFactory());
}

private void addSubcommands(CommandLine cli, Class<?> type) {
ServiceLoader<SubcommandWithParent> registeredSubcommands =
ServiceLoader.load(SubcommandWithParent.class);
for (SubcommandWithParent subcommand : registeredSubcommands) {
if (subcommand.getParentType().equals(type)) {
final Command commandAnnotation =
subcommand.getClass().getAnnotation(Command.class);
CommandLine subcommandCommandLine = new CommandLine(subcommand);
addSubcommands(subcommandCommandLine, subcommand.getClass());
cli.addSubcommand(commandAnnotation.name(), subcommandCommandLine);
}
public GenericCli(Class<?> type, CommandLine.IFactory factory) {
cmd = new CommandLine(this, factory);
cmd.setExecutionExceptionHandler((ex, commandLine, parseResult) -> {
printError(ex);
return EXECUTION_ERROR_EXIT_CODE;
});

if (type != null) {
SubcommandWithParent.addSubcommands(getCmd(), type);
}
ExtensibleParentCommand.addSubcommands(cmd);
}

/**
Expand Down
Loading

0 comments on commit 1243d7a

Please sign in to comment.