Skip to content

Commit

Permalink
Add Tests for AwsChunkedDecodingInputStream
Browse files Browse the repository at this point in the history
There were no Unit or IntegrationTests up until now.
Had trouble getting chunked signing to work without HTTPS in V2.
Works with HTTP though.
  • Loading branch information
afranken committed Jul 2, 2023
1 parent 3401681 commit 7f9bf72
Show file tree
Hide file tree
Showing 11 changed files with 338 additions and 80 deletions.
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
server/src/test/resources/com/adobe/testing/s3mock/util/* text eol=crlf
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Copyright 2017-2023 Adobe.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.adobe.testing.s3mock.its

import com.adobe.testing.s3mock.util.DigestUtil
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInfo
import software.amazon.awssdk.core.sync.RequestBody
import software.amazon.awssdk.services.s3.model.ChecksumAlgorithm
import software.amazon.awssdk.services.s3.model.GetObjectRequest
import software.amazon.awssdk.services.s3.model.PutObjectRequest
import java.io.File
import java.io.FileInputStream
import java.io.InputStream

/**
* Chunked encoding with signing is only active in AWS SDK v2 when endpoint is http
*/
internal class AwsChunkedEndcodingITV2 : S3TestBase() {

private val client = createS3ClientV2(serviceEndpointHttp)

/**
* Unfortunately the S3 API does not persist or return data that would let us verify if signed and chunked encoding
* was actually used for the putObject request.
* This was manually validated through the debugger.
*/
@Test
@S3VerifiedFailure(year = 2023,
reason = "Only works with http endpoints")
fun testPutObject_etagCreation(testInfo: TestInfo) {
val bucket = givenBucketV2(testInfo)
val uploadFile = File(UPLOAD_FILE_NAME)
val uploadFileIs: InputStream = FileInputStream(uploadFile)
val expectedEtag = "\"${DigestUtil.hexDigest(uploadFileIs)}\""

client.putObject(
PutObjectRequest.builder()
.bucket(bucket)
.key(UPLOAD_FILE_NAME)
.checksumAlgorithm(ChecksumAlgorithm.SHA256)
.build(),
RequestBody.fromFile(uploadFile))

val getObjectResponse = client.getObject(
GetObjectRequest.builder()
.bucket(bucket)
.key(UPLOAD_FILE_NAME)
.build()
)
assertThat(getObjectResponse.response().eTag()).isEqualTo(expectedEtag)
assertThat(getObjectResponse.response().contentLength()).isEqualTo(uploadFile.length())
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.TestInfo
import org.mockito.kotlin.stub
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
Expand Down Expand Up @@ -148,14 +147,21 @@ internal abstract class S3TestBase {
protected val serviceEndpoint: String
get() = s3Endpoint ?: "https://$host:$port"

protected val serviceEndpointHttp: String
get() = s3Endpoint ?: "http://$host:$httpPort"

protected fun createS3ClientV2(): S3Client {
return createS3ClientV2(serviceEndpoint)
}

protected fun createS3ClientV2(endpoint: String): S3Client {
return S3Client.builder()
.region(Region.of(s3Region))
.credentialsProvider(
StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKeyId, secretAccessKey))
)
.serviceConfiguration(S3Configuration.builder().pathStyleAccessEnabled(true).build())
.endpointOverride(URI.create(serviceEndpoint))
.endpointOverride(URI.create(endpoint))
.httpClient(
ApacheHttpClient.builder().buildWithDefaults(
AttributeMap.builder()
Expand Down Expand Up @@ -226,7 +232,7 @@ internal abstract class S3TestBase {
)
.forcePathStyle(true)
//set endpoint to http(!)
.endpointOverride(URI.create("http://$host:$httpPort"))
.endpointOverride(URI.create(serviceEndpointHttp))
.targetThroughputInGbps(20.0)
.minimumPartSizeInBytes((8 * MB).toLong())
//S3Mock currently does not support checksum validation. See #1123
Expand Down
6 changes: 6 additions & 0 deletions server/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,12 @@
<artifactId>utils</artifactId>
<version>${aws-v2.version}</version>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>auth</artifactId>
<version>${aws-v2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/*
* Copyright 2017-2023 Adobe.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.adobe.testing.s3mock.util;

import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;

abstract class AbstractAwsInputStream extends InputStream {
protected static final byte[] CRLF = "\r\n".getBytes(StandardCharsets.UTF_8);
protected static final byte[] DELIMITER = ";".getBytes(StandardCharsets.UTF_8);
protected final InputStream source;
protected long payloadLength = 0L;
/**
* That's the max chunk buffer size used in the AWS implementation.
*/
private static final int MAX_CHUNK_SIZE = 256 * 1024;
private final ByteBuffer byteBuffer = ByteBuffer.allocate(MAX_CHUNK_SIZE);

protected AbstractAwsInputStream(final InputStream source) {
this.source = new BufferedInputStream(source);
}

@Override
public void close() throws IOException {
source.close();
}

/**
* Reads this stream until the byte sequence was found.
*
* @param endSequence The byte sequence to look for in the stream. The source stream is read
* until the last bytes read are equal to this sequence.
*
* @return The bytes read <em>before</em> the end sequence started.
*/
protected byte[] readUntil(final byte[] endSequence) throws IOException {
byteBuffer.clear();
while (!endsWith(byteBuffer.asReadOnlyBuffer(), endSequence)) {
final int c = source.read();
if (c < 0) {
return new byte[0];
}

final byte unsigned = (byte) (c & 0xFF);
byteBuffer.put(unsigned);
}

final byte[] result = new byte[byteBuffer.position() - endSequence.length];
byteBuffer.rewind();
byteBuffer.get(result);
return result;
}

protected boolean endsWith(final ByteBuffer buffer, final byte[] endSequence) {
final int pos = buffer.position();
if (pos >= endSequence.length) {
for (int i = 0; i < endSequence.length; i++) {
if (buffer.get(pos - endSequence.length + i) != endSequence[i]) {
return false;
}
}

return true;
}

return false;
}

protected void setPayloadLength(byte[] hexLengthBytes) {
payloadLength = Long.parseLong(new String(hexLengthBytes, StandardCharsets.UTF_8)
.replace("\n", "").replace("\r", "")
.trim(), 16);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,8 @@

package com.adobe.testing.s3mock.util;

import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;

/**
* Skips V4 style signing metadata from input streams.
Expand All @@ -43,98 +40,36 @@
* <a href="http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/AwsChunkedEncodingInputStream.html">
* AwsChunkedEncodingInputStream</a>
*/
public class AwsChunkedDecodingInputStream extends InputStream {

/**
* That's the max chunk buffer size used in the AWS implementation.
*/
private static final int MAX_CHUNK_SIZE = 256 * 1024;

private static final byte[] CRLF = "\r\n".getBytes(StandardCharsets.UTF_8);

private static final byte[] DELIMITER = ";".getBytes(StandardCharsets.UTF_8);

private final InputStream source;

private int remainingInChunk = 0;

private final ByteBuffer byteBuffer = ByteBuffer.allocate(MAX_CHUNK_SIZE);
public class AwsChunkedDecodingInputStream extends AbstractAwsInputStream {

/**
* Constructs a new {@link AwsChunkedDecodingInputStream}.
*
* @param source The {@link InputStream} to wrap.
*/
public AwsChunkedDecodingInputStream(final InputStream source) {
this.source = new BufferedInputStream(source);
public AwsChunkedDecodingInputStream(InputStream source) {
super(source);
}

@Override
public int read() throws IOException {
if (remainingInChunk == 0) {
if (payloadLength == 0L) {
final byte[] hexLengthBytes = readUntil(DELIMITER);
if (hexLengthBytes == null) {
if (hexLengthBytes.length == 0) {
return -1;
}

remainingInChunk =
Integer.parseInt(new String(hexLengthBytes, StandardCharsets.UTF_8).trim(), 16);
setPayloadLength(hexLengthBytes);

if (remainingInChunk == 0) {
if (payloadLength == 0L) {
return -1;
}

readUntil(CRLF);
}

remainingInChunk--;
payloadLength--;

return source.read();
}

@Override
public void close() throws IOException {
source.close();
}

/**
* Reads this stream until the byte sequence was found.
*
* @param endSequence The byte sequence to look for in the stream. The source stream is read
* until the last bytes read are equal to this sequence.
*
* @return The bytes read <em>before</em> the end sequence started.
*/
private byte[] readUntil(final byte[] endSequence) throws IOException {
byteBuffer.clear();
while (!endsWith(byteBuffer.asReadOnlyBuffer(), endSequence)) {
final int c = source.read();
if (c < 0) {
return null;
}

final byte unsigned = (byte) (c & 0xFF);
byteBuffer.put(unsigned);
}

final byte[] result = new byte[byteBuffer.position() - endSequence.length];
byteBuffer.rewind();
byteBuffer.get(result);
return result;
}

private boolean endsWith(final ByteBuffer buffer, final byte[] endSequence) {
final int pos = buffer.position();
if (pos >= endSequence.length) {
for (int i = 0; i < endSequence.length; i++) {
if (buffer.get(pos - endSequence.length + i) != endSequence[i]) {
return false;
}
}

return true;
}

return false;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/*
* Copyright 2017-2023 Adobe.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.adobe.testing.s3mock.util;

import static com.adobe.testing.s3mock.util.TestUtil.getFileFromClasspath;
import static org.assertj.core.api.Assertions.assertThat;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import software.amazon.awssdk.auth.signer.internal.chunkedencoding.AwsS3V4ChunkSigner;
import software.amazon.awssdk.auth.signer.internal.chunkedencoding.AwsSignedChunkedEncodingInputStream;

class AwsChunkedDecodingInputStreamTest {

@Test
void testDecoding(TestInfo testInfo) throws IOException {
doTest(testInfo, "sampleFile.txt");
doTest(testInfo, "sampleFile_large.txt");
}

void doTest(TestInfo testInfo, String fileName) throws IOException {
File sampleFile = getFileFromClasspath(testInfo, fileName);
InputStream chunkedEncodingInputStream = AwsSignedChunkedEncodingInputStream
.builder()
.inputStream(Files.newInputStream(sampleFile.toPath()))
.awsChunkSigner(new AwsS3V4ChunkSigner("signingKey".getBytes(),
"dateTime",
"keyPath"))
.build();
InputStream iut = new AwsChunkedDecodingInputStream(chunkedEncodingInputStream);
assertThat(iut).hasSameContentAs(Files.newInputStream(sampleFile.toPath()));
}
}
Loading

0 comments on commit 7f9bf72

Please sign in to comment.