Skip to content

Commit 8249bb8

Browse files
committed
HADOOP-19610. S3A: ITests to run under JUnit5: many more tests
* parameterize remaining ones * Rename Flaky to FlakyTest, same for @scaletest * new @loadtest tag * audit all subclasses of setup/teardown for @BeforeEach/@AfterEach
1 parent 6c71f1f commit 8249bb8

File tree

40 files changed

+246
-198
lines changed

40 files changed

+246
-198
lines changed

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractMultipartUploaderTest.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
import java.util.concurrent.CompletableFuture;
3131

3232
import org.assertj.core.api.Assertions;
33+
import org.junit.jupiter.api.AfterEach;
3334
import org.junit.jupiter.api.BeforeEach;
3435
import org.junit.jupiter.api.Test;
3536
import org.slf4j.Logger;
@@ -99,6 +100,7 @@ public void setup() throws Exception {
99100
}
100101

101102
@Override
103+
@AfterEach
102104
public void teardown() throws Exception {
103105
MultipartUploader uploader = getUploader(1);
104106
if (uploader != null) {

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractUnbufferTest.java

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626

2727
import org.apache.hadoop.fs.FSDataInputStream;
2828
import org.apache.hadoop.fs.Path;
29-
import org.apache.hadoop.test.tags.Flaky;
29+
import org.apache.hadoop.test.tags.FlakyTest;
3030

3131
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
3232
import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
@@ -50,7 +50,7 @@ public void setup() throws Exception {
5050
}
5151

5252
@Test
53-
@Flaky("buffer underflow")
53+
@FlakyTest("buffer underflow")
5454
public void testUnbufferAfterRead() throws IOException {
5555
describe("unbuffer a file after a single read");
5656
try (FSDataInputStream stream = getFileSystem().open(file)) {
@@ -60,7 +60,7 @@ public void testUnbufferAfterRead() throws IOException {
6060
}
6161

6262
@Test
63-
@Flaky("buffer underflow")
63+
@FlakyTest("buffer underflow")
6464
public void testUnbufferBeforeRead() throws IOException {
6565
describe("unbuffer a file before a read");
6666
try (FSDataInputStream stream = getFileSystem().open(file)) {
@@ -80,7 +80,7 @@ public void testUnbufferEmptyFile() throws IOException {
8080
}
8181

8282
@Test
83-
@Flaky("buffer underflow")
83+
@FlakyTest("buffer underflow")
8484
public void testUnbufferOnClosedFile() throws IOException {
8585
describe("unbuffer a file before a read");
8686
FSDataInputStream stream = null;
@@ -98,7 +98,7 @@ public void testUnbufferOnClosedFile() throws IOException {
9898
}
9999

100100
@Test
101-
@Flaky("buffer underflow")
101+
@FlakyTest("buffer underflow")
102102
public void testMultipleUnbuffers() throws IOException {
103103
describe("unbuffer a file multiple times");
104104
try (FSDataInputStream stream = getFileSystem().open(file)) {
@@ -110,7 +110,7 @@ public void testMultipleUnbuffers() throws IOException {
110110
}
111111
}
112112

113-
@Flaky("buffer underflow")
113+
@FlakyTest("buffer underflow")
114114
@Test
115115
public void testUnbufferMultipleReads() throws IOException {
116116
describe("unbuffer a file multiple times");

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/tags/Flaky.java renamed to hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/tags/FlakyTest.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
*/
3838
@Target({ElementType.METHOD, ElementType.TYPE})
3939
@Retention(RetentionPolicy.RUNTIME)
40-
@Tag("Flaky")
41-
public @interface Flaky {
40+
@Tag("flaky")
41+
public @interface FlakyTest {
4242
String value();
4343
}
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.test.tags;
20+
21+
import java.lang.annotation.ElementType;
22+
import java.lang.annotation.Retention;
23+
import java.lang.annotation.RetentionPolicy;
24+
import java.lang.annotation.Target;
25+
26+
import org.junit.jupiter.api.Tag;
27+
28+
/**
29+
* JUnit 5 tag to indicate that a test suite is a load test suite, which is
30+
* designed to load/overload the target system.
31+
* <p> If this test is directed at cloud infrastructure the load may be significant
32+
* enough to trigger throttling, which may be observed not only by othere tests, but
33+
* by other users/applications using the same account.
34+
* <p> The test runner tag to filter on is {@code load}.
35+
* <p> Note: this annotation should be accompanied by the {@link ScaleTest}
36+
* tag to indicate it is a specific type of scale test.
37+
*/
38+
@Target({ElementType.METHOD, ElementType.TYPE})
39+
@Retention(RetentionPolicy.RUNTIME)
40+
@Tag("load")
41+
public @interface LoadTest {
42+
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/tags/Scale.java renamed to hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/tags/ScaleTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,5 +33,5 @@
3333
@Target({ElementType.METHOD, ElementType.TYPE})
3434
@Retention(RetentionPolicy.RUNTIME)
3535
@Tag("scale")
36-
public @interface Scale {
36+
public @interface ScaleTest {
3737
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/tags/package-info.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,10 @@
1818

1919
@org.apache.hadoop.classification.InterfaceStability.Unstable
2020
@org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate("test exenders")
21-
package org.apache.hadoop.test.tags;
2221
/**
2322
* JUnit 5 tags.
2423
* <p>
2524
* For use in Hadoop's own test suites, and those which extend them, such as FileSystem contract
2625
* tests.
27-
*/
26+
*/
27+
package org.apache.hadoop.test.tags;

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractAnalyticsStreamVectoredRead.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
import org.apache.hadoop.conf.Configuration;
2222
import org.apache.hadoop.fs.contract.AbstractContractVectoredReadTest;
2323
import org.apache.hadoop.fs.contract.AbstractFSContract;
24-
import org.junit.jupiter.api.BeforeEach;
2524
import org.junit.jupiter.params.ParameterizedClass;
2625
import org.junit.jupiter.params.provider.MethodSource;
2726

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractMultipartUploader.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@
2222
import org.apache.hadoop.fs.contract.AbstractContractMultipartUploaderTest;
2323
import org.apache.hadoop.fs.contract.AbstractFSContract;
2424
import org.apache.hadoop.fs.s3a.S3AFileSystem;
25+
import org.apache.hadoop.test.tags.ScaleTest;
26+
2527
import org.junit.jupiter.api.BeforeEach;
2628

2729
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
@@ -43,6 +45,7 @@
4345
* to enable it, and partition size option to control the size of
4446
* parts uploaded.
4547
*/
48+
@ScaleTest
4649
public class ITestS3AContractMultipartUploader extends
4750
AbstractContractMultipartUploaderTest {
4851

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractSeek.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,7 @@ public void testReadPolicyInFS() throws Throwable {
223223
* in random IO mode, a subsequent GET.
224224
*/
225225

226-
@Test
226+
@Test
227227
public void testReadAcrossReadahead() throws Throwable {
228228
describe("Sets up a read which will span the active readahead"
229229
+ " and the rest of the file.");
@@ -254,7 +254,7 @@ public void testReadAcrossReadahead() throws Throwable {
254254
* which will read a single byte only.
255255
*/
256256

257-
@Test
257+
@Test
258258
public void testReadSingleByteAcrossReadahead() throws Throwable {
259259
describe("Read over boundary using read()/readByte() calls.");
260260
Path path = path("testReadSingleByteAcrossReadahead");
@@ -276,7 +276,7 @@ public void testReadSingleByteAcrossReadahead() throws Throwable {
276276
}
277277

278278

279-
@Test
279+
@Test
280280
public void testSeekToReadaheadAndRead() throws Throwable {
281281
describe("Seek to just before readahead limit and call"
282282
+ " InputStream.read(byte[])");
@@ -320,7 +320,7 @@ public void testSeekToReadaheadExactlyAndRead() throws Throwable {
320320
}
321321

322322

323-
@Test
323+
@Test
324324
public void testSeekToReadaheadExactlyAndReadByte() throws Throwable {
325325
describe("Seek to exactly the readahead limit and call"
326326
+ " readByte()");

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@
4646
import org.apache.hadoop.fs.statistics.IOStatisticAssertions;
4747
import org.apache.hadoop.fs.statistics.IOStatistics;
4848
import org.apache.hadoop.fs.store.audit.AuditSpan;
49+
import org.apache.hadoop.test.tags.ScaleTest;
4950

5051
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
5152
import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
@@ -160,6 +161,7 @@ public void testDirectoryListingFileLengths() throws IOException {
160161
* verifying the contents of the uploaded file.
161162
*/
162163
@Test
164+
@ScaleTest
163165
public void testBigFilePutAndGet() throws IOException {
164166
maybeSkipTest();
165167
assume("Scale test disabled: to enable set property " +

0 commit comments

Comments
 (0)