Skip to content

Commit 8de04ba

Browse files
committed
Merge branch 'trunk' into HDFS-17438
2 parents f681133 + 6a04df2 commit 8de04ba

File tree

36 files changed

+1036
-219
lines changed

36 files changed

+1036
-219
lines changed

LICENSE-binary

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -241,7 +241,7 @@ com.google.guava:guava:20.0
241241
com.google.guava:guava:27.0-jre
242242
com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
243243
com.microsoft.azure:azure-storage:7.0.0
244-
com.nimbusds:nimbus-jose-jwt:9.31
244+
com.nimbusds:nimbus-jose-jwt:9.37.2
245245
com.zaxxer:HikariCP:4.0.3
246246
commons-beanutils:commons-beanutils:1.9.4
247247
commons-cli:commons-cli:1.5.0
@@ -340,7 +340,7 @@ org.apache.sshd:sshd-core:2.11.0
340340
org.apache.sshd:sshd-sftp:2.11.0
341341
org.apache.solr:solr-solrj:8.11.2
342342
org.apache.yetus:audience-annotations:0.5.0
343-
org.apache.zookeeper:zookeeper:3.8.3
343+
org.apache.zookeeper:zookeeper:3.8.4
344344
org.codehaus.jettison:jettison:1.5.4
345345
org.eclipse.jetty:jetty-annotations:9.4.53.v20231009
346346
org.eclipse.jetty:jetty-http:9.4.53.v20231009

dev-support/bin/hadoop.sh

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -387,6 +387,11 @@ function personality_modules
387387
fi
388388
;;
389389
unit)
390+
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 && (-z "$IS_NIGHTLY_BUILD" || "$IS_NIGHTLY_BUILD" == 0) ]]; then
391+
echo "Won't run unit tests for Windows in pre-commit CI"
392+
return
393+
fi
394+
390395
extra="-Dsurefire.rerunFailingTestsCount=2"
391396
if [[ "${BUILDMODE}" = full ]]; then
392397
ordering=mvnsrc

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -646,6 +646,7 @@
646646
<exclude>src/main/native/*</exclude>
647647
<exclude>src/main/native/config/*</exclude>
648648
<exclude>src/main/native/m4/*</exclude>
649+
<exclude>src/main/winutils/winutils.sln</exclude>
649650
<exclude>src/test/empty-file</exclude>
650651
<exclude>src/test/all-tests</exclude>
651652
<exclude>src/main/native/gtest/**/*</exclude>
@@ -655,6 +656,7 @@
655656
<exclude>src/test/resources/test.har/_masterindex</exclude>
656657
<exclude>src/test/resources/test.har/part-0</exclude>
657658
<exclude>src/test/resources/javakeystoreprovider.password</exclude>
659+
<exclude>src/test/resources/lz4/sequencefile</exclude>
658660
<exclude>dev-support/jdiff-workaround.patch</exclude>
659661
</excludes>
660662
</configuration>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,17 @@
2323
import java.io.FileNotFoundException;
2424
import java.io.IOException;
2525
import java.io.StringReader;
26+
import java.nio.file.FileStore;
27+
import java.nio.file.Files;
2628

2729
import org.apache.hadoop.io.IOUtils;
2830
import org.apache.hadoop.util.Shell;
2931
import org.apache.hadoop.util.Shell.ExitCodeException;
3032
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
3133

3234
import org.apache.hadoop.classification.VisibleForTesting;
35+
import org.slf4j.Logger;
36+
import org.slf4j.LoggerFactory;
3337

3438
import static java.nio.file.Files.createLink;
3539

@@ -50,6 +54,11 @@ public class HardLink {
5054
private static HardLinkCommandGetter getHardLinkCommand;
5155

5256
public final LinkStats linkStats; //not static
57+
58+
static final Logger LOG = LoggerFactory.getLogger(HardLink.class);
59+
60+
private static final String FILE_ATTRIBUTE_VIEW = "unix";
61+
private static final String FILE_ATTRIBUTE = "unix:nlink";
5362

5463
//initialize the command "getters" statically, so can use their
5564
//methods without instantiating the HardLink object
@@ -204,6 +213,21 @@ public static void createHardLinkMult(File parentDir, String[] fileBaseNames,
204213
}
205214
}
206215

216+
/**
217+
* Determines whether the system supports hardlinks.
218+
* @param f - file to examine
219+
* @return true if hardlinks are supported, false otherwise
220+
*/
221+
public static boolean supportsHardLink(File f) {
222+
try {
223+
FileStore store = Files.getFileStore(f.toPath());
224+
return store.supportsFileAttributeView(FILE_ATTRIBUTE_VIEW);
225+
} catch (IOException e) {
226+
LOG.warn("Failed to determine if hardlink is supported", e);
227+
return false;
228+
}
229+
}
230+
207231
/**
208232
* Retrieves the number of links to the specified file.
209233
*
@@ -220,6 +244,10 @@ public static int getLinkCount(File fileName) throws IOException {
220244
throw new FileNotFoundException(fileName + " not found.");
221245
}
222246

247+
if (supportsHardLink(fileName)) {
248+
return (int) Files.getAttribute(fileName.toPath(), FILE_ATTRIBUTE);
249+
}
250+
223251
// construct and execute shell command
224252
String[] cmd = getHardLinkCommand.linkCount(fileName);
225253
String inpMsg = null;

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1128,8 +1128,7 @@ public void run() {
11281128
synchronized (ipcStreams.out) {
11291129
if (LOG.isDebugEnabled()) {
11301130
Call call = pair.getLeft();
1131-
LOG.debug(getName() + "{} sending #{} {}", getName(), call.id,
1132-
call.rpcRequest);
1131+
LOG.debug("{} sending #{} {}", getName(), call.id, call.rpcRequest);
11331132
}
11341133
// RpcRequestHeader + RpcRequest
11351134
ipcStreams.sendRequest(buf.toByteArray());

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java

Lines changed: 25 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
import org.apache.commons.lang3.time.FastDateFormat;
4141
import org.apache.hadoop.classification.InterfaceAudience;
4242
import org.apache.hadoop.classification.InterfaceStability;
43+
import org.apache.hadoop.classification.VisibleForTesting;
4344
import org.apache.hadoop.fs.Path;
4445
import org.apache.hadoop.net.NetUtils;
4546
import org.apache.log4j.LogManager;
@@ -79,6 +80,18 @@ public class StringUtils {
7980
public static final Pattern ENV_VAR_PATTERN = Shell.WINDOWS ?
8081
WIN_ENV_VAR_PATTERN : SHELL_ENV_VAR_PATTERN;
8182

83+
/**
84+
* {@link #getTrimmedStringCollectionSplitByEquals(String)} throws
85+
* {@link IllegalArgumentException} with error message starting with this string
86+
* if the argument provided is not valid representation of non-empty key-value
87+
* pairs.
88+
* Value = {@value}
89+
*/
90+
@VisibleForTesting
91+
public static final String STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG =
92+
"Trimmed string split by equals does not correctly represent "
93+
+ "non-empty key-value pairs.";
94+
8295
/**
8396
* Make a string representation of the exception.
8497
* @param e The exception to stringify
@@ -494,10 +507,19 @@ public static Map<String, String> getTrimmedStringCollectionSplitByEquals(
494507
String[] trimmedList = getTrimmedStrings(str);
495508
Map<String, String> pairs = new HashMap<>();
496509
for (String s : trimmedList) {
497-
String[] splitByKeyVal = getTrimmedStringsSplitByEquals(s);
498-
if (splitByKeyVal.length == 2) {
499-
pairs.put(splitByKeyVal[0], splitByKeyVal[1]);
510+
if (s.isEmpty()) {
511+
continue;
500512
}
513+
String[] splitByKeyVal = getTrimmedStringsSplitByEquals(s);
514+
Preconditions.checkArgument(
515+
splitByKeyVal.length == 2,
516+
STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG + " Input: " + str);
517+
boolean emptyKey = org.apache.commons.lang3.StringUtils.isEmpty(splitByKeyVal[0]);
518+
boolean emptyVal = org.apache.commons.lang3.StringUtils.isEmpty(splitByKeyVal[1]);
519+
Preconditions.checkArgument(
520+
!emptyKey && !emptyVal,
521+
STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG + " Input: " + str);
522+
pairs.put(splitByKeyVal[0], splitByKeyVal[1]);
501523
}
502524
return pairs;
503525
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -219,6 +219,16 @@ public void testGetLinkCount() throws IOException {
219219
assertEquals(1, getLinkCount(x3));
220220
}
221221

222+
@Test
223+
public void testGetLinkCountFromFileAttribute() throws IOException {
224+
assertTrue(supportsHardLink(x1));
225+
assertEquals(1, getLinkCount(x1));
226+
assertTrue(supportsHardLink(x2));
227+
assertEquals(1, getLinkCount(x2));
228+
assertTrue(supportsHardLink(x3));
229+
assertEquals(1, getLinkCount(x3));
230+
}
231+
222232
/**
223233
* Test the single-file method HardLink.createHardLink().
224234
* Also tests getLinkCount() with values greater than one.

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java

Lines changed: 57 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@
1919
package org.apache.hadoop.util;
2020

2121
import java.util.Locale;
22+
23+
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
24+
import static org.apache.hadoop.util.StringUtils.STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG;
2225
import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String;
2326
import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.string2long;
2427
import static org.junit.Assert.assertArrayEquals;
@@ -515,7 +518,7 @@ public void testCreateStartupShutdownMessage() {
515518
}
516519

517520
@Test
518-
public void testStringCollectionSplitByEquals() {
521+
public void testStringCollectionSplitByEqualsSuccess() {
519522
Map<String, String> splitMap =
520523
StringUtils.getTrimmedStringCollectionSplitByEquals("");
521524
Assertions
@@ -566,6 +569,59 @@ public void testStringCollectionSplitByEquals() {
566569
.containsEntry("element.xyz.key5", "element.abc.val5")
567570
.containsEntry("element.xyz.key6", "element.abc.val6")
568571
.containsEntry("element.xyz.key7", "element.abc.val7");
572+
573+
splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals(
574+
"element.first.key1 = element.first.val2 ,element.first.key1 =element.first.val1");
575+
Assertions
576+
.assertThat(splitMap)
577+
.describedAs("Map of key value pairs split by equals(=) and comma(,)")
578+
.hasSize(1)
579+
.containsEntry("element.first.key1", "element.first.val1");
580+
581+
splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals(
582+
",,, , ,, ,element.first.key1 = element.first.val2 ,"
583+
+ "element.first.key1 = element.first.val1 , ,,, ,");
584+
Assertions
585+
.assertThat(splitMap)
586+
.describedAs("Map of key value pairs split by equals(=) and comma(,)")
587+
.hasSize(1)
588+
.containsEntry("element.first.key1", "element.first.val1");
589+
590+
splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals(
591+
",, , , ,, ,");
592+
Assertions
593+
.assertThat(splitMap)
594+
.describedAs("Map of key value pairs split by equals(=) and comma(,)")
595+
.hasSize(0);
596+
597+
}
598+
599+
@Test
600+
public void testStringCollectionSplitByEqualsFailure() throws Exception {
601+
intercept(
602+
IllegalArgumentException.class,
603+
STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG,
604+
() -> StringUtils.getTrimmedStringCollectionSplitByEquals(" = element.abc.val1"));
605+
606+
intercept(
607+
IllegalArgumentException.class,
608+
STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG,
609+
() -> StringUtils.getTrimmedStringCollectionSplitByEquals("element.abc.key1="));
610+
611+
intercept(
612+
IllegalArgumentException.class,
613+
STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG,
614+
() -> StringUtils.getTrimmedStringCollectionSplitByEquals("="));
615+
616+
intercept(
617+
IllegalArgumentException.class,
618+
STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG,
619+
() -> StringUtils.getTrimmedStringCollectionSplitByEquals("== = = ="));
620+
621+
intercept(
622+
IllegalArgumentException.class,
623+
STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG,
624+
() -> StringUtils.getTrimmedStringCollectionSplitByEquals(",="));
569625
}
570626

571627
// Benchmark for StringUtils split

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Sender.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,11 +57,16 @@
5757
import org.apache.hadoop.tracing.TraceUtils;
5858

5959
import org.apache.hadoop.thirdparty.protobuf.Message;
60+
import org.slf4j.Logger;
61+
import org.slf4j.LoggerFactory;
62+
6063

6164
/** Sender */
6265
@InterfaceAudience.Private
6366
@InterfaceStability.Evolving
6467
public class Sender implements DataTransferProtocol {
68+
private static final Logger LOG = LoggerFactory.getLogger(Sender.class);
69+
6570
private final DataOutputStream out;
6671

6772
/** Create a sender for DataTransferProtocol with a output stream. */

hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -172,6 +172,11 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
172172
<artifactId>assertj-core</artifactId>
173173
<scope>test</scope>
174174
</dependency>
175+
<dependency>
176+
<groupId>org.junit.jupiter</groupId>
177+
<artifactId>junit-jupiter-api</artifactId>
178+
<scope>test</scope>
179+
</dependency>
175180
<dependency>
176181
<groupId>org.junit.jupiter</groupId>
177182
<artifactId>junit-jupiter-params</artifactId>
@@ -187,6 +192,11 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
187192
<artifactId>junit-vintage-engine</artifactId>
188193
<scope>test</scope>
189194
</dependency>
195+
<dependency>
196+
<groupId>org.junit.platform</groupId>
197+
<artifactId>junit-platform-launcher</artifactId>
198+
<scope>test</scope>
199+
</dependency>
190200
</dependencies>
191201

192202
<build>

0 commit comments

Comments
 (0)