Skip to content

Commit

Permalink
Fix JVM locale dependent casing (#10521)
Browse files Browse the repository at this point in the history
  • Loading branch information
findepi authored Jun 18, 2024
1 parent 9dbfbbb commit 7911406
Show file tree
Hide file tree
Showing 18 changed files with 48 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.iceberg.aliyun.oss;

import com.aliyun.oss.internal.OSSUtils;
import java.util.Locale;
import java.util.Set;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
Expand Down Expand Up @@ -64,7 +65,7 @@ public OSSURI(String location) {

String scheme = schemeSplit[0];
ValidationException.check(
VALID_SCHEMES.contains(scheme.toLowerCase()),
VALID_SCHEMES.contains(scheme.toLowerCase(Locale.ROOT)),
"Invalid scheme: %s in OSS location %s",
scheme,
location);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.iceberg.catalog;

import java.util.Arrays;
import java.util.Locale;
import java.util.Objects;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.base.Splitter;
Expand Down Expand Up @@ -80,7 +81,7 @@ public String name() {
public TableIdentifier toLowerCase() {
String[] newLevels =
Arrays.stream(namespace().levels()).map(String::toLowerCase).toArray(String[]::new);
String newName = name().toLowerCase();
String newName = name().toLowerCase(Locale.ROOT);
return TableIdentifier.of(Namespace.of(newLevels), newName);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import java.time.ZoneId;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
Expand Down Expand Up @@ -185,12 +186,12 @@ private S3SignResponse signRequest(S3SignRequest request) {

Map<String, List<String>> unsignedHeaders =
request.headers().entrySet().stream()
.filter(e -> UNSIGNED_HEADERS.contains(e.getKey().toLowerCase()))
.filter(e -> UNSIGNED_HEADERS.contains(e.getKey().toLowerCase(Locale.ROOT)))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

Map<String, List<String>> signedHeaders =
request.headers().entrySet().stream()
.filter(e -> !UNSIGNED_HEADERS.contains(e.getKey().toLowerCase()))
.filter(e -> !UNSIGNED_HEADERS.contains(e.getKey().toLowerCase(Locale.ROOT)))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

SdkHttpFullRequest sign =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -327,7 +328,10 @@ private SdkHttpFullRequest signWithAwsSigner(
// back after signing
Map<String, List<String>> unsignedHeaders =
request.headers().entrySet().stream()
.filter(e -> S3SignerServlet.UNSIGNED_HEADERS.contains(e.getKey().toLowerCase()))
.filter(
e ->
S3SignerServlet.UNSIGNED_HEADERS.contains(
e.getKey().toLowerCase(Locale.ROOT)))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

SdkHttpFullRequest.Builder builder = request.toBuilder();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.iceberg.avro;

import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.function.BiFunction;
Expand Down Expand Up @@ -528,6 +529,6 @@ private static String sanitize(char character) {
if (Character.isDigit(character)) {
return "_" + character;
}
return "_x" + Integer.toHexString(character).toUpperCase();
return "_x" + Integer.toHexString(character).toUpperCase(Locale.ROOT);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.util.Enumeration;
import java.util.Locale;
import java.util.Map;
import javax.crypto.SecretKey;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
Expand All @@ -50,13 +51,13 @@ public class KeyStoreKmsClient extends MemoryMockKMS {
@Override
public ByteBuffer wrapKey(ByteBuffer key, String wrappingKeyId) {
// keytool keeps key names in lower case
return super.wrapKey(key, wrappingKeyId.toLowerCase());
return super.wrapKey(key, wrappingKeyId.toLowerCase(Locale.ROOT));
}

@Override
public ByteBuffer unwrapKey(ByteBuffer wrappedKey, String wrappingKeyId) {
// keytool keeps key names in lower case
return super.unwrapKey(wrappedKey, wrappingKeyId.toLowerCase());
return super.unwrapKey(wrappedKey, wrappingKeyId.toLowerCase(Locale.ROOT));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;

import java.util.Locale;
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataTableType;
import org.apache.iceberg.StaticTableOperations;
Expand Down Expand Up @@ -113,8 +114,8 @@ public void testImmutable() {
@Test
public void testMetadataTables() {
for (MetadataTableType type : MetadataTableType.values()) {
String enumName = type.name().replace("_", "").toLowerCase();
assertThat(getStaticTable(type).getClass().getName().toLowerCase())
String enumName = type.name().replace("_", "").toLowerCase(Locale.ROOT);
assertThat(getStaticTable(type).getClass().getName().toLowerCase(Locale.ROOT))
.as("Should be able to get MetadataTable of type : " + type)
.contains(enumName);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.stream.Collectors;
import org.apache.iceberg.FileFormat;
import org.apache.iceberg.Parameter;
Expand Down Expand Up @@ -175,7 +176,7 @@ public void testRollIfExceedTargetFileSize() throws IOException {

int id = record.get(0, Integer.class);
String data = record.get(1, String.class);
Record newRecord = createRecord(id, data.toUpperCase());
Record newRecord = createRecord(id, data.toUpperCase(Locale.ROOT));
expected.add(newRecord);
taskWriter.write(newRecord);
}
Expand Down
5 changes: 4 additions & 1 deletion dell/src/main/java/org/apache/iceberg/dell/ecs/EcsURI.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.iceberg.dell.ecs;

import java.net.URI;
import java.util.Locale;
import java.util.Set;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
Expand All @@ -40,7 +41,9 @@ class EcsURI {

URI uri = URI.create(location);
ValidationException.check(
VALID_SCHEME.contains(uri.getScheme().toLowerCase()), "Invalid ecs location: %s", location);
VALID_SCHEME.contains(uri.getScheme().toLowerCase(Locale.ROOT)),
"Invalid ecs location: %s",
location);
this.bucket = uri.getHost();
this.name = uri.getPath().replaceAll("^/*", "");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.flink.streaming.api.operators.BoundedOneInput;
Expand Down Expand Up @@ -201,7 +202,7 @@ private Set<String> scanDataFiles() throws IOException {
LocatedFileStatus status = iterators.next();
if (status.isFile()) {
Path path = status.getPath();
if (path.getName().endsWith("." + format.toString().toLowerCase())) {
if (path.getName().endsWith("." + format.toString().toLowerCase(Locale.ROOT))) {
paths.add(path.toString());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.flink.streaming.api.operators.BoundedOneInput;
Expand Down Expand Up @@ -201,7 +202,7 @@ private Set<String> scanDataFiles() throws IOException {
LocatedFileStatus status = iterators.next();
if (status.isFile()) {
Path path = status.getPath();
if (path.getName().endsWith("." + format.toString().toLowerCase())) {
if (path.getName().endsWith("." + format.toString().toLowerCase(Locale.ROOT))) {
paths.add(path.toString());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.flink.streaming.api.operators.BoundedOneInput;
Expand Down Expand Up @@ -201,7 +202,7 @@ private Set<String> scanDataFiles() throws IOException {
LocatedFileStatus status = iterators.next();
if (status.isFile()) {
Path path = status.getPath();
if (path.getName().endsWith("." + format.toString().toLowerCase())) {
if (path.getName().endsWith("." + format.toString().toLowerCase(Locale.ROOT))) {
paths.add(path.toString());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ static Key extractKey(String cacheKeys, Configuration conf) {
!confElements.containsKey(key), "Conf key element %s already specified", key);
confElements.put(key, conf.get(key));
} else {
KeyElementType type = KeyElementType.valueOf(trimmed.toUpperCase());
KeyElementType type = KeyElementType.valueOf(trimmed.toUpperCase(Locale.ROOT));
switch (type) {
case UGI:
case USER_NAME:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hive.ql.exec.vector;

import java.util.Locale;
import java.util.Map;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;

Expand All @@ -30,7 +31,7 @@ public enum Support {
final String lowerCaseName;

Support() {
this.lowerCaseName = name().toLowerCase();
this.lowerCaseName = name().toLowerCase(Locale.ROOT);
}

public static final Map<String, Support> nameToSupportMap = Maps.newHashMap();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.iceberg.mr.hive;

import java.util.List;
import java.util.Locale;
import java.util.Properties;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -84,7 +85,7 @@ public void preCreateTable(org.apache.hadoop.hive.metastore.api.Table hmsTable)
.getParameters()
.put(
BaseMetastoreTableOperations.TABLE_TYPE_PROP,
BaseMetastoreTableOperations.ICEBERG_TABLE_TYPE_VALUE.toUpperCase());
BaseMetastoreTableOperations.ICEBERG_TABLE_TYPE_VALUE.toUpperCase(Locale.ROOT));

if (!Catalogs.hiveCatalog(conf, catalogProperties)) {
// For non-HiveCatalog tables too, we should set the input and output format
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
Expand Down Expand Up @@ -52,7 +53,7 @@ public IcebergRecordObjectInspector(
Types.NestedField.of(
field.fieldId(),
field.isOptional(),
field.name().toLowerCase(),
field.name().toLowerCase(Locale.ROOT),
field.type(),
field.doc());
IcebergRecordStructField structField =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
Expand Down Expand Up @@ -785,7 +786,7 @@ public void testIcebergAndHmsTableProperties() throws Exception {
hive_metastoreConstants.META_TABLE_STORAGE, HiveIcebergStorageHandler.class.getName())
.containsEntry(
BaseMetastoreTableOperations.TABLE_TYPE_PROP,
BaseMetastoreTableOperations.ICEBERG_TABLE_TYPE_VALUE.toUpperCase())
BaseMetastoreTableOperations.ICEBERG_TABLE_TYPE_VALUE.toUpperCase(Locale.ROOT))
.containsEntry(
BaseMetastoreTableOperations.METADATA_LOCATION_PROP,
getCurrentSnapshotForHiveCatalogTable(icebergTable))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.time.LocalDateTime;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -311,8 +312,8 @@ public void testJoinTablesSupportedTypes() throws IOException {
if (type == Types.UUIDType.get() && fileFormat == FileFormat.PARQUET) {
continue;
}
String tableName = type.typeId().toString().toLowerCase() + "_table_" + i;
String columnName = type.typeId().toString().toLowerCase() + "_column";
String tableName = type.typeId().toString().toLowerCase(Locale.ROOT) + "_table_" + i;
String columnName = type.typeId().toString().toLowerCase(Locale.ROOT) + "_column";

Schema schema = new Schema(required(1, columnName, type));
List<Record> records = TestHelper.generateRandomRecords(schema, 1, 0L);
Expand Down Expand Up @@ -349,8 +350,8 @@ public void testSelectDistinctFromTable() throws IOException {
if (type == Types.UUIDType.get() && fileFormat == FileFormat.PARQUET) {
continue;
}
String tableName = type.typeId().toString().toLowerCase() + "_table_" + i;
String columnName = type.typeId().toString().toLowerCase() + "_column";
String tableName = type.typeId().toString().toLowerCase(Locale.ROOT) + "_table_" + i;
String columnName = type.typeId().toString().toLowerCase(Locale.ROOT) + "_column";

Schema schema = new Schema(required(1, columnName, type));
List<Record> records = TestHelper.generateRandomRecords(schema, 4, 0L);
Expand Down Expand Up @@ -411,7 +412,7 @@ public void testInsertSupportedTypes() throws IOException {
if (type.equals(Types.BinaryType.get()) || type.equals(Types.FixedType.ofLength(5))) {
continue;
}
String columnName = type.typeId().toString().toLowerCase() + "_column";
String columnName = type.typeId().toString().toLowerCase(Locale.ROOT) + "_column";

Schema schema =
new Schema(required(1, "id", Types.LongType.get()), required(2, columnName, type));
Expand All @@ -420,7 +421,7 @@ public void testInsertSupportedTypes() throws IOException {
Table table =
testTables.createTable(
shell,
type.typeId().toString().toLowerCase() + "_table_" + i,
type.typeId().toString().toLowerCase(Locale.ROOT) + "_table_" + i,
schema,
PartitionSpec.unpartitioned(),
fileFormat,
Expand Down

0 comments on commit 7911406

Please sign in to comment.