Skip to content

Commit

Permalink
Use collections conveniences in static initializers (elastic#41374)
Browse files Browse the repository at this point in the history
This commit replaces the construction of some collections in static
initializers with new collection convenience methods that are available
now that we have bumped the minimum Java language level to be higher
than Java 8.
  • Loading branch information
jasontedor authored and Gurkan Kaymak committed May 27, 2019
1 parent e0085ef commit dcd1631
Show file tree
Hide file tree
Showing 15 changed files with 287 additions and 285 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.hash.MessageDigests;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.env.Environment;

Expand All @@ -52,6 +54,7 @@
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URISyntaxException;
Expand Down Expand Up @@ -82,7 +85,6 @@
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
Expand Down Expand Up @@ -130,36 +132,28 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
static final int PLUGIN_MALFORMED = 2;

/** The builtin modules, which are plugins, but cannot be installed or removed. */
static final Set<String> MODULES;
private static final Set<String> MODULES;
static {
try (InputStream stream = InstallPluginCommand.class.getResourceAsStream("/modules.txt");
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) {
Set<String> modules = new HashSet<>();
String line = reader.readLine();
while (line != null) {
modules.add(line.trim());
line = reader.readLine();
}
MODULES = Collections.unmodifiableSet(modules);
} catch (IOException e) {
throw new RuntimeException(e);
try (var stream = InstallPluginCommand.class.getResourceAsStream("/modules.txt")) {
MODULES = Streams.readAllLines(stream)
.stream()
.map(String::trim)
.collect(Collectors.toUnmodifiableSet());
} catch (final IOException e) {
throw new UncheckedIOException(e);
}
}

/** The official plugins that can be installed simply by name. */
static final Set<String> OFFICIAL_PLUGINS;
static {
try (InputStream stream = InstallPluginCommand.class.getResourceAsStream("/plugins.txt");
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) {
Set<String> plugins = new TreeSet<>(); // use tree set to get sorting for help command
String line = reader.readLine();
while (line != null) {
plugins.add(line.trim());
line = reader.readLine();
}
OFFICIAL_PLUGINS = Collections.unmodifiableSet(plugins);
} catch (IOException e) {
throw new RuntimeException(e);
try (var stream = InstallPluginCommand.class.getResourceAsStream("/plugins.txt")) {
OFFICIAL_PLUGINS = Streams.readAllLines(stream)
.stream()
.map(String::trim)
.collect(Sets.toUnmodifiableSortedSet());
} catch (final IOException e) {
throw new UncheckedIOException(e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,8 @@
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
import org.elasticsearch.index.analysis.Analysis;

import java.util.HashMap;
import java.util.Map;

import static java.util.Collections.unmodifiableMap;

/**
* Creates a SnowballAnalyzer initialized with stopwords and Snowball filter. Only
* supports Dutch, English (default), French, German and German2 where stopwords
Expand All @@ -48,25 +45,20 @@
*
*/
public class SnowballAnalyzerProvider extends AbstractIndexAnalyzerProvider<SnowballAnalyzer> {
private static final Map<String, CharArraySet> DEFAULT_LANGUAGE_STOPWORDS;

static {
Map<String, CharArraySet> defaultLanguageStopwords = new HashMap<>();
defaultLanguageStopwords.put("English", EnglishAnalyzer.ENGLISH_STOP_WORDS_SET);
defaultLanguageStopwords.put("Dutch", DutchAnalyzer.getDefaultStopSet());
defaultLanguageStopwords.put("German", GermanAnalyzer.getDefaultStopSet());
defaultLanguageStopwords.put("German2", GermanAnalyzer.getDefaultStopSet());
defaultLanguageStopwords.put("French", FrenchAnalyzer.getDefaultStopSet());
DEFAULT_LANGUAGE_STOPWORDS = unmodifiableMap(defaultLanguageStopwords);
}
private static final Map<String, CharArraySet> DEFAULT_LANGUAGE_STOP_WORDS = Map.of(
"English", EnglishAnalyzer.ENGLISH_STOP_WORDS_SET,
"Dutch", DutchAnalyzer.getDefaultStopSet(),
"German", GermanAnalyzer.getDefaultStopSet(),
"German2", GermanAnalyzer.getDefaultStopSet(),
"French", FrenchAnalyzer.getDefaultStopSet());

private final SnowballAnalyzer analyzer;

SnowballAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);

String language = settings.get("language", settings.get("name", "English"));
CharArraySet defaultStopwords = DEFAULT_LANGUAGE_STOPWORDS.getOrDefault(language, CharArraySet.EMPTY_SET);
CharArraySet defaultStopwords = DEFAULT_LANGUAGE_STOP_WORDS.getOrDefault(language, CharArraySet.EMPTY_SET);
CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords);

analyzer = new SnowballAnalyzer(language, stopWords);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
Expand All @@ -64,38 +63,34 @@
import java.util.function.BiFunction;
import java.util.stream.Collectors;

import static java.util.Map.entry;
import static java.util.stream.Collectors.toSet;

final class QueryAnalyzer {

private static final Map<Class<? extends Query>, BiFunction<Query, Version, Result>> queryProcessors;

static {
Map<Class<? extends Query>, BiFunction<Query, Version, Result>> map = new HashMap<>();
map.put(MatchNoDocsQuery.class, matchNoDocsQuery());
map.put(MatchAllDocsQuery.class, matchAllDocsQuery());
map.put(ConstantScoreQuery.class, constantScoreQuery());
map.put(BoostQuery.class, boostQuery());
map.put(TermQuery.class, termQuery());
map.put(TermInSetQuery.class, termInSetQuery());
map.put(CommonTermsQuery.class, commonTermsQuery());
map.put(BlendedTermQuery.class, blendedTermQuery());
map.put(PhraseQuery.class, phraseQuery());
map.put(MultiPhraseQuery.class, multiPhraseQuery());
map.put(SpanTermQuery.class, spanTermQuery());
map.put(SpanNearQuery.class, spanNearQuery());
map.put(SpanOrQuery.class, spanOrQuery());
map.put(SpanFirstQuery.class, spanFirstQuery());
map.put(SpanNotQuery.class, spanNotQuery());
map.put(BooleanQuery.class, booleanQuery());
map.put(DisjunctionMaxQuery.class, disjunctionMaxQuery());
map.put(SynonymQuery.class, synonymQuery());
map.put(FunctionScoreQuery.class, functionScoreQuery());
map.put(PointRangeQuery.class, pointRangeQuery());
map.put(IndexOrDocValuesQuery.class, indexOrDocValuesQuery());
map.put(ESToParentBlockJoinQuery.class, toParentBlockJoinQuery());
queryProcessors = Collections.unmodifiableMap(map);
}
private static final Map<Class<? extends Query>, BiFunction<Query, Version, Result>> QUERY_PROCESSORS = Map.ofEntries(
entry(MatchNoDocsQuery.class, matchNoDocsQuery()),
entry(MatchAllDocsQuery.class, matchAllDocsQuery()),
entry(ConstantScoreQuery.class, constantScoreQuery()),
entry(BoostQuery.class, boostQuery()),
entry(TermQuery.class, termQuery()),
entry(TermInSetQuery.class, termInSetQuery()),
entry(CommonTermsQuery.class, commonTermsQuery()),
entry(BlendedTermQuery.class, blendedTermQuery()),
entry(PhraseQuery.class, phraseQuery()),
entry(MultiPhraseQuery.class, multiPhraseQuery()),
entry(SpanTermQuery.class, spanTermQuery()),
entry(SpanNearQuery.class, spanNearQuery()),
entry(SpanOrQuery.class, spanOrQuery()),
entry(SpanFirstQuery.class, spanFirstQuery()),
entry(SpanNotQuery.class, spanNotQuery()),
entry(BooleanQuery.class, booleanQuery()),
entry(DisjunctionMaxQuery.class, disjunctionMaxQuery()),
entry(SynonymQuery.class, synonymQuery()),
entry(FunctionScoreQuery.class, functionScoreQuery()),
entry(PointRangeQuery.class, pointRangeQuery()),
entry(IndexOrDocValuesQuery.class, indexOrDocValuesQuery()),
entry(ESToParentBlockJoinQuery.class, toParentBlockJoinQuery()));

private QueryAnalyzer() {
}
Expand Down Expand Up @@ -130,11 +125,11 @@ private QueryAnalyzer() {
static Result analyze(Query query, Version indexVersion) {
Class<?> queryClass = query.getClass();
if (queryClass.isAnonymousClass()) {
// Sometimes queries have anonymous classes in that case we need the direct super class.
// (for example blended term query)
// sometimes queries have anonymous classes in that case we need the direct super class (e.g., blended term query)
queryClass = queryClass.getSuperclass();
}
BiFunction<Query, Version, Result> queryProcessor = queryProcessors.get(queryClass);
assert Query.class.isAssignableFrom(queryClass) : query.getClass();
BiFunction<Query, Version, Result> queryProcessor = QUERY_PROCESSORS.get(queryClass);
if (queryProcessor != null) {
return queryProcessor.apply(query, indexVersion);
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
*/
package org.elasticsearch.http;

import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Setting;
Expand All @@ -33,8 +33,6 @@
import org.elasticsearch.rest.RestStatus;

import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

Expand All @@ -58,17 +56,10 @@ public class TestDeprecationHeaderRestAction extends BaseRestHandler {
Setting.boolSetting("test.setting.not_deprecated", false,
Setting.Property.NodeScope, Setting.Property.Dynamic);

private static final Map<String, Setting<?>> SETTINGS_MAP;

static {
Map<String, Setting<?>> settingsMap = new HashMap<>(3);

settingsMap.put(TEST_DEPRECATED_SETTING_TRUE1.getKey(), TEST_DEPRECATED_SETTING_TRUE1);
settingsMap.put(TEST_DEPRECATED_SETTING_TRUE2.getKey(), TEST_DEPRECATED_SETTING_TRUE2);
settingsMap.put(TEST_NOT_DEPRECATED_SETTING.getKey(), TEST_NOT_DEPRECATED_SETTING);

SETTINGS_MAP = Collections.unmodifiableMap(settingsMap);
}
private static final Map<String, Setting<?>> SETTINGS_MAP = Map.of(
TEST_DEPRECATED_SETTING_TRUE1.getKey(), TEST_DEPRECATED_SETTING_TRUE1,
TEST_DEPRECATED_SETTING_TRUE2.getKey(), TEST_DEPRECATED_SETTING_TRUE2,
TEST_NOT_DEPRECATED_SETTING.getKey(), TEST_NOT_DEPRECATED_SETTING);

public static final String DEPRECATED_ENDPOINT = "[/_test_cluster/deprecated_settings] exists for deprecated tests";
public static final String DEPRECATED_USAGE = "[deprecated_settings] usage is deprecated. use [settings] instead";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ public VotingConfiguration reconfigure(Set<DiscoveryNode> liveNodes, Set<String>
final Set<String> liveInConfigIds = new TreeSet<>(currentConfig.getNodeIds());
liveInConfigIds.retainAll(liveNodeIds);

final Set<String> inConfigNotLiveIds = Sets.sortedDifference(currentConfig.getNodeIds(), liveInConfigIds);
final Set<String> inConfigNotLiveIds = Sets.unmodifiableSortedDifference(currentConfig.getNodeIds(), liveInConfigIds);
final Set<String> nonRetiredInConfigNotLiveIds = new TreeSet<>(inConfigNotLiveIds);
nonRetiredInConfigNotLiveIds.removeAll(retiredNodeIds);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,11 @@
import java.lang.reflect.TypeVariable;
import java.lang.reflect.WildcardType;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;

import static java.util.Collections.singleton;
import static java.util.Collections.unmodifiableMap;

/**
* Static methods for working with types that we aren't publishing in the
Expand All @@ -53,20 +51,16 @@ public class MoreTypes {
private MoreTypes() {
}

private static final Map<TypeLiteral<?>, TypeLiteral<?>> PRIMITIVE_TO_WRAPPER;
static {
Map<TypeLiteral<?>, TypeLiteral<?>> primitiveToWrapper = new HashMap<>();
primitiveToWrapper.put(TypeLiteral.get(boolean.class), TypeLiteral.get(Boolean.class));
primitiveToWrapper.put(TypeLiteral.get(byte.class), TypeLiteral.get(Byte.class));
primitiveToWrapper.put(TypeLiteral.get(short.class), TypeLiteral.get(Short.class));
primitiveToWrapper.put(TypeLiteral.get(int.class), TypeLiteral.get(Integer.class));
primitiveToWrapper.put(TypeLiteral.get(long.class), TypeLiteral.get(Long.class));
primitiveToWrapper.put(TypeLiteral.get(float.class), TypeLiteral.get(Float.class));
primitiveToWrapper.put(TypeLiteral.get(double.class), TypeLiteral.get(Double.class));
primitiveToWrapper.put(TypeLiteral.get(char.class), TypeLiteral.get(Character.class));
primitiveToWrapper.put(TypeLiteral.get(void.class), TypeLiteral.get(Void.class));
PRIMITIVE_TO_WRAPPER = unmodifiableMap(primitiveToWrapper);
}
private static final Map<TypeLiteral<?>, TypeLiteral<?>> PRIMITIVE_TO_WRAPPER = Map.of(
TypeLiteral.get(boolean.class), TypeLiteral.get(Boolean.class),
TypeLiteral.get(byte.class), TypeLiteral.get(Byte.class),
TypeLiteral.get(short.class), TypeLiteral.get(Short.class),
TypeLiteral.get(int.class), TypeLiteral.get(Integer.class),
TypeLiteral.get(long.class), TypeLiteral.get(Long.class),
TypeLiteral.get(float.class), TypeLiteral.get(Float.class),
TypeLiteral.get(double.class), TypeLiteral.get(Double.class),
TypeLiteral.get(char.class), TypeLiteral.get(Character.class),
TypeLiteral.get(void.class), TypeLiteral.get(Void.class));

/**
* Returns an equivalent type that's safe for use in a key. The returned type will be free of
Expand Down
Loading

0 comments on commit dcd1631

Please sign in to comment.