Remove and forbid use of the type-unsafe empty Collections fields
This commit removes and now forbids all uses of the type-unsafe empty Collections fields Collections#EMPTY_LIST, Collections#EMPTY_MAP, and Collections#EMPTY_SET. The type-safe methods Collections#emptyList, Collections#emptyMap, and Collections#emptySet should be used instead.
This commit is contained in:
parent
7854368180
commit
05430a788a
|
@ -112,3 +112,7 @@ java.lang.System#setProperty(java.lang.String,java.lang.String)
|
|||
java.lang.System#clearProperty(java.lang.String)
|
||||
java.lang.System#getProperties() @ Use BootstrapInfo.getSystemProperties for a read-only view
|
||||
|
||||
@defaultMessage Avoid unchecked warnings by using Collections#empty(List|Map|Set) methods
|
||||
java.util.Collections#EMPTY_LIST
|
||||
java.util.Collections#EMPTY_MAP
|
||||
java.util.Collections#EMPTY_SET
|
||||
|
|
|
@ -299,7 +299,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
// Set up everything, now locally create the index to see that things are ok, and apply
|
||||
final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build();
|
||||
// create the index here (on the master) to validate it can be created, as well as adding the mapping
|
||||
indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.EMPTY_LIST);
|
||||
indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.emptyList());
|
||||
indexCreated = true;
|
||||
// now add the mappings
|
||||
IndexService indexService = indicesService.indexServiceSafe(request.index());
|
||||
|
|
|
@ -99,7 +99,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
|
|||
if (indexService == null) {
|
||||
// temporarily create the index and add mappings so we can parse the filter
|
||||
try {
|
||||
indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST);
|
||||
indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList());
|
||||
if (indexMetaData.getMappings().containsKey(MapperService.DEFAULT_MAPPING)) {
|
||||
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.getMappings().get(MapperService.DEFAULT_MAPPING).source(), false, false);
|
||||
}
|
||||
|
|
|
@ -218,8 +218,8 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
|
|||
try {
|
||||
// We cannot instantiate real analysis server at this point because the node might not have
|
||||
// been started yet. However, we don't really need real analyzers at this stage - so we can fake it
|
||||
IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings, Collections.EMPTY_LIST);
|
||||
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.EMPTY_MAP);
|
||||
IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings, Collections.emptyList());
|
||||
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
|
||||
|
||||
try (AnalysisService analysisService = new FakeAnalysisService(indexSettings)) {
|
||||
try (MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry)) {
|
||||
|
@ -256,7 +256,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
|
|||
};
|
||||
|
||||
public FakeAnalysisService(IndexSettings indexSettings) {
|
||||
super(indexSettings, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP);
|
||||
super(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -219,7 +219,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
IndexService indexService;
|
||||
if (indicesService.hasIndex(index) == false) {
|
||||
indicesToClose.add(index);
|
||||
indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST);
|
||||
indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList());
|
||||
// add mappings for all types, we need them for cross-type validation
|
||||
for (ObjectCursor<MappingMetaData> mapping : indexMetaData.getMappings().values()) {
|
||||
indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), false, request.updateAllTypes());
|
||||
|
|
|
@ -123,7 +123,7 @@ public abstract class ExtensionPoint {
|
|||
public static final class SelectedType<T> extends ClassMap<T> {
|
||||
|
||||
public SelectedType(String name, Class<T> extensionClass) {
|
||||
super(name, extensionClass, Collections.EMPTY_SET);
|
||||
super(name, extensionClass, Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -131,7 +131,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
|
|||
if (metaData != null) {
|
||||
ShardPath shardPath = null;
|
||||
try {
|
||||
IndexSettings indexSettings = new IndexSettings(metaData, settings, Collections.EMPTY_LIST);
|
||||
IndexSettings indexSettings = new IndexSettings(metaData, settings, Collections.emptyList());
|
||||
shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings);
|
||||
if (shardPath == null) {
|
||||
throw new IllegalStateException(shardId + " no shard path found");
|
||||
|
|
|
@ -55,7 +55,7 @@ public final class AnalysisRegistry implements Closeable {
|
|||
private final Environment environemnt;
|
||||
|
||||
public AnalysisRegistry(HunspellService hunspellService, Environment environment) {
|
||||
this(hunspellService, environment, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP);
|
||||
this(hunspellService, environment, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
|
||||
}
|
||||
|
||||
public AnalysisRegistry(HunspellService hunspellService, Environment environment,
|
||||
|
|
|
@ -166,7 +166,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
current = createWriter(checkpoint.generation + 1);
|
||||
this.lastCommittedTranslogFileGeneration = translogGeneration.translogFileGeneration;
|
||||
} else {
|
||||
this.recoveredTranslogs = Collections.EMPTY_LIST;
|
||||
this.recoveredTranslogs = Collections.emptyList();
|
||||
IOUtils.rm(location);
|
||||
logger.debug("wipe translog location - creating new translog");
|
||||
Files.createDirectories(location);
|
||||
|
@ -582,7 +582,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
* and updated with any future translog.
|
||||
*/
|
||||
public static final class View implements Closeable {
|
||||
public static final Translog.View EMPTY_VIEW = new View(Collections.EMPTY_LIST, null);
|
||||
public static final Translog.View EMPTY_VIEW = new View(Collections.emptyList(), null);
|
||||
|
||||
boolean closed;
|
||||
// last in this list is always FsTranslog.current
|
||||
|
|
|
@ -264,7 +264,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
}
|
||||
final String indexName = indexMetaData.getIndex();
|
||||
final Predicate<String> indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(indexName, indexExpression, clusterService.state());
|
||||
final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.EMPTY_LIST, indexNameMatcher);
|
||||
final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.emptyList(), indexNameMatcher);
|
||||
Index index = new Index(indexMetaData.getIndex());
|
||||
if (indices.containsKey(index.name())) {
|
||||
throw new IndexAlreadyExistsException(index);
|
||||
|
@ -562,7 +562,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
// play safe here and make sure that we take node level settings into account.
|
||||
// we might run on nodes where we use shard FS and then in the future don't delete
|
||||
// actual content.
|
||||
return new IndexSettings(metaData, settings, Collections.EMPTY_LIST);
|
||||
return new IndexSettings(metaData, settings, Collections.emptyList());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -245,7 +245,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||
} else {
|
||||
final IndexMetaData metaData = previousState.metaData().index(index);
|
||||
assert metaData != null;
|
||||
indexSettings = new IndexSettings(metaData, settings, Collections.EMPTY_LIST);
|
||||
indexSettings = new IndexSettings(metaData, settings, Collections.emptyList());
|
||||
indicesService.deleteClosedIndex("closed index no longer part of the metadata", metaData, event.state());
|
||||
}
|
||||
try {
|
||||
|
|
|
@ -171,7 +171,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
|
|||
if (!storeType.contains("fs")) {
|
||||
return new StoreFilesMetaData(false, shardId, Store.MetadataSnapshot.EMPTY);
|
||||
}
|
||||
final IndexSettings indexSettings = indexService != null ? indexService.getIndexSettings() : new IndexSettings(metaData, settings, Collections.EMPTY_LIST);
|
||||
final IndexSettings indexSettings = indexService != null ? indexService.getIndexSettings() : new IndexSettings(metaData, settings, Collections.emptyList());
|
||||
final ShardPath shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings);
|
||||
if (shardPath == null) {
|
||||
return new StoreFilesMetaData(false, shardId, Store.MetadataSnapshot.EMPTY);
|
||||
|
|
|
@ -729,7 +729,7 @@ public class PercolateContext extends SearchContext {
|
|||
|
||||
@Override
|
||||
public Set<String> getHeaders() {
|
||||
return Collections.EMPTY_SET;
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -294,7 +294,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
|
|||
if (readOnly()) {
|
||||
throw new RepositoryException(this.repositoryName, "cannot delete snapshot from a readonly repository");
|
||||
}
|
||||
List<String> indices = Collections.EMPTY_LIST;
|
||||
List<String> indices = Collections.emptyList();
|
||||
Snapshot snapshot = null;
|
||||
try {
|
||||
snapshot = readSnapshot(snapshotId);
|
||||
|
|
|
@ -121,12 +121,12 @@ public class Template extends Script {
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static Script parse(Map<String, Object> config, boolean removeMatchedEntries, ParseFieldMatcher parseFieldMatcher) {
|
||||
return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(config, removeMatchedEntries, parseFieldMatcher);
|
||||
return new TemplateParser(Collections.emptyMap(), MustacheScriptEngineService.NAME).parse(config, removeMatchedEntries, parseFieldMatcher);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
|
||||
return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher);
|
||||
return new TemplateParser(Collections.emptyMap(), MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher);
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.Map;
|
|||
*/
|
||||
public class SignificanceHeuristicStreams {
|
||||
|
||||
private static Map<String, Stream> STREAMS = Collections.EMPTY_MAP;
|
||||
private static Map<String, Stream> STREAMS = Collections.emptyMap();
|
||||
|
||||
static {
|
||||
HashMap<String, Stream> map = new HashMap<>();
|
||||
|
|
|
@ -78,7 +78,7 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg
|
|||
}
|
||||
}
|
||||
}
|
||||
return buildAggregation(Collections.EMPTY_LIST, metaData());
|
||||
return buildAggregation(Collections.emptyList(), metaData());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -123,4 +123,4 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg
|
|||
gapPolicy.writeTo(out);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ public class MaxBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
@Override
|
||||
protected InternalAggregation buildAggregation(List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
|
||||
String[] keys = maxBucketKeys.toArray(new String[maxBucketKeys.size()]);
|
||||
return new InternalBucketMetricValue(name(), keys, maxValue, formatter, Collections.EMPTY_LIST, metaData());
|
||||
return new InternalBucketMetricValue(name(), keys, maxValue, formatter, Collections.emptyList(), metaData());
|
||||
}
|
||||
|
||||
public static class Factory extends PipelineAggregatorFactory {
|
||||
|
|
|
@ -91,7 +91,7 @@ public class MinBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
protected InternalAggregation buildAggregation(java.util.List<PipelineAggregator> pipelineAggregators,
|
||||
java.util.Map<String, Object> metadata) {
|
||||
String[] keys = minBucketKeys.toArray(new String[minBucketKeys.size()]);
|
||||
return new InternalBucketMetricValue(name(), keys, minValue, formatter, Collections.EMPTY_LIST, metaData());
|
||||
return new InternalBucketMetricValue(name(), keys, minValue, formatter, Collections.emptyList(), metaData());
|
||||
};
|
||||
|
||||
public static class Factory extends PipelineAggregatorFactory {
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.Map;
|
|||
*/
|
||||
public class MovAvgModelStreams {
|
||||
|
||||
private static Map<String, Stream> STREAMS = Collections.EMPTY_MAP;
|
||||
private static Map<String, Stream> STREAMS = Collections.emptyMap();
|
||||
|
||||
static {
|
||||
HashMap<String, Stream> map = new HashMap<>();
|
||||
|
|
|
@ -35,7 +35,7 @@ public final class Suggesters extends ExtensionPoint.ClassMap<Suggester> {
|
|||
private final Map<String, Suggester> parsers;
|
||||
|
||||
public Suggesters() {
|
||||
this(Collections.EMPTY_MAP);
|
||||
this(Collections.emptyMap());
|
||||
}
|
||||
|
||||
public Suggesters(Map<String, Suggester> suggesters) {
|
||||
|
|
|
@ -38,10 +38,10 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest
|
|||
private CompletionFieldMapper.CompletionFieldType fieldType;
|
||||
private CompletionSuggestionBuilder.FuzzyOptionsBuilder fuzzyOptionsBuilder;
|
||||
private CompletionSuggestionBuilder.RegexOptionsBuilder regexOptionsBuilder;
|
||||
private Map<String, List<ContextMapping.QueryContext>> queryContexts = Collections.EMPTY_MAP;
|
||||
private Map<String, List<ContextMapping.QueryContext>> queryContexts = Collections.emptyMap();
|
||||
private final MapperService mapperService;
|
||||
private final IndexFieldDataService indexFieldDataService;
|
||||
private Set<String> payloadFields = Collections.EMPTY_SET;
|
||||
private Set<String> payloadFields = Collections.emptySet();
|
||||
|
||||
CompletionSuggestionContext(Suggester suggester, MapperService mapperService, IndexFieldDataService indexFieldDataService) {
|
||||
super(suggester);
|
||||
|
|
|
@ -29,8 +29,6 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.Collections.*;
|
||||
|
||||
/**
|
||||
* Represent information about snapshot
|
||||
*/
|
||||
|
@ -93,7 +91,7 @@ public class Snapshot implements Comparable<Snapshot>, ToXContent, FromXContentB
|
|||
* Special constructor for the prototype object
|
||||
*/
|
||||
private Snapshot() {
|
||||
this("", (List<String>) EMPTY_LIST, 0);
|
||||
this("", Collections.emptyList(), 0);
|
||||
}
|
||||
|
||||
private static SnapshotState snapshotState(String reason, List<SnapshotShardFailure> shardFailures) {
|
||||
|
|
|
@ -154,7 +154,7 @@ public class LocalTransport extends AbstractLifecycleComponent<Transport> implem
|
|||
|
||||
@Override
|
||||
public Map<String, BoundTransportAddress> profileBoundAddresses() {
|
||||
return Collections.EMPTY_MAP;
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -189,6 +189,6 @@ abstract class FailAndRetryMockTransport<Response extends TransportResponse> imp
|
|||
|
||||
@Override
|
||||
public Map<String, BoundTransportAddress> profileBoundAddresses() {
|
||||
return Collections.EMPTY_MAP;
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TransportClientNodesServiceTests extends ESTestCase {
|
|||
transport = new FailAndRetryMockTransport<TestResponse>(getRandom()) {
|
||||
@Override
|
||||
public List<String> getLocalAddresses() {
|
||||
return Collections.EMPTY_LIST;
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -104,8 +104,8 @@ public class IndexModuleTests extends ESTestCase {
|
|||
Set<ScriptEngineService> scriptEngines = new HashSet<>();
|
||||
scriptEngines.add(new MustacheScriptEngineService(settings));
|
||||
scriptEngines.addAll(Arrays.asList(scriptEngineServices));
|
||||
ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), new ScriptContextRegistry(Collections.EMPTY_LIST));
|
||||
IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, Collections.EMPTY_SET, new NamedWriteableRegistry());
|
||||
ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), new ScriptContextRegistry(Collections.emptyList()));
|
||||
IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, Collections.emptySet(), new NamedWriteableRegistry());
|
||||
return new NodeServicesProvider(threadPool, indicesQueryCache, null, warmer, bigArrays, client, scriptService, indicesQueriesRegistry, indicesFieldDataCache, circuitBreakerService);
|
||||
}
|
||||
|
||||
|
@ -251,7 +251,7 @@ public class IndexModuleTests extends ESTestCase {
|
|||
assertEquals("Unknown Similarity type [test_similarity] for [my_similarity]", ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void testSetupWithoutType() throws IOException {
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put("index.similarity.my_similarity.foo", "bar")
|
||||
|
|
|
@ -95,7 +95,7 @@ public class IndexSettingsTests extends ESTestCase {
|
|||
public void testSettingsConsistency() {
|
||||
Version version = VersionUtils.getPreviousVersion();
|
||||
IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build());
|
||||
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.EMPTY_LIST);
|
||||
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList());
|
||||
assertEquals(version, settings.getIndexVersionCreated());
|
||||
assertEquals("_na_", settings.getUUID());
|
||||
try {
|
||||
|
@ -106,7 +106,7 @@ public class IndexSettingsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build());
|
||||
settings = new IndexSettings(metaData, Settings.EMPTY, Collections.EMPTY_LIST);
|
||||
settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList());
|
||||
try {
|
||||
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("index.test.setting.int", 42).build()));
|
||||
fail("uuid missing/change");
|
||||
|
|
|
@ -72,7 +72,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
|
|||
|
||||
public AnalysisRegistry getNewRegistry(Settings settings) {
|
||||
return new AnalysisRegistry(null, new Environment(settings),
|
||||
Collections.EMPTY_MAP, Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new), Collections.EMPTY_MAP, Collections.EMPTY_MAP);
|
||||
Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new), Collections.emptyMap(), Collections.emptyMap());
|
||||
}
|
||||
|
||||
private Settings loadFromClasspath(String path) {
|
||||
|
|
|
@ -51,6 +51,6 @@ public class AnalysisTestsHelper {
|
|||
}
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
|
||||
Environment environment = new Environment(settings);
|
||||
return new AnalysisRegistry(new HunspellService(settings, environment, Collections.EMPTY_MAP), environment).build(idxSettings);
|
||||
return new AnalysisRegistry(new HunspellService(settings, environment, Collections.emptyMap()), environment).build(idxSettings);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ public class CompoundAnalysisTests extends ESTestCase {
|
|||
Settings settings = getJsonSettings();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings),
|
||||
Collections.EMPTY_MAP,Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.EMPTY_MAP,Collections.EMPTY_MAP).build(idxSettings);
|
||||
Collections.emptyMap(),Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings);
|
||||
|
||||
TokenFilterFactory filterFactory = analysisService.tokenFilter("dict_dec");
|
||||
MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class));
|
||||
|
@ -71,7 +71,7 @@ public class CompoundAnalysisTests extends ESTestCase {
|
|||
Index index = new Index("test");
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings),
|
||||
Collections.EMPTY_MAP, Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.EMPTY_MAP,Collections.EMPTY_MAP).build(idxSettings);
|
||||
Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings);
|
||||
|
||||
Analyzer analyzer = analysisService.analyzer(analyzerName).analyzer();
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ public class CodecTests extends ESTestCase {
|
|||
.put("path.home", createTempDir())
|
||||
.build();
|
||||
IndexSettings settings = IndexSettingsModule.newIndexSettings(new Index("_na"), nodeSettings);
|
||||
SimilarityService similarityService = new SimilarityService(settings, Collections.EMPTY_MAP);
|
||||
SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap());
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(settings);
|
||||
MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap());
|
||||
MapperService service = new MapperService(settings, analysisService, similarityService, mapperRegistry);
|
||||
|
|
|
@ -1617,7 +1617,7 @@ public class InternalEngineTests extends ESTestCase {
|
|||
// now it should be OK.
|
||||
IndexSettings indexSettings = new IndexSettings(defaultSettings.getIndexMetaData(),
|
||||
Settings.builder().put(defaultSettings.getSettings()).put(EngineConfig.INDEX_FORCE_NEW_TRANSLOG, true).build(),
|
||||
Collections.EMPTY_LIST);
|
||||
Collections.emptyList());
|
||||
engine = createEngine(indexSettings, store, primaryTranslogDir, new MergeSchedulerConfig(indexSettings), newMergePolicy());
|
||||
}
|
||||
|
||||
|
@ -1901,8 +1901,8 @@ public class InternalEngineTests extends ESTestCase {
|
|||
RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder("test");
|
||||
Index index = new Index(indexName);
|
||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings);
|
||||
AnalysisService analysisService = new AnalysisService(indexSettings, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP);
|
||||
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.EMPTY_MAP);
|
||||
AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
|
||||
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
|
||||
MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry();
|
||||
MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry);
|
||||
DocumentMapper.Builder b = new DocumentMapper.Builder(settings, rootBuilder, mapperService);
|
||||
|
|
|
@ -241,7 +241,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
).createInjector();
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
|
||||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.EMPTY_MAP);
|
||||
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap());
|
||||
MapperRegistry mapperRegistry = injector.getInstance(MapperRegistry.class);
|
||||
MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry);
|
||||
indexFieldDataService = new IndexFieldDataService(idxSettings, injector.getInstance(IndicesFieldDataCache.class), injector.getInstance(CircuitBreakerService.class), mapperService);
|
||||
|
|
|
@ -120,7 +120,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
|
||||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.EMPTY_MAP);
|
||||
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap());
|
||||
MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry();
|
||||
MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry);
|
||||
IndexFieldDataService indexFieldDataService =new IndexFieldDataService(idxSettings, injector.getInstance(IndicesFieldDataCache.class), injector.getInstance(CircuitBreakerService.class), mapperService);
|
||||
|
|
|
@ -103,13 +103,13 @@ public class SignificanceHeuristicTests extends ESTestCase {
|
|||
if (randomBoolean()) {
|
||||
buckets.add(new SignificantLongTerms.Bucket(1, 2, 3, 4, 123, InternalAggregations.EMPTY, null));
|
||||
sTerms[0] = new SignificantLongTerms(10, 20, "some_name", null, 1, 1, heuristic, buckets,
|
||||
Collections.EMPTY_LIST, null);
|
||||
Collections.emptyList(), null);
|
||||
sTerms[1] = new SignificantLongTerms();
|
||||
} else {
|
||||
|
||||
BytesRef term = new BytesRef("someterm");
|
||||
buckets.add(new SignificantStringTerms.Bucket(term, 1, 2, 3, 4, InternalAggregations.EMPTY));
|
||||
sTerms[0] = new SignificantStringTerms(10, 20, "some_name", 1, 1, heuristic, buckets, Collections.EMPTY_LIST,
|
||||
sTerms[0] = new SignificantStringTerms(10, 20, "some_name", 1, 1, heuristic, buckets, Collections.emptyList(),
|
||||
null);
|
||||
sTerms[1] = new SignificantStringTerms();
|
||||
}
|
||||
|
|
|
@ -79,14 +79,15 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent<GceCompute
|
|||
return list.execute();
|
||||
}
|
||||
});
|
||||
if (instanceList.isEmpty()) {
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
return instanceList.getItems();
|
||||
// assist type inference
|
||||
List<Instance> items = instanceList.isEmpty() ? Collections.emptyList() : instanceList.getItems();
|
||||
return items;
|
||||
} catch (PrivilegedActionException e) {
|
||||
logger.warn("Problem fetching instance list for zone {}", zoneId);
|
||||
logger.debug("Full exception:", e);
|
||||
return Collections.EMPTY_LIST;
|
||||
// assist type inference
|
||||
List<Instance> items = Collections.emptyList();
|
||||
return items;
|
||||
}
|
||||
}).reduce(new ArrayList<>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
|
|
|
@ -54,6 +54,6 @@ public class IndexSettingsModule extends AbstractModule {
|
|||
.put(settings)
|
||||
.build();
|
||||
IndexMetaData metaData = IndexMetaData.builder(index.getName()).settings(build).build();
|
||||
return new IndexSettings(metaData, Settings.EMPTY, Collections.EMPTY_LIST);
|
||||
return new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -650,7 +650,7 @@ public class TestSearchContext extends SearchContext {
|
|||
|
||||
@Override
|
||||
public Set<String> getHeaders() {
|
||||
return Collections.EMPTY_SET;
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -173,7 +173,7 @@ public class MockFSDirectoryService extends FsDirectoryService {
|
|||
private FsDirectoryService randomDirectorService(IndexStore indexStore, ShardPath path) {
|
||||
final IndexSettings indexSettings = indexStore.getIndexSettings();
|
||||
final IndexMetaData build = IndexMetaData.builder(indexSettings.getIndexMetaData()).settings(Settings.builder().put(indexSettings.getSettings()).put(IndexModule.STORE_TYPE, RandomPicks.randomFrom(random, IndexModule.Type.values()).getSettingsKey())).build();
|
||||
final IndexSettings newIndexSettings = new IndexSettings(build, indexSettings.getNodeSettings(), Collections.EMPTY_LIST);
|
||||
final IndexSettings newIndexSettings = new IndexSettings(build, indexSettings.getNodeSettings(), Collections.emptyList());
|
||||
return new FsDirectoryService(newIndexSettings, indexStore, path);
|
||||
}
|
||||
|
||||
|
|
|
@ -178,6 +178,6 @@ public class CapturingTransport implements Transport {
|
|||
|
||||
@Override
|
||||
public List<String> getLocalAddresses() {
|
||||
return Collections.EMPTY_LIST;
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue