Use CollectionUtils.isEmpty where appropriate (#55910)
This commit uses the isEmpty utility method for arrays in place of null and greater than zero checks.
This commit is contained in:
parent
32471abc0e
commit
8e96e5c936
|
@ -64,6 +64,7 @@ import org.elasticsearch.common.SuppressForbidden;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -855,10 +856,10 @@ final class RequestConverters {
|
|||
if (fetchSourceContext.fetchSource() == false) {
|
||||
putParam("_source", Boolean.FALSE.toString());
|
||||
}
|
||||
if (fetchSourceContext.includes() != null && fetchSourceContext.includes().length > 0) {
|
||||
if (CollectionUtils.isEmpty(fetchSourceContext.includes()) == false) {
|
||||
putParam("_source_includes", String.join(",", fetchSourceContext.includes()));
|
||||
}
|
||||
if (fetchSourceContext.excludes() != null && fetchSourceContext.excludes().length > 0) {
|
||||
if (CollectionUtils.isEmpty(fetchSourceContext.excludes()) == false) {
|
||||
putParam("_source_excludes", String.join(",", fetchSourceContext.excludes()));
|
||||
}
|
||||
}
|
||||
|
@ -866,7 +867,7 @@ final class RequestConverters {
|
|||
}
|
||||
|
||||
Params withFields(String[] fields) {
|
||||
if (fields != null && fields.length > 0) {
|
||||
if (CollectionUtils.isEmpty(fields) == false) {
|
||||
return putParam("fields", String.join(",", fields));
|
||||
}
|
||||
return this;
|
||||
|
@ -967,7 +968,7 @@ final class RequestConverters {
|
|||
}
|
||||
|
||||
Params withStoredFields(String[] storedFields) {
|
||||
if (storedFields != null && storedFields.length > 0) {
|
||||
if (CollectionUtils.isEmpty(storedFields) == false) {
|
||||
return putParam("stored_fields", String.join(",", storedFields));
|
||||
}
|
||||
return this;
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
|||
import org.elasticsearch.client.cluster.RemoteInfoRequest;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Assert;
|
||||
|
@ -142,7 +143,7 @@ public class ClusterRequestConvertersTests extends ESTestCase {
|
|||
Assert.assertThat(request, CoreMatchers.notNullValue());
|
||||
Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
Assert.assertThat(request.getEntity(), nullValue());
|
||||
if (indices != null && indices.length > 0) {
|
||||
if (CollectionUtils.isEmpty(indices) == false) {
|
||||
Assert.assertThat(request.getEndpoint(), equalTo("/_cluster/health/" + String.join(",", indices)));
|
||||
} else {
|
||||
Assert.assertThat(request.getEndpoint(), equalTo("/_cluster/health"));
|
||||
|
|
|
@ -474,7 +474,7 @@ public class IndicesRequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
if (indicesUnderTest != null && indicesUnderTest.length > 0) {
|
||||
if (CollectionUtils.isEmpty(indicesUnderTest) == false) {
|
||||
endpoint.add(String.join(",", indicesUnderTest));
|
||||
}
|
||||
endpoint.add("_settings");
|
||||
|
@ -487,7 +487,7 @@ public class IndicesRequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
getSettingsRequest.names(names);
|
||||
if (names != null && names.length > 0) {
|
||||
if (CollectionUtils.isEmpty(names) == false) {
|
||||
endpoint.add(String.join(",", names));
|
||||
}
|
||||
}
|
||||
|
@ -1207,7 +1207,7 @@ public class IndicesRequestConvertersTests extends ESTestCase {
|
|||
public void testReloadAnalyzers() {
|
||||
String[] indices = RequestConvertersTests.randomIndicesNames(1, 5);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
if (indices != null && indices.length > 0) {
|
||||
if (CollectionUtils.isEmpty(indices) == false) {
|
||||
endpoint.add(String.join(",", indices));
|
||||
}
|
||||
ReloadAnalyzersRequest reloadRequest = new ReloadAnalyzersRequest(indices);
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.common.Priority;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.shard.DocsStats;
|
||||
|
@ -393,7 +394,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
logger.info("iteration [{}] - successful shards: {} (expected {})", iteration,
|
||||
searchResponse.getSuccessfulShards(), numberOfShards);
|
||||
logger.info("iteration [{}] - failed shards: {} (expected 0)", iteration, searchResponse.getFailedShards());
|
||||
if (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) {
|
||||
if (CollectionUtils.isEmpty(searchResponse.getShardFailures()) == false) {
|
||||
logger.info("iteration [{}] - shard failures: {}", iteration, Arrays.toString(searchResponse.getShardFailures()));
|
||||
}
|
||||
logger.info("iteration [{}] - returned documents: {} (expected {})", iteration,
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.SortField;
|
|||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.apache.lucene.search.TotalHits;
|
||||
import org.apache.lucene.util.PriorityQueue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
|
@ -184,7 +185,7 @@ public final class CollapseTopFieldDocs extends TopFieldDocs {
|
|||
if (shard.totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) {
|
||||
totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
|
||||
}
|
||||
if (shard.scoreDocs != null && shard.scoreDocs.length > 0) {
|
||||
if (CollectionUtils.isEmpty(shard.scoreDocs) == false) {
|
||||
availHitCount += shard.scoreDocs.length;
|
||||
queue.add(new ShardRef(shardIDX, setShardIndex == false));
|
||||
}
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -228,7 +229,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
|
|||
if (request.waitForNodes().isEmpty() == false) {
|
||||
waitCount++;
|
||||
}
|
||||
if (request.indices() != null && request.indices().length > 0) { // check that they actually exists in the meta data
|
||||
if (CollectionUtils.isEmpty(request.indices()) == false) { // check that they actually exists in the meta data
|
||||
waitCount++;
|
||||
}
|
||||
return waitCount;
|
||||
|
@ -282,7 +283,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
|
|||
waitForCounter++;
|
||||
}
|
||||
}
|
||||
if (request.indices() != null && request.indices().length > 0) {
|
||||
if (CollectionUtils.isEmpty(request.indices()) == false) {
|
||||
try {
|
||||
indexNameExpressionResolver.concreteIndexNames(clusterState, IndicesOptions.strictExpand(), true, request.indices());
|
||||
waitForCounter++;
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
|
||||
|
@ -208,7 +209,7 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeAction<Sn
|
|||
}
|
||||
// Now add snapshots on disk that are not currently running
|
||||
final String repositoryName = request.repository();
|
||||
if (Strings.hasText(repositoryName) && request.snapshots() != null && request.snapshots().length > 0) {
|
||||
if (Strings.hasText(repositoryName) && CollectionUtils.isEmpty(request.snapshots()) == false) {
|
||||
loadRepositoryData(snapshotsInProgress, request, builder, currentSnapshotNames, repositoryName, listener);
|
||||
} else {
|
||||
listener.onResponse(new SnapshotsStatusResponse(Collections.unmodifiableList(builder)));
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -203,7 +204,7 @@ public class CloseIndexResponse extends ShardsAcknowledgedResponse {
|
|||
}
|
||||
|
||||
public boolean hasFailures() {
|
||||
return failures != null && failures.length > 0;
|
||||
return CollectionUtils.isEmpty(failures) == false;
|
||||
}
|
||||
|
||||
public int getId() {
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
|
@ -118,7 +119,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
} else if (source.isEmpty()) {
|
||||
validationException = addValidationError("mapping source is empty", validationException);
|
||||
}
|
||||
if (concreteIndex != null && (indices != null && indices.length > 0)) {
|
||||
if (concreteIndex != null && CollectionUtils.isEmpty(indices) == false) {
|
||||
validationException = addValidationError("either concrete index or unresolved indices can be set, concrete index: ["
|
||||
+ concreteIndex + "] and indices: " + Arrays.asList(indices) , validationException);
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.ExceptionsHelper;
|
|||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
|
@ -119,7 +120,7 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
|
|||
private static String buildMessage(String phaseName, String msg, ShardSearchFailure[] shardFailures) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("Failed to execute phase [").append(phaseName).append("], ").append(msg);
|
||||
if (shardFailures != null && shardFailures.length > 0) {
|
||||
if (CollectionUtils.isEmpty(shardFailures) == false) {
|
||||
sb.append("; shardFailures ");
|
||||
for (ShardSearchFailure shardFailure : shardFailures) {
|
||||
if (shardFailure.shard() != null) {
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
|
||||
|
@ -159,7 +160,7 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
|
|||
}
|
||||
|
||||
public boolean match(Task task) {
|
||||
if (getActions() != null && getActions().length > 0 && Regex.simpleMatch(getActions(), task.getAction()) == false) {
|
||||
if (CollectionUtils.isEmpty(getActions()) == false && Regex.simpleMatch(getActions(), task.getAction()) == false) {
|
||||
return false;
|
||||
}
|
||||
if (getTaskId().isSet()) {
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.regex.Regex;
|
|||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateMathParser;
|
||||
import org.elasticsearch.common.time.DateUtils;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
|
@ -311,7 +312,7 @@ public class IndexNameExpressionResolver {
|
|||
* @return the concrete index obtained as a result of the index resolution
|
||||
*/
|
||||
public Index concreteSingleIndex(ClusterState state, IndicesRequest request) {
|
||||
String indexExpression = request.indices() != null && request.indices().length > 0 ? request.indices()[0] : null;
|
||||
String indexExpression = CollectionUtils.isEmpty(request.indices()) ? null : request.indices()[0];
|
||||
Index[] indices = concreteIndices(state, request.indicesOptions(), indexExpression);
|
||||
if (indices.length != 1) {
|
||||
throw new IllegalArgumentException("unable to return a single index as the index and options" +
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
|
||||
import java.io.Reader;
|
||||
|
||||
|
@ -97,7 +98,7 @@ public final class CustomAnalyzer extends Analyzer implements AnalyzerComponents
|
|||
@Override
|
||||
protected Reader initReader(String fieldName, Reader reader) {
|
||||
CharFilterFactory[] charFilters = charFilters();
|
||||
if (charFilters != null && charFilters.length > 0) {
|
||||
if (CollectionUtils.isEmpty(charFilters) == false) {
|
||||
for (CharFilterFactory charFilter : charFilters) {
|
||||
reader = charFilter.create(reader);
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
|
|||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
|
||||
import java.io.Reader;
|
||||
import java.util.Map;
|
||||
|
@ -152,7 +153,7 @@ public final class ReloadableCustomAnalyzer extends Analyzer implements Analyzer
|
|||
@Override
|
||||
protected Reader initReader(String fieldName, Reader reader) {
|
||||
final AnalyzerComponents components = getStoredComponents();
|
||||
if (components.getCharFilters() != null && components.getCharFilters().length > 0) {
|
||||
if (CollectionUtils.isEmpty(components.getCharFilters()) == false) {
|
||||
for (CharFilterFactory charFilter : components.getCharFilters()) {
|
||||
reader = charFilter.create(reader);
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.action.support.PlainActionFuture;
|
|||
import org.elasticsearch.common.FieldMemoryStats;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionStats;
|
||||
|
||||
import java.util.function.Supplier;
|
||||
|
@ -123,7 +124,7 @@ class CompletionStatsCache implements ReferenceManager.RefreshListener {
|
|||
|
||||
private static CompletionStats filterCompletionStatsByFieldName(String[] fieldNamePatterns, CompletionStats fullCompletionStats) {
|
||||
final FieldMemoryStats fieldMemoryStats;
|
||||
if (fieldNamePatterns != null && fieldNamePatterns.length > 0) {
|
||||
if (CollectionUtils.isEmpty(fieldNamePatterns) == false) {
|
||||
final ObjectLongHashMap<String> completionFields = new ObjectLongHashMap<>(fieldNamePatterns.length);
|
||||
for (ObjectLongCursor<String> fieldCursor : fullCompletionStats.getFields()) {
|
||||
if (Regex.simpleMatch(fieldNamePatterns, fieldCursor.key)) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.util.Accountable;
|
|||
import org.elasticsearch.common.FieldMemoryStats;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
|
@ -38,7 +39,7 @@ public class ShardFieldData implements IndexFieldDataCache.Listener {
|
|||
|
||||
public FieldDataStats stats(String... fields) {
|
||||
ObjectLongHashMap<String> fieldTotals = null;
|
||||
if (fields != null && fields.length > 0) {
|
||||
if (CollectionUtils.isEmpty(fields) == false) {
|
||||
fieldTotals = new ObjectLongHashMap<>();
|
||||
for (Map.Entry<String, CounterMetric> entry : perFieldTotals.entrySet()) {
|
||||
if (Regex.simpleMatch(fields, entry.getKey())) {
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.collect.Tuple;
|
|||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
|
@ -192,7 +193,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
this.enabled = enabled;
|
||||
this.includes = includes;
|
||||
this.excludes = excludes;
|
||||
final boolean filtered = (includes != null && includes.length > 0) || (excludes != null && excludes.length > 0);
|
||||
final boolean filtered = CollectionUtils.isEmpty(includes) == false || CollectionUtils.isEmpty(excludes) == false;
|
||||
this.filter = enabled && filtered && fieldType().stored() ? XContentMapValues.filter(includes, excludes) : null;
|
||||
this.complete = enabled && includes == null && excludes == null;
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.search.QueryParserHelper;
|
||||
|
@ -273,7 +274,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
* none are specified.
|
||||
*/
|
||||
public SimpleQueryStringBuilder flags(SimpleQueryStringFlag... flags) {
|
||||
if (flags != null && flags.length > 0) {
|
||||
if (CollectionUtils.isEmpty(flags) == false) {
|
||||
int value = 0;
|
||||
for (SimpleQueryStringFlag flag : flags) {
|
||||
value |= flag.value;
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.common.collect.MapBuilder;
|
|||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -47,7 +48,7 @@ public final class ShardSearchStats implements SearchOperationListener {
|
|||
public SearchStats stats(String... groups) {
|
||||
SearchStats.Stats total = totalStats.stats();
|
||||
Map<String, SearchStats.Stats> groupsSt = null;
|
||||
if (groups != null && groups.length > 0) {
|
||||
if (CollectionUtils.isEmpty(groups) == false) {
|
||||
groupsSt = new HashMap<>(groupsStats.size());
|
||||
if (groups.length == 1 && groups[0].equals("_all")) {
|
||||
for (Map.Entry<String, StatsHolder> entry : groupsStats.entrySet()) {
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -88,7 +89,7 @@ public class RestActions {
|
|||
builder.field(SKIPPED_FIELD.getPreferredName(), skipped);
|
||||
}
|
||||
builder.field(FAILED_FIELD.getPreferredName(), failed);
|
||||
if (shardFailures != null && shardFailures.length > 0) {
|
||||
if (CollectionUtils.isEmpty(shardFailures) == false) {
|
||||
builder.startArray(FAILURES_FIELD.getPreferredName());
|
||||
for (ShardOperationFailedException shardFailure : ExceptionsHelper.groupBy(shardFailures)) {
|
||||
shardFailure.toXContent(builder, params);
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.elasticsearch.common.settings.Setting.Property;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
|
@ -967,7 +968,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
if (source.stats() != null) {
|
||||
context.groupStats(source.stats());
|
||||
}
|
||||
if (source.searchAfter() != null && source.searchAfter().length > 0) {
|
||||
if (CollectionUtils.isEmpty(source.searchAfter()) == false) {
|
||||
if (context.scrollContext() != null) {
|
||||
throw new SearchException(shardTarget, "`search_after` cannot be used in a scroll context.");
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.lucene.search.vectorhighlight.SingleFragListBuilder;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
|
||||
|
@ -182,7 +183,7 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder);
|
||||
}
|
||||
|
||||
if (fragments != null && fragments.length > 0) {
|
||||
if (CollectionUtils.isEmpty(fragments) == false) {
|
||||
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
|
||||
}
|
||||
|
||||
|
@ -195,7 +196,7 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(),
|
||||
fieldType.name(), fieldFragList, 1, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), encoder);
|
||||
if (fragments != null && fragments.length > 0) {
|
||||
if (CollectionUtils.isEmpty(fragments) == false) {
|
||||
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.watcher;
|
|||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
|
@ -182,7 +183,7 @@ public class FileWatcher extends AbstractResourceWatcher<FileChangesListener> {
|
|||
|
||||
private FileObserver[] listChildren(boolean initial) throws IOException {
|
||||
Path[] files = listFiles();
|
||||
if (files != null && files.length > 0) {
|
||||
if (CollectionUtils.isEmpty(files) == false) {
|
||||
FileObserver[] children = new FileObserver[files.length];
|
||||
for (int i = 0; i < files.length; i++) {
|
||||
children[i] = createChild(files[i], initial);
|
||||
|
@ -195,7 +196,7 @@ public class FileWatcher extends AbstractResourceWatcher<FileChangesListener> {
|
|||
|
||||
private void updateChildren() throws IOException {
|
||||
Path[] files = listFiles();
|
||||
if (files != null && files.length > 0) {
|
||||
if (CollectionUtils.isEmpty(files) == false) {
|
||||
FileObserver[] newChildren = new FileObserver[files.length];
|
||||
int child = 0;
|
||||
int file = 0;
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -161,7 +162,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||
} else {
|
||||
likeItems = randomLikeItems;
|
||||
}
|
||||
if (randomBoolean() && likeItems != null && likeItems.length > 0) { // for the default field
|
||||
if (randomBoolean() && CollectionUtils.isEmpty(likeItems) == false) { // for the default field
|
||||
queryBuilder = new MoreLikeThisQueryBuilder(null, likeItems);
|
||||
} else {
|
||||
queryBuilder = new MoreLikeThisQueryBuilder(randomFields, likeTexts, likeItems);
|
||||
|
@ -268,7 +269,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
if (queryBuilder.likeItems() != null && queryBuilder.likeItems().length > 0) {
|
||||
if (CollectionUtils.isEmpty(queryBuilder.likeItems()) == false) {
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery booleanQuery = (BooleanQuery) query;
|
||||
for (BooleanClause booleanClause : booleanQuery) {
|
||||
|
|
|
@ -49,6 +49,7 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -511,7 +512,7 @@ public class JobResultsProvider {
|
|||
SearchResponse searchResponse = itemResponse.getResponse();
|
||||
ShardSearchFailure[] shardFailures = searchResponse.getShardFailures();
|
||||
int unavailableShards = searchResponse.getTotalShards() - searchResponse.getSuccessfulShards();
|
||||
if (shardFailures != null && shardFailures.length > 0) {
|
||||
if (CollectionUtils.isEmpty(shardFailures) == false) {
|
||||
LOGGER.error("[{}] Search request returned shard failures: {}", jobId, Arrays.toString(shardFailures));
|
||||
listener.onFailure(
|
||||
new ElasticsearchException(ExceptionsHelper.shardFailuresToErrorMsg(jobId, shardFailures)));
|
||||
|
@ -617,7 +618,7 @@ public class JobResultsProvider {
|
|||
SearchResponse searchResponse = itemResponse.getResponse();
|
||||
ShardSearchFailure[] shardFailures = searchResponse.getShardFailures();
|
||||
int unavailableShards = searchResponse.getTotalShards() - searchResponse.getSuccessfulShards();
|
||||
if (shardFailures != null && shardFailures.length > 0) {
|
||||
if (CollectionUtils.isEmpty(shardFailures) == false) {
|
||||
LOGGER.error("[{}] Search request returned shard failures: {}", jobId,
|
||||
Arrays.toString(shardFailures));
|
||||
errorHandler.accept(new ElasticsearchException(
|
||||
|
|
Loading…
Reference in New Issue