Forbid usage of Character.codePoint(At|Before) and Collections.sort.
Character.codePointAt and codePointBefore have two versions: one which only accepts an offset, and one which accepts an offset and a limit. The former can be dangerous when working with buffers of characters because if the offset is the last char of the buffer, a char outside the buffer might be used to compute the code point, so one should always use the version which accepts a limit. Collections.sort is wasteful on random-access lists: it dumps data into an array, sorts the list and then adds elements back to the list. However, the sorting can easily be performed in-place by using Lucene's CollectionUtil.(merge|quick|tim)Sort.
This commit is contained in:
parent
6d8a85c6af
commit
c20d44a1ff
|
@ -25,3 +25,11 @@ java.util.concurrent.Executors#newSingleThreadScheduledExecutor()
|
|||
java.util.concurrent.Executors#newScheduledThreadPool(int)
|
||||
java.util.concurrent.Executors#defaultThreadFactory()
|
||||
java.util.concurrent.Executors#privilegedThreadFactory()
|
||||
|
||||
java.lang.Character#codePointBefore(char[],int) @ Implicit start offset is error-prone when the char[] is a buffer and the first chars are random chars
|
||||
java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone when the char[] is a buffer and the last chars are random chars
|
||||
|
||||
@defaultMessage Collections.sort dumps data into an array, sorts the array and reinserts data into the list, one should rather use Lucene's CollectionUtil sort methods which sort in place
|
||||
|
||||
java.util.Collections#sort(java.util.List)
|
||||
java.util.Collections#sort(java.util.List,java.util.Comparator)
|
||||
|
|
|
@ -24,7 +24,7 @@ import com.google.common.collect.Lists;
|
|||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.common.io.Closeables;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
|
@ -64,10 +64,8 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
@ -481,7 +479,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
Collections.sort(templates, new Comparator<IndexTemplateMetaData>() {
|
||||
CollectionUtil.quickSort(templates, new Comparator<IndexTemplateMetaData>() {
|
||||
@Override
|
||||
public int compare(IndexTemplateMetaData o1, IndexTemplateMetaData o2) {
|
||||
return o2.order() - o1.order();
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.elasticsearch.common.inject.internal;
|
|||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.common.inject.*;
|
||||
import org.elasticsearch.common.inject.spi.*;
|
||||
|
||||
|
@ -439,7 +440,7 @@ public final class Errors implements Serializable {
|
|||
}
|
||||
|
||||
List<Message> result = Lists.newArrayList(root.errors);
|
||||
Collections.sort(result, new Comparator<Message>() {
|
||||
CollectionUtil.timSort(result, new Comparator<Message>() {
|
||||
public int compare(Message a, Message b) {
|
||||
return a.getSource().compareTo(b.getSource());
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.common.network;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
@ -109,7 +110,7 @@ public abstract class NetworkUtils {
|
|||
final Method getIndexMethod = NetworkInterface.class.getDeclaredMethod("getIndex");
|
||||
getIndexMethod.setAccessible(true);
|
||||
|
||||
Collections.sort(intfsList, new Comparator<NetworkInterface>() {
|
||||
CollectionUtil.timSort(intfsList, new Comparator<NetworkInterface>() {
|
||||
@Override
|
||||
public int compare(NetworkInterface o1, NetworkInterface o2) {
|
||||
try {
|
||||
|
|
|
@ -20,11 +20,11 @@
|
|||
package org.elasticsearch.discovery.zen.elect;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -110,7 +110,7 @@ public class ElectMasterService extends AbstractComponent {
|
|||
it.remove();
|
||||
}
|
||||
}
|
||||
Collections.sort(possibleNodes, nodeComparator);
|
||||
CollectionUtil.quickSort(possibleNodes, nodeComparator);
|
||||
return possibleNodes;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,13 +21,13 @@ package org.elasticsearch.index.gateway;
|
|||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -40,7 +40,7 @@ public class CommitPoints implements Iterable<CommitPoint> {
|
|||
private final ImmutableList<CommitPoint> commitPoints;
|
||||
|
||||
public CommitPoints(List<CommitPoint> commitPoints) {
|
||||
Collections.sort(commitPoints, new Comparator<CommitPoint>() {
|
||||
CollectionUtil.quickSort(commitPoints, new Comparator<CommitPoint>() {
|
||||
@Override
|
||||
public int compare(CommitPoint o1, CommitPoint o2) {
|
||||
return (o2.version() < o1.version() ? -1 : (o2.version() == o1.version() ? 0 : 1));
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.monitor.jvm;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
|
@ -130,7 +131,7 @@ public class HotThreads {
|
|||
// sort by delta CPU time on thread.
|
||||
List<MyThreadInfo> hotties = new ArrayList<MyThreadInfo>(threadInfos.values());
|
||||
// skip that for now
|
||||
Collections.sort(hotties, new Comparator<MyThreadInfo>() {
|
||||
CollectionUtil.quickSort(hotties, new Comparator<MyThreadInfo>() {
|
||||
public int compare(MyThreadInfo o1, MyThreadInfo o2) {
|
||||
if ("cpu".equals(type)) {
|
||||
return (int) (o2.cpuTime - o1.cpuTime);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.facet.datehistogram;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -163,7 +164,7 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
|
|||
// we need to sort it
|
||||
InternalFullDateHistogramFacet internalFacet = (InternalFullDateHistogramFacet) facets.get(0);
|
||||
List<FullEntry> entries = internalFacet.getEntries();
|
||||
Collections.sort(entries, comparatorType.comparator());
|
||||
CollectionUtil.timSort(entries, comparatorType.comparator());
|
||||
internalFacet.releaseCache();
|
||||
return internalFacet;
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.facet.histogram;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -160,7 +161,7 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
|
|||
// we need to sort it
|
||||
InternalFullHistogramFacet internalFacet = (InternalFullHistogramFacet) facets.get(0);
|
||||
List<FullEntry> entries = internalFacet.getEntries();
|
||||
Collections.sort(entries, comparatorType.comparator());
|
||||
CollectionUtil.timSort(entries, comparatorType.comparator());
|
||||
internalFacet.releaseCache();
|
||||
return internalFacet;
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.search.facet.termsstats.doubles;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -175,7 +177,7 @@ public class InternalTermsStatsDoubleFacet extends InternalTermsStatsFacet {
|
|||
InternalTermsStatsDoubleFacet tsFacet = (InternalTermsStatsDoubleFacet) facets.get(0);
|
||||
if (!tsFacet.entries.isEmpty()) {
|
||||
List<DoubleEntry> entries = tsFacet.mutableList();
|
||||
Collections.sort(entries, comparatorType.comparator());
|
||||
CollectionUtil.timSort(entries, comparatorType.comparator());
|
||||
}
|
||||
}
|
||||
return facets.get(0);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.search.facet.termsstats.longs;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -175,7 +176,7 @@ public class InternalTermsStatsLongFacet extends InternalTermsStatsFacet {
|
|||
InternalTermsStatsLongFacet tsFacet = (InternalTermsStatsLongFacet) facets.get(0);
|
||||
if (!tsFacet.entries.isEmpty()) {
|
||||
List<LongEntry> entries = tsFacet.mutableList();
|
||||
Collections.sort(entries, comparatorType.comparator());
|
||||
CollectionUtil.timSort(entries, comparatorType.comparator());
|
||||
}
|
||||
}
|
||||
return facets.get(0);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.search.facet.termsstats.strings;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
|
@ -180,7 +181,7 @@ public class InternalTermsStatsStringFacet extends InternalTermsStatsFacet {
|
|||
InternalTermsStatsStringFacet tsFacet = (InternalTermsStatsStringFacet) facets.get(0);
|
||||
if (!tsFacet.entries.isEmpty()) {
|
||||
List<StringEntry> entries = tsFacet.mutableList();
|
||||
Collections.sort(entries, comparatorType.comparator());
|
||||
CollectionUtil.timSort(entries, comparatorType.comparator());
|
||||
}
|
||||
}
|
||||
return facets.get(0);
|
||||
|
|
|
@ -19,14 +19,13 @@
|
|||
package org.elasticsearch.search.highlight;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.highlight.*;
|
||||
import org.apache.lucene.search.highlight.Formatter;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.io.FastStringReader;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
|
@ -37,7 +36,10 @@ import org.elasticsearch.search.fetch.FetchSubPhase;
|
|||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -135,7 +137,7 @@ public class PlainHighlighter implements Highlighter {
|
|||
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e);
|
||||
}
|
||||
if (field.scoreOrdered()) {
|
||||
Collections.sort(fragsList, new Comparator<TextFragment>() {
|
||||
CollectionUtil.quickSort(fragsList, new Comparator<TextFragment>() {
|
||||
public int compare(TextFragment o1, TextFragment o2) {
|
||||
return Math.round(o2.getScore() - o1.getScore());
|
||||
}
|
||||
|
|
|
@ -20,28 +20,20 @@
|
|||
|
||||
package org.elasticsearch.search.highlight.vectorhighlight;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.search.vectorhighlight.FastVectorHighlighter;
|
||||
import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo;
|
||||
import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo.SubInfo;
|
||||
import org.apache.lucene.search.vectorhighlight.FragmentsBuilder;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.elasticsearch.index.analysis.CustomAnalyzer;
|
||||
import org.elasticsearch.index.analysis.EdgeNGramTokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.EdgeNGramTokenizerFactory;
|
||||
import org.elasticsearch.index.analysis.NGramTokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.NGramTokenizerFactory;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.WordDelimiterTokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.*;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Simple helper class for {@link FastVectorHighlighter} {@link FragmentsBuilder} implemenations.
|
||||
*/
|
||||
|
@ -65,7 +57,7 @@ public final class FragmentBuilderHelper {
|
|||
* the FastVectorHighlighter. Yet, this is really a lucene problem and should be fixed in lucene rather
|
||||
* than in this hack... aka. "we are are working on in!" */
|
||||
final List<SubInfo> subInfos = fragInfo.getSubInfos();
|
||||
Collections.sort(subInfos, new Comparator<SubInfo>() {
|
||||
CollectionUtil.quickSort(subInfos, new Comparator<SubInfo>() {
|
||||
@Override
|
||||
public int compare(SubInfo o1, SubInfo o2) {
|
||||
int startOffset = o1.getTermsOffsets().get(0).getStartOffset();
|
||||
|
|
|
@ -18,15 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -39,6 +31,9 @@ import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
|
|||
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
|
||||
import org.elasticsearch.search.suggest.term.TermSuggestion;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Top level suggest result, containing the result for each suggestion.
|
||||
*/
|
||||
|
@ -354,7 +349,7 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
}
|
||||
|
||||
protected void sort(Comparator<O> comparator) {
|
||||
Collections.sort(options, comparator);
|
||||
CollectionUtil.timSort(options, comparator);
|
||||
}
|
||||
|
||||
protected Entry<O> reduce(List<Entry<O>> toReduce) {
|
||||
|
|
Loading…
Reference in New Issue