LUCENE-6892: various lucene.index initialCapacity tweaks

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1713903 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Christine Poerschke 2015-11-11 17:52:00 +00:00
parent da18097f85
commit a4b4c75dcf
6 changed files with 11 additions and 8 deletions

View File

@ -203,6 +203,9 @@ Optimizations
* LUCENE-6863: Optimized storage requirements of doc values fields when less
than 1% of documents have a value. (Adrien Grand)
* LUCENE-6892: various lucene.index initialCapacity tweaks
(Christine Poerschke)
Bug Fixes
* LUCENE-6817: ComplexPhraseQueryParser.ComplexPhraseQuery does not display

View File

@ -333,7 +333,7 @@ public abstract class CodecReader extends LeafReader implements Accountable {
@Override
public Collection<Accountable> getChildResources() {
ensureOpen();
List<Accountable> resources = new ArrayList<>();
final List<Accountable> resources = new ArrayList<>(6);
// terms/postings
resources.add(Accountables.namedAccountable("postings", getPostingsReader()));

View File

@ -2390,7 +2390,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
* to match with a call to {@link IOUtils#close} in a
* finally clause. */
private List<Lock> acquireWriteLocks(Directory... dirs) throws IOException {
List<Lock> locks = new ArrayList<>();
List<Lock> locks = new ArrayList<>(dirs.length);
for(int i=0;i<dirs.length;i++) {
boolean success = false;
try {
@ -3967,7 +3967,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
infoStream.message("IW", "merging " + segString(merge.segments));
}
merge.readers = new ArrayList<>();
merge.readers = new ArrayList<>(sourceSegments.size());
// This is try/finally to make sure merger's readers are
// closed:

View File

@ -461,7 +461,7 @@ public abstract class LogMergePolicy extends MergePolicy {
// Compute levels, which is just log (base mergeFactor)
// of the size of each segment
final List<SegmentInfoAndLevel> levels = new ArrayList<>();
final List<SegmentInfoAndLevel> levels = new ArrayList<>(numSegments);
final float norm = (float) Math.log(mergeFactor);
final Collection<SegmentCommitInfo> mergingSegments = writer.getMergingSegments();
@ -563,7 +563,7 @@ public abstract class LogMergePolicy extends MergePolicy {
} else if (!anyTooLarge) {
if (spec == null)
spec = new MergeSpecification();
final List<SegmentCommitInfo> mergeInfos = new ArrayList<>();
final List<SegmentCommitInfo> mergeInfos = new ArrayList<>(end-start);
for(int i=start;i<end;i++) {
mergeInfos.add(levels.get(i).info);
assert infos.contains(levels.get(i).info);

View File

@ -66,8 +66,8 @@ public final class MultiFields extends Fields {
// already an atomic reader / reader with one leave
return leaves.get(0).reader().fields();
default:
final List<Fields> fields = new ArrayList<>();
final List<ReaderSlice> slices = new ArrayList<>();
final List<Fields> fields = new ArrayList<>(leaves.size());
final List<ReaderSlice> slices = new ArrayList<>(leaves.size());
for (final LeafReaderContext ctx : leaves) {
final LeafReader r = ctx.reader();
final Fields f = r.fields();

View File

@ -161,7 +161,7 @@ class SegmentDocValuesProducer extends DocValuesProducer {
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
final List<Accountable> resources = new ArrayList<>(dvProducers.size());
for (Accountable producer : dvProducers) {
resources.add(Accountables.namedAccountable("delegate", producer));
}