Upgrade to Lucene 6.5.0 (#23750)

This commit is contained in:
Jim Ferenczi 2017-03-27 15:57:54 +02:00 committed by GitHub
parent 677fd68f16
commit 0e95c90e9f
51 changed files with 105 additions and 69 deletions

View File

@ -1,6 +1,6 @@
# When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
elasticsearch = 6.0.0-alpha1
lucene = 6.5.0-snapshot-d00c5ca
lucene = 6.5.0
# optional dependencies
spatial4j = 0.6

View File

@ -1 +0,0 @@
9ad2a7bd252cbdb76ac121287e670d75f4db2cd3

View File

@ -0,0 +1 @@
3989779b05ecd0ace6affe19223b1c27156604f1

View File

@ -1 +0,0 @@
c6a940eff8a87df40262b752ed7b135e448b7873

View File

@ -0,0 +1 @@
6a8660e7133f357ef40d9cac26316ccd9937a2eb

View File

@ -1 +0,0 @@
6ef5ad88141760c00ea041da1535f3ffc364d67d

View File

@ -0,0 +1 @@
ff176c9bde4228b43827849f5d2ff2e2717e3297

View File

@ -1 +0,0 @@
f15775571fb5762dfc92e00c3909cb8db8ff1d53

View File

@ -0,0 +1 @@
10d2e5b36f460527ac9b948be0ec3077bde5b0ca

View File

@ -1 +0,0 @@
051d793aa64257beead4ccc7432eb5df81d17f23

View File

@ -0,0 +1 @@
0019bb6a631ea0123e8e553b0510fa81c9d3c3eb

View File

@ -1 +0,0 @@
5bc4cba55670c14ea812ff5de65edad4c312fdf6

View File

@ -0,0 +1 @@
dad85baba266793b9ceb80a9b08c4ee9838e09df

View File

@ -1 +0,0 @@
68cf08bcd8414a57493debf3a6a509d78a9abb56

View File

@ -0,0 +1 @@
938f9f7efe8a403fd57c99aedd75d040d9caa896

View File

@ -1 +0,0 @@
f5d90756dbeda1218d723b7bea0799c88d621adb

View File

@ -0,0 +1 @@
afdff39ecb30f6e2c6f056a5bdfcb13d928a25af

View File

@ -1 +0,0 @@
9298e7d1ed96e7beb63d7ccdce1a4502eb0fe484

View File

@ -0,0 +1 @@
8e3971a008070712d57b59cf1f7b44c0d9d3df25

View File

@ -1 +0,0 @@
918de18963607af69dff38e4773c0bde89c73ae3

View File

@ -0,0 +1 @@
225b904edf91ccdffffa398e1924ebadd5677c09

View File

@ -1 +0,0 @@
a311a7d9f3e9a8fbf3a367a4e2731f9d4579732b

View File

@ -0,0 +1 @@
5c994fc5dc4f37133a861571211303d81c5d51ff

View File

@ -1 +0,0 @@
693bc4cb0e2e4465e0173c67ed0818071c4b460b

View File

@ -0,0 +1 @@
553b7b13bef994f14076a85557df03cad67322e9

View File

@ -1 +0,0 @@
0326f31e63c76d476c23488c7354265cf915350f

View File

@ -0,0 +1 @@
73deae791d861820974600705ba06e9f801cbe56

View File

@ -1 +0,0 @@
69a3a86e9d045f872408793ea411d49e0c577268

View File

@ -0,0 +1 @@
c2aad69500dac79338ef45f570cab47bec3d2724

View File

@ -1 +0,0 @@
fabc05ca175150171cf60370877276b933716bcd

View File

@ -0,0 +1 @@
acf211f2bf901dfc8155a46c5a42c5650edf74ef

View File

@ -19,6 +19,8 @@
package org.apache.lucene.index;
import java.io.IOException;
/**
* Allows pkg private access
*/
@ -27,4 +29,33 @@ public class OneMergeHelper {
public static String getSegmentName(MergePolicy.OneMerge merge) {
return merge.info != null ? merge.info.info.name : "_na_";
}
/**
* The current MB per second rate limit for this merge.
**/
public static double getMbPerSec(Thread thread, MergePolicy.OneMerge merge) {
if (thread instanceof ConcurrentMergeScheduler.MergeThread) {
return ((ConcurrentMergeScheduler.MergeThread) thread).rateLimiter.getMBPerSec();
}
assert false: "this is not merge thread";
return Double.POSITIVE_INFINITY;
}
/**
* Returns total bytes written by this merge.
**/
public static long getTotalBytesWritten(Thread thread,
MergePolicy.OneMerge merge) throws IOException {
/**
* TODO: The number of bytes written during the merge should be accessible in OneMerge.
*/
if (thread instanceof ConcurrentMergeScheduler.MergeThread) {
return ((ConcurrentMergeScheduler.MergeThread) thread).rateLimiter
.getTotalBytesWritten();
}
assert false: "this is not merge thread";
return merge.totalBytesSize();
}
}

View File

@ -110,10 +110,15 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler {
totalMergesNumDocs.inc(totalNumDocs);
totalMergesSizeInBytes.inc(totalSizeInBytes);
totalMerges.inc(tookMS);
long stoppedMS = TimeValue.nsecToMSec(merge.rateLimiter.getTotalStoppedNS());
long throttledMS = TimeValue.nsecToMSec(merge.rateLimiter.getTotalPausedNS());
long stoppedMS = TimeValue.nsecToMSec(
merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.STOPPED)
);
long throttledMS = TimeValue.nsecToMSec(
merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.PAUSED)
);
final Thread thread = Thread.currentThread();
long totalBytesWritten = OneMergeHelper.getTotalBytesWritten(thread, merge);
double mbPerSec = OneMergeHelper.getMbPerSec(thread, merge);
totalMergeStoppedTime.inc(stoppedMS);
totalMergeThrottledTime.inc(throttledMS);
@ -125,8 +130,8 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler {
totalNumDocs,
TimeValue.timeValueMillis(stoppedMS),
TimeValue.timeValueMillis(throttledMS),
merge.rateLimiter.getTotalBytesWritten()/1024f/1024f,
merge.rateLimiter.getMBPerSec());
totalBytesWritten/1024f/1024f,
mbPerSec);
if (tookMS > 20000) { // if more than 20 seconds, DEBUG log it
logger.debug("{}", message);

View File

@ -19,10 +19,10 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.DoubleRangeField;
import org.apache.lucene.document.FloatRangeField;
import org.apache.lucene.document.IntRangeField;
import org.apache.lucene.document.LongRangeField;
import org.apache.lucene.document.DoubleRange;
import org.apache.lucene.document.FloatRange;
import org.apache.lucene.document.IntRange;
import org.apache.lucene.document.LongRange;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
@ -430,7 +430,7 @@ public class RangeFieldMapper extends FieldMapper {
DATE("date_range", NumberType.LONG) {
@Override
public Field getRangeField(String name, Range r) {
return new LongRangeField(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()});
return new LongRange(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()});
}
private Number parse(DateMathParser dateMathParser, String dateStr) {
return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");});
@ -516,7 +516,7 @@ public class RangeFieldMapper extends FieldMapper {
}
@Override
public Field getRangeField(String name, Range r) {
return new FloatRangeField(name, new float[] {r.from.floatValue()}, new float[] {r.to.floatValue()});
return new FloatRange(name, new float[] {r.from.floatValue()}, new float[] {r.to.floatValue()});
}
@Override
public byte[] getBytes(Range r) {
@ -527,19 +527,19 @@ public class RangeFieldMapper extends FieldMapper {
}
@Override
public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return FloatRangeField.newWithinQuery(field,
return FloatRange.newWithinQuery(field,
new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)},
new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)});
}
@Override
public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return FloatRangeField.newContainsQuery(field,
return FloatRange.newContainsQuery(field,
new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)},
new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)});
}
@Override
public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return FloatRangeField.newIntersectsQuery(field,
return FloatRange.newIntersectsQuery(field,
new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)},
new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)});
}
@ -563,7 +563,7 @@ public class RangeFieldMapper extends FieldMapper {
}
@Override
public Field getRangeField(String name, Range r) {
return new DoubleRangeField(name, new double[] {r.from.doubleValue()}, new double[] {r.to.doubleValue()});
return new DoubleRange(name, new double[] {r.from.doubleValue()}, new double[] {r.to.doubleValue()});
}
@Override
public byte[] getBytes(Range r) {
@ -574,19 +574,19 @@ public class RangeFieldMapper extends FieldMapper {
}
@Override
public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return DoubleRangeField.newWithinQuery(field,
return DoubleRange.newWithinQuery(field,
new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)},
new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)});
}
@Override
public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return DoubleRangeField.newContainsQuery(field,
return DoubleRange.newContainsQuery(field,
new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)},
new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)});
}
@Override
public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return DoubleRangeField.newIntersectsQuery(field,
return DoubleRange.newIntersectsQuery(field,
new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)},
new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)});
}
@ -612,7 +612,7 @@ public class RangeFieldMapper extends FieldMapper {
}
@Override
public Field getRangeField(String name, Range r) {
return new IntRangeField(name, new int[] {r.from.intValue()}, new int[] {r.to.intValue()});
return new IntRange(name, new int[] {r.from.intValue()}, new int[] {r.to.intValue()});
}
@Override
public byte[] getBytes(Range r) {
@ -623,17 +623,17 @@ public class RangeFieldMapper extends FieldMapper {
}
@Override
public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return IntRangeField.newWithinQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)},
return IntRange.newWithinQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)},
new int[] {(Integer)to - (includeTo ? 0 : 1)});
}
@Override
public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return IntRangeField.newContainsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)},
return IntRange.newContainsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)},
new int[] {(Integer)to - (includeTo ? 0 : 1)});
}
@Override
public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return IntRangeField.newIntersectsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)},
return IntRange.newIntersectsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)},
new int[] {(Integer)to - (includeTo ? 0 : 1)});
}
},
@ -656,7 +656,7 @@ public class RangeFieldMapper extends FieldMapper {
}
@Override
public Field getRangeField(String name, Range r) {
return new LongRangeField(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()});
return new LongRange(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()});
}
@Override
public byte[] getBytes(Range r) {
@ -669,17 +669,17 @@ public class RangeFieldMapper extends FieldMapper {
}
@Override
public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return LongRangeField.newWithinQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)},
return LongRange.newWithinQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)},
new long[] {(Long)to - (includeTo ? 0 : 1)});
}
@Override
public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return LongRangeField.newContainsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)},
return LongRange.newContainsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)},
new long[] {(Long)to - (includeTo ? 0 : 1)});
}
@Override
public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return LongRangeField.newIntersectsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)},
return LongRange.newIntersectsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)},
new long[] {(Long)to - (includeTo ? 0 : 1)});
}
};

View File

@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.1.jar}" {
//// Very special jar permissions:
//// These are dangerous permissions that we don't want to grant to everything.
grant codeBase "${codebase.lucene-core-6.5.0-snapshot-d00c5ca.jar}" {
grant codeBase "${codebase.lucene-core-6.5.0.jar}" {
// needed to allow MMapDirectory's "unmap hack" (die unmap hack, die)
// java 8 package
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-6.5.0-snapshot-d00c5ca.jar}" {
permission java.lang.RuntimePermission "accessDeclaredMembers";
};
grant codeBase "${codebase.lucene-misc-6.5.0-snapshot-d00c5ca.jar}" {
grant codeBase "${codebase.lucene-misc-6.5.0.jar}" {
// needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper
permission java.nio.file.LinkPermission "hard";
};

View File

@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
grant codeBase "${codebase.lucene-test-framework-6.5.0-snapshot-d00c5ca.jar}" {
grant codeBase "${codebase.lucene-test-framework-6.5.0.jar}" {
// needed by RamUsageTester
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
// needed for testing hardlinks in StoreRecoveryTests since we install MockFS

View File

@ -19,10 +19,10 @@
package org.elasticsearch.index.mapper;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.document.DoubleRangeField;
import org.apache.lucene.document.FloatRangeField;
import org.apache.lucene.document.IntRangeField;
import org.apache.lucene.document.LongRangeField;
import org.apache.lucene.document.DoubleRange;
import org.apache.lucene.document.FloatRange;
import org.apache.lucene.document.IntRange;
import org.apache.lucene.document.LongRange;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
@ -113,44 +113,44 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
int[] lower = new int[] {from + (includeLower ? 0 : 1)};
int[] upper = new int[] {to - (includeUpper ? 0 : 1)};
if (relation == ShapeRelation.WITHIN) {
return IntRangeField.newWithinQuery(FIELDNAME, lower, upper);
return IntRange.newWithinQuery(FIELDNAME, lower, upper);
} else if (relation == ShapeRelation.CONTAINS) {
return IntRangeField.newContainsQuery(FIELDNAME, lower, upper);
return IntRange.newContainsQuery(FIELDNAME, lower, upper);
}
return IntRangeField.newIntersectsQuery(FIELDNAME, lower, upper);
return IntRange.newIntersectsQuery(FIELDNAME, lower, upper);
}
private Query getLongRangeQuery(ShapeRelation relation, long from, long to, boolean includeLower, boolean includeUpper) {
long[] lower = new long[] {from + (includeLower ? 0 : 1)};
long[] upper = new long[] {to - (includeUpper ? 0 : 1)};
if (relation == ShapeRelation.WITHIN) {
return LongRangeField.newWithinQuery(FIELDNAME, lower, upper);
return LongRange.newWithinQuery(FIELDNAME, lower, upper);
} else if (relation == ShapeRelation.CONTAINS) {
return LongRangeField.newContainsQuery(FIELDNAME, lower, upper);
return LongRange.newContainsQuery(FIELDNAME, lower, upper);
}
return LongRangeField.newIntersectsQuery(FIELDNAME, lower, upper);
return LongRange.newIntersectsQuery(FIELDNAME, lower, upper);
}
private Query getFloatRangeQuery(ShapeRelation relation, float from, float to, boolean includeLower, boolean includeUpper) {
float[] lower = new float[] {includeLower ? from : Math.nextUp(from)};
float[] upper = new float[] {includeUpper ? to : Math.nextDown(to)};
if (relation == ShapeRelation.WITHIN) {
return FloatRangeField.newWithinQuery(FIELDNAME, lower, upper);
return FloatRange.newWithinQuery(FIELDNAME, lower, upper);
} else if (relation == ShapeRelation.CONTAINS) {
return FloatRangeField.newContainsQuery(FIELDNAME, lower, upper);
return FloatRange.newContainsQuery(FIELDNAME, lower, upper);
}
return FloatRangeField.newIntersectsQuery(FIELDNAME, lower, upper);
return FloatRange.newIntersectsQuery(FIELDNAME, lower, upper);
}
private Query getDoubleRangeQuery(ShapeRelation relation, double from, double to, boolean includeLower, boolean includeUpper) {
double[] lower = new double[] {includeLower ? from : Math.nextUp(from)};
double[] upper = new double[] {includeUpper ? to : Math.nextDown(to)};
if (relation == ShapeRelation.WITHIN) {
return DoubleRangeField.newWithinQuery(FIELDNAME, lower, upper);
return DoubleRange.newWithinQuery(FIELDNAME, lower, upper);
} else if (relation == ShapeRelation.CONTAINS) {
return DoubleRangeField.newContainsQuery(FIELDNAME, lower, upper);
return DoubleRange.newContainsQuery(FIELDNAME, lower, upper);
}
return DoubleRangeField.newIntersectsQuery(FIELDNAME, lower, upper);
return DoubleRange.newIntersectsQuery(FIELDNAME, lower, upper);
}
private Object nextFrom() {

View File

@ -1 +0,0 @@
eb201cc666e834f5f128cea00acdf2c046fcbb87

View File

@ -0,0 +1 @@
5dfd44932fc77187a233a1cbf228c1a96ac8924f

View File

@ -1 +0,0 @@
165f826617aa6cb7af67b2c3f87df3b46216a155

View File

@ -0,0 +1 @@
3a71465f63887f871bc377d87a0838c29b0a857d

View File

@ -1 +0,0 @@
50ed8c505a120bfcd1d5a7d3fae837027153f0dd

View File

@ -0,0 +1 @@
03353b0d030f6d5a63c4c0d5b64c770f5ba9d829

View File

@ -1 +0,0 @@
f4c04ecad541aa9526c4e2bd4e98aa08898ffa1c

View File

@ -0,0 +1 @@
77ce4fb8c62688d8a094f08a07685c464ec46345

View File

@ -1 +0,0 @@
bc5ca65f0db1ec9f71481c6ad4e146bbf56df32e

View File

@ -0,0 +1 @@
60a780d900e48b0cead42d82fe405ad54bd658c3

View File

@ -1 +0,0 @@
dae2a3e6b79197d4e48ee1ae8d0ef31b8b20069e

View File

@ -0,0 +1 @@
894c42c011d291e72d14db660499c75281de9efd

View File

@ -1 +0,0 @@
318fcd0d1d33d45088ac3f4ab8291a4a22060078

View File

@ -0,0 +1 @@
72f0172cf947ab563a7c8166855cf7cbdfe33136