Upgrade lucene to r1702090

The semantics of the `boost` parameter for `function_score` changed. This is
due to the fact that Lucene now requires that query boosts and top-level boosts
are applied the same way.
This commit is contained in:
Robert Muir 2015-09-09 16:35:21 -04:00 committed by Adrien Grand
parent c598271076
commit c1f2fc76c2
61 changed files with 145 additions and 247 deletions

View File

@ -58,28 +58,28 @@ public class CustomFieldQuery extends FieldQuery {
}
@Override
void flatten(Query sourceQuery, IndexReader reader, Collection<Query> flatQueries) throws IOException {
void flatten(Query sourceQuery, IndexReader reader, Collection<Query> flatQueries, float boost) throws IOException {
if (sourceQuery instanceof SpanTermQuery) {
super.flatten(new TermQuery(((SpanTermQuery) sourceQuery).getTerm()), reader, flatQueries);
super.flatten(new TermQuery(((SpanTermQuery) sourceQuery).getTerm()), reader, flatQueries, boost);
} else if (sourceQuery instanceof ConstantScoreQuery) {
flatten(((ConstantScoreQuery) sourceQuery).getQuery(), reader, flatQueries);
flatten(((ConstantScoreQuery) sourceQuery).getQuery(), reader, flatQueries, boost);
} else if (sourceQuery instanceof FunctionScoreQuery) {
flatten(((FunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries);
flatten(((FunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost);
} else if (sourceQuery instanceof FilteredQuery) {
flatten(((FilteredQuery) sourceQuery).getQuery(), reader, flatQueries);
flatten(((FilteredQuery) sourceQuery).getQuery(), reader, flatQueries, boost);
flatten(((FilteredQuery) sourceQuery).getFilter(), reader, flatQueries);
} else if (sourceQuery instanceof MultiPhrasePrefixQuery) {
flatten(sourceQuery.rewrite(reader), reader, flatQueries);
flatten(sourceQuery.rewrite(reader), reader, flatQueries, boost);
} else if (sourceQuery instanceof FiltersFunctionScoreQuery) {
flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries);
flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost);
} else if (sourceQuery instanceof MultiPhraseQuery) {
MultiPhraseQuery q = ((MultiPhraseQuery) sourceQuery);
convertMultiPhraseQuery(0, new int[q.getTermArrays().size()], q, q.getTermArrays(), q.getPositions(), reader, flatQueries);
} else if (sourceQuery instanceof BlendedTermQuery) {
final BlendedTermQuery blendedTermQuery = (BlendedTermQuery) sourceQuery;
flatten(blendedTermQuery.rewrite(reader), reader, flatQueries);
flatten(blendedTermQuery.rewrite(reader), reader, flatQueries, boost);
} else {
super.flatten(sourceQuery, reader, flatQueries);
super.flatten(sourceQuery, reader, flatQueries, boost);
}
}
@ -93,7 +93,7 @@ public class CustomFieldQuery extends FieldQuery {
if (numTerms > 16) {
for (Term[] currentPosTerm : terms) {
for (Term term : currentPosTerm) {
super.flatten(new TermQuery(term), reader, flatQueries);
super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost());
}
}
return;
@ -111,7 +111,7 @@ public class CustomFieldQuery extends FieldQuery {
}
PhraseQuery query = queryBuilder.build();
query.setBoost(orig.getBoost());
this.flatten(query, reader, flatQueries);
this.flatten(query, reader, flatQueries, orig.getBoost());
} else {
Term[] t = terms.get(currentPos);
for (int i = 0; i < t.length; i++) {
@ -127,7 +127,7 @@ public class CustomFieldQuery extends FieldQuery {
return;
}
if (sourceFilter instanceof QueryWrapperFilter) {
flatten(((QueryWrapperFilter) sourceFilter).getQuery(), reader, flatQueries);
flatten(((QueryWrapperFilter) sourceFilter).getQuery(), reader, flatQueries, 1.0F);
}
}
}

View File

@ -121,8 +121,8 @@ final class Security {
private static final Map<Pattern,String> SPECIAL_JARS;
static {
Map<Pattern,String> m = new IdentityHashMap<>();
m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core");
m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock");
m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core");
m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock");
SPECIAL_JARS = Collections.unmodifiableMap(m);
}

View File

@ -64,6 +64,9 @@ public final class AllTermQuery extends Query {
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (getBoost() != 1f) {
return super.rewrite(reader);
}
boolean fieldExists = false;
boolean hasPayloads = false;
for (LeafReaderContext context : reader.leaves()) {
@ -98,7 +101,7 @@ public final class AllTermQuery extends Query {
final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field());
final TermStatistics termStats = searcher.termStatistics(term, termStates);
final Similarity similarity = searcher.getSimilarity(needsScores);
final SimWeight stats = similarity.computeWeight(getBoost(), collectionStats, termStats);
final SimWeight stats = similarity.computeWeight(collectionStats, termStats);
return new Weight(this) {
@Override

View File

@ -120,6 +120,9 @@ public class MultiPhrasePrefixQuery extends Query {
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (getBoost() != 1.0F) {
return super.rewrite(reader);
}
if (termArrays.isEmpty()) {
return new MatchNoDocsQuery();
}

View File

@ -24,8 +24,8 @@ import org.apache.lucene.search.Explanation;
public enum CombineFunction {
MULT {
@Override
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
return toFloat(queryBoost * queryScore * Math.min(funcScore, maxBoost));
public float combine(double queryScore, double funcScore, double maxBoost) {
return toFloat(queryScore * Math.min(funcScore, maxBoost));
}
@Override
@ -34,21 +34,20 @@ public enum CombineFunction {
}
@Override
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost) * queryExpl.getValue();
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation boostExpl = Explanation.match(maxBoost, "maxBoost");
Explanation minExpl = Explanation.match(
Math.min(funcExpl.getValue(), maxBoost),
"min of:",
funcExpl, boostExpl);
return Explanation.match(score, "function score, product of:",
queryExpl, minExpl, Explanation.match(queryBoost, "queryBoost"));
return Explanation.match(queryExpl.getValue() * minExpl.getValue(),
"function score, product of:", queryExpl, minExpl);
}
},
REPLACE {
@Override
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
return toFloat(queryBoost * Math.min(funcScore, maxBoost));
public float combine(double queryScore, double funcScore, double maxBoost) {
return toFloat(Math.min(funcScore, maxBoost));
}
@Override
@ -57,22 +56,19 @@ public enum CombineFunction {
}
@Override
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost);
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation boostExpl = Explanation.match(maxBoost, "maxBoost");
Explanation minExpl = Explanation.match(
return Explanation.match(
Math.min(funcExpl.getValue(), maxBoost),
"min of:",
funcExpl, boostExpl);
return Explanation.match(score, "function score, product of:",
minExpl, Explanation.match(queryBoost, "queryBoost"));
}
},
SUM {
@Override
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
return toFloat(queryBoost * (queryScore + Math.min(funcScore, maxBoost)));
public float combine(double queryScore, double funcScore, double maxBoost) {
return toFloat(queryScore + Math.min(funcScore, maxBoost));
}
@Override
@ -81,21 +77,18 @@ public enum CombineFunction {
}
@Override
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = queryBoost * (Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue());
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:",
funcExpl, Explanation.match(maxBoost, "maxBoost"));
Explanation sumExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(), "sum of",
return Explanation.match(Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(), "sum of",
queryExpl, minExpl);
return Explanation.match(score, "function score, product of:",
sumExpl, Explanation.match(queryBoost, "queryBoost"));
}
},
AVG {
@Override
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
return toFloat((queryBoost * (Math.min(funcScore, maxBoost) + queryScore) / 2.0));
public float combine(double queryScore, double funcScore, double maxBoost) {
return toFloat((Math.min(funcScore, maxBoost) + queryScore) / 2.0);
}
@Override
@ -104,22 +97,19 @@ public enum CombineFunction {
}
@Override
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = toFloat(queryBoost * (queryExpl.getValue() + Math.min(funcExpl.getValue(), maxBoost)) / 2.0);
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:",
funcExpl, Explanation.match(maxBoost, "maxBoost"));
Explanation avgExpl = Explanation.match(
return Explanation.match(
toFloat((Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue()) / 2.0), "avg of",
queryExpl, minExpl);
return Explanation.match(score, "function score, product of:",
avgExpl, Explanation.match(queryBoost, "queryBoost"));
}
},
MIN {
@Override
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
return toFloat(queryBoost * Math.min(queryScore, Math.min(funcScore, maxBoost)));
public float combine(double queryScore, double funcScore, double maxBoost) {
return toFloat(Math.min(queryScore, Math.min(funcScore, maxBoost)));
}
@Override
@ -128,23 +118,20 @@ public enum CombineFunction {
}
@Override
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = toFloat(queryBoost * Math.min(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost)));
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation innerMinExpl = Explanation.match(
Math.min(funcExpl.getValue(), maxBoost), "min of:",
funcExpl, Explanation.match(maxBoost, "maxBoost"));
Explanation outerMinExpl = Explanation.match(
return Explanation.match(
Math.min(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "min of",
queryExpl, innerMinExpl);
return Explanation.match(score, "function score, product of:",
outerMinExpl, Explanation.match(queryBoost, "queryBoost"));
}
},
MAX {
@Override
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
return toFloat(queryBoost * (Math.max(queryScore, Math.min(funcScore, maxBoost))));
public float combine(double queryScore, double funcScore, double maxBoost) {
return toFloat(Math.max(queryScore, Math.min(funcScore, maxBoost)));
}
@Override
@ -153,21 +140,18 @@ public enum CombineFunction {
}
@Override
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = toFloat(queryBoost * Math.max(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost)));
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation innerMinExpl = Explanation.match(
Math.min(funcExpl.getValue(), maxBoost), "min of:",
funcExpl, Explanation.match(maxBoost, "maxBoost"));
Explanation outerMaxExpl = Explanation.match(
return Explanation.match(
Math.max(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "max of:",
queryExpl, innerMinExpl);
return Explanation.match(score, "function score, product of:",
outerMaxExpl, Explanation.match(queryBoost, "queryBoost"));
}
};
public abstract float combine(double queryBoost, double queryScore, double funcScore, double maxBoost);
public abstract float combine(double queryScore, double funcScore, double maxBoost);
public abstract String getName();
@ -181,5 +165,5 @@ public enum CombineFunction {
return Double.compare(floatVersion, input) == 0 || input == 0.0d ? 0 : 1.d - (floatVersion) / input;
}
public abstract Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost);
public abstract Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost);
}

View File

@ -21,13 +21,11 @@ package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
abstract class CustomBoostFactorScorer extends Scorer {
final float subQueryBoost;
final Scorer scorer;
final float maxBoost;
final CombineFunction scoreCombiner;
@ -43,7 +41,6 @@ abstract class CustomBoostFactorScorer extends Scorer {
} else {
nextDoc = new MinScoreNextDoc();
}
this.subQueryBoost = w.getQuery().getBoost();
this.scorer = scorer;
this.maxBoost = maxBoost;
this.scoreCombiner = scoreCombiner;

View File

@ -114,6 +114,9 @@ public class FiltersFunctionScoreQuery extends Query {
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (getBoost() != 1.0F) {
return super.rewrite(reader);
}
Query newQ = subQuery.rewrite(reader);
if (newQ == subQuery)
return this;
@ -158,14 +161,12 @@ public class FiltersFunctionScoreQuery extends Query {
@Override
public float getValueForNormalization() throws IOException {
float sum = subQueryWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
return subQueryWeight.getValueForNormalization();
}
@Override
public void normalize(float norm, float topLevelBoost) {
subQueryWeight.normalize(norm, topLevelBoost * getBoost());
public void normalize(float norm, float boost) {
subQueryWeight.normalize(norm, boost);
}
@Override
@ -219,10 +220,7 @@ public class FiltersFunctionScoreQuery extends Query {
}
}
if (filterExplanations.size() == 0) {
float sc = getBoost() * subQueryExpl.getValue();
return Explanation.match(sc, "function score, no filter match, product of:",
subQueryExpl,
Explanation.match(getBoost(), "queryBoost"));
return subQueryExpl;
}
// Second: Compute the factor that would have been computed by the
@ -266,7 +264,7 @@ public class FiltersFunctionScoreQuery extends Query {
CombineFunction.toFloat(factor),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]",
filterExplanations);
return combineFunction.explain(getBoost(), subQueryExpl, factorExplanation, maxBoost);
return combineFunction.explain(subQueryExpl, factorExplanation, maxBoost);
}
}
@ -348,7 +346,7 @@ public class FiltersFunctionScoreQuery extends Query {
}
}
}
return scoreCombiner.combine(subQueryBoost, subQueryScore, factor, maxBoost);
return scoreCombiner.combine(subQueryScore, factor, maxBoost);
}
}

View File

@ -76,6 +76,9 @@ public class FunctionScoreQuery extends Query {
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (getBoost() != 1.0F) {
return super.rewrite(reader);
}
Query newQ = subQuery.rewrite(reader);
if (newQ == subQuery) {
return this;
@ -117,14 +120,12 @@ public class FunctionScoreQuery extends Query {
@Override
public float getValueForNormalization() throws IOException {
float sum = subQueryWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
return subQueryWeight.getValueForNormalization();
}
@Override
public void normalize(float norm, float topLevelBoost) {
subQueryWeight.normalize(norm, topLevelBoost * getBoost());
public void normalize(float norm, float boost) {
subQueryWeight.normalize(norm, boost);
}
@Override
@ -148,7 +149,7 @@ public class FunctionScoreQuery extends Query {
}
if (function != null) {
Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);
return combineFunction.explain(getBoost(), subQueryExpl, functionExplanation, maxBoost);
return combineFunction.explain(subQueryExpl, functionExplanation, maxBoost);
} else {
return subQueryExpl;
}
@ -174,9 +175,9 @@ public class FunctionScoreQuery extends Query {
// are needed
float score = needsScores ? scorer.score() : 0f;
if (function == null) {
return subQueryBoost * score;
return score;
} else {
return scoreCombiner.combine(subQueryBoost, score,
return scoreCombiner.combine(score,
function.score(scorer.docID(), score), maxBoost);
}
}

View File

@ -219,6 +219,9 @@ public class DateFieldMapper extends NumberFieldMapper {
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (getBoost() != 1.0F) {
return super.rewrite(reader);
}
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
}

View File

@ -209,8 +209,9 @@ public class HasChildQueryParser implements QueryParser {
@Override
public Query rewrite(IndexReader reader) throws IOException {
IndexSearcher indexSearcher = new IndexSearcher(reader);
indexSearcher.setQueryCache(null);
if (getBoost() != 1.0F) {
return super.rewrite(reader);
}
String joinField = ParentFieldMapper.joinField(parentType);
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader());
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);

View File

@ -79,15 +79,15 @@ public final class CustomQueryScorer extends QueryScorer {
Map<String, WeightedSpanTerm> terms) throws IOException {
if (query instanceof FunctionScoreQuery) {
query = ((FunctionScoreQuery) query).getSubQuery();
extract(query, terms);
extract(query, query.getBoost(), terms);
} else if (query instanceof FiltersFunctionScoreQuery) {
query = ((FiltersFunctionScoreQuery) query).getSubQuery();
extract(query, terms);
extract(query, query.getBoost(), terms);
} else if (query instanceof FilteredQuery) {
query = ((FilteredQuery) query).getQuery();
extract(query, terms);
extract(query, 1F, terms);
} else {
extractWeightedTerms(terms, query);
extractWeightedTerms(terms, query, query.getBoost());
}
}

View File

@ -36,6 +36,10 @@ grant codeBase "${es.security.jar.lucene.core}" {
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
};
//// test framework permissions.
//// These are mock objects and test management that we allow test framework libs
//// to provide on our behalf. But tests themselves cannot do this stuff!
grant codeBase "${es.security.jar.elasticsearch.securemock}" {
// needed to support creation of mocks
permission java.lang.RuntimePermission "reflectionFactoryAccess";
@ -80,8 +84,6 @@ grant {
permission java.lang.RuntimePermission "getProtectionDomain";
// reflection hacks:
// needed for mock filesystems in tests (to capture implCloseChannel)
permission java.lang.RuntimePermission "accessClassInPackage.sun.nio.ch";
// needed by groovy engine
permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
// needed by aws core sdk (TODO: look into this)

View File

@ -115,7 +115,7 @@ public class BootstrapForTesting {
perms.add(new FilePermission(coverageDir.resolve("jacoco-it.exec").toString(), "read,write"));
}
Policy.setPolicy(new ESPolicy(perms));
System.setSecurityManager(new XTestSecurityManager());
System.setSecurityManager(new TestSecurityManager());
Security.selfTest();
} catch (Exception e) {
throw new RuntimeException("unable to install test security manager", e);

View File

@ -1,113 +0,0 @@
package org.elasticsearch.bootstrap;
import java.security.AccessController;
import java.security.PrivilegedAction;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// the above license header is a lie, here is the real one.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A {@link SecurityManager} that prevents tests calling {@link System#exit(int)}.
* Only the test runner itself is allowed to exit the JVM.
* All other security checks are handled by the default security policy.
* <p>
* Use this with {@code -Djava.security.manager=org.apache.lucene.util.TestSecurityManager}.
*/
// TODO: remove me when https://issues.apache.org/jira/browse/LUCENE-6794 is committed
public final class XTestSecurityManager extends SecurityManager {
static final String JUNIT4_TEST_RUNNER_PACKAGE = "com.carrotsearch.ant.tasks.junit4.";
static final String ECLIPSE_TEST_RUNNER_PACKAGE = "org.eclipse.jdt.internal.junit.runner.";
static final String IDEA_TEST_RUNNER_PACKAGE = "com.intellij.rt.execution.junit.";
/**
* Creates a new TestSecurityManager. This ctor is called on JVM startup,
* when {@code -Djava.security.manager=org.apache.lucene.util.TestSecurityManager}
* is passed to JVM.
*/
public XTestSecurityManager() {
super();
}
/**
* {@inheritDoc}
* <p>This method inspects the stack trace and checks who is calling
* {@link System#exit(int)} and similar methods
* @throws SecurityException if the caller of this method is not the test runner itself.
*/
@Override
public void checkExit(final int status) {
AccessController.doPrivileged((PrivilegedAction<Void>) () -> {
final String systemClassName = System.class.getName(),
runtimeClassName = Runtime.class.getName();
String exitMethodHit = null;
for (final StackTraceElement se : Thread.currentThread().getStackTrace()) {
final String className = se.getClassName(), methodName = se.getMethodName();
if (
("exit".equals(methodName) || "halt".equals(methodName)) &&
(systemClassName.equals(className) || runtimeClassName.equals(className))
) {
exitMethodHit = className + '#' + methodName + '(' + status + ')';
continue;
}
if (exitMethodHit != null) {
if (className.startsWith(JUNIT4_TEST_RUNNER_PACKAGE) ||
className.startsWith(ECLIPSE_TEST_RUNNER_PACKAGE) ||
className.startsWith(IDEA_TEST_RUNNER_PACKAGE)) {
// this exit point is allowed, we return normally from closure:
return /*void*/ null;
} else {
// anything else in stack trace is not allowed, break and throw SecurityException below:
break;
}
}
}
if (exitMethodHit == null) {
// should never happen, only if JVM hides stack trace - replace by generic:
exitMethodHit = "JVM exit method";
}
throw new SecurityException(exitMethodHit + " calls are not allowed because they terminate the test runner's JVM.");
});
// we passed the stack check, delegate to super, so default policy can still deny permission:
super.checkExit(status);
}
}

View File

@ -198,25 +198,30 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
public void testQueryStringBoostsBuilder() throws Exception {
IndexQueryParserService queryParser = queryParser();
QueryStringQueryBuilder builder = queryStringQuery("field:boosted^2");
Query expected = new BoostQuery(new TermQuery(new Term("field", "boosted")), 2);
Query parsedQuery = queryParser.parse(builder).query();
assertThat(parsedQuery, instanceOf(TermQuery.class));
assertThat(((TermQuery) parsedQuery).getTerm(), equalTo(new Term("field", "boosted")));
assertThat(parsedQuery.getBoost(), equalTo(2.0f));
assertEquals(expected, parsedQuery);
builder.boost(2.0f);
expected = new BoostQuery(new TermQuery(new Term("field", "boosted")), 4);
parsedQuery = queryParser.parse(builder).query();
assertThat(parsedQuery.getBoost(), equalTo(4.0f));
assertEquals(expected, parsedQuery);
builder = queryStringQuery("((field:boosted^2) AND (field:foo^1.5))^3");
expected = new BoostQuery(new BooleanQuery.Builder()
.add(new BoostQuery(new TermQuery(new Term("field", "boosted")), 2), Occur.MUST)
.add(new BoostQuery(new TermQuery(new Term("field", "foo")), 1.5f), Occur.MUST)
.build(), 3);
parsedQuery = queryParser.parse(builder).query();
assertThat(parsedQuery, instanceOf(BooleanQuery.class));
assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getTerm(), equalTo(new Term("field", "boosted")));
assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getBoost(), equalTo(2.0f));
assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getTerm(), equalTo(new Term("field", "foo")));
assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getBoost(), equalTo(1.5f));
assertThat(parsedQuery.getBoost(), equalTo(3.0f));
assertEquals(expected, parsedQuery);
builder.boost(2.0f);
expected = new BoostQuery(new BooleanQuery.Builder()
.add(new BoostQuery(new TermQuery(new Term("field", "boosted")), 2), Occur.MUST)
.add(new BoostQuery(new TermQuery(new Term("field", "foo")), 1.5f), Occur.MUST)
.build(), 6);
parsedQuery = queryParser.parse(builder).query();
assertThat(parsedQuery.getBoost(), equalTo(6.0f));
assertEquals(expected, parsedQuery);
}
@Test
@ -1947,10 +1952,8 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
BooleanQuery.Builder expected = new BooleanQuery.Builder();
expected.add(new TermQuery(new Term("foobar", "banon")), Occur.SHOULD);
TermQuery tq1 = new TermQuery(new Term("name.first", "banon"));
tq1.setBoost(2);
TermQuery tq2 = new TermQuery(new Term("name.last", "banon"));
tq2.setBoost(3);
Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2);
Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3);
expected.add(new DisjunctionMaxQuery(Arrays.<Query>asList(tq1, tq2), 0f), Occur.SHOULD);
assertEquals(expected.build(), rewrittenQuery);
}

View File

@ -54,6 +54,7 @@ public class DirectBufferNetworkIT extends ESIntegTestCase {
*/
@Test
public void verifySaneDirectBufferAllocations() throws Exception {
assumeTrue("test cannot run with security manager enabled", System.getSecurityManager() == null);
createIndex("test");
int estimatedBytesSize = scaledRandomIntBetween(ByteSizeValue.parseBytesSizeValue("1.1mb", "estimatedBytesSize").bytesAsInt(),

View File

@ -53,6 +53,7 @@ import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
public class DecayFunctionScoreIT extends ESIntegTestCase {
@Test
@ -348,7 +349,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
SearchHits sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5));
assertThat((double) sh.getAt(0).score(), closeTo(0.153426408, 1.e-5));
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
@ -359,7 +360,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5));
assertThat((double) sh.getAt(0).score(), closeTo(0.5, 1.e-5));
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
@ -370,7 +371,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
assertThat((double) sh.getAt(0).score(), closeTo(2.0 * (0.30685282 + 0.5), 1.e-5));
assertThat((double) sh.getAt(0).score(), closeTo(0.30685282 + 0.5, 1.e-5));
logger.info("--> Hit[0] {} Explanation:\n {}", sr.getHits().getAt(0).id(), sr.getHits().getAt(0).explanation());
response = client().search(
@ -382,7 +383,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
assertThat((double) sh.getAt(0).score(), closeTo((0.30685282 + 0.5), 1.e-5));
assertThat((double) sh.getAt(0).score(), closeTo((0.30685282 + 0.5) / 2, 1.e-5));
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
@ -393,7 +394,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
assertThat((double) sh.getAt(0).score(), closeTo(2.0 * (0.30685282), 1.e-5));
assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5));
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
@ -404,7 +405,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5));
assertThat((double) sh.getAt(0).score(), closeTo(0.5, 1.e-5));
}

View File

@ -178,7 +178,7 @@ public class FunctionScoreIT extends ESIntegTestCase {
assertThat(
responseWithWeights.getHits().getAt(0).getExplanation().toString(),
equalTo("6.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 6.0 = min of:\n 6.0 = function score, score mode [multiply]\n 1.0 = function score, product of:\n 1.0 = match filter: *:*\n 1.0 = Function for field geo_point_field:\n 1.0 = exp(-0.5*pow(MIN of: [Math.max(arcDistance([10.0, 20.0](=doc value),[10.0, 20.0](=origin)) - 0.0(=offset), 0)],2.0)/7.213475204444817E11)\n 2.0 = function score, product of:\n 1.0 = match filter: *:*\n 2.0 = product of:\n 1.0 = field value function: ln(doc['double_field'].value * factor=1.0)\n 2.0 = weight\n 3.0 = function score, product of:\n 1.0 = match filter: *:*\n 3.0 = product of:\n 1.0 = script score function, computed with script:\"[script: _index['text_field']['value'].tf(), type: inline, lang: null, params: null]\n 1.0 = _score: \n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 3.0 = weight\n 3.4028235E38 = maxBoost\n 1.0 = queryBoost\n"));
equalTo("6.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 6.0 = min of:\n 6.0 = function score, score mode [multiply]\n 1.0 = function score, product of:\n 1.0 = match filter: *:*\n 1.0 = Function for field geo_point_field:\n 1.0 = exp(-0.5*pow(MIN of: [Math.max(arcDistance([10.0, 20.0](=doc value),[10.0, 20.0](=origin)) - 0.0(=offset), 0)],2.0)/7.213475204444817E11)\n 2.0 = function score, product of:\n 1.0 = match filter: *:*\n 2.0 = product of:\n 1.0 = field value function: ln(doc['double_field'].value * factor=1.0)\n 2.0 = weight\n 3.0 = function score, product of:\n 1.0 = match filter: *:*\n 3.0 = product of:\n 1.0 = script score function, computed with script:\"[script: _index['text_field']['value'].tf(), type: inline, lang: null, params: null]\n 1.0 = _score: \n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 3.0 = weight\n 3.4028235E38 = maxBoost\n"));
responseWithWeights = client().search(
searchRequest().source(
searchSource().query(
@ -186,7 +186,7 @@ public class FunctionScoreIT extends ESIntegTestCase {
.explain(true))).actionGet();
assertThat(
responseWithWeights.getHits().getAt(0).getExplanation().toString(),
equalTo("4.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 4.0 = min of:\n 4.0 = product of:\n 1.0 = constant score 1.0 - no function provided\n 4.0 = weight\n 3.4028235E38 = maxBoost\n 1.0 = queryBoost\n"));
equalTo("4.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 4.0 = min of:\n 4.0 = product of:\n 1.0 = constant score 1.0 - no function provided\n 4.0 = weight\n 3.4028235E38 = maxBoost\n"));
}

View File

@ -442,4 +442,4 @@ public class TribeIT extends ESIntegTestCase {
}
return unicastHosts.toArray(new String[unicastHosts.size()]);
}
}
}

View File

@ -0,0 +1,14 @@
#!/bin/sh
mvn install -DskipTests
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
--update distribution/licenses/ distribution/zip/target/releases/elasticsearch-3.0.0-SNAPSHOT.zip elasticsearch-3.0.0-SNAPSHOT
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
--update plugins/analysis-icu/licenses/ plugins/analysis-icu/target/releases/analysis-icu-3.0.0-SNAPSHOT.zip analysis-icu-3.0.0-SNAPSHOT
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
--update plugins/analysis-kuromoji/licenses/ plugins/analysis-kuromoji/target/releases/analysis-kuromoji-3.0.0-SNAPSHOT.zip analysis-kuromoji-3.0.0-SNAPSHOT
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
--update plugins/analysis-phonetic/licenses/ plugins/analysis-phonetic/target/releases/analysis-phonetic-3.0.0-SNAPSHOT.zip analysis-phonetic-3.0.0-SNAPSHOT
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
--update plugins/analysis-smartcn/licenses/ plugins/analysis-smartcn/target/releases/analysis-smartcn-3.0.0-SNAPSHOT.zip analysis-smartcn-3.0.0-SNAPSHOT
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
--update plugins/analysis-stempel/licenses/ plugins/analysis-stempel/target/releases/analysis-stempel-3.0.0-SNAPSHOT.zip analysis-stempel-3.0.0-SNAPSHOT

View File

@ -1 +0,0 @@
35fca29c4597a15ce4d4eb7dc73a517038684a27

View File

@ -0,0 +1 @@
8243b938b75818e86aa8d270d8d99529e1847578

View File

@ -1 +0,0 @@
e4769b5c05fad8339f4eaf9cfa9e850cbeaa10ec

View File

@ -0,0 +1 @@
ba85c6e5e77e1f76c52c31d34a59558afa135d47

View File

@ -1 +0,0 @@
3bbab9d7a395bd0b6cc8b5bee26287105c8659e8

View File

@ -0,0 +1 @@
f8a38658b6393015c9b33c16b1b4122167b526b2

View File

@ -1 +0,0 @@
d60476428e7d3d8a68fe491d42dbda0d4024f589

View File

@ -0,0 +1 @@
fa5d27ecadbe346caaf5a571ba71944b51761acf

View File

@ -1 +0,0 @@
8618da3f400f0a4b140f196bbbecb0686fe754db

View File

@ -0,0 +1 @@
2c1464fcf6ede7819f8ba434b9bc7c79f5968407

View File

@ -1 +0,0 @@
c7db4fe5587d08ab23b253c622566462aab6796a

View File

@ -0,0 +1 @@
a40f9a3ef224bc042ef2ad1b713e318911b6057a

View File

@ -1 +0,0 @@
f9c8d435d3e1d553b0dca05c99b1fa377568eed0

View File

@ -0,0 +1 @@
0a7642c9b98cb3d9013fb33be5c0751baf9f0b31

View File

@ -1 +0,0 @@
571dd2e4363f0a0410de04b3f3f4bbf66e782c31

View File

@ -0,0 +1 @@
a0d6461ab9cda93ea530560b0c074a28fe0dd717

View File

@ -1 +0,0 @@
423264f839aace3b9159a0dd54f56c250458fd46

View File

@ -0,0 +1 @@
85c5c7b78715c50157700c90ffd101537446533d

View File

@ -1 +0,0 @@
872530eeac156faa0989eb87145bbef74a72e66f

View File

@ -0,0 +1 @@
70ca782d6ed458b5f777141353e09600083ed4fe

View File

@ -1 +0,0 @@
6f6b6a024ca96017252efea6d2fc7dc97c69febd

View File

@ -0,0 +1 @@
b4832cdfe7a6cc7c586a3e28d7cd530acb182232

View File

@ -1 +0,0 @@
a6f5a5c84b165ebde104cdcde46fa9c5948650f0

View File

@ -0,0 +1 @@
bde73ae2b2324e1576c5789a7e6dd88b6543b939

View File

@ -1 +0,0 @@
a305601f93b6cb02444816c96276a74f91ac7d40

View File

@ -0,0 +1 @@
8d261ff1c2333ce1e040c3aefca9784d1ae71acc

View File

@ -1 +0,0 @@
ef1fcaa5b6663dd9382719a1ad40d86fc962c690

View File

@ -0,0 +1 @@
ee041e52dfcdb33a1aa6fab112042b5f33fc0c0c

View File

@ -1 +0,0 @@
3698e0623f45e181d2ceead46e48a6dd8c2867dd

View File

@ -0,0 +1 @@
a8ceb11b26e53612eee9a265ff454351f6dc99f2

View File

@ -1 +0,0 @@
b7f57ef60f302b30e88196d4f0d11f789c5cfabd

View File

@ -0,0 +1 @@
1f92d0376ca9219b0bf96fe5bd9a913089608d6a

View File

@ -1 +0,0 @@
5d1023fc3f28a42357d44d3a330ac0df1df4bf42

View File

@ -0,0 +1 @@
60ee5bc1ac8ec102434e7064141a1f40281918b5

View File

@ -1 +0,0 @@
654c3e345ffdd74605582d1320c51c1c550a5cca

View File

@ -0,0 +1 @@
977aa506485d358b40602347c11238b0f912fe2c

View File

@ -1 +0,0 @@
80c09e367abf2ad936c86cf74a16ae2b4e805b81

View File

@ -0,0 +1 @@
61911b8400160bd206ea6ea46ba08fd9ba09e72b

View File

@ -1 +0,0 @@
7c6ae4fc7e8e1d39c155068fea67b7fabb12c444

View File

@ -0,0 +1 @@
5a9bdf48b63562bf1ac8a73c1c6bdb4cc450439e

View File

@ -45,7 +45,7 @@
<!-- libraries -->
<lucene.version>5.4.0</lucene.version>
<lucene.snapshot.revision>1701068</lucene.snapshot.revision>
<lucene.snapshot.revision>1702265</lucene.snapshot.revision>
<lucene.maven.version>5.4.0-snapshot-${lucene.snapshot.revision}</lucene.maven.version>
<testframework.version>2.1.16</testframework.version>
<jackson.version>2.5.3</jackson.version>