mirror of https://github.com/apache/lucene.git
Merge branch 'master' into feature/autoscaling
# Conflicts: # solr/CHANGES.txt
This commit is contained in:
commit
5e5462e408
|
@ -138,7 +138,7 @@
|
|||
'java', 'jflex', 'py', 'pl', 'g4', 'jj', 'html', 'js',
|
||||
'css', 'xml', 'xsl', 'vm', 'sh', 'cmd', 'bat', 'policy',
|
||||
'properties', 'mdtext',
|
||||
'template', 'adoc',
|
||||
'template', 'adoc', 'json',
|
||||
];
|
||||
def invalidPatterns = [
|
||||
(~$/@author\b/$) : '@author javadoc tag',
|
||||
|
|
|
@ -24,11 +24,11 @@ Optimizations
|
|||
* LUCENE-7655: Speed up geo-distance queries in case of dense single-valued
|
||||
fields when most documents match. (Maciej Zasada via Adrien Grand)
|
||||
|
||||
Bug Fixes
|
||||
* LUCENE-7897: IndexOrDocValuesQuery now requires the range cost to be more
|
||||
than 8x greater than the cost of the lead iterator in order to use doc values.
|
||||
(Murali Krishna P via Adrien Grand)
|
||||
|
||||
* LUCENE-7914: Add a maximum recursion level in automaton recursive
|
||||
functions (Operations.isFinite and Operations.topsortState) to prevent
|
||||
large automaton to overflow the stack (Robert Muir, Adrien Grand, Jim Ferenczi)
|
||||
Bug Fixes
|
||||
|
||||
* LUCENE-7916: Prevent ArrayIndexOutOfBoundsException if ICUTokenizer is used
|
||||
with a different ICU JAR version than it is compiled against. Note, this is
|
||||
|
@ -151,6 +151,10 @@ Bug Fixes
|
|||
* LUCENE-7871: fix false positive match in BlockJoinSelector when children have no value, introducing
|
||||
wrap methods accepting children as DISI. Extracting ToParentDocValues (Mikhail Khludnev)
|
||||
|
||||
* LUCENE-7914: Add a maximum recursion level in automaton recursive
|
||||
functions (Operations.isFinite and Operations.topsortState) to prevent
|
||||
large automaton to overflow the stack (Robert Muir, Adrien Grand, Jim Ferenczi)
|
||||
|
||||
Improvements
|
||||
|
||||
* LUCENE-7489: Better storage of sparse doc-values fields with the default
|
||||
|
@ -188,6 +192,8 @@ Optimizations
|
|||
|
||||
Other
|
||||
|
||||
* LUCENE-7923: Removed FST.Arc.node field (unused). (Dawid Weiss)
|
||||
|
||||
* LUCENE-7328: Remove LegacyNumericEncoding from GeoPointField. (Nick Knize)
|
||||
|
||||
* LUCENE-7360: Remove Explanation.toHtml() (Alan Woodward)
|
||||
|
|
|
@ -68,7 +68,7 @@ public class LowerCaseTokenizerFactory extends TokenizerFactory implements Multi
|
|||
|
||||
@Override
|
||||
public AbstractAnalysisFactory getMultiTermComponent() {
|
||||
Map map = new HashMap<>(getOriginalArgs());
|
||||
Map<String,String> map = new HashMap<>(getOriginalArgs());
|
||||
map.remove("maxTokenLen"); //removing "maxTokenLen" argument for LowerCaseFilterFactory init
|
||||
return new LowerCaseFilterFactory(map);
|
||||
}
|
||||
|
|
|
@ -312,7 +312,7 @@ abstract class RangeFieldQuery extends Query {
|
|||
if (allDocsMatch) {
|
||||
return new ScorerSupplier() {
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) {
|
||||
public Scorer get(long leadCost) {
|
||||
return new ConstantScoreScorer(weight, score(), DocIdSetIterator.all(reader.maxDoc()));
|
||||
}
|
||||
|
||||
|
@ -329,7 +329,7 @@ abstract class RangeFieldQuery extends Query {
|
|||
long cost = -1;
|
||||
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
values.intersect(visitor);
|
||||
DocIdSetIterator iterator = result.build().iterator();
|
||||
return new ConstantScoreScorer(weight, score(), iterator);
|
||||
|
@ -354,7 +354,7 @@ abstract class RangeFieldQuery extends Query {
|
|||
if (scorerSupplier == null) {
|
||||
return null;
|
||||
}
|
||||
return scorerSupplier.get(false);
|
||||
return scorerSupplier.get(Long.MAX_VALUE);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import java.util.OptionalLong;
|
|||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.util.PriorityQueue;
|
||||
|
||||
final class Boolean2ScorerSupplier extends ScorerSupplier {
|
||||
|
||||
|
@ -84,17 +83,18 @@ final class Boolean2ScorerSupplier extends ScorerSupplier {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
// three cases: conjunction, disjunction, or mix
|
||||
leadCost = Math.min(leadCost, cost());
|
||||
|
||||
// pure conjunction
|
||||
if (subs.get(Occur.SHOULD).isEmpty()) {
|
||||
return excl(req(subs.get(Occur.FILTER), subs.get(Occur.MUST), randomAccess), subs.get(Occur.MUST_NOT));
|
||||
return excl(req(subs.get(Occur.FILTER), subs.get(Occur.MUST), leadCost), subs.get(Occur.MUST_NOT), leadCost);
|
||||
}
|
||||
|
||||
// pure disjunction
|
||||
if (subs.get(Occur.FILTER).isEmpty() && subs.get(Occur.MUST).isEmpty()) {
|
||||
return excl(opt(subs.get(Occur.SHOULD), minShouldMatch, needsScores, randomAccess), subs.get(Occur.MUST_NOT));
|
||||
return excl(opt(subs.get(Occur.SHOULD), minShouldMatch, needsScores, leadCost), subs.get(Occur.MUST_NOT), leadCost);
|
||||
}
|
||||
|
||||
// conjunction-disjunction mix:
|
||||
|
@ -103,38 +103,23 @@ final class Boolean2ScorerSupplier extends ScorerSupplier {
|
|||
// optional side must match. otherwise it's required + optional
|
||||
|
||||
if (minShouldMatch > 0) {
|
||||
boolean reqRandomAccess = true;
|
||||
boolean msmRandomAccess = true;
|
||||
if (randomAccess == false) {
|
||||
// We need to figure out whether the MUST/FILTER or the SHOULD clauses would lead the iteration
|
||||
final long reqCost = Stream.concat(
|
||||
subs.get(Occur.MUST).stream(),
|
||||
subs.get(Occur.FILTER).stream())
|
||||
.mapToLong(ScorerSupplier::cost)
|
||||
.min().getAsLong();
|
||||
final long msmCost = MinShouldMatchSumScorer.cost(
|
||||
subs.get(Occur.SHOULD).stream().mapToLong(ScorerSupplier::cost),
|
||||
subs.get(Occur.SHOULD).size(), minShouldMatch);
|
||||
reqRandomAccess = reqCost > msmCost;
|
||||
msmRandomAccess = msmCost > reqCost;
|
||||
}
|
||||
Scorer req = excl(req(subs.get(Occur.FILTER), subs.get(Occur.MUST), reqRandomAccess), subs.get(Occur.MUST_NOT));
|
||||
Scorer opt = opt(subs.get(Occur.SHOULD), minShouldMatch, needsScores, msmRandomAccess);
|
||||
Scorer req = excl(req(subs.get(Occur.FILTER), subs.get(Occur.MUST), leadCost), subs.get(Occur.MUST_NOT), leadCost);
|
||||
Scorer opt = opt(subs.get(Occur.SHOULD), minShouldMatch, needsScores, leadCost);
|
||||
return new ConjunctionScorer(weight, Arrays.asList(req, opt), Arrays.asList(req, opt));
|
||||
} else {
|
||||
assert needsScores;
|
||||
return new ReqOptSumScorer(
|
||||
excl(req(subs.get(Occur.FILTER), subs.get(Occur.MUST), randomAccess), subs.get(Occur.MUST_NOT)),
|
||||
opt(subs.get(Occur.SHOULD), minShouldMatch, needsScores, true));
|
||||
excl(req(subs.get(Occur.FILTER), subs.get(Occur.MUST), leadCost), subs.get(Occur.MUST_NOT), leadCost),
|
||||
opt(subs.get(Occur.SHOULD), minShouldMatch, needsScores, leadCost));
|
||||
}
|
||||
}
|
||||
|
||||
/** Create a new scorer for the given required clauses. Note that
|
||||
* {@code requiredScoring} is a subset of {@code required} containing
|
||||
* required clauses that should participate in scoring. */
|
||||
private Scorer req(Collection<ScorerSupplier> requiredNoScoring, Collection<ScorerSupplier> requiredScoring, boolean randomAccess) throws IOException {
|
||||
private Scorer req(Collection<ScorerSupplier> requiredNoScoring, Collection<ScorerSupplier> requiredScoring, long leadCost) throws IOException {
|
||||
if (requiredNoScoring.size() + requiredScoring.size() == 1) {
|
||||
Scorer req = (requiredNoScoring.isEmpty() ? requiredScoring : requiredNoScoring).iterator().next().get(randomAccess);
|
||||
Scorer req = (requiredNoScoring.isEmpty() ? requiredScoring : requiredNoScoring).iterator().next().get(leadCost);
|
||||
|
||||
if (needsScores == false) {
|
||||
return req;
|
||||
|
@ -158,16 +143,13 @@ final class Boolean2ScorerSupplier extends ScorerSupplier {
|
|||
|
||||
return req;
|
||||
} else {
|
||||
long minCost = Math.min(
|
||||
requiredNoScoring.stream().mapToLong(ScorerSupplier::cost).min().orElse(Long.MAX_VALUE),
|
||||
requiredScoring.stream().mapToLong(ScorerSupplier::cost).min().orElse(Long.MAX_VALUE));
|
||||
List<Scorer> requiredScorers = new ArrayList<>();
|
||||
List<Scorer> scoringScorers = new ArrayList<>();
|
||||
for (ScorerSupplier s : requiredNoScoring) {
|
||||
requiredScorers.add(s.get(randomAccess || s.cost() > minCost));
|
||||
requiredScorers.add(s.get(leadCost));
|
||||
}
|
||||
for (ScorerSupplier s : requiredScoring) {
|
||||
Scorer scorer = s.get(randomAccess || s.cost() > minCost);
|
||||
Scorer scorer = s.get(leadCost);
|
||||
requiredScorers.add(scorer);
|
||||
scoringScorers.add(scorer);
|
||||
}
|
||||
|
@ -175,42 +157,28 @@ final class Boolean2ScorerSupplier extends ScorerSupplier {
|
|||
}
|
||||
}
|
||||
|
||||
private Scorer excl(Scorer main, Collection<ScorerSupplier> prohibited) throws IOException {
|
||||
private Scorer excl(Scorer main, Collection<ScorerSupplier> prohibited, long leadCost) throws IOException {
|
||||
if (prohibited.isEmpty()) {
|
||||
return main;
|
||||
} else {
|
||||
return new ReqExclScorer(main, opt(prohibited, 1, false, true));
|
||||
return new ReqExclScorer(main, opt(prohibited, 1, false, leadCost));
|
||||
}
|
||||
}
|
||||
|
||||
private Scorer opt(Collection<ScorerSupplier> optional, int minShouldMatch,
|
||||
boolean needsScores, boolean randomAccess) throws IOException {
|
||||
boolean needsScores, long leadCost) throws IOException {
|
||||
if (optional.size() == 1) {
|
||||
return optional.iterator().next().get(randomAccess);
|
||||
} else if (minShouldMatch > 1) {
|
||||
final List<Scorer> optionalScorers = new ArrayList<>();
|
||||
final PriorityQueue<ScorerSupplier> pq = new PriorityQueue<ScorerSupplier>(subs.get(Occur.SHOULD).size() - minShouldMatch + 1) {
|
||||
@Override
|
||||
protected boolean lessThan(ScorerSupplier a, ScorerSupplier b) {
|
||||
return a.cost() > b.cost();
|
||||
}
|
||||
};
|
||||
for (ScorerSupplier scorer : subs.get(Occur.SHOULD)) {
|
||||
ScorerSupplier overflow = pq.insertWithOverflow(scorer);
|
||||
if (overflow != null) {
|
||||
optionalScorers.add(overflow.get(true));
|
||||
}
|
||||
}
|
||||
for (ScorerSupplier scorer : pq) {
|
||||
optionalScorers.add(scorer.get(randomAccess));
|
||||
}
|
||||
return new MinShouldMatchSumScorer(weight, optionalScorers, minShouldMatch);
|
||||
return optional.iterator().next().get(leadCost);
|
||||
} else {
|
||||
final List<Scorer> optionalScorers = new ArrayList<>();
|
||||
for (ScorerSupplier scorer : optional) {
|
||||
optionalScorers.add(scorer.get(randomAccess));
|
||||
optionalScorers.add(scorer.get(leadCost));
|
||||
}
|
||||
if (minShouldMatch > 1) {
|
||||
return new MinShouldMatchSumScorer(weight, optionalScorers, minShouldMatch);
|
||||
} else {
|
||||
return new DisjunctionSumScorer(weight, optionalScorers, needsScores);
|
||||
}
|
||||
return new DisjunctionSumScorer(weight, optionalScorers, needsScores);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -296,7 +296,7 @@ final class BooleanWeight extends Weight {
|
|||
if (scorerSupplier == null) {
|
||||
return null;
|
||||
}
|
||||
return scorerSupplier.get(false);
|
||||
return scorerSupplier.get(Long.MAX_VALUE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -132,8 +132,8 @@ public final class ConstantScoreQuery extends Query {
|
|||
}
|
||||
return new ScorerSupplier() {
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
final Scorer innerScorer = innerScorerSupplier.get(randomAccess);
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
final Scorer innerScorer = innerScorerSupplier.get(leadCost);
|
||||
final float score = score();
|
||||
return new FilterScorer(innerScorer) {
|
||||
@Override
|
||||
|
@ -164,7 +164,7 @@ public final class ConstantScoreQuery extends Query {
|
|||
if (scorerSupplier == null) {
|
||||
return null;
|
||||
}
|
||||
return scorerSupplier.get(false);
|
||||
return scorerSupplier.get(Long.MAX_VALUE);
|
||||
}
|
||||
|
||||
};
|
||||
|
|
|
@ -141,13 +141,22 @@ public final class IndexOrDocValuesQuery extends Query {
|
|||
}
|
||||
return new ScorerSupplier() {
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
return (randomAccess ? dvScorerSupplier : indexScorerSupplier).get(randomAccess);
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
// At equal costs, doc values tend to be worse than points since they
|
||||
// still need to perform one comparison per document while points can
|
||||
// do much better than that given how values are organized. So we give
|
||||
// an arbitrary 8x penalty to doc values.
|
||||
final long threshold = cost() >>> 3;
|
||||
if (threshold <= leadCost) {
|
||||
return indexScorerSupplier.get(leadCost);
|
||||
} else {
|
||||
return dvScorerSupplier.get(leadCost);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long cost() {
|
||||
return Math.min(indexScorerSupplier.cost(), dvScorerSupplier.cost());
|
||||
return indexScorerSupplier.cost();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -158,7 +167,7 @@ public final class IndexOrDocValuesQuery extends Query {
|
|||
if (scorerSupplier == null) {
|
||||
return null;
|
||||
}
|
||||
return scorerSupplier.get(false);
|
||||
return scorerSupplier.get(Long.MAX_VALUE);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -767,7 +767,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
|
|||
|
||||
return new ScorerSupplier() {
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
public Scorer get(long LeadCost) throws IOException {
|
||||
return new ConstantScoreScorer(CachingWrapperWeight.this, 0f, disi);
|
||||
}
|
||||
|
||||
|
@ -785,7 +785,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
|
|||
if (scorerSupplier == null) {
|
||||
return null;
|
||||
}
|
||||
return scorerSupplier.get(false);
|
||||
return scorerSupplier.get(Long.MAX_VALUE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -262,7 +262,7 @@ public abstract class PointRangeQuery extends Query {
|
|||
// all docs have a value and all points are within bounds, so everything matches
|
||||
return new ScorerSupplier() {
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) {
|
||||
public Scorer get(long leadCost) {
|
||||
return new ConstantScoreScorer(weight, score(),
|
||||
DocIdSetIterator.all(reader.maxDoc()));
|
||||
}
|
||||
|
@ -280,7 +280,7 @@ public abstract class PointRangeQuery extends Query {
|
|||
long cost = -1;
|
||||
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
if (values.getDocCount() == reader.maxDoc()
|
||||
&& values.getDocCount() == values.size()
|
||||
&& cost() > reader.maxDoc() / 2) {
|
||||
|
@ -319,7 +319,7 @@ public abstract class PointRangeQuery extends Query {
|
|||
if (scorerSupplier == null) {
|
||||
return null;
|
||||
}
|
||||
return scorerSupplier.get(false);
|
||||
return scorerSupplier.get(Long.MAX_VALUE);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -27,15 +27,14 @@ public abstract class ScorerSupplier {
|
|||
/**
|
||||
* Get the {@link Scorer}. This may not return {@code null} and must be called
|
||||
* at most once.
|
||||
* @param randomAccess A hint about the expected usage of the {@link Scorer}.
|
||||
* If {@link DocIdSetIterator#advance} or {@link TwoPhaseIterator} will be
|
||||
* used to check whether given doc ids match, then pass {@code true}.
|
||||
* Otherwise if the {@link Scorer} will be mostly used to lead the iteration
|
||||
* using {@link DocIdSetIterator#nextDoc()}, then {@code false} should be
|
||||
* passed. Under doubt, pass {@code false} which usually has a better
|
||||
* worst-case.
|
||||
* @param leadCost Cost of the scorer that will be used in order to lead
|
||||
* iteration. This can be interpreted as an upper bound of the number of times
|
||||
* that {@link DocIdSetIterator#nextDoc}, {@link DocIdSetIterator#advance}
|
||||
* and {@link TwoPhaseIterator#matches} will be called. Under doubt, pass
|
||||
* {@link Long#MAX_VALUE}, which will produce a {@link Scorer} that has good
|
||||
* iteration capabilities.
|
||||
*/
|
||||
public abstract Scorer get(boolean randomAccess) throws IOException;
|
||||
public abstract Scorer get(long leadCost) throws IOException;
|
||||
|
||||
/**
|
||||
* Get an estimate of the {@link Scorer} that would be returned by {@link #get}.
|
||||
|
|
|
@ -116,7 +116,7 @@ public abstract class Weight {
|
|||
}
|
||||
return new ScorerSupplier() {
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) {
|
||||
public Scorer get(long leadCost) {
|
||||
return scorer;
|
||||
}
|
||||
|
||||
|
|
|
@ -161,10 +161,6 @@ public final class FST<T> implements Accountable {
|
|||
public int label;
|
||||
public T output;
|
||||
|
||||
// From node (ord or address); currently only used when
|
||||
// building an FST w/ willPackFST=true:
|
||||
long node;
|
||||
|
||||
/** To node (ord or address) */
|
||||
public long target;
|
||||
|
||||
|
@ -193,7 +189,6 @@ public final class FST<T> implements Accountable {
|
|||
|
||||
/** Returns this */
|
||||
public Arc<T> copyFrom(Arc<T> other) {
|
||||
node = other.node;
|
||||
label = other.label;
|
||||
target = other.target;
|
||||
flags = other.flags;
|
||||
|
@ -224,7 +219,6 @@ public final class FST<T> implements Accountable {
|
|||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("node=" + node);
|
||||
b.append(" target=" + target);
|
||||
b.append(" label=0x" + Integer.toHexString(label));
|
||||
if (flag(BIT_FINAL_ARC)) {
|
||||
|
@ -770,7 +764,6 @@ public final class FST<T> implements Accountable {
|
|||
return arc;
|
||||
} else {
|
||||
in.setPosition(follow.target);
|
||||
arc.node = follow.target;
|
||||
final byte b = in.readByte();
|
||||
if (b == ARCS_AS_FIXED_ARRAY) {
|
||||
// array: jump straight to end
|
||||
|
@ -842,7 +835,6 @@ public final class FST<T> implements Accountable {
|
|||
if (follow.target <= 0) {
|
||||
arc.flags |= BIT_LAST_ARC;
|
||||
} else {
|
||||
arc.node = follow.target;
|
||||
// NOTE: nextArc is a node (not an address!) in this case:
|
||||
arc.nextArc = follow.target;
|
||||
}
|
||||
|
@ -860,7 +852,6 @@ public final class FST<T> implements Accountable {
|
|||
//System.out.println(" readFirstRealTargtArc address="
|
||||
//+ address);
|
||||
//System.out.println(" flags=" + arc.flags);
|
||||
arc.node = node;
|
||||
|
||||
if (in.readByte() == ARCS_AS_FIXED_ARRAY) {
|
||||
//System.out.println(" fixedArray");
|
||||
|
@ -1035,7 +1026,6 @@ public final class FST<T> implements Accountable {
|
|||
assert cachedArc.label == result.label;
|
||||
assert cachedArc.nextArc == result.nextArc;
|
||||
assert cachedArc.nextFinalOutput.equals(result.nextFinalOutput);
|
||||
assert cachedArc.node == result.node;
|
||||
assert cachedArc.numArcs == result.numArcs;
|
||||
assert cachedArc.output.equals(result.output);
|
||||
assert cachedArc.posArcsStart == result.posArcsStart;
|
||||
|
@ -1066,7 +1056,6 @@ public final class FST<T> implements Accountable {
|
|||
arc.flags = 0;
|
||||
// NOTE: nextArc is a node (not an address!) in this case:
|
||||
arc.nextArc = follow.target;
|
||||
arc.node = follow.target;
|
||||
}
|
||||
arc.output = follow.nextFinalOutput;
|
||||
arc.label = END_LABEL;
|
||||
|
@ -1098,8 +1087,6 @@ public final class FST<T> implements Accountable {
|
|||
|
||||
in.setPosition(follow.target);
|
||||
|
||||
arc.node = follow.target;
|
||||
|
||||
// System.out.println("fta label=" + (char) labelToMatch);
|
||||
|
||||
if (in.readByte() == ARCS_AS_FIXED_ARRAY) {
|
||||
|
|
|
@ -620,8 +620,7 @@ public final class Util {
|
|||
*
|
||||
* <p>
|
||||
* Note: larger FSTs (a few thousand nodes) won't even
|
||||
* render, don't bother. If the FST is > 2.1 GB in size
|
||||
* then this method will throw strange exceptions.
|
||||
* render, don't bother.
|
||||
*
|
||||
* @param sameRank
|
||||
* If <code>true</code>, the resulting <code>dot</code> file will try
|
||||
|
@ -945,7 +944,6 @@ public final class Util {
|
|||
arc.flags = 0;
|
||||
// NOTE: nextArc is a node (not an address!) in this case:
|
||||
arc.nextArc = follow.target;
|
||||
arc.node = follow.target;
|
||||
}
|
||||
arc.output = follow.nextFinalOutput;
|
||||
arc.label = FST.END_LABEL;
|
||||
|
|
|
@ -70,22 +70,22 @@ public class TestBoolean2ScorerSupplier extends LuceneTestCase {
|
|||
private static class FakeScorerSupplier extends ScorerSupplier {
|
||||
|
||||
private final long cost;
|
||||
private final Boolean randomAccess;
|
||||
private final Long leadCost;
|
||||
|
||||
FakeScorerSupplier(long cost) {
|
||||
this.cost = cost;
|
||||
this.randomAccess = null;
|
||||
this.leadCost = null;
|
||||
}
|
||||
|
||||
FakeScorerSupplier(long cost, boolean randomAccess) {
|
||||
FakeScorerSupplier(long cost, long leadCost) {
|
||||
this.cost = cost;
|
||||
this.randomAccess = randomAccess;
|
||||
this.leadCost = leadCost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
if (this.randomAccess != null) {
|
||||
assertEquals(this.toString(), this.randomAccess, randomAccess);
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
if (this.leadCost != null) {
|
||||
assertEquals(this.toString(), this.leadCost.longValue(), leadCost);
|
||||
}
|
||||
return new FakeScorer(cost);
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ public class TestBoolean2ScorerSupplier extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "FakeLazyScorer(cost=" + cost + ",randomAccess=" + randomAccess + ")";
|
||||
return "FakeLazyScorer(cost=" + cost + ",leadCost=" + leadCost + ")";
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -127,17 +127,17 @@ public class TestBoolean2ScorerSupplier extends LuceneTestCase {
|
|||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42));
|
||||
ScorerSupplier s = new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0);
|
||||
assertEquals(42, s.cost());
|
||||
assertEquals(42, s.get(random().nextBoolean()).iterator().cost());
|
||||
assertEquals(42, s.get(random().nextInt(100)).iterator().cost());
|
||||
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12));
|
||||
s = new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0);
|
||||
assertEquals(42 + 12, s.cost());
|
||||
assertEquals(42 + 12, s.get(random().nextBoolean()).iterator().cost());
|
||||
assertEquals(42 + 12, s.get(random().nextInt(100)).iterator().cost());
|
||||
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(20));
|
||||
s = new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0);
|
||||
assertEquals(42 + 12 + 20, s.cost());
|
||||
assertEquals(42 + 12 + 20, s.get(random().nextBoolean()).iterator().cost());
|
||||
assertEquals(42 + 12 + 20, s.get(random().nextInt(100)).iterator().cost());
|
||||
}
|
||||
|
||||
public void testDisjunctionWithMinShouldMatchCost() throws IOException {
|
||||
|
@ -150,26 +150,26 @@ public class TestBoolean2ScorerSupplier extends LuceneTestCase {
|
|||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12));
|
||||
ScorerSupplier s = new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 1);
|
||||
assertEquals(42 + 12, s.cost());
|
||||
assertEquals(42 + 12, s.get(random().nextBoolean()).iterator().cost());
|
||||
assertEquals(42 + 12, s.get(random().nextInt(100)).iterator().cost());
|
||||
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(20));
|
||||
s = new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 1);
|
||||
assertEquals(42 + 12 + 20, s.cost());
|
||||
assertEquals(42 + 12 + 20, s.get(random().nextBoolean()).iterator().cost());
|
||||
assertEquals(42 + 12 + 20, s.get(random().nextInt(100)).iterator().cost());
|
||||
s = new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 2);
|
||||
assertEquals(12 + 20, s.cost());
|
||||
assertEquals(12 + 20, s.get(random().nextBoolean()).iterator().cost());
|
||||
assertEquals(12 + 20, s.get(random().nextInt(100)).iterator().cost());
|
||||
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30));
|
||||
s = new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 1);
|
||||
assertEquals(42 + 12 + 20 + 30, s.cost());
|
||||
assertEquals(42 + 12 + 20 + 30, s.get(random().nextBoolean()).iterator().cost());
|
||||
assertEquals(42 + 12 + 20 + 30, s.get(random().nextInt(100)).iterator().cost());
|
||||
s = new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 2);
|
||||
assertEquals(12 + 20 + 30, s.cost());
|
||||
assertEquals(12 + 20 + 30, s.get(random().nextBoolean()).iterator().cost());
|
||||
assertEquals(12 + 20 + 30, s.get(random().nextInt(100)).iterator().cost());
|
||||
s = new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 3);
|
||||
assertEquals(12 + 20, s.cost());
|
||||
assertEquals(12 + 20, s.get(random().nextBoolean()).iterator().cost());
|
||||
assertEquals(12 + 20, s.get(random().nextInt(100)).iterator().cost());
|
||||
}
|
||||
|
||||
public void testDuelCost() throws Exception {
|
||||
|
@ -205,128 +205,149 @@ public class TestBoolean2ScorerSupplier extends LuceneTestCase {
|
|||
Boolean2ScorerSupplier supplier = new Boolean2ScorerSupplier(null,
|
||||
subs, needsScores, minShouldMatch);
|
||||
long cost1 = supplier.cost();
|
||||
long cost2 = supplier.get(false).iterator().cost();
|
||||
long cost2 = supplier.get(Long.MAX_VALUE).iterator().cost();
|
||||
assertEquals("clauses=" + subs + ", minShouldMatch=" + minShouldMatch, cost1, cost2);
|
||||
}
|
||||
}
|
||||
|
||||
// test the tester...
|
||||
public void testFakeScorerSupplier() {
|
||||
FakeScorerSupplier randomAccessSupplier = new FakeScorerSupplier(random().nextInt(100), true);
|
||||
expectThrows(AssertionError.class, () -> randomAccessSupplier.get(false));
|
||||
FakeScorerSupplier sequentialSupplier = new FakeScorerSupplier(random().nextInt(100), false);
|
||||
expectThrows(AssertionError.class, () -> sequentialSupplier.get(true));
|
||||
FakeScorerSupplier randomAccessSupplier = new FakeScorerSupplier(random().nextInt(100), 30);
|
||||
expectThrows(AssertionError.class, () -> randomAccessSupplier.get(70));
|
||||
FakeScorerSupplier sequentialSupplier = new FakeScorerSupplier(random().nextInt(100), 70);
|
||||
expectThrows(AssertionError.class, () -> sequentialSupplier.get(30));
|
||||
}
|
||||
|
||||
public void testConjunctionRandomAccess() throws IOException {
|
||||
public void testConjunctionLeadCost() throws IOException {
|
||||
Map<Occur, Collection<ScorerSupplier>> subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
// If sequential access is required, only the least costly clause does not use random-access
|
||||
subs.get(RandomPicks.randomFrom(random(), Arrays.asList(Occur.FILTER, Occur.MUST))).add(new FakeScorerSupplier(42, true));
|
||||
subs.get(RandomPicks.randomFrom(random(), Arrays.asList(Occur.FILTER, Occur.MUST))).add(new FakeScorerSupplier(12, false));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(false); // triggers assertions as a side-effect
|
||||
// If the clauses are less costly than the lead cost, the min cost is the new lead cost
|
||||
subs.get(RandomPicks.randomFrom(random(), Arrays.asList(Occur.FILTER, Occur.MUST))).add(new FakeScorerSupplier(42, 12));
|
||||
subs.get(RandomPicks.randomFrom(random(), Arrays.asList(Occur.FILTER, Occur.MUST))).add(new FakeScorerSupplier(12, 12));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(Long.MAX_VALUE); // triggers assertions as a side-effect
|
||||
|
||||
subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
// If random access is required, then we propagate to sub clauses
|
||||
subs.get(RandomPicks.randomFrom(random(), Arrays.asList(Occur.FILTER, Occur.MUST))).add(new FakeScorerSupplier(42, true));
|
||||
subs.get(RandomPicks.randomFrom(random(), Arrays.asList(Occur.FILTER, Occur.MUST))).add(new FakeScorerSupplier(12, true));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(true); // triggers assertions as a side-effect
|
||||
// If the lead cost is less that the clauses' cost, then we don't modify it
|
||||
subs.get(RandomPicks.randomFrom(random(), Arrays.asList(Occur.FILTER, Occur.MUST))).add(new FakeScorerSupplier(42, 7));
|
||||
subs.get(RandomPicks.randomFrom(random(), Arrays.asList(Occur.FILTER, Occur.MUST))).add(new FakeScorerSupplier(12, 7));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(7); // triggers assertions as a side-effect
|
||||
}
|
||||
|
||||
public void testDisjunctionRandomAccess() throws IOException {
|
||||
// disjunctions propagate
|
||||
for (boolean randomAccess : new boolean[] {false, true}) {
|
||||
Map<Occur, Collection<ScorerSupplier>> subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, randomAccess));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, randomAccess));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(randomAccess); // triggers assertions as a side-effect
|
||||
public void testDisjunctionLeadCost() throws IOException {
|
||||
Map<Occur, Collection<ScorerSupplier>> subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, 54));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, 54));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(100); // triggers assertions as a side-effect
|
||||
|
||||
subs.get(Occur.SHOULD).clear();
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, 20));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, 20));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(20); // triggers assertions as a side-effect
|
||||
}
|
||||
|
||||
public void testDisjunctionWithMinShouldMatchRandomAccess() throws IOException {
|
||||
public void testDisjunctionWithMinShouldMatchLeadCost() throws IOException {
|
||||
Map<Occur, Collection<ScorerSupplier>> subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
// Only the most costly clause uses random-access in that case:
|
||||
// most of time, we will find agreement between the 2 least costly
|
||||
// clauses and only then check whether the 3rd one matches too
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, true));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, false));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30, false));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 2).get(false); // triggers assertions as a side-effect
|
||||
// minShouldMatch is 2 so the 2 least costly clauses will lead iteration
|
||||
// and their cost will be 30+12=42
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(50, 42));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, 42));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30, 42));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 2).get(100); // triggers assertions as a side-effect
|
||||
|
||||
subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
// When random-access is true, just propagate
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, true));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, true));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30, true));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 2).get(true); // triggers assertions as a side-effect
|
||||
// If the leadCost is less than the msm cost, then it wins
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, 20));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, 20));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30, 20));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 2).get(20); // triggers assertions as a side-effect
|
||||
|
||||
subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, true));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, false));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30, false));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(20, false));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 2).get(false); // triggers assertions as a side-effect
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, 62));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, 62));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30, 62));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(20, 62));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 2).get(100); // triggers assertions as a side-effect
|
||||
|
||||
subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, true));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, false));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30, true));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(20, false));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 3).get(false); // triggers assertions as a side-effect
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(42, 32));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(12, 32));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30, 32));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(20, 32));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 3).get(100); // triggers assertions as a side-effect
|
||||
}
|
||||
|
||||
public void testProhibitedRandomAccess() throws IOException {
|
||||
for (boolean randomAccess : new boolean[] {false, true}) {
|
||||
Map<Occur, Collection<ScorerSupplier>> subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
// The MUST_NOT clause always uses random-access
|
||||
subs.get(Occur.MUST).add(new FakeScorerSupplier(42, randomAccess));
|
||||
subs.get(Occur.MUST_NOT).add(new FakeScorerSupplier(TestUtil.nextInt(random(), 1, 100), true));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(randomAccess); // triggers assertions as a side-effect
|
||||
public void testProhibitedLeadCost() throws IOException {
|
||||
Map<Occur, Collection<ScorerSupplier>> subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
// The MUST_NOT clause is called with the same lead cost as the MUST clause
|
||||
subs.get(Occur.MUST).add(new FakeScorerSupplier(42, 42));
|
||||
subs.get(Occur.MUST_NOT).add(new FakeScorerSupplier(30, 42));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(100); // triggers assertions as a side-effect
|
||||
|
||||
subs.get(Occur.MUST).clear();
|
||||
subs.get(Occur.MUST_NOT).clear();
|
||||
subs.get(Occur.MUST).add(new FakeScorerSupplier(42, 42));
|
||||
subs.get(Occur.MUST_NOT).add(new FakeScorerSupplier(80, 42));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(100); // triggers assertions as a side-effect
|
||||
|
||||
subs.get(Occur.MUST).clear();
|
||||
subs.get(Occur.MUST_NOT).clear();
|
||||
subs.get(Occur.MUST).add(new FakeScorerSupplier(42, 20));
|
||||
subs.get(Occur.MUST_NOT).add(new FakeScorerSupplier(30, 20));
|
||||
new Boolean2ScorerSupplier(null, subs, random().nextBoolean(), 0).get(20); // triggers assertions as a side-effect
|
||||
}
|
||||
|
||||
public void testMixedRandomAccess() throws IOException {
|
||||
for (boolean randomAccess : new boolean[] {false, true}) {
|
||||
Map<Occur, Collection<ScorerSupplier>> subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
// The SHOULD clause always uses random-access if there is a MUST clause
|
||||
subs.get(Occur.MUST).add(new FakeScorerSupplier(42, randomAccess));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(TestUtil.nextInt(random(), 1, 100), true));
|
||||
new Boolean2ScorerSupplier(null, subs, true, 0).get(randomAccess); // triggers assertions as a side-effect
|
||||
public void testMixedLeadCost() throws IOException {
|
||||
Map<Occur, Collection<ScorerSupplier>> subs = new EnumMap<>(Occur.class);
|
||||
for (Occur occur : Occur.values()) {
|
||||
subs.put(occur, new ArrayList<>());
|
||||
}
|
||||
|
||||
// The SHOULD clause is always called with the same lead cost as the MUST clause
|
||||
subs.get(Occur.MUST).add(new FakeScorerSupplier(42, 42));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(30, 42));
|
||||
new Boolean2ScorerSupplier(null, subs, true, 0).get(100); // triggers assertions as a side-effect
|
||||
|
||||
subs.get(Occur.MUST).clear();
|
||||
subs.get(Occur.SHOULD).clear();
|
||||
subs.get(Occur.MUST).add(new FakeScorerSupplier(42, 42));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(80, 42));
|
||||
new Boolean2ScorerSupplier(null, subs, true, 0).get(100); // triggers assertions as a side-effect
|
||||
|
||||
subs.get(Occur.MUST).clear();
|
||||
subs.get(Occur.SHOULD).clear();
|
||||
subs.get(Occur.MUST).add(new FakeScorerSupplier(42, 20));
|
||||
subs.get(Occur.SHOULD).add(new FakeScorerSupplier(80, 20));
|
||||
new Boolean2ScorerSupplier(null, subs, true, 0).get(20); // triggers assertions as a side-effect
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -238,8 +238,8 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
|
|||
"ConjunctionScorer\n" +
|
||||
" MUST ConstantScoreScorer\n" +
|
||||
" MUST MinShouldMatchSumScorer\n" +
|
||||
" SHOULD TermScorer body:web\n" +
|
||||
" SHOULD TermScorer body:crawler\n" +
|
||||
" SHOULD TermScorer body:web\n" +
|
||||
" SHOULD TermScorer body:nutch",
|
||||
summary);
|
||||
}
|
||||
|
|
|
@ -1289,14 +1289,14 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
|||
return new ConstantScoreWeight(this, boost) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
return scorerSupplier(context).get(false);
|
||||
return scorerSupplier(context).get(Long.MAX_VALUE);
|
||||
}
|
||||
@Override
|
||||
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
|
||||
final Weight weight = this;
|
||||
return new ScorerSupplier() {
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
scorerCreated.set(true);
|
||||
return new ConstantScoreScorer(weight, boost, DocIdSetIterator.all(1));
|
||||
}
|
||||
|
@ -1344,7 +1344,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
|||
Weight weight = searcher.createNormalizedWeight(query, false);
|
||||
ScorerSupplier supplier = weight.scorerSupplier(searcher.getIndexReader().leaves().get(0));
|
||||
assertFalse(scorerCreated.get());
|
||||
supplier.get(random().nextBoolean());
|
||||
supplier.get(random().nextLong() & 0x7FFFFFFFFFFFFFFFL);
|
||||
assertTrue(scorerCreated.get());
|
||||
|
||||
reader.close();
|
||||
|
|
|
@ -109,7 +109,7 @@ public class ToParentBlockJoinQuery extends Query {
|
|||
if (scorerSupplier == null) {
|
||||
return null;
|
||||
}
|
||||
return scorerSupplier.get(false);
|
||||
return scorerSupplier.get(Long.MAX_VALUE);
|
||||
}
|
||||
|
||||
// NOTE: acceptDocs applies (and is checked) only in the
|
||||
|
@ -132,8 +132,8 @@ public class ToParentBlockJoinQuery extends Query {
|
|||
return new ScorerSupplier() {
|
||||
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
return new BlockJoinScorer(BlockJoinWeight.this, childScorerSupplier.get(randomAccess), parents, scoreMode);
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
return new BlockJoinScorer(BlockJoinWeight.this, childScorerSupplier.get(leadCost), parents, scoreMode);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.queries.payloads;
|
||||
|
||||
/**
|
||||
* Calculate the final score as the sum of scores of all payloads seen.
|
||||
* <p>
|
||||
* Is thread safe and completely reusable.
|
||||
*
|
||||
**/
|
||||
public class SumPayloadFunction extends PayloadFunction {
|
||||
|
||||
@Override
|
||||
public float currentScore(int docId, String field, int start, int end, int numPayloadsSeen, float currentScore, float currentPayloadScore) {
|
||||
return currentPayloadScore + currentScore;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float docScore(int docId, String field, int numPayloadsSeen, float payloadScore) {
|
||||
return numPayloadsSeen > 0 ? payloadScore : 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + this.getClass().hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -114,7 +114,7 @@ final class LatLonPointDistanceQuery extends Query {
|
|||
if (scorerSupplier == null) {
|
||||
return null;
|
||||
}
|
||||
return scorerSupplier.get(false);
|
||||
return scorerSupplier.get(Long.MAX_VALUE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -142,7 +142,7 @@ final class LatLonPointDistanceQuery extends Query {
|
|||
long cost = -1;
|
||||
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
if (values.getDocCount() == reader.maxDoc()
|
||||
&& values.getDocCount() == values.size()
|
||||
&& cost() > reader.maxDoc() / 2) {
|
||||
|
|
|
@ -108,7 +108,7 @@ public abstract class GeoBaseCompositeShape<T extends GeoShape> implements GeoSh
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return super.hashCode() * 31 + shapes.hashCode();//TODO cache
|
||||
return shapes.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -116,6 +116,6 @@ public abstract class GeoBaseCompositeShape<T extends GeoShape> implements GeoSh
|
|||
if (!(o instanceof GeoBaseCompositeShape<?>))
|
||||
return false;
|
||||
GeoBaseCompositeShape<?> other = (GeoBaseCompositeShape<?>) o;
|
||||
return super.equals(o) && shapes.equals(other.shapes);
|
||||
return shapes.equals(other.shapes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.analysis.CharArraySet;
|
|||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.suggest.Input;
|
||||
import org.apache.lucene.search.suggest.InputArrayIterator;
|
||||
|
@ -302,17 +303,17 @@ public class BlendedInfixSuggesterTest extends LuceneTestCase {
|
|||
assertEquals(2, responses.size());
|
||||
|
||||
|
||||
responses = suggester.lookup(term, (Map) null, 1, false, false);
|
||||
responses = suggester.lookup(term, (Map<BytesRef, BooleanClause.Occur>) null, 1, false, false);
|
||||
assertEquals(1, responses.size());
|
||||
|
||||
responses = suggester.lookup(term, (Map) null, 2, false, false);
|
||||
responses = suggester.lookup(term, (Map<BytesRef, BooleanClause.Occur>) null, 2, false, false);
|
||||
assertEquals(2, responses.size());
|
||||
|
||||
|
||||
responses = suggester.lookup(term, (Set) null, 1, false, false);
|
||||
responses = suggester.lookup(term, (Set<BytesRef>) null, 1, false, false);
|
||||
assertEquals(1, responses.size());
|
||||
|
||||
responses = suggester.lookup(term, (Set) null, 2, false, false);
|
||||
responses = suggester.lookup(term, (Set<BytesRef>) null, 2, false, false);
|
||||
assertEquals(2, responses.size());
|
||||
|
||||
|
||||
|
|
|
@ -719,13 +719,15 @@ public class TestSuggestField extends LuceneTestCase {
|
|||
for (int i = 0; i < num; i++) {
|
||||
Document document = lineFileDocs.nextDoc();
|
||||
String title = document.getField("title").stringValue();
|
||||
int maxLen = Math.min(title.length(), 500);
|
||||
String prefix = title.substring(0, maxLen);
|
||||
int weight = random().nextInt(Integer.MAX_VALUE);
|
||||
Integer prevWeight = mappings.get(title);
|
||||
Integer prevWeight = mappings.get(prefix);
|
||||
if (prevWeight == null || prevWeight < weight) {
|
||||
mappings.put(title, weight);
|
||||
mappings.put(prefix, weight);
|
||||
}
|
||||
Document doc = new Document();
|
||||
doc.add(new SuggestField("suggest_field", title, weight));
|
||||
doc.add(new SuggestField("suggest_field", prefix, weight));
|
||||
iw.addDocument(doc);
|
||||
|
||||
if (rarely()) {
|
||||
|
|
|
@ -46,7 +46,7 @@ class AssertingWeight extends FilterWeight {
|
|||
// Evil: make sure computing the cost has no side effects
|
||||
scorerSupplier.cost();
|
||||
}
|
||||
return scorerSupplier.get(false);
|
||||
return scorerSupplier.get(Long.MAX_VALUE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59,10 +59,11 @@ class AssertingWeight extends FilterWeight {
|
|||
return new ScorerSupplier() {
|
||||
private boolean getCalled = false;
|
||||
@Override
|
||||
public Scorer get(boolean randomAccess) throws IOException {
|
||||
public Scorer get(long leadCost) throws IOException {
|
||||
assert getCalled == false;
|
||||
getCalled = true;
|
||||
return AssertingScorer.wrap(new Random(random.nextLong()), inScorerSupplier.get(randomAccess), needsScores);
|
||||
assert leadCost >= 0 : leadCost;
|
||||
return AssertingScorer.wrap(new Random(random.nextLong()), inScorerSupplier.get(leadCost), needsScores);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -61,6 +61,12 @@ Upgrade Notes
|
|||
2. Deprecations: Enabling/disabling autoAddReplicas cluster wide with the API will be deprecated; use
|
||||
suspend/resume trigger APIs with name='.auto_add_replicas' instead.
|
||||
|
||||
* SOLR-11195: shard and cluster metric reporter configuration now requires a class attribute.
|
||||
If a reporter configures the group="shard" attribute then please also configure the
|
||||
class="org.apache.solr.metrics.reporters.solr.SolrShardReporter" attribute.
|
||||
If a reporter configures the group="cluster" attribute then please also configure the
|
||||
class="org.apache.solr.metrics.reporters.solr.SolrClusterReporter" attribute.
|
||||
|
||||
New Features
|
||||
----------------------
|
||||
|
||||
|
@ -78,6 +84,11 @@ New Features
|
|||
|
||||
* SOLR-11205: Any metrics value can be directly accessed in autoscaling policies (noble)
|
||||
|
||||
* SOLR-11199: Payloads supports an "operator" param. Supported operators are 'or', "phrase" ( default ).
|
||||
A new "sum" function is also added. Example :
|
||||
{!payload_score f=payload_field func=sum operator=or}A B C" (Varun Thacker)
|
||||
|
||||
|
||||
Bug Fixes
|
||||
----------------------
|
||||
|
||||
|
@ -101,6 +112,14 @@ Bug Fixes
|
|||
may not have a registered searcher. This causes spikes in response times when adding a replica
|
||||
in busy clusters. (Ludovic Boutros, Timothy Potter, shalin)
|
||||
|
||||
* SOLR-11190: GraphQuery also supports string fields which are indexed=false and docValues=true. Please refer to the
|
||||
Javadocs for DocValuesTermsQuery for it's performance characteristics. (Karthik Ramachandran, Varun Thacker)
|
||||
|
||||
* SOLR-11228: Exclude static html files in the partials directory from authentication and authorization checks. The UI
|
||||
will open correctly with kerberos enabled (Ishan Chattopadhyaya, Varun Thacker)
|
||||
|
||||
* SOLR-11084: Issue with starting script with solr.home (-s) == solr (Leil Ireson, Amrit Sarkar via Erick Erickson)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
||||
|
@ -152,6 +171,14 @@ Other Changes
|
|||
|
||||
* SOLR-11157: remove-policy must fail if a policy to be deleted is used by a collection (noble)
|
||||
|
||||
* SOLR-11090: Add Replica.getProperty accessor. (Christine Poerschke)
|
||||
|
||||
* SOLR-11061: Add a spins metric for data directory paths. (ab)
|
||||
|
||||
* SOLR-11071: Improve TestIntervalFacets.testRandom (Tomás Fernández Löbbe)
|
||||
|
||||
* SOLR-11195: Require class attribute for shard and cluster metric reporter configuration. (Christine Poerschke)
|
||||
|
||||
================== 7.0.0 ==================
|
||||
|
||||
Versions of Major Components
|
||||
|
|
|
@ -338,7 +338,8 @@ function print_usage() {
|
|||
echo " while reusing the same server directory set using the -d parameter. If set, the"
|
||||
echo " specified directory should contain a solr.xml file, unless solr.xml exists in Zookeeper."
|
||||
echo " This parameter is ignored when running examples (-e), as the solr.solr.home depends"
|
||||
echo " on which example is run. The default value is server/solr."
|
||||
echo " on which example is run. The default value is server/solr. If passed relative dir,"
|
||||
echo " validation with current dir will be done, before trying default server/<dir>"
|
||||
echo ""
|
||||
echo " -t <dir> Sets the solr.data.home system property, where Solr will store data (index)."
|
||||
echo " If not set, Solr uses solr.solr.home for config and data."
|
||||
|
@ -481,23 +482,23 @@ function print_usage() {
|
|||
print_short_zk_usage ""
|
||||
echo " Be sure to check the Solr logs in case of errors."
|
||||
echo ""
|
||||
echo " -z zkHost Optional Zookeeper connection string for all commands. If specified it"
|
||||
echo " -z zkHost Optional Zookeeper connection string for all commands. If specified it"
|
||||
echo " overrides the 'ZK_HOST=...'' defined in solr.in.sh."
|
||||
echo ""
|
||||
echo " upconfig uploads a configset from the local machine to Zookeeper. (Backcompat: -upconfig)"
|
||||
echo ""
|
||||
echo " downconfig downloads a configset from Zookeeper to the local machine. (Backcompat: -downconfig)"
|
||||
echo ""
|
||||
echo " -n configName Name of the configset in Zookeeper that will be the destination of"
|
||||
echo " -n configName Name of the configset in Zookeeper that will be the destination of"
|
||||
echo " 'upconfig' and the source for 'downconfig'."
|
||||
echo ""
|
||||
echo " -d confdir The local directory the configuration will be uploaded from for"
|
||||
echo " -d confdir The local directory the configuration will be uploaded from for"
|
||||
echo " 'upconfig' or downloaded to for 'downconfig'. If 'confdir' is a child of"
|
||||
echo " ...solr/server/solr/configsets' then the configs will be copied from/to"
|
||||
echo " that directory. Otherwise it is interpreted as a simple local path."
|
||||
echo ""
|
||||
echo " cp copies files or folders to/from Zookeeper or Zokeeper -> Zookeeper"
|
||||
echo " -r Recursively copy <src> to <dst>. Command will fail if <src> has children and "
|
||||
echo " -r Recursively copy <src> to <dst>. Command will fail if <src> has children and "
|
||||
echo " -r is not specified. Optional"
|
||||
echo ""
|
||||
echo " <src>, <dest> : [file:][/]path/to/local/file or zk:/path/to/zk/node"
|
||||
|
@ -527,9 +528,9 @@ function print_usage() {
|
|||
echo " Wildcards are supported when copying from local, trailing only and must be quoted."
|
||||
echo ""
|
||||
echo " rm deletes files or folders on Zookeeper"
|
||||
echo " -r Recursively delete if <path> is a directory. Command will fail if <path>"
|
||||
echo " -r Recursively delete if <path> is a directory. Command will fail if <path>"
|
||||
echo " has children and -r is not specified. Optional"
|
||||
echo " <path> : [zk:]/path/to/zk/node. <path> may not be the root ('/')"
|
||||
echo " <path> : [zk:]/path/to/zk/node. <path> may not be the root ('/')"
|
||||
echo ""
|
||||
echo " mv moves (renames) znodes on Zookeeper"
|
||||
echo " <src>, <dest> : Zookeeper nodes, the 'zk:' prefix is optional."
|
||||
|
@ -1328,14 +1329,15 @@ if [[ "$SCRIPT_CMD" == "auth" ]]; then
|
|||
echo -e "\nSolr server directory $SOLR_SERVER_DIR not found!\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$SOLR_HOME" ]; then
|
||||
SOLR_HOME="$SOLR_SERVER_DIR/solr"
|
||||
else
|
||||
if [[ $SOLR_HOME != /* ]] && [[ -d "$SOLR_SERVER_DIR/$SOLR_HOME" ]]; then
|
||||
elif [[ $SOLR_HOME != /* ]]; then
|
||||
if [[ -d "`pwd`/$SOLR_HOME" ]]; then
|
||||
SOLR_HOME="$(pwd)/$SOLR_HOME"
|
||||
elif [[ -d "$SOLR_SERVER_DIR/$SOLR_HOME" ]]; then
|
||||
SOLR_HOME="$SOLR_SERVER_DIR/$SOLR_HOME"
|
||||
SOLR_PID_DIR="$SOLR_HOME"
|
||||
elif [[ $SOLR_HOME != /* ]] && [[ -d "`pwd`/$SOLR_HOME" ]]; then
|
||||
SOLR_HOME="$(pwd)/$SOLR_HOME"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -1674,12 +1676,12 @@ fi
|
|||
|
||||
if [ -z "$SOLR_HOME" ]; then
|
||||
SOLR_HOME="$SOLR_SERVER_DIR/solr"
|
||||
else
|
||||
if [[ $SOLR_HOME != /* ]] && [[ -d "$SOLR_SERVER_DIR/$SOLR_HOME" ]]; then
|
||||
elif [[ $SOLR_HOME != /* ]]; then
|
||||
if [[ -d "`pwd`/$SOLR_HOME" ]]; then
|
||||
SOLR_HOME="$(pwd)/$SOLR_HOME"
|
||||
elif [[ -d "$SOLR_SERVER_DIR/$SOLR_HOME" ]]; then
|
||||
SOLR_HOME="$SOLR_SERVER_DIR/$SOLR_HOME"
|
||||
SOLR_PID_DIR="$SOLR_HOME"
|
||||
elif [[ $SOLR_HOME != /* ]] && [[ -d "`pwd`/$SOLR_HOME" ]]; then
|
||||
SOLR_HOME="$(pwd)/$SOLR_HOME"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
|
|
@ -310,7 +310,8 @@ goto done
|
|||
@echo while reusing the same server directory set using the -d parameter. If set, the
|
||||
@echo specified directory should contain a solr.xml file, unless solr.xml exists in Zookeeper.
|
||||
@echo This parameter is ignored when running examples (-e), as the solr.solr.home depends
|
||||
@echo on which example is run. The default value is server/solr.
|
||||
@echo on which example is run. The default value is server/solr. If passed a relative dir
|
||||
@echo validation with the current dir will be done before trying the default server/<dir>
|
||||
@echo.
|
||||
@echo -t dir Sets the solr.data.home system property, used as root for ^<instance_dir^>/data directories.
|
||||
@echo If not set, Solr uses solr.solr.home for both config and data.
|
||||
|
@ -1426,7 +1427,6 @@ if "!CREATE_PORT!"=="" (
|
|||
goto err
|
||||
)
|
||||
|
||||
|
||||
if "!CREATE_CONFDIR!"=="_default" (
|
||||
echo WARNING: Using _default configset. Data driven schema functionality is enabled by default, which is
|
||||
echo NOT RECOMMENDED for production use.
|
||||
|
|
|
@ -1,20 +1,25 @@
|
|||
{
|
||||
"class":"org.apache.solr.ltr.model.LinearModel",
|
||||
"name":"fqmodel",
|
||||
"features":[
|
||||
{
|
||||
"name":"matchedTitle",
|
||||
"norm": {
|
||||
"class":"org.apache.solr.ltr.norm.MinMaxNormalizer",
|
||||
"params":{ "min":"0.0f", "max":"10.0f" }
|
||||
}
|
||||
},
|
||||
{ "name":"popularity"}
|
||||
],
|
||||
"params":{
|
||||
"weights": {
|
||||
"matchedTitle": 0.5,
|
||||
"popularity": 0.5
|
||||
}
|
||||
}
|
||||
"class":"org.apache.solr.ltr.model.LinearModel",
|
||||
"name":"fqmodel",
|
||||
"features":[
|
||||
{
|
||||
"name": "matchedTitle",
|
||||
"norm": {
|
||||
"class": "org.apache.solr.ltr.norm.MinMaxNormalizer",
|
||||
"params": {
|
||||
"min": "0.0f",
|
||||
"max": "10.0f"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "popularity"
|
||||
}
|
||||
],
|
||||
"params": {
|
||||
"weights": {
|
||||
"matchedTitle": 0.5,
|
||||
"popularity": 0.5
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,30 +1,45 @@
|
|||
{
|
||||
"class":"org.apache.solr.ltr.model.LinearModel",
|
||||
"name":"6029760550880411648",
|
||||
"features":[
|
||||
{"name":"title"},
|
||||
{"name":"description"},
|
||||
{"name":"keywords"},
|
||||
{
|
||||
"name":"popularity",
|
||||
"norm": {
|
||||
"class":"org.apache.solr.ltr.norm.MinMaxNormalizer",
|
||||
"params":{ "min":"0.0f", "max":"10.0f" }
|
||||
}
|
||||
},
|
||||
{"name":"text"},
|
||||
{"name":"queryIntentPerson"},
|
||||
{"name":"queryIntentCompany"}
|
||||
],
|
||||
"params":{
|
||||
"weights": {
|
||||
"title": 0.0000000000,
|
||||
"description": 0.1000000000,
|
||||
"keywords": 0.2000000000,
|
||||
"popularity": 0.3000000000,
|
||||
"text": 0.4000000000,
|
||||
"queryIntentPerson":0.1231231,
|
||||
"queryIntentCompany":0.12121211
|
||||
}
|
||||
}
|
||||
"class": "org.apache.solr.ltr.model.LinearModel",
|
||||
"name": "6029760550880411648",
|
||||
"features": [
|
||||
{
|
||||
"name": "title"
|
||||
},
|
||||
{
|
||||
"name": "description"
|
||||
},
|
||||
{
|
||||
"name": "keywords"
|
||||
},
|
||||
{
|
||||
"name": "popularity",
|
||||
"norm": {
|
||||
"class": "org.apache.solr.ltr.norm.MinMaxNormalizer",
|
||||
"params": {
|
||||
"min": "0.0f",
|
||||
"max": "10.0f"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "text"
|
||||
},
|
||||
{
|
||||
"name": "queryIntentPerson"
|
||||
},
|
||||
{
|
||||
"name": "queryIntentCompany"
|
||||
}
|
||||
],
|
||||
"params": {
|
||||
"weights": {
|
||||
"title": 0.0000000000,
|
||||
"description": 0.1000000000,
|
||||
"keywords": 0.2000000000,
|
||||
"popularity": 0.3000000000,
|
||||
"text": 0.4000000000,
|
||||
"queryIntentPerson": 0.1231231,
|
||||
"queryIntentCompany": 0.12121211
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,6 +91,19 @@ public class TestLTROnSolrCloud extends TestRerankBase {
|
|||
assertEquals("2", queryResponse.getResults().get(1).get("id").toString());
|
||||
assertEquals("3", queryResponse.getResults().get(2).get("id").toString());
|
||||
assertEquals("4", queryResponse.getResults().get(3).get("id").toString());
|
||||
assertEquals("5", queryResponse.getResults().get(4).get("id").toString());
|
||||
assertEquals("6", queryResponse.getResults().get(5).get("id").toString());
|
||||
assertEquals("7", queryResponse.getResults().get(6).get("id").toString());
|
||||
assertEquals("8", queryResponse.getResults().get(7).get("id").toString());
|
||||
|
||||
final Float original_result0_score = (Float)queryResponse.getResults().get(0).get("score");
|
||||
final Float original_result1_score = (Float)queryResponse.getResults().get(1).get("score");
|
||||
final Float original_result2_score = (Float)queryResponse.getResults().get(2).get("score");
|
||||
final Float original_result3_score = (Float)queryResponse.getResults().get(3).get("score");
|
||||
final Float original_result4_score = (Float)queryResponse.getResults().get(4).get("score");
|
||||
final Float original_result5_score = (Float)queryResponse.getResults().get(5).get("score");
|
||||
final Float original_result6_score = (Float)queryResponse.getResults().get(6).get("score");
|
||||
final Float original_result7_score = (Float)queryResponse.getResults().get(7).get("score");
|
||||
|
||||
final String result0_features= FeatureLoggerTestUtils.toFeatureVector(
|
||||
"powpularityS","64.0", "c3","2.0");
|
||||
|
@ -100,9 +113,40 @@ public class TestLTROnSolrCloud extends TestRerankBase {
|
|||
"powpularityS","36.0", "c3","2.0");
|
||||
final String result3_features= FeatureLoggerTestUtils.toFeatureVector(
|
||||
"powpularityS","25.0", "c3","2.0");
|
||||
final String result4_features= FeatureLoggerTestUtils.toFeatureVector(
|
||||
"powpularityS","16.0", "c3","2.0");
|
||||
final String result5_features= FeatureLoggerTestUtils.toFeatureVector(
|
||||
"powpularityS", "9.0", "c3","2.0");
|
||||
final String result6_features= FeatureLoggerTestUtils.toFeatureVector(
|
||||
"powpularityS", "4.0", "c3","2.0");
|
||||
final String result7_features= FeatureLoggerTestUtils.toFeatureVector(
|
||||
"powpularityS", "1.0", "c3","2.0");
|
||||
|
||||
// Test re-rank and feature vectors returned
|
||||
|
||||
// Test feature vectors returned (without re-ranking)
|
||||
query.setFields("*,score,features:[fv]");
|
||||
queryResponse =
|
||||
solrCluster.getSolrClient().query(COLLECTION,query);
|
||||
assertEquals(8, queryResponse.getResults().getNumFound());
|
||||
assertEquals("1", queryResponse.getResults().get(0).get("id").toString());
|
||||
assertEquals("2", queryResponse.getResults().get(1).get("id").toString());
|
||||
assertEquals("3", queryResponse.getResults().get(2).get("id").toString());
|
||||
assertEquals("4", queryResponse.getResults().get(3).get("id").toString());
|
||||
assertEquals("5", queryResponse.getResults().get(4).get("id").toString());
|
||||
assertEquals("6", queryResponse.getResults().get(5).get("id").toString());
|
||||
assertEquals("7", queryResponse.getResults().get(6).get("id").toString());
|
||||
assertEquals("8", queryResponse.getResults().get(7).get("id").toString());
|
||||
|
||||
assertEquals(original_result0_score, queryResponse.getResults().get(0).get("score"));
|
||||
assertEquals(original_result1_score, queryResponse.getResults().get(1).get("score"));
|
||||
assertEquals(original_result2_score, queryResponse.getResults().get(2).get("score"));
|
||||
assertEquals(original_result3_score, queryResponse.getResults().get(3).get("score"));
|
||||
assertEquals(original_result4_score, queryResponse.getResults().get(4).get("score"));
|
||||
assertEquals(original_result5_score, queryResponse.getResults().get(5).get("score"));
|
||||
assertEquals(original_result6_score, queryResponse.getResults().get(6).get("score"));
|
||||
assertEquals(original_result7_score, queryResponse.getResults().get(7).get("score"));
|
||||
|
||||
// Test feature vectors returned (with re-ranking)
|
||||
query.add("rq", "{!ltr model=powpularityS-model reRankDocs=8}");
|
||||
queryResponse =
|
||||
solrCluster.getSolrClient().query(COLLECTION,query);
|
||||
|
@ -119,6 +163,18 @@ public class TestLTROnSolrCloud extends TestRerankBase {
|
|||
assertEquals("5", queryResponse.getResults().get(3).get("id").toString());
|
||||
assertEquals(result3_features,
|
||||
queryResponse.getResults().get(3).get("features").toString());
|
||||
assertEquals("4", queryResponse.getResults().get(4).get("id").toString());
|
||||
assertEquals(result4_features,
|
||||
queryResponse.getResults().get(4).get("features").toString());
|
||||
assertEquals("3", queryResponse.getResults().get(5).get("id").toString());
|
||||
assertEquals(result5_features,
|
||||
queryResponse.getResults().get(5).get("features").toString());
|
||||
assertEquals("2", queryResponse.getResults().get(6).get("id").toString());
|
||||
assertEquals(result6_features,
|
||||
queryResponse.getResults().get(6).get("features").toString());
|
||||
assertEquals("1", queryResponse.getResults().get(7).get("id").toString());
|
||||
assertEquals(result7_features,
|
||||
queryResponse.getResults().get(7).get("features").toString());
|
||||
}
|
||||
|
||||
private void setupSolrCluster(int numShards, int numReplicas, int numServers, int maxShardsPerNode) throws Exception {
|
||||
|
|
|
@ -74,7 +74,7 @@ public class EmbeddedSolrServer extends SolrClient {
|
|||
* Create an EmbeddedSolrServer using a NodeConfig
|
||||
*
|
||||
* @param nodeConfig the configuration
|
||||
* @param defaultCoreName the core to route requests to be default
|
||||
* @param defaultCoreName the core to route requests to by default
|
||||
*/
|
||||
public EmbeddedSolrServer(NodeConfig nodeConfig, String defaultCoreName) {
|
||||
this(load(new CoreContainer(nodeConfig)), defaultCoreName);
|
||||
|
@ -99,7 +99,7 @@ public class EmbeddedSolrServer extends SolrClient {
|
|||
* {@link #close()} is called.
|
||||
*
|
||||
* @param coreContainer the core container
|
||||
* @param coreName the core to route requests to be default
|
||||
* @param coreName the core to route requests to by default
|
||||
*/
|
||||
public EmbeddedSolrServer(CoreContainer coreContainer, String coreName) {
|
||||
if (coreContainer == null) {
|
||||
|
|
|
@ -569,12 +569,30 @@ public class CoreContainer {
|
|||
true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
|
||||
metricManager.registerGauge(null, registryName, () -> dataHome.toAbsolutePath().toString(),
|
||||
true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs");
|
||||
metricManager.registerGauge(null, registryName, () -> {
|
||||
try {
|
||||
return org.apache.lucene.util.IOUtils.spins(dataHome.toAbsolutePath());
|
||||
} catch (IOException e) {
|
||||
// default to spinning
|
||||
return true;
|
||||
}
|
||||
},
|
||||
true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs");
|
||||
metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
|
||||
true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
|
||||
metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
|
||||
true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
|
||||
metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toAbsolutePath().toString(),
|
||||
true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
|
||||
metricManager.registerGauge(null, registryName, () -> {
|
||||
try {
|
||||
return org.apache.lucene.util.IOUtils.spins(cfg.getCoreRootDirectory().toAbsolutePath());
|
||||
} catch (IOException e) {
|
||||
// default to spinning
|
||||
return true;
|
||||
}
|
||||
},
|
||||
true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
|
||||
// add version information
|
||||
metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getSpecificationVersion(),
|
||||
true, "specification", SolrInfoBean.Category.CONTAINER.toString(), "version");
|
||||
|
|
|
@ -1163,6 +1163,15 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
|
|||
File dataDirFile = dataDirPath.toFile();
|
||||
manager.registerGauge(this, registry, () -> dataDirFile.getTotalSpace(), true, "totalSpace", Category.CORE.toString(), "fs");
|
||||
manager.registerGauge(this, registry, () -> dataDirFile.getUsableSpace(), true, "usableSpace", Category.CORE.toString(), "fs");
|
||||
manager.registerGauge(this, registry, () -> dataDirPath.toAbsolutePath().toString(), true, "path", Category.CORE.toString(), "fs");
|
||||
manager.registerGauge(this, registry, () -> {
|
||||
try {
|
||||
return org.apache.lucene.util.IOUtils.spins(dataDirPath.toAbsolutePath());
|
||||
} catch (IOException e) {
|
||||
// default to spinning
|
||||
return true;
|
||||
}
|
||||
}, true, "spins", Category.CORE.toString(), "fs");
|
||||
}
|
||||
|
||||
private void checkVersionFieldExistsInSchema(IndexSchema schema, CoreDescriptor coreDescriptor) {
|
||||
|
|
|
@ -224,6 +224,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
.withFunctionName("kolmogorovSmirnov", KolmogorovSmirnovEvaluator.class)
|
||||
.withFunctionName("ks", KolmogorovSmirnovEvaluator.class)
|
||||
.withFunctionName("asc", AscEvaluator.class)
|
||||
.withFunctionName("cumulativeProbability", CumulativeProbabilityEvaluator.class)
|
||||
|
||||
// Boolean Stream Evaluators
|
||||
.withFunctionName("and", AndEvaluator.class)
|
||||
|
|
|
@ -1003,7 +1003,7 @@ public class SolrMetricManager {
|
|||
}
|
||||
}
|
||||
|
||||
private List<PluginInfo> prepareCloudPlugins(PluginInfo[] pluginInfos, String group, String className,
|
||||
private List<PluginInfo> prepareCloudPlugins(PluginInfo[] pluginInfos, String group,
|
||||
Map<String, String> defaultAttributes,
|
||||
Map<String, Object> defaultInitArgs) {
|
||||
List<PluginInfo> result = new ArrayList<>();
|
||||
|
@ -1015,7 +1015,7 @@ public class SolrMetricManager {
|
|||
if (!group.equals(groupAttr)) {
|
||||
continue;
|
||||
}
|
||||
info = preparePlugin(info, className, defaultAttributes, defaultInitArgs);
|
||||
info = preparePlugin(info, defaultAttributes, defaultInitArgs);
|
||||
if (info != null) {
|
||||
result.add(info);
|
||||
}
|
||||
|
@ -1023,18 +1023,12 @@ public class SolrMetricManager {
|
|||
return result;
|
||||
}
|
||||
|
||||
private PluginInfo preparePlugin(PluginInfo info, String className, Map<String, String> defaultAttributes,
|
||||
private PluginInfo preparePlugin(PluginInfo info, Map<String, String> defaultAttributes,
|
||||
Map<String, Object> defaultInitArgs) {
|
||||
if (info == null) {
|
||||
return null;
|
||||
}
|
||||
String classNameAttr = info.attributes.get("class");
|
||||
if (className != null) {
|
||||
if (classNameAttr != null && !className.equals(classNameAttr)) {
|
||||
log.warn("Conflicting class name attributes, expected " + className + " but was " + classNameAttr + ", skipping " + info);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, String> attrs = new HashMap<>(info.attributes);
|
||||
defaultAttributes.forEach((k, v) -> {
|
||||
|
@ -1042,7 +1036,7 @@ public class SolrMetricManager {
|
|||
attrs.put(k, v);
|
||||
}
|
||||
});
|
||||
attrs.put("class", className);
|
||||
attrs.put("class", classNameAttr);
|
||||
Map<String, Object> initArgs = new HashMap<>();
|
||||
if (info.initArgs != null) {
|
||||
initArgs.putAll(info.initArgs.asMap(10));
|
||||
|
@ -1069,7 +1063,7 @@ public class SolrMetricManager {
|
|||
|
||||
String registryName = core.getCoreMetricManager().getRegistryName();
|
||||
// collect infos and normalize
|
||||
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.shard.toString(), SolrShardReporter.class.getName(),
|
||||
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.shard.toString(),
|
||||
attrs, initArgs);
|
||||
for (PluginInfo info : infos) {
|
||||
try {
|
||||
|
@ -1092,7 +1086,7 @@ public class SolrMetricManager {
|
|||
attrs.put("group", SolrInfoBean.Group.cluster.toString());
|
||||
Map<String, Object> initArgs = new HashMap<>();
|
||||
initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
|
||||
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.cluster.toString(), SolrClusterReporter.class.getName(),
|
||||
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.cluster.toString(),
|
||||
attrs, initArgs);
|
||||
String registryName = getRegistryName(SolrInfoBean.Group.cluster);
|
||||
for (PluginInfo info : infos) {
|
||||
|
|
|
@ -37,11 +37,12 @@ import org.apache.solr.util.PayloadUtils;
|
|||
* <br>Other parameters:
|
||||
* <br><code>f</code>, the field (required)
|
||||
* <br><code>func</code>, payload function (min, max, or average; required)
|
||||
* <br><code>includeSpanScore</code>, multiple payload function result by similarity score or not (default: false)
|
||||
* <br><code>includeSpanScore</code>, multiply payload function result by similarity score or not (default: false)
|
||||
* <br>Example: <code>{!payload_score f=weighted_terms_dpf}Foo Bar</code> creates a SpanNearQuery with "Foo" followed by "Bar"
|
||||
*/
|
||||
public class PayloadScoreQParserPlugin extends QParserPlugin {
|
||||
public static final String NAME = "payload_score";
|
||||
public static final String DEFAULT_OPERATOR = "phrase";
|
||||
|
||||
@Override
|
||||
public QParser createParser(String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) {
|
||||
|
@ -51,6 +52,10 @@ public class PayloadScoreQParserPlugin extends QParserPlugin {
|
|||
String field = localParams.get(QueryParsing.F);
|
||||
String value = localParams.get(QueryParsing.V);
|
||||
String func = localParams.get("func");
|
||||
String operator = localParams.get("operator", DEFAULT_OPERATOR);
|
||||
if (!(operator.equalsIgnoreCase(DEFAULT_OPERATOR) || operator.equalsIgnoreCase("or"))) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Supported operators are : or , phrase");
|
||||
}
|
||||
boolean includeSpanScore = localParams.getBool("includeSpanScore", false);
|
||||
|
||||
if (field == null) {
|
||||
|
@ -63,9 +68,9 @@ public class PayloadScoreQParserPlugin extends QParserPlugin {
|
|||
|
||||
FieldType ft = req.getCore().getLatestSchema().getFieldType(field);
|
||||
Analyzer analyzer = ft.getQueryAnalyzer();
|
||||
SpanQuery query = null;
|
||||
SpanQuery query;
|
||||
try {
|
||||
query = PayloadUtils.createSpanQuery(field, value, analyzer);
|
||||
query = PayloadUtils.createSpanQuery(field, value, analyzer, operator);
|
||||
} catch (IOException e) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,e);
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.search.join;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.schema.StrField;
|
||||
import org.apache.solr.search.QParser;
|
||||
import org.apache.solr.search.QueryParsing;
|
||||
import org.apache.solr.search.SyntaxError;
|
||||
|
@ -45,6 +46,9 @@ public class GraphQueryParser extends QParser {
|
|||
String fromField = localParams.get("from", "node_id");
|
||||
String toField = localParams.get("to", "edge_ids");
|
||||
|
||||
validateFields(fromField);
|
||||
validateFields(toField);
|
||||
|
||||
// only documents that do not have values in the edge id fields.
|
||||
boolean onlyLeafNodes = localParams.getBool("returnOnlyLeaf", false);
|
||||
// choose if you want to return documents that match the initial query or not.
|
||||
|
@ -65,5 +69,31 @@ public class GraphQueryParser extends QParser {
|
|||
// return the parsed graph query.
|
||||
return gq;
|
||||
}
|
||||
|
||||
public void validateFields(String field) throws SyntaxError {
|
||||
|
||||
if (req.getSchema().getField(field) == null) {
|
||||
throw new SyntaxError("field " + field + " not defined in schema");
|
||||
}
|
||||
|
||||
if (req.getSchema().getField(field).getType().isPointField()) {
|
||||
if (req.getSchema().getField(field).hasDocValues()) {
|
||||
return;
|
||||
} else {
|
||||
throw new SyntaxError("point field " + field + " must have docValues=true");
|
||||
}
|
||||
}
|
||||
|
||||
if (req.getSchema().getField(field).getType() instanceof StrField) {
|
||||
if ((req.getSchema().getField(field).hasDocValues() || req.getSchema().getField(field).indexed())) {
|
||||
return;
|
||||
} else {
|
||||
throw new SyntaxError("string field " + field + " must have indexed=true or docValues=true");
|
||||
}
|
||||
}
|
||||
|
||||
throw new SyntaxError("FieldType for field=" + field + " not supported");
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.index.SortedSetDocValues;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.AutomatonQuery;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.DocValuesTermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.SimpleCollector;
|
||||
import org.apache.lucene.search.TermInSetQuery;
|
||||
|
@ -173,7 +174,9 @@ class GraphTermsCollector extends GraphEdgeCollector {
|
|||
collectorTerms.get(i, ref);
|
||||
termList.add(ref);
|
||||
}
|
||||
q = new TermInSetQuery(matchField.getName(), termList);
|
||||
q = (matchField.hasDocValues() && !matchField.indexed())
|
||||
? new DocValuesTermsQuery(matchField.getName(), termList)
|
||||
: new TermInSetQuery(matchField.getName(), termList);
|
||||
}
|
||||
|
||||
return q;
|
||||
|
|
|
@ -46,6 +46,8 @@ import org.apache.solr.common.cloud.SecurityAwareZkACLProvider;
|
|||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.common.cloud.ZkACLProvider;
|
||||
import org.apache.solr.common.cloud.ZkCredentialsProvider;
|
||||
import org.apache.zookeeper.CreateMode;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.data.ACL;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -65,8 +67,12 @@ public class DelegationTokenKerberosFilter extends DelegationTokenAuthentication
|
|||
if (conf != null && "zookeeper".equals(conf.getInitParameter("signer.secret.provider"))) {
|
||||
SolrZkClient zkClient =
|
||||
(SolrZkClient)conf.getServletContext().getAttribute(KerberosPlugin.DELEGATION_TOKEN_ZK_CLIENT);
|
||||
conf.getServletContext().setAttribute("signer.secret.provider.zookeeper.curator.client",
|
||||
getCuratorClient(zkClient));
|
||||
try {
|
||||
conf.getServletContext().setAttribute("signer.secret.provider.zookeeper.curator.client",
|
||||
getCuratorClient(zkClient));
|
||||
} catch (InterruptedException | KeeperException e) {
|
||||
throw new ServletException(e);
|
||||
}
|
||||
}
|
||||
super.init(conf);
|
||||
}
|
||||
|
@ -147,7 +153,7 @@ public class DelegationTokenKerberosFilter extends DelegationTokenAuthentication
|
|||
newAuthHandler.setAuthHandler(authHandler);
|
||||
}
|
||||
|
||||
protected CuratorFramework getCuratorClient(SolrZkClient zkClient) {
|
||||
protected CuratorFramework getCuratorClient(SolrZkClient zkClient) throws InterruptedException, KeeperException {
|
||||
// should we try to build a RetryPolicy off of the ZkController?
|
||||
RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
|
||||
if (zkClient == null) {
|
||||
|
@ -161,6 +167,17 @@ public class DelegationTokenKerberosFilter extends DelegationTokenAuthentication
|
|||
SolrZkToCuratorCredentialsACLs curatorToSolrZk = new SolrZkToCuratorCredentialsACLs(zkClient);
|
||||
final int connectionTimeoutMs = 30000; // this value is currently hard coded, see SOLR-7561.
|
||||
|
||||
// Create /security znode upfront. Without this, the curator framework creates this directory path
|
||||
// without the appropriate ACL configuration. This issue is possibly related to HADOOP-11973
|
||||
try {
|
||||
zkClient.makePath(SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH, CreateMode.PERSISTENT, true);
|
||||
|
||||
} catch (KeeperException ex) {
|
||||
if (ex.code() != KeeperException.Code.NODEEXISTS) {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
curatorFramework = CuratorFrameworkFactory.builder()
|
||||
.namespace(zkNamespace)
|
||||
.connectString(zkConnectionString)
|
||||
|
@ -178,12 +195,15 @@ public class DelegationTokenKerberosFilter extends DelegationTokenAuthentication
|
|||
* Convert Solr Zk Credentials/ACLs to Curator versions
|
||||
*/
|
||||
protected static class SolrZkToCuratorCredentialsACLs {
|
||||
private final String zkChroot;
|
||||
private final ACLProvider aclProvider;
|
||||
private final List<AuthInfo> authInfos;
|
||||
|
||||
public SolrZkToCuratorCredentialsACLs(SolrZkClient zkClient) {
|
||||
this.aclProvider = createACLProvider(zkClient);
|
||||
this.authInfos = createAuthInfo(zkClient);
|
||||
String zkHost = zkClient.getZkServerAddress();
|
||||
this.zkChroot = zkHost.contains("/")? zkHost.substring(zkHost.indexOf("/")): null;
|
||||
}
|
||||
|
||||
public ACLProvider getACLProvider() { return aclProvider; }
|
||||
|
@ -199,8 +219,20 @@ public class DelegationTokenKerberosFilter extends DelegationTokenAuthentication
|
|||
|
||||
@Override
|
||||
public List<ACL> getAclForPath(String path) {
|
||||
List<ACL> acls = zkACLProvider.getACLsToAdd(path);
|
||||
return acls;
|
||||
List<ACL> acls = null;
|
||||
|
||||
// The logic in SecurityAwareZkACLProvider does not work when
|
||||
// the Solr zkPath is chrooted (e.g. /solr instead of /). This
|
||||
// due to the fact that the getACLsToAdd(..) callback provides
|
||||
// an absolute path (instead of relative path to the chroot) and
|
||||
// the string comparison in SecurityAwareZkACLProvider fails.
|
||||
if (zkACLProvider instanceof SecurityAwareZkACLProvider && zkChroot != null) {
|
||||
acls = zkACLProvider.getACLsToAdd(path.replace(zkChroot, ""));
|
||||
} else {
|
||||
acls = zkACLProvider.getACLsToAdd(path);
|
||||
}
|
||||
|
||||
return acls;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -43,6 +43,8 @@ import org.apache.solr.common.cloud.SecurityAwareZkACLProvider;
|
|||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.common.cloud.ZkACLProvider;
|
||||
import org.apache.solr.common.cloud.ZkCredentialsProvider;
|
||||
import org.apache.zookeeper.CreateMode;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.data.ACL;
|
||||
|
||||
/**
|
||||
|
@ -62,8 +64,12 @@ public class HadoopAuthFilter extends DelegationTokenAuthenticationFilter {
|
|||
if (conf != null && "zookeeper".equals(conf.getInitParameter("signer.secret.provider"))) {
|
||||
SolrZkClient zkClient =
|
||||
(SolrZkClient)conf.getServletContext().getAttribute(DELEGATION_TOKEN_ZK_CLIENT);
|
||||
conf.getServletContext().setAttribute("signer.secret.provider.zookeeper.curator.client",
|
||||
getCuratorClient(zkClient));
|
||||
try {
|
||||
conf.getServletContext().setAttribute("signer.secret.provider.zookeeper.curator.client",
|
||||
getCuratorClient(zkClient));
|
||||
} catch (KeeperException | InterruptedException e) {
|
||||
throw new ServletException(e);
|
||||
}
|
||||
}
|
||||
super.init(conf);
|
||||
}
|
||||
|
@ -125,7 +131,7 @@ public class HadoopAuthFilter extends DelegationTokenAuthenticationFilter {
|
|||
newAuthHandler.setAuthHandler(authHandler);
|
||||
}
|
||||
|
||||
protected CuratorFramework getCuratorClient(SolrZkClient zkClient) {
|
||||
protected CuratorFramework getCuratorClient(SolrZkClient zkClient) throws KeeperException, InterruptedException {
|
||||
// should we try to build a RetryPolicy off of the ZkController?
|
||||
RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
|
||||
if (zkClient == null) {
|
||||
|
@ -139,6 +145,17 @@ public class HadoopAuthFilter extends DelegationTokenAuthenticationFilter {
|
|||
SolrZkToCuratorCredentialsACLs curatorToSolrZk = new SolrZkToCuratorCredentialsACLs(zkClient);
|
||||
final int connectionTimeoutMs = 30000; // this value is currently hard coded, see SOLR-7561.
|
||||
|
||||
// Create /security znode upfront. Without this, the curator framework creates this directory path
|
||||
// without the appropriate ACL configuration. This issue is possibly related to HADOOP-11973
|
||||
try {
|
||||
zkClient.makePath(SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH, CreateMode.PERSISTENT, true);
|
||||
|
||||
} catch (KeeperException ex) {
|
||||
if (ex.code() != KeeperException.Code.NODEEXISTS) {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
curatorFramework = CuratorFrameworkFactory.builder()
|
||||
.namespace(zkNamespace)
|
||||
.connectString(zkConnectionString)
|
||||
|
@ -156,12 +173,15 @@ public class HadoopAuthFilter extends DelegationTokenAuthenticationFilter {
|
|||
* Convert Solr Zk Credentials/ACLs to Curator versions
|
||||
*/
|
||||
protected static class SolrZkToCuratorCredentialsACLs {
|
||||
private final String zkChroot;
|
||||
private final ACLProvider aclProvider;
|
||||
private final List<AuthInfo> authInfos;
|
||||
|
||||
public SolrZkToCuratorCredentialsACLs(SolrZkClient zkClient) {
|
||||
this.aclProvider = createACLProvider(zkClient);
|
||||
this.authInfos = createAuthInfo(zkClient);
|
||||
String zkHost = zkClient.getZkServerAddress();
|
||||
this.zkChroot = zkHost.contains("/")? zkHost.substring(zkHost.indexOf("/")): null;
|
||||
}
|
||||
|
||||
public ACLProvider getACLProvider() { return aclProvider; }
|
||||
|
@ -177,8 +197,20 @@ public class HadoopAuthFilter extends DelegationTokenAuthenticationFilter {
|
|||
|
||||
@Override
|
||||
public List<ACL> getAclForPath(String path) {
|
||||
List<ACL> acls = zkACLProvider.getACLsToAdd(path);
|
||||
return acls;
|
||||
List<ACL> acls = null;
|
||||
|
||||
// The logic in SecurityAwareZkACLProvider does not work when
|
||||
// the Solr zkPath is chrooted (e.g. /solr instead of /). This
|
||||
// due to the fact that the getACLsToAdd(..) callback provides
|
||||
// an absolute path (instead of relative path to the chroot) and
|
||||
// the string comparison in SecurityAwareZkACLProvider fails.
|
||||
if (zkACLProvider instanceof SecurityAwareZkACLProvider && zkChroot != null) {
|
||||
acls = zkACLProvider.getACLsToAdd(path.replace(zkChroot, ""));
|
||||
} else {
|
||||
acls = zkACLProvider.getACLsToAdd(path);
|
||||
}
|
||||
|
||||
return acls;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -142,6 +142,7 @@ public class HadoopAuthPlugin extends AuthenticationPlugin {
|
|||
authFilter.init(conf);
|
||||
|
||||
} catch (ServletException e) {
|
||||
log.error("Error initializing " + getClass().getSimpleName(), e);
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Error initializing " + getClass().getName() + ": "+e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -266,6 +266,14 @@ public class PeerSync implements SolrMetricProducer {
|
|||
requestVersions(replica);
|
||||
}
|
||||
|
||||
try {
|
||||
// waiting a little bit, there are a chance that an update is sending from leader,
|
||||
// so it will present in the response, but not in our recent updates (SOLR-10126)
|
||||
Thread.sleep(300);
|
||||
} catch (InterruptedException e) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
|
||||
}
|
||||
|
||||
try (UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates()) {
|
||||
ourUpdates = recentUpdates.getVersions(nUpdates);
|
||||
}
|
||||
|
|
|
@ -33,12 +33,15 @@ import org.apache.lucene.queries.payloads.AveragePayloadFunction;
|
|||
import org.apache.lucene.queries.payloads.MaxPayloadFunction;
|
||||
import org.apache.lucene.queries.payloads.MinPayloadFunction;
|
||||
import org.apache.lucene.queries.payloads.PayloadFunction;
|
||||
import org.apache.lucene.queries.payloads.SumPayloadFunction;
|
||||
import org.apache.lucene.search.spans.SpanNearQuery;
|
||||
import org.apache.lucene.search.spans.SpanOrQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.solr.analysis.TokenizerChain;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.search.PayloadScoreQParserPlugin;
|
||||
|
||||
public class PayloadUtils {
|
||||
public static String getPayloadEncoder(FieldType fieldType) {
|
||||
|
@ -95,15 +98,22 @@ public class PayloadUtils {
|
|||
if ("average".equals(func)) {
|
||||
payloadFunction = new AveragePayloadFunction();
|
||||
}
|
||||
|
||||
if ("sum".equals(func)) {
|
||||
payloadFunction = new SumPayloadFunction();
|
||||
}
|
||||
return payloadFunction;
|
||||
}
|
||||
|
||||
public static SpanQuery createSpanQuery(String field, String value, Analyzer analyzer) throws IOException {
|
||||
return createSpanQuery(field, value, analyzer, PayloadScoreQParserPlugin.DEFAULT_OPERATOR);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The generated SpanQuery will be either a SpanTermQuery or an ordered, zero slop SpanNearQuery, depending
|
||||
* on how many tokens are emitted.
|
||||
*/
|
||||
public static SpanQuery createSpanQuery(String field, String value, Analyzer analyzer) throws IOException {
|
||||
public static SpanQuery createSpanQuery(String field, String value, Analyzer analyzer, String operator) throws IOException {
|
||||
// adapted this from QueryBuilder.createSpanQuery (which isn't currently public) and added reset(), end(), and close() calls
|
||||
List<SpanTermQuery> terms = new ArrayList<>();
|
||||
try (TokenStream in = analyzer.tokenStream(field, value)) {
|
||||
|
@ -121,9 +131,11 @@ public class PayloadUtils {
|
|||
query = null;
|
||||
} else if (terms.size() == 1) {
|
||||
query = terms.get(0);
|
||||
} else if (operator != null && operator.equalsIgnoreCase("or")) {
|
||||
query = new SpanOrQuery(terms.toArray(new SpanTermQuery[terms.size()]));
|
||||
} else {
|
||||
query = new SpanNearQuery(terms.toArray(new SpanTermQuery[terms.size()]), 0, true);
|
||||
}
|
||||
query = new SpanNearQuery(terms.toArray(new SpanTermQuery[terms.size()]), 0, true);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
<fieldType name="plong" class="solr.LongPointField"/>
|
||||
<fieldType name="pdouble" class="solr.DoublePointField"/>
|
||||
<fieldType name="pfloat" class="solr.FloatPointField"/>
|
||||
<!-- fieldType name="pdate" class="solr.DatePointField"/ -->
|
||||
<fieldType name="pdate" class="solr.DatePointField"/>
|
||||
|
||||
<field name="id" type="string" indexed="true" stored="true" docValues="false" multiValued="false" required="true"/>
|
||||
<field name="id_dv" type="string" indexed="false" stored="false" docValues="true" multiValued="false"
|
||||
|
@ -63,7 +63,10 @@
|
|||
<dynamicField name="*_ds_p" type="pdouble" indexed="true" stored="false" docValues="true" multiValued="true"/>
|
||||
<dynamicField name="*_dt" type="date" indexed="true" stored="false" docValues="${solr.tests.numeric.dv}"/>
|
||||
<dynamicField name="*_dt_dv" type="date" indexed="true" stored="false" docValues="true"/>
|
||||
<dynamicField name="*_dt_p" type="pdate" indexed="true" stored="false" docValues="true"/>
|
||||
<dynamicField name="*_dts" type="date" indexed="true" stored="false" docValues="${solr.tests.numeric.dv}" multiValued="true"/>
|
||||
<dynamicField name="*_dts_dv" type="date" indexed="true" stored="false" docValues="true" multiValued="true"/>
|
||||
<dynamicField name="*_dts_p" type="pdate" indexed="true" stored="false" docValues="true" multiValued="true"/>
|
||||
|
||||
<uniqueKey>id</uniqueKey>
|
||||
|
||||
|
@ -78,6 +81,8 @@
|
|||
<copyField source="*_l" dest="*_l_p"/>
|
||||
<copyField source="*_d" dest="*_d_dv"/>
|
||||
<copyField source="*_d" dest="*_d_p"/>
|
||||
<copyField source="*_dt" dest="*_dt_dv"/>
|
||||
<copyField source="*_dt" dest="*_dt_p"/>
|
||||
<copyField source="*_ss" dest="*_ss_dv"/>
|
||||
<copyField source="*_fs" dest="*_fs_dv"/>
|
||||
<copyField source="*_fs" dest="*_fs_p"/>
|
||||
|
@ -85,5 +90,7 @@
|
|||
<copyField source="*_ls" dest="*_ls_p"/>
|
||||
<copyField source="*_ds" dest="*_ds_dv"/>
|
||||
<copyField source="*_ds" dest="*_ds_p"/>
|
||||
<copyField source="*_dts" dest="*_dts_dv"/>
|
||||
<copyField source="*_dts" dest="*_dts_p"/>
|
||||
<copyField source="id" dest="id_dv"/>
|
||||
</schema>
|
||||
|
|
|
@ -239,6 +239,10 @@
|
|||
<dynamicField name="*_ddsS" type="double" indexed="true" stored="true" multiValued="true" docValues="true"/>
|
||||
<dynamicField name="*_dtdS" type="date" indexed="true" stored="true" docValues="true"/>
|
||||
<dynamicField name="*_dtdsS" type="date" indexed="true" stored="true" multiValued="true" docValues="true"/>
|
||||
|
||||
<!-- docvalues, not indexed (N suffix) and not stored -->
|
||||
<dynamicField name="*_sdN" type="string" indexed="false" stored="false" docValues="true"/>
|
||||
<dynamicField name="*_sdsN" type="string" indexed="false" stored="false" multiValued="true" docValues="true"/>
|
||||
|
||||
<!-- explicit points with docValues (since they can't be uninverted with FieldCache -->
|
||||
<dynamicField name="*_ip" type="pint" indexed="true" stored="true" docValues="true" multiValued="false"/>
|
||||
|
|
|
@ -42,12 +42,12 @@
|
|||
<reporter name="defaultJmx" class="org.apache.solr.metrics.reporters.SolrJmxReporter">
|
||||
<bool name="enabled">false</bool>
|
||||
</reporter>
|
||||
<reporter name="test" group="shard">
|
||||
<reporter name="test" group="shard" class="org.apache.solr.metrics.reporters.solr.SolrShardReporter">
|
||||
<int name="period">5</int>
|
||||
<str name="filter">UPDATE\./update/.*requests</str>
|
||||
<str name="filter">QUERY\./select.*requests</str>
|
||||
</reporter>
|
||||
<reporter name="test" group="cluster">
|
||||
<reporter name="test" group="cluster" class="org.apache.solr.metrics.reporters.solr.SolrClusterReporter">
|
||||
<str name="handler">/admin/metrics/collector</str>
|
||||
<int name="period">5</int>
|
||||
<lst name="report">
|
||||
|
|
|
@ -369,14 +369,14 @@ public class CollectionsAPISolrJTest extends SolrCloudTestCase {
|
|||
assertEquals(0, response.getStatus());
|
||||
|
||||
waitForState("Expecting property 'preferredleader' to appear on replica " + replica.getName(), collection,
|
||||
(n, c) -> "true".equals(c.getReplica(replica.getName()).getStr("property.preferredleader")));
|
||||
(n, c) -> "true".equals(c.getReplica(replica.getName()).getProperty("preferredleader")));
|
||||
|
||||
response = CollectionAdminRequest.deleteReplicaProperty(collection, "shard1", replica.getName(), "property.preferredleader")
|
||||
.process(cluster.getSolrClient());
|
||||
assertEquals(0, response.getStatus());
|
||||
|
||||
waitForState("Expecting property 'preferredleader' to be removed from replica " + replica.getName(), collection,
|
||||
(n, c) -> c.getReplica(replica.getName()).getStr("property.preferredleader") == null);
|
||||
(n, c) -> c.getReplica(replica.getName()).getProperty("preferredleader") == null);
|
||||
|
||||
}
|
||||
|
||||
|
@ -396,7 +396,7 @@ public class CollectionsAPISolrJTest extends SolrCloudTestCase {
|
|||
for (Slice slice : c) {
|
||||
int count = 0;
|
||||
for (Replica replica : slice) {
|
||||
if ("true".equals(replica.getStr("property.preferredleader")))
|
||||
if ("true".equals(replica.getProperty("preferredleader")))
|
||||
count += 1;
|
||||
}
|
||||
if (count != 1)
|
||||
|
|
|
@ -37,7 +37,6 @@ import com.codahale.metrics.MetricRegistry;
|
|||
import com.codahale.metrics.Timer;
|
||||
import org.apache.commons.lang.RandomStringUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
import org.apache.lucene.util.LuceneTestCase.BadApple;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
|
@ -49,6 +48,8 @@ import org.apache.solr.common.cloud.Replica;
|
|||
import org.apache.solr.common.cloud.Slice;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.metrics.SolrMetricManager;
|
||||
import org.apache.solr.util.TimeOut;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -63,7 +64,6 @@ import static java.util.concurrent.TimeUnit.SECONDS;
|
|||
* This test is modeled after SyncSliceTest
|
||||
*/
|
||||
@Slow
|
||||
@BadApple(bugUrl = "https://issues.apache.org/jira/browse/SOLR-10126")
|
||||
public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
@ -78,6 +78,7 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
|
|||
if (!success) {
|
||||
printLayoutOnTearDown = true;
|
||||
}
|
||||
System.clearProperty("distribUpdateSoTimeout");
|
||||
System.clearProperty("solr.directoryFactory");
|
||||
System.clearProperty("solr.ulog.numRecordsToKeep");
|
||||
System.clearProperty("tests.zk.violationReportAction");
|
||||
|
@ -96,6 +97,8 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
@Override
|
||||
public void distribSetUp() throws Exception {
|
||||
// set socket timeout small, so replica won't be put into LIR state when they restart
|
||||
System.setProperty("distribUpdateSoTimeout", "3000");
|
||||
// tlog gets deleted after node restarts if we use CachingDirectoryFactory.
|
||||
// make sure that tlog stays intact after we restart a node
|
||||
System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
|
||||
|
@ -181,13 +184,21 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
|
|||
assertEquals(nodePeerSynced, shardToLeaderJetty.get("shard1"));
|
||||
|
||||
// assert metrics
|
||||
MetricRegistry registry = nodePeerSynced.jetty.getCoreContainer().getMetricManager().registry("solr.core.collection1");
|
||||
SolrMetricManager manager = nodePeerSynced.jetty.getCoreContainer().getMetricManager();
|
||||
MetricRegistry registry = null;
|
||||
for (String name : manager.registryNames()) {
|
||||
if (name.startsWith("solr.core.collection1")) {
|
||||
registry = manager.registry(name);
|
||||
break;
|
||||
}
|
||||
}
|
||||
assertNotNull(registry);
|
||||
Map<String, Metric> metrics = registry.getMetrics();
|
||||
assertTrue("REPLICATION.time present", metrics.containsKey("REPLICATION.time"));
|
||||
assertTrue("REPLICATION.errors present", metrics.containsKey("REPLICATION.errors"));
|
||||
Timer timer = (Timer)metrics.get("REPLICATION.time");
|
||||
assertTrue("REPLICATION.peerSync.time present", metrics.containsKey("REPLICATION.peerSync.time"));
|
||||
assertTrue("REPLICATION.peerSync.errors present", metrics.containsKey("REPLICATION.peerSync.errors"));
|
||||
Timer timer = (Timer)metrics.get("REPLICATION.peerSync.time");
|
||||
assertEquals(1L, timer.getCount());
|
||||
Counter counter = (Counter)metrics.get("REPLICATION.errors");
|
||||
Counter counter = (Counter)metrics.get("REPLICATION.peerSync.errors");
|
||||
assertEquals(0L, counter.getCount());
|
||||
success = true;
|
||||
} finally {
|
||||
|
@ -197,14 +208,18 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
class IndexInBackGround extends Thread {
|
||||
private int numDocs;
|
||||
private CloudJettyRunner runner;
|
||||
|
||||
public IndexInBackGround(int numDocs) {
|
||||
public IndexInBackGround(int numDocs, CloudJettyRunner nodeToBringUp) {
|
||||
super(getClassName());
|
||||
this.numDocs = numDocs;
|
||||
this.runner = nodeToBringUp;
|
||||
}
|
||||
|
||||
public void run() {
|
||||
try {
|
||||
// If we don't wait for cores get loaded, the leader may put this replica into LIR state
|
||||
waitForCoreLoading();
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
indexDoc(id, docId, i1, 50, tlong, 50, t1, "document number " + docId);
|
||||
docId++;
|
||||
|
@ -216,6 +231,17 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
|
|||
//Throwing an error here will kill the thread
|
||||
}
|
||||
}
|
||||
|
||||
private void waitForCoreLoading() throws InterruptedException {
|
||||
while (true) {
|
||||
if (runner.jetty.getCoreContainer() != null) {
|
||||
CoreContainer cc = runner.jetty.getCoreContainer();
|
||||
cc.waitForLoadingCoresToFinish(20000);
|
||||
break;
|
||||
}
|
||||
Thread.sleep(100);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -271,8 +297,9 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
|
|||
throws Exception {
|
||||
// disable fingerprint check if needed
|
||||
System.setProperty("solr.disableFingerprint", String.valueOf(disableFingerprint));
|
||||
|
||||
IndexInBackGround iib = new IndexInBackGround(50);
|
||||
// we wait a little bit, so socket between leader -> replica will be timeout
|
||||
Thread.sleep(3000);
|
||||
IndexInBackGround iib = new IndexInBackGround(50, nodeToBringUp);
|
||||
iib.start();
|
||||
|
||||
// bring back dead node and ensure it recovers
|
||||
|
|
|
@ -62,7 +62,7 @@ public abstract class ReplicaPropertiesBase extends AbstractFullDistribZkTestBas
|
|||
if (replica == null) {
|
||||
fail("Could not find collection/replica pair! " + collectionName + "/" + replicaName);
|
||||
}
|
||||
if (StringUtils.isBlank(replica.getStr(property))) return;
|
||||
if (StringUtils.isBlank(replica.getProperty(property))) return;
|
||||
Thread.sleep(100);
|
||||
}
|
||||
fail("Property " + property + " not set correctly for collection/replica pair: " +
|
||||
|
@ -88,11 +88,11 @@ public abstract class ReplicaPropertiesBase extends AbstractFullDistribZkTestBas
|
|||
if (replica == null) {
|
||||
fail("Could not find collection/replica pair! " + collectionName + "/" + replicaName);
|
||||
}
|
||||
if (StringUtils.equals(val, replica.getStr(property))) return;
|
||||
if (StringUtils.equals(val, replica.getProperty(property))) return;
|
||||
Thread.sleep(100);
|
||||
}
|
||||
|
||||
fail("Property '" + property + "' with value " + replica.getStr(property) +
|
||||
fail("Property '" + property + "' with value " + replica.getProperty(property) +
|
||||
" not set correctly for collection/replica pair: " + collectionName + "/" + replicaName + " property map is " +
|
||||
replica.getProperties().toString() + ".");
|
||||
|
||||
|
@ -131,7 +131,7 @@ public abstract class ReplicaPropertiesBase extends AbstractFullDistribZkTestBas
|
|||
int propCount = 0;
|
||||
for (Replica replica : slice.getReplicas()) {
|
||||
uniqueNodes.add(replica.getNodeName());
|
||||
String propVal = replica.getStr(property);
|
||||
String propVal = replica.getProperty(property);
|
||||
if (StringUtils.isNotBlank(propVal)) {
|
||||
++propCount;
|
||||
if (counts.containsKey(replica.getNodeName()) == false) {
|
||||
|
|
|
@ -401,8 +401,8 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
client.request(request);
|
||||
|
||||
// The above should have set exactly one preferredleader...
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
|
||||
doPropertyAction(client,
|
||||
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
|
||||
|
@ -412,8 +412,8 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
"property", "preferredLeader",
|
||||
"property.value", "true");
|
||||
// The preferred leader property for shard1 should have switched to the other replica.
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
|
||||
doPropertyAction(client,
|
||||
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
|
||||
|
@ -424,9 +424,9 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
"property.value", "true");
|
||||
|
||||
// Now we should have a preferred leader in both shards...
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
|
||||
doPropertyAction(client,
|
||||
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
|
||||
|
@ -437,11 +437,11 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
"property.value", "true");
|
||||
|
||||
// Now we should have three preferred leaders.
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME1, c2_s1_r1, "property.preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME1, c2_s1_r1, "preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader");
|
||||
|
||||
doPropertyAction(client,
|
||||
"action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toString(),
|
||||
|
@ -452,10 +452,10 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
|
||||
// Now we should have two preferred leaders.
|
||||
// But first we have to wait for the overseer to finish the action
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader");
|
||||
|
||||
// Try adding an arbitrary property to one that has the leader property
|
||||
doPropertyAction(client,
|
||||
|
@ -466,11 +466,11 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
"property", "testprop",
|
||||
"property.value", "true");
|
||||
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.testprop", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "testprop", "true");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader");
|
||||
|
||||
doPropertyAction(client,
|
||||
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
|
||||
|
@ -480,12 +480,12 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
"property", "prop",
|
||||
"property.value", "silly");
|
||||
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.testprop", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "testprop", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader");
|
||||
|
||||
doPropertyAction(client,
|
||||
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(),
|
||||
|
@ -496,12 +496,12 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
"property.value", "nonsense",
|
||||
SHARD_UNIQUE, "true");
|
||||
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.testprop", "nonsense");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "testprop", "nonsense");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader");
|
||||
|
||||
|
||||
doPropertyAction(client,
|
||||
|
@ -513,12 +513,12 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
"property.value", "true",
|
||||
SHARD_UNIQUE, "false");
|
||||
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.testprop", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "testprop", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader");
|
||||
|
||||
doPropertyAction(client,
|
||||
"action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(),
|
||||
|
@ -527,12 +527,12 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
"replica", c1_s1_r1,
|
||||
"property", "property.testprop");
|
||||
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.testprop");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "testprop");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader");
|
||||
|
||||
try {
|
||||
doPropertyAction(client,
|
||||
|
@ -549,12 +549,12 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
se.getMessage().contains("with the shardUnique parameter set to something other than 'true'"));
|
||||
}
|
||||
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.testprop");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "testprop");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "prop", "silly");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader");
|
||||
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader");
|
||||
|
||||
Map<String, String> origProps = getProps(client, COLLECTION_NAME, c1_s1_r1,
|
||||
"state", "core", "node_name", "base_url");
|
||||
|
@ -592,10 +592,10 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
"property.value", "base_url_bad");
|
||||
|
||||
// The above should be on new proeprties.
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.state", "state_bad");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.core", "core_bad");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.node_name", "node_name_bad");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.base_url", "base_url_bad");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "state", "state_bad");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "core", "core_bad");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "node_name", "node_name_bad");
|
||||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "base_url", "base_url_bad");
|
||||
|
||||
doPropertyAction(client,
|
||||
"action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(),
|
||||
|
@ -630,10 +630,10 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, ent.getKey(), ent.getValue());
|
||||
}
|
||||
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.state");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.core");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.node_name");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.base_url");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "state");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "core");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "node_name");
|
||||
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "base_url");
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -776,7 +776,7 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
|
|||
Replica replica = docCollection.getReplica(replicaName);
|
||||
Map<String, String> propMap = new HashMap<>();
|
||||
for (String prop : props) {
|
||||
propMap.put(prop, replica.getStr(prop));
|
||||
propMap.put(prop, replica.getProperty(prop));
|
||||
}
|
||||
return propMap;
|
||||
}
|
||||
|
|
|
@ -103,7 +103,7 @@ public class TestReplicaProperties extends ReplicaPropertiesBase {
|
|||
"collection", COLLECTION_NAME,
|
||||
"property", "preferredLeader");
|
||||
|
||||
verifyUniqueAcrossCollection(client, COLLECTION_NAME, "property.preferredleader");
|
||||
verifyUniqueAcrossCollection(client, COLLECTION_NAME, "preferredleader");
|
||||
|
||||
doPropertyAction(client,
|
||||
"action", CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(),
|
||||
|
@ -170,7 +170,7 @@ public class TestReplicaProperties extends ReplicaPropertiesBase {
|
|||
"shardUnique", "true");
|
||||
|
||||
verifyPropertyVal(client, COLLECTION_NAME,
|
||||
c1_s1_r1, "property.bogus1", "true");
|
||||
c1_s1_r1, "bogus1", "true");
|
||||
verifyPropertyVal(client, COLLECTION_NAME,
|
||||
c1_s1_r2, "property.bogus1", "whatever");
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import com.codahale.metrics.Metric;
|
|||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.codahale.metrics.Timer;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
|
@ -166,12 +167,24 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
|
|||
assertTrue(metrics.containsKey("CONTAINER.fs.totalSpace"));
|
||||
assertTrue(metrics.containsKey("CONTAINER.fs.usableSpace"));
|
||||
assertTrue(metrics.containsKey("CONTAINER.fs.path"));
|
||||
assertTrue(metrics.containsKey("CONTAINER.fs.spins"));
|
||||
assertTrue(metrics.containsKey("CONTAINER.fs.coreRoot.totalSpace"));
|
||||
assertTrue(metrics.containsKey("CONTAINER.fs.coreRoot.usableSpace"));
|
||||
assertTrue(metrics.containsKey("CONTAINER.fs.coreRoot.path"));
|
||||
assertTrue(metrics.containsKey("CONTAINER.fs.coreRoot.spins"));
|
||||
assertTrue(metrics.containsKey("CONTAINER.version.specification"));
|
||||
assertTrue(metrics.containsKey("CONTAINER.version.implementation"));
|
||||
Gauge<?> g = (Gauge<?>)metrics.get("CONTAINER.fs.path");
|
||||
assertEquals(g.getValue(), cc.getResourceLoader().getInstancePath().toAbsolutePath().toString());
|
||||
boolean spins = IOUtils.spins(cc.getCoreRootDirectory());
|
||||
g = (Gauge<?>)metrics.get("CONTAINER.fs.coreRoot.spins");
|
||||
assertEquals(spins, g.getValue());
|
||||
g = (Gauge<?>)metrics.get("CONTAINER.fs.spins");
|
||||
if (cc.getConfig().getSolrDataHome() != null) {
|
||||
spins = IOUtils.spins(cc.getConfig().getSolrDataHome());
|
||||
assertEquals(spins, g.getValue());
|
||||
} else {
|
||||
assertEquals(spins, g.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,13 @@
|
|||
*/
|
||||
package org.apache.solr.request;
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
|
@ -28,7 +35,10 @@ import org.apache.solr.common.util.NamedList;
|
|||
import org.apache.solr.request.IntervalFacets.FacetInterval;
|
||||
import org.apache.solr.request.IntervalFacets.IntervalCompareResult;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.schema.NumberType;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.apache.solr.schema.StrField;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.search.SyntaxError;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
|
@ -37,13 +47,13 @@ import org.junit.Test;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class TestIntervalFaceting extends SolrTestCaseJ4 {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
|
||||
private final static long DATE_START_TIME_RANDOM_TEST = 1499797224224L;
|
||||
private final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.ROOT);
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTests() throws Exception {
|
||||
// we need DVs on point fields to compute stats & facets
|
||||
|
@ -245,13 +255,14 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 {
|
|||
@Slow
|
||||
public void testRandom() throws Exception {
|
||||
// All field values will be a number between 0 and cardinality
|
||||
int cardinality = 100000;
|
||||
int cardinality = 10000;
|
||||
// Fields to use for interval faceting
|
||||
String[] fields = new String[]{
|
||||
"test_s_dv", "test_i_dv", "test_l_dv", "test_f_dv", "test_d_dv",
|
||||
"test_ss_dv", "test_is_dv", "test_fs_dv", "test_ls_dv", "test_ds_dv", "test_s", "test_i",
|
||||
"test_l", "test_f", "test_d", "test_ss", "test_is", "test_fs", "test_ls", "test_ds",
|
||||
"test_i_p", "test_is_p", "test_l_p", "test_ls_p", "test_f_p", "test_fs_p", "test_d_p", "test_ds_p"};
|
||||
"test_s_dv", "test_i_dv", "test_l_dv", "test_f_dv", "test_d_dv", "test_dt_dv",
|
||||
"test_ss_dv", "test_is_dv", "test_fs_dv", "test_ls_dv", "test_ds_dv", "test_dts_dv", "test_s", "test_i",
|
||||
"test_l", "test_f", "test_d", "test_dt", "test_ss", "test_is", "test_fs", "test_ls", "test_ds", "test_dts",
|
||||
"test_i_p", "test_is_p", "test_l_p", "test_ls_p", "test_f_p", "test_fs_p", "test_d_p", "test_ds_p", "test_dts_p"
|
||||
};
|
||||
for (int i = 0; i < atLeast(500); i++) {
|
||||
if (random().nextInt(50) == 0) {
|
||||
//have some empty docs
|
||||
|
@ -263,30 +274,34 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 {
|
|||
//delete some docs
|
||||
assertU(delI(String.valueOf(i - 1)));
|
||||
}
|
||||
String[] docFields = new String[(random().nextInt(5)) * 10 + 12];
|
||||
String[] docFields = new String[(random().nextInt(5)) * 12 + 14];
|
||||
docFields[0] = "id";
|
||||
docFields[1] = String.valueOf(i);
|
||||
docFields[1] = String.valueOf(i * (random().nextBoolean()?1:-1)); // in the queries we do positive and negative
|
||||
docFields[2] = "test_s";
|
||||
docFields[3] = String.valueOf(random().nextInt(cardinality));
|
||||
docFields[3] = String.valueOf(randomInt(cardinality));
|
||||
docFields[4] = "test_i";
|
||||
docFields[5] = String.valueOf(random().nextInt(cardinality));
|
||||
docFields[5] = String.valueOf(randomInt(cardinality));
|
||||
docFields[6] = "test_l";
|
||||
docFields[7] = String.valueOf(random().nextInt(cardinality));
|
||||
docFields[7] = String.valueOf(randomLong(cardinality));
|
||||
docFields[8] = "test_f";
|
||||
docFields[9] = String.valueOf(random().nextFloat() * cardinality);
|
||||
docFields[9] = String.valueOf(randomFloat(cardinality));
|
||||
docFields[10] = "test_d";
|
||||
docFields[11] = String.valueOf(random().nextDouble() * cardinality);
|
||||
for (int j = 12; j < docFields.length; ) {
|
||||
docFields[11] = String.valueOf(raondomDouble(cardinality));
|
||||
docFields[12] = "test_dt";
|
||||
docFields[13] = dateFormat.format(new Date(randomMs(cardinality)));
|
||||
for (int j = 14; j < docFields.length; ) {
|
||||
docFields[j++] = "test_ss";
|
||||
docFields[j++] = String.valueOf(random().nextInt(cardinality));
|
||||
docFields[j++] = String.valueOf(randomInt(cardinality));
|
||||
docFields[j++] = "test_is";
|
||||
docFields[j++] = String.valueOf(random().nextInt(cardinality));
|
||||
docFields[j++] = String.valueOf(randomInt(cardinality));
|
||||
docFields[j++] = "test_ls";
|
||||
docFields[j++] = String.valueOf(random().nextInt(cardinality));
|
||||
docFields[j++] = String.valueOf(randomLong(cardinality));
|
||||
docFields[j++] = "test_fs";
|
||||
docFields[j++] = String.valueOf(random().nextFloat() * cardinality);
|
||||
docFields[j++] = String.valueOf(randomFloat(cardinality));
|
||||
docFields[j++] = "test_ds";
|
||||
docFields[j++] = String.valueOf(random().nextDouble() * cardinality);
|
||||
docFields[j++] = String.valueOf(raondomDouble(cardinality));
|
||||
docFields[j++] = "test_dts";
|
||||
docFields[j++] = dateFormat.format(new Date(randomMs(cardinality)));
|
||||
}
|
||||
assertU(adoc(docFields));
|
||||
if (random().nextInt(50) == 0) {
|
||||
|
@ -295,12 +310,64 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 {
|
|||
}
|
||||
assertU(commit());
|
||||
|
||||
for (int i = 0; i < atLeast(100); i++) {
|
||||
for (int i = 0; i < atLeast(10000); i++) {
|
||||
doTestQuery(cardinality, fields);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
long randomMs(int cardinality) {
|
||||
return DATE_START_TIME_RANDOM_TEST + random().nextInt(cardinality) * 1000 * (random().nextBoolean()?1:-1);
|
||||
}
|
||||
|
||||
double raondomDouble(int cardinality) {
|
||||
if (rarely()) {
|
||||
int num = random().nextInt(4);
|
||||
if (num == 0) return Double.NEGATIVE_INFINITY;
|
||||
if (num == 1) return Double.POSITIVE_INFINITY;
|
||||
if (num == 2) return Double.MIN_VALUE;
|
||||
if (num == 3) return Double.MAX_VALUE;
|
||||
}
|
||||
Double d = Double.NaN;
|
||||
while (d.isNaN()) {
|
||||
d = random().nextDouble();
|
||||
}
|
||||
return d * cardinality * (random().nextBoolean()?1:-1);
|
||||
}
|
||||
|
||||
float randomFloat(int cardinality) {
|
||||
if (rarely()) {
|
||||
int num = random().nextInt(4);
|
||||
if (num == 0) return Float.NEGATIVE_INFINITY;
|
||||
if (num == 1) return Float.POSITIVE_INFINITY;
|
||||
if (num == 2) return Float.MIN_VALUE;
|
||||
if (num == 3) return Float.MAX_VALUE;
|
||||
}
|
||||
Float f = Float.NaN;
|
||||
while (f.isNaN()) {
|
||||
f = random().nextFloat();
|
||||
}
|
||||
return f * cardinality * (random().nextBoolean()?1:-1);
|
||||
}
|
||||
|
||||
int randomInt(int cardinality) {
|
||||
if (rarely()) {
|
||||
int num = random().nextInt(2);
|
||||
if (num == 0) return Integer.MAX_VALUE;
|
||||
if (num == 1) return Integer.MIN_VALUE;
|
||||
}
|
||||
return random().nextInt(cardinality) * (random().nextBoolean()?1:-1);
|
||||
}
|
||||
|
||||
long randomLong(int cardinality) {
|
||||
if (rarely()) {
|
||||
int num = random().nextInt(2);
|
||||
if (num == 0) return Long.MAX_VALUE;
|
||||
if (num == 1) return Long.MIN_VALUE;
|
||||
}
|
||||
return randomInt(cardinality);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes one query using interval faceting and compares with the same query using
|
||||
* facet query with the same range
|
||||
|
@ -309,18 +376,22 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 {
|
|||
private void doTestQuery(int cardinality, String[] fields) throws Exception {
|
||||
String[] startOptions = new String[]{"(", "["};
|
||||
String[] endOptions = new String[]{")", "]"};
|
||||
// the query should match some documents in most cases
|
||||
Integer[] qRange = getRandomRange(cardinality, "id");
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
params.set("q", "id:[" + qRange[0] + " TO " + qRange[1] + "]");
|
||||
if (rarely()) {
|
||||
params.set("q", "*:*");
|
||||
} else {
|
||||
// the query should match some documents in most cases
|
||||
String[] qRange = getRandomRange(cardinality, "id");
|
||||
params.set("q", "id:[" + qRange[0] + " TO " + qRange[1] + "]");
|
||||
}
|
||||
params.set("facet", "true");
|
||||
String field = fields[random().nextInt(fields.length)]; //choose from any of the fields
|
||||
String field = pickRandom(fields); //choose from any of the fields
|
||||
params.set("facet.interval", field);
|
||||
// number of intervals
|
||||
for (int i = 0; i < 1 + random().nextInt(20); i++) {
|
||||
Integer[] interval = getRandomRange(cardinality, field);
|
||||
String open = startOptions[interval[0] % 2];
|
||||
String close = endOptions[interval[1] % 2];
|
||||
String[] interval = getRandomRange(cardinality, field);
|
||||
String open = pickRandom(startOptions);
|
||||
String close = pickRandom(endOptions);
|
||||
params.add("f." + field + ".facet.interval.set", open + interval[0] + "," + interval[1] + close);
|
||||
params.add("facet.query", field + ":" + open.replace('(', '{') + interval[0] + " TO " + interval[1] + close.replace(')', '}'));
|
||||
}
|
||||
|
@ -331,10 +402,11 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 {
|
|||
NamedList<Object> facetIntervals = (NamedList<Object>) ((NamedList<Object>) (NamedList<Object>) ((NamedList<Object>) rsp.getValues().get("facet_counts"))
|
||||
.get("facet_intervals")).get(field);
|
||||
assertEquals("Responses don't have the same number of facets: \n" + facetQueries + "\n" + facetIntervals,
|
||||
facetQueries.size(), facetIntervals.size());
|
||||
facetQueries.size(), getCountDistinctIntervals(facetIntervals));
|
||||
for (int i = 0; i < facetIntervals.size(); i++) {
|
||||
assertEquals("Interval did not match: " + facetIntervals.getName(i), facetIntervals.getVal(i).toString(),
|
||||
facetQueries.get(field + ":" + facetIntervals.getName(i).replace(",", " TO ").replace('(', '{').replace(')', '}')).toString());
|
||||
assertEquals("Interval did not match: " + field + ": " + facetIntervals.getName(i) + "\nResponse: " + rsp.getValues().get("facet_counts"),
|
||||
facetQueries.get(field + ":" + facetIntervals.getName(i).replace(",", " TO ").replace('(', '{').replace(')', '}')).toString(),
|
||||
facetIntervals.getVal(i).toString());
|
||||
}
|
||||
} finally {
|
||||
req.close();
|
||||
|
@ -342,24 +414,80 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 {
|
|||
|
||||
}
|
||||
|
||||
private int getCountDistinctIntervals(NamedList<Object> facetIntervals) {
|
||||
Set<String> distinctIntervals = new HashSet<>(facetIntervals.size());
|
||||
for (int i = 0; i < facetIntervals.size(); i++) {
|
||||
distinctIntervals.add(facetIntervals.getName(i));
|
||||
}
|
||||
return distinctIntervals.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a random range. It's guaranteed that the first
|
||||
* number will be lower than the second, and both of them
|
||||
* between 0 (inclusive) and <code>max</code> (exclusive).
|
||||
* number will be lower than the second. The range could have values greater than "max",
|
||||
* for example [Integer/Long/Float/Double].[MIN/MAX_VALUE,POSITIVE/NEGATIVE_INFINITY]
|
||||
* If the fieldName is "test_s_dv" or "test_ss_dv" (the
|
||||
* two fields used for Strings), the comparison will be done
|
||||
* alphabetically
|
||||
* If the field is a Date, a date range will be returned
|
||||
* The range could also contain "*" as beginning and/or end of the range
|
||||
*/
|
||||
private Integer[] getRandomRange(int max, String fieldName) {
|
||||
Integer[] values = new Integer[2];
|
||||
values[0] = random().nextInt(max);
|
||||
values[1] = random().nextInt(max);
|
||||
if (fieldName.startsWith("test_s")) {
|
||||
private String[] getRandomRange(int max, String fieldName) {
|
||||
Number[] values = new Number[2];
|
||||
FieldType ft = h.getCore().getLatestSchema().getField(fieldName).getType();
|
||||
if (ft.getNumberType() == null) {
|
||||
assert ft instanceof StrField;
|
||||
values[0] = randomInt(max);
|
||||
values[1] = randomInt(max);
|
||||
Arrays.sort(values, (o1, o2) -> String.valueOf(o1).compareTo(String.valueOf(o2)));
|
||||
} else {
|
||||
switch (ft.getNumberType()) {
|
||||
case DOUBLE:
|
||||
values[0] = raondomDouble(max);
|
||||
values[1] = raondomDouble(max);
|
||||
break;
|
||||
case FLOAT:
|
||||
values[0] = randomFloat(max);
|
||||
values[1] = randomFloat(max);
|
||||
break;
|
||||
case INTEGER:
|
||||
values[0] = randomInt(max);
|
||||
values[1] = randomInt(max);
|
||||
break;
|
||||
case LONG:
|
||||
values[0] = randomLong(max);
|
||||
values[1] = randomLong(max);
|
||||
break;
|
||||
case DATE:
|
||||
values[0] = randomMs(max);
|
||||
values[1] = randomMs(max);
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Unexpected number type");
|
||||
|
||||
}
|
||||
Arrays.sort(values);
|
||||
}
|
||||
return values;
|
||||
String[] stringValues = new String[2];
|
||||
if (rarely()) {
|
||||
stringValues[0] = "*";
|
||||
} else {
|
||||
if (ft.getNumberType() == NumberType.DATE) {
|
||||
stringValues[0] = dateFormat.format(values[0]);
|
||||
} else {
|
||||
stringValues[0] = String.valueOf(values[0]);
|
||||
}
|
||||
}
|
||||
if (rarely()) {
|
||||
stringValues[1] = "*";
|
||||
} else {
|
||||
if (ft.getNumberType() == NumberType.DATE) {
|
||||
stringValues[1] = dateFormat.format(values[1]);
|
||||
} else {
|
||||
stringValues[1] = String.valueOf(values[1]);
|
||||
}
|
||||
}
|
||||
return stringValues;
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -772,7 +900,6 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 {
|
|||
assertIntervalQuery(field, "(0, " + Double.POSITIVE_INFINITY + ")", "2");
|
||||
assertIntervalQuery(field, "(0, " + Double.POSITIVE_INFINITY + "]", "3");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -35,7 +35,6 @@ public class TestPayloadScoreQParserPlugin extends SolrTestCaseJ4 {
|
|||
|
||||
@Test
|
||||
public void test() {
|
||||
clearIndex();
|
||||
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf v=B func=min}"), "//float[@name='score']='2.0'");
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf v=mult func=min}"), "//float[@name='score']='50.0'");
|
||||
|
@ -47,6 +46,15 @@ public class TestPayloadScoreQParserPlugin extends SolrTestCaseJ4 {
|
|||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf func=average}B C"), "//float[@name='score']='2.5'");
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf func=max}A B C"), "//float[@name='score']='3.0'");
|
||||
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf func=sum}A B C"), "//float[@name='score']='6.0'");
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf func=sum operator=or}A C"), "//float[@name='score']='4.0'");
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf func=sum operator=or}A"), "//float[@name='score']='1.0'");
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf func=sum operator=or}foo"), "//result[@numFound='0']");
|
||||
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf func=max operator=or}A C"), "//float[@name='score']='3.0'");
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf func=min operator=or}A x"), "//float[@name='score']='1.0'");
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf func=average operator=or}A C"), "//float[@name='score']='2.0'");
|
||||
|
||||
// TODO: fix this includeSpanScore test to be less brittle - score result is score of "A" (via BM25) multipled by 1.0 (payload value)
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf v=A func=min}"), "//float[@name='score']='1.0'");
|
||||
assertQ(req("fl","*,score", "q", "{!payload_score f=vals_dpf v=A func=min includeSpanScore=true}"), "//float[@name='score']='0.2876821'");
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.solr.search.join;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
@ -25,7 +26,6 @@ public class GraphQueryTest extends SolrTestCaseJ4 {
|
|||
|
||||
@BeforeClass
|
||||
public static void beforeTests() throws Exception {
|
||||
|
||||
initCore("solrconfig.xml","schema_latest.xml");
|
||||
}
|
||||
|
||||
|
@ -44,6 +44,9 @@ public class GraphQueryTest extends SolrTestCaseJ4 {
|
|||
doGraph( params("node_id","node_fps", "edge_id","edge_fps") );
|
||||
doGraph( params("node_id","node_dp", "edge_id","edge_dps") );
|
||||
doGraph( params("node_id","node_dps", "edge_id","edge_dps") );
|
||||
|
||||
// string with indexed=false and docValues=true
|
||||
doGraph( params("node_id","node_sdN", "edge_id","edge_sdsN") );
|
||||
}
|
||||
|
||||
public void doGraph(SolrParams p) throws Exception {
|
||||
|
@ -118,4 +121,23 @@ public class GraphQueryTest extends SolrTestCaseJ4 {
|
|||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGraphQueryParserValidation() throws Exception {
|
||||
// from schema field existence
|
||||
doGraphQuery( params("node_id","node_nothere", "edge_id","edge_ss",
|
||||
"message", "field node_nothere not defined in schema", "errorCode", String.valueOf(SolrException.ErrorCode.BAD_REQUEST.code)) );
|
||||
|
||||
// to schema field existence
|
||||
doGraphQuery( params("node_id","node_s", "edge_id","edge_notthere",
|
||||
"message", "field node_nothere not defined in schema", "errorCode", String.valueOf(SolrException.ErrorCode.BAD_REQUEST.code)) );
|
||||
}
|
||||
|
||||
public void doGraphQuery(SolrParams p) throws Exception {
|
||||
String message = p.get("message");
|
||||
int errorCode = p.getInt("errorCode", SolrException.ErrorCode.UNKNOWN.code);
|
||||
|
||||
assertQEx(message , req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=false maxDepth=1}id:doc_1")
|
||||
, errorCode
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,165 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.security.hadoop;
|
||||
|
||||
import static org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME;
|
||||
import static org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME;
|
||||
import static org.apache.solr.common.cloud.VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME;
|
||||
import static org.apache.solr.common.cloud.VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.solr.cloud.MiniSolrCloudCluster;
|
||||
import org.apache.solr.cloud.SolrCloudTestCase;
|
||||
import org.apache.solr.common.cloud.SecurityAwareZkACLProvider;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider;
|
||||
import org.apache.solr.common.cloud.VMParamsSingleSetCredentialsDigestZkCredentialsProvider;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.zookeeper.WatchedEvent;
|
||||
import org.apache.zookeeper.Watcher;
|
||||
import org.apache.zookeeper.ZooDefs;
|
||||
import org.apache.zookeeper.ZooKeeper;
|
||||
import org.apache.zookeeper.data.ACL;
|
||||
import org.apache.zookeeper.data.Id;
|
||||
import org.apache.zookeeper.data.Stat;
|
||||
import org.apache.zookeeper.server.auth.DigestAuthenticationProvider;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestZkAclsWithHadoopAuth extends SolrCloudTestCase {
|
||||
protected static final int NUM_SERVERS = 1;
|
||||
protected static final int NUM_SHARDS = 1;
|
||||
protected static final int REPLICATION_FACTOR = 1;
|
||||
private static final String SOLR_PASSWD = "solr";
|
||||
private static final String FOO_PASSWD = "foo";
|
||||
private static final Id SOLR_ZK_ID = new Id("digest", digest ("solr", SOLR_PASSWD));
|
||||
private static final Id FOO_ZK_ID = new Id("digest", digest ("foo", FOO_PASSWD));
|
||||
|
||||
@BeforeClass
|
||||
public static void setupClass() throws Exception {
|
||||
assumeFalse("Hadoop does not work on Windows", Constants.WINDOWS);
|
||||
assumeFalse("FIXME: SOLR-8182: This test fails under Java 9", Constants.JRE_IS_MINIMUM_JAVA9);
|
||||
|
||||
System.setProperty(SolrZkClient.ZK_ACL_PROVIDER_CLASS_NAME_VM_PARAM_NAME,
|
||||
VMParamsAllAndReadonlyDigestZkACLProvider.class.getName());
|
||||
System.setProperty(SolrZkClient.ZK_CRED_PROVIDER_CLASS_NAME_VM_PARAM_NAME,
|
||||
VMParamsSingleSetCredentialsDigestZkCredentialsProvider.class.getName());
|
||||
System.setProperty(DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, "solr");
|
||||
System.setProperty(DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, SOLR_PASSWD);
|
||||
System.setProperty(DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME, "foo");
|
||||
System.setProperty(DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME, FOO_PASSWD);
|
||||
|
||||
configureCluster(NUM_SERVERS)// nodes
|
||||
.withSolrXml(MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML)
|
||||
.withSecurityJson(TEST_PATH().resolve("security").resolve("hadoop_simple_auth_with_delegation.json"))
|
||||
.addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
|
||||
.configure();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass() {
|
||||
System.clearProperty(SolrZkClient.ZK_ACL_PROVIDER_CLASS_NAME_VM_PARAM_NAME);
|
||||
System.clearProperty(SolrZkClient.ZK_CRED_PROVIDER_CLASS_NAME_VM_PARAM_NAME);
|
||||
System.clearProperty(DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME);
|
||||
System.clearProperty(DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME);
|
||||
System.clearProperty(DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME);
|
||||
System.clearProperty(DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testZkAcls() throws Exception {
|
||||
ZooKeeper keeper = null;
|
||||
try {
|
||||
keeper = new ZooKeeper(cluster.getZkServer().getZkAddress(), (int) TimeUnit.MINUTES.toMillis(1), new Watcher() {
|
||||
@Override
|
||||
public void process(WatchedEvent arg0) {
|
||||
// Do nothing
|
||||
}
|
||||
});
|
||||
|
||||
keeper.addAuthInfo("digest", ("solr:"+SOLR_PASSWD).getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
// Test well known paths.
|
||||
checkNonSecurityACLs(keeper, "/solr.xml");
|
||||
checkSecurityACLs(keeper, "/security/token");
|
||||
checkSecurityACLs(keeper, "/security");
|
||||
|
||||
// Now test all ZK tree.
|
||||
String zkHost = cluster.getSolrClient().getZkHost();
|
||||
String zkChroot = zkHost.contains("/")? zkHost.substring(zkHost.indexOf("/")): null;
|
||||
walkZkTree(keeper, zkChroot, "/");
|
||||
|
||||
} finally {
|
||||
if (keeper != null) {
|
||||
keeper.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void walkZkTree (ZooKeeper keeper, String zkChroot, String path) throws Exception {
|
||||
if (isSecurityZNode(zkChroot, path)) {
|
||||
checkSecurityACLs(keeper, path);
|
||||
} else {
|
||||
checkNonSecurityACLs(keeper, path);
|
||||
}
|
||||
|
||||
List<String> children = keeper.getChildren(path, false);
|
||||
for (String child : children) {
|
||||
String subpath = path.endsWith("/") ? path + child : path + "/" + child;
|
||||
walkZkTree(keeper, zkChroot, subpath);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isSecurityZNode(String zkChroot, String path) {
|
||||
String temp = path;
|
||||
if (zkChroot != null) {
|
||||
temp = path.replace(zkChroot, "");
|
||||
}
|
||||
return !ZkStateReader.SOLR_SECURITY_CONF_PATH.equals(path) &&
|
||||
temp.startsWith(SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH);
|
||||
}
|
||||
|
||||
private void checkSecurityACLs(ZooKeeper keeper, String path) throws Exception {
|
||||
List<ACL> acls = keeper.getACL(path, new Stat());
|
||||
String message = String.format(Locale.ROOT, "Path %s ACLs found %s", path, acls);
|
||||
assertEquals(message, 1, acls.size());
|
||||
assertTrue(message, acls.contains(new ACL(ZooDefs.Perms.ALL, SOLR_ZK_ID)));
|
||||
}
|
||||
|
||||
private void checkNonSecurityACLs(ZooKeeper keeper, String path) throws Exception {
|
||||
List<ACL> acls = keeper.getACL(path, new Stat());
|
||||
String message = String.format(Locale.ROOT, "Path %s ACLs found %s", path, acls);
|
||||
assertEquals(message, 2, acls.size());
|
||||
assertTrue(message, acls.contains(new ACL(ZooDefs.Perms.ALL, SOLR_ZK_ID)));
|
||||
assertTrue(message, acls.contains(new ACL(ZooDefs.Perms.READ, FOO_ZK_ID)));
|
||||
}
|
||||
|
||||
private static String digest (String userName, String passwd) {
|
||||
try {
|
||||
return DigestAuthenticationProvider.generateDigest(userName+":"+passwd);
|
||||
} catch (NoSuchAlgorithmException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -91,6 +91,76 @@ public class SoftAutoCommitTest extends AbstractSolrTestCase {
|
|||
super.setUp();
|
||||
|
||||
}
|
||||
|
||||
public void testSoftAndHardCommitMaxDocs() throws Exception {
|
||||
|
||||
// NOTE WHEN READING THIS TEST...
|
||||
// The maxDocs settings on the CommitTrackers are the "upper bound"
|
||||
// of how many docs can be added with out doing a commit.
|
||||
// That means they are one less then the actual number of docs that will trigger a commit.
|
||||
final int softCommitMaxDocs = 5;
|
||||
final int hardCommitMaxDocs = 7;
|
||||
|
||||
assert softCommitMaxDocs < hardCommitMaxDocs; // remainder of test designed with these assumptions
|
||||
|
||||
CommitTracker hardTracker = updater.commitTracker;
|
||||
CommitTracker softTracker = updater.softCommitTracker;
|
||||
|
||||
// wait out any leaked commits
|
||||
monitor.hard.poll(3000, MILLISECONDS);
|
||||
monitor.soft.poll(0, MILLISECONDS);
|
||||
monitor.clear();
|
||||
|
||||
softTracker.setDocsUpperBound(softCommitMaxDocs);
|
||||
softTracker.setTimeUpperBound(-1);
|
||||
hardTracker.setDocsUpperBound(hardCommitMaxDocs);
|
||||
hardTracker.setTimeUpperBound(-1);
|
||||
// simplify whats going on by only having soft auto commits trigger new searchers
|
||||
hardTracker.setOpenSearcher(false);
|
||||
|
||||
// Note: doc id counting starts at 0, see comment at start of test regarding "upper bound"
|
||||
|
||||
// add num docs up to the soft commit upper bound
|
||||
for (int i = 0; i < softCommitMaxDocs; i++) {
|
||||
assertU(adoc("id", ""+(8000 + i), "subject", "testMaxDocs"));
|
||||
}
|
||||
// the first soft commit we see must be after this.
|
||||
final long minSoftCommitNanos = System.nanoTime();
|
||||
|
||||
// now add the doc that will trigger the soft commit,
|
||||
// as well as additional docs up to the hard commit upper bound
|
||||
for (int i = softCommitMaxDocs; i < hardCommitMaxDocs; i++) {
|
||||
assertU(adoc("id", ""+(8000 + i), "subject", "testMaxDocs"));
|
||||
}
|
||||
// the first hard commit we see must be after this.
|
||||
final long minHardCommitNanos = System.nanoTime();
|
||||
|
||||
// a final doc to trigger the hard commit
|
||||
assertU(adoc("id", ""+(8000 + hardCommitMaxDocs), "subject", "testMaxDocs"));
|
||||
|
||||
// now poll our monitors for the timestamps on the first commits
|
||||
final Long firstSoftNanos = monitor.soft.poll(5000, MILLISECONDS);
|
||||
final Long firstHardNanos = monitor.hard.poll(5000, MILLISECONDS);
|
||||
|
||||
assertNotNull("didn't get a single softCommit after adding the max docs", firstSoftNanos);
|
||||
assertNotNull("didn't get a single hardCommit after adding the max docs", firstHardNanos);
|
||||
|
||||
assertTrue("softCommit @ " + firstSoftNanos + "ns is before the maxDocs should have triggered it @ " +
|
||||
minSoftCommitNanos + "ns",
|
||||
minSoftCommitNanos < firstSoftNanos);
|
||||
assertTrue("hardCommit @ " + firstHardNanos + "ns is before the maxDocs should have triggered it @ " +
|
||||
minHardCommitNanos + "ns",
|
||||
minHardCommitNanos < firstHardNanos);
|
||||
|
||||
// wait a bit, w/o other action we shouldn't see any new hard/soft commits
|
||||
assertNull("Got a hard commit we weren't expecting",
|
||||
monitor.hard.poll(1000, MILLISECONDS));
|
||||
assertNull("Got a soft commit we weren't expecting",
|
||||
monitor.soft.poll(0, MILLISECONDS));
|
||||
|
||||
monitor.assertSaneOffers();
|
||||
monitor.clear();
|
||||
}
|
||||
|
||||
public void testSoftAndHardCommitMaxTimeMixedAdds() throws Exception {
|
||||
final int softCommitWaitMillis = 500;
|
||||
|
|
|
@ -146,3 +146,8 @@ For example, to use a blob named test, you would configure `solrconfig.xml` like
|
|||
----
|
||||
|
||||
If there are parameters available in the custom handler, you can define them in the same way as any other request handler definition.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
|
||||
Blob store can only be used to dynamically load components configured in `solrconfig.xml`. Components specified in `schema.xml` cannot be loaded from blob store
|
|
@ -307,6 +307,10 @@ The `graph` query parser does a breadth first, cyclic aware, graph traversal of
|
|||
|
||||
The graph is built according to linkages between documents based on the terms found in `from` and `to` fields that you specify as part of the query.
|
||||
|
||||
The supported fieldTypes are point fields with docValues enabled or string fields with indexed=true or docValues=true.
|
||||
For string fields which are indexed=false and docValues=true please refer to the javadocs for `DocValuesTermsQuery`
|
||||
for it's performance characteristics so indexed=true will perform better for most use-cases.
|
||||
|
||||
=== Graph Query Parameters
|
||||
|
||||
`to`::
|
||||
|
@ -657,7 +661,11 @@ This parser accepts the following parameters:
|
|||
The field to use (required).
|
||||
|
||||
`func`::
|
||||
Payload function: min, max, average (required).
|
||||
Payload function: min, max, average, sum (required).
|
||||
|
||||
`operator`::
|
||||
Search operator: or , phrase ( default ) (optional). This defines if the search query should be an OR
|
||||
query or a phrase query
|
||||
|
||||
`includeSpanScore`::
|
||||
If `true`, multiples computed payload factor by the score of the original query. If `false`, the default, the computed payload factor is the score.
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.client.solrj.io.eval;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.commons.math3.distribution.RealDistribution;
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
|
||||
public class CumulativeProbabilityEvaluator extends ComplexEvaluator implements Expressible {
|
||||
|
||||
private static final long serialVersionUID = 1;
|
||||
|
||||
public CumulativeProbabilityEvaluator(StreamExpression expression, StreamFactory factory) throws IOException {
|
||||
super(expression, factory);
|
||||
|
||||
if(2 != subEvaluators.size()){
|
||||
throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting two values (regression result and a number) but found %d",expression,subEvaluators.size()));
|
||||
}
|
||||
}
|
||||
|
||||
public Number evaluate(Tuple tuple) throws IOException {
|
||||
StreamEvaluator a = subEvaluators.get(0);
|
||||
StreamEvaluator b = subEvaluators.get(1);
|
||||
Number number1 = (Number)b.evaluate(tuple);
|
||||
RealDistribution rd= (RealDistribution)a.evaluate(tuple);
|
||||
|
||||
return rd.cumulativeProbability(number1.doubleValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException {
|
||||
StreamExpression expression = new StreamExpression(factory.getFunctionName(getClass()));
|
||||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation toExplanation(StreamFactory factory) throws IOException {
|
||||
return new Explanation(nodeId.toString())
|
||||
.withExpressionType(ExpressionType.EVALUATOR)
|
||||
.withFunctionName(factory.getFunctionName(getClass()))
|
||||
.withImplementingClass(getClass().getName())
|
||||
.withExpression(toExpression(factory).toString());
|
||||
}
|
||||
}
|
|
@ -19,6 +19,8 @@ package org.apache.solr.client.solrj.io.eval;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
|
||||
|
@ -38,17 +40,27 @@ public class PredictEvaluator extends ComplexEvaluator implements Expressible {
|
|||
if(2 != subEvaluators.size()){
|
||||
throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting two values (regression result and a number) but found %d",expression,subEvaluators.size()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public Number evaluate(Tuple tuple) throws IOException {
|
||||
public Object evaluate(Tuple tuple) throws IOException {
|
||||
|
||||
StreamEvaluator r = subEvaluators.get(0);
|
||||
StreamEvaluator d = subEvaluators.get(1);
|
||||
|
||||
RegressionEvaluator.RegressionTuple rt= (RegressionEvaluator.RegressionTuple)r.evaluate(tuple);
|
||||
Number n = (Number)d.evaluate(tuple);
|
||||
return rt.predict(n.doubleValue());
|
||||
|
||||
Object o = d.evaluate(tuple);
|
||||
if(o instanceof Number) {
|
||||
Number n = (Number)o;
|
||||
return rt.predict(n.doubleValue());
|
||||
} else {
|
||||
List<Number> list = (List<Number>)o;
|
||||
List<Number> predications = new ArrayList();
|
||||
for(Number n : list) {
|
||||
predications.add(rt.predict(n.doubleValue()));
|
||||
}
|
||||
return predications;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -14,9 +14,6 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/**
|
||||
*
|
||||
*/
|
||||
package org.apache.solr.client.solrj.io.eval;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -157,6 +157,17 @@ public class Replica extends ZkNodeProps {
|
|||
return this.type;
|
||||
}
|
||||
|
||||
public String getProperty(String propertyName) {
|
||||
final String propertyKey;
|
||||
if (!propertyName.startsWith(ZkStateReader.PROPERTY_PROP_PREFIX)) {
|
||||
propertyKey = ZkStateReader.PROPERTY_PROP_PREFIX+propertyName;
|
||||
} else {
|
||||
propertyKey = propertyName;
|
||||
}
|
||||
final String propertyValue = getStr(propertyKey);
|
||||
return propertyValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name + ':' + JSONUtil.toJSON(propMap, -1); // small enough, keep it on one line (i.e. no indent)
|
||||
|
|
|
@ -88,6 +88,7 @@ public class ZkStateReader implements Closeable {
|
|||
public static final String NUM_SHARDS_PROP = "numShards";
|
||||
public static final String LEADER_PROP = "leader";
|
||||
public static final String PROPERTY_PROP = "property";
|
||||
public static final String PROPERTY_PROP_PREFIX = "property.";
|
||||
public static final String PROPERTY_VALUE_PROP = "property.value";
|
||||
public static final String MAX_AT_ONCE_PROP = "maxAtOnce";
|
||||
public static final String MAX_WAIT_SECONDS_PROP = "maxWaitSeconds";
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"documentation": "TODO NOCOMMIT",
|
||||
"documentation": "https://lucene.apache.org/solr/guide/solrcloud-autoscaling-api.html",
|
||||
"description": "The Scaling API provides API for adding cluster level scaling rules, triggers and event listeners",
|
||||
"methods": [
|
||||
"GET",
|
||||
|
|
|
@ -5287,6 +5287,23 @@ public class StreamExpressionTest extends SolrCloudTestCase {
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCumulativeProbability() throws Exception {
|
||||
String expr = "cumulativeProbability(normalDistribution(500, 40), 500)";
|
||||
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
|
||||
paramsLoc.set("expr", expr);
|
||||
paramsLoc.set("qt", "/stream");
|
||||
|
||||
String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS;
|
||||
TupleStream solrStream = new SolrStream(url, paramsLoc);
|
||||
|
||||
StreamContext context = new StreamContext();
|
||||
solrStream.setStreamContext(context);
|
||||
List<Tuple> tuples = getTuples(solrStream);
|
||||
assertTrue(tuples.size() == 1);
|
||||
Number number = (Number)tuples.get(0).get("return-value");
|
||||
assertTrue(number.doubleValue() == .5D);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
@ -6270,7 +6287,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
|
|||
String expr1 = "search("+COLLECTIONORALIAS+", q=\"col_s:a\", fl=\"price_f, order_i\", sort=\"order_i asc\")";
|
||||
String expr2 = "search("+COLLECTIONORALIAS+", q=\"col_s:b\", fl=\"price_f, order_i\", sort=\"order_i asc\")";
|
||||
|
||||
String cexpr = "let(a="+expr1+", b="+expr2+", c=col(a, price_f), d=col(b, price_f), e=regress(c, d), tuple(regress=e, p=predict(e, 300)))";
|
||||
String cexpr = "let(a="+expr1+", b="+expr2+", c=col(a, price_f), d=col(b, price_f), e=regress(c, d), tuple(regress=e, p=predict(e, 300), pl=predict(e, c)))";
|
||||
|
||||
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
|
||||
paramsLoc.set("expr", cexpr);
|
||||
|
@ -6293,6 +6310,8 @@ public class StreamExpressionTest extends SolrCloudTestCase {
|
|||
assertTrue(rSquare == 1.0D);
|
||||
double prediction = tuple.getDouble("p");
|
||||
assertTrue(prediction == 600.0D);
|
||||
List<Number> predictions = (List<Number>)tuple.get("pl");
|
||||
assertList(predictions, 200.0, 400.0, 600.0, 200.0, 400.0, 800.0, 1200.0);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -86,6 +86,8 @@ public class MiniSolrCloudCluster {
|
|||
" <int name=\"leaderVoteWait\">10000</int>\n" +
|
||||
" <int name=\"distribUpdateConnTimeout\">${distribUpdateConnTimeout:45000}</int>\n" +
|
||||
" <int name=\"distribUpdateSoTimeout\">${distribUpdateSoTimeout:340000}</int>\n" +
|
||||
" <str name=\"zkCredentialsProvider\">${zkCredentialsProvider:org.apache.solr.common.cloud.DefaultZkCredentialsProvider}</str> \n" +
|
||||
" <str name=\"zkACLProvider\">${zkACLProvider:org.apache.solr.common.cloud.DefaultZkACLProvider}</str> \n" +
|
||||
" </solrcloud>\n" +
|
||||
" <metrics>\n" +
|
||||
" <reporter name=\"default\" class=\"org.apache.solr.metrics.reporters.SolrJmxReporter\">\n" +
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
-->
|
||||
<init-param>
|
||||
<param-name>excludePatterns</param-name>
|
||||
<param-value>/libs/.+,/css/.+,/js/.+,/img/.+,/tpl/.+</param-value>
|
||||
<param-value>/partials/.+,/libs/.+,/css/.+,/js/.+,/img/.+,/tpl/.+</param-value>
|
||||
</init-param>
|
||||
</filter>
|
||||
|
||||
|
|
Loading…
Reference in New Issue