Merge remote-tracking branch 'origin/master'

This commit is contained in:
Noble Paul 2017-01-03 17:53:34 +10:30
commit 33304f202c
7 changed files with 34 additions and 26 deletions

View File

@ -208,7 +208,7 @@ New Features
* SOLR-9668,SOLR-7197: introduce cursorMark='true' in SolrEntityProcessor (Yegor Kozlov, Raveendra Yerraguntl via Mikhail Khludnev)
* SOLR-9684: Add schedule Streaming Expression (Joel Bernstein)
* SOLR-9684: Add priority Streaming Expression (Joel Bernstein, David Smiley)
Optimizations
----------------------
@ -299,7 +299,7 @@ Bug Fixes
* SOLR-9699,SOLR-4668: fix exception from core status in parallel with core reload (Mikhail Khludnev)
* SOLR-9859: replication.properties cannot be updated after being written and neither replication.properties or
* SOLR-9859: replication.properties cannot be updated after being written and neither replication.properties or
index.properties are durable in the face of a crash. (Pushkar Raste, Chris de Kok, Cao Manh Dat, Mark Miller)
* SOLR-9901: Implement move in HdfsDirectoryFactory. (Mark Miller)
@ -308,6 +308,8 @@ Bug Fixes
* SOLR-9495: AIOBE with confusing message for incomplete sort spec in Streaming Expression (Gus Heck, Joel Bernstein)
* SOLR-9154: Fix DirectSolrSpellChecker to work when added through the Config API. (Anshum Gupta)
Other Changes
----------------------

View File

@ -140,7 +140,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
.withFunctionName("fetch", FetchStream.class)
.withFunctionName("executor", ExecutorStream.class)
.withFunctionName("null", NullStream.class)
.withFunctionName("schedule", SchedulerStream.class)
.withFunctionName("priority", PriorityStream.class)
// metrics
.withFunctionName("min", MinMetric.class)
.withFunctionName("max", MaxMetric.class)

View File

@ -29,6 +29,7 @@ import org.apache.lucene.search.spell.StringDistance;
import org.apache.lucene.search.spell.SuggestWord;
import org.apache.lucene.search.spell.SuggestWordFrequencyComparator;
import org.apache.lucene.search.spell.SuggestWordQueue;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore;
import org.apache.solr.search.SolrIndexSearcher;
@ -93,6 +94,9 @@ public class DirectSolrSpellChecker extends SolrSpellChecker {
@Override
public String init(NamedList config, SolrCore core) {
SolrParams params = SolrParams.toSolrParams(config);
LOG.info("init: " + config);
String name = super.init(config, core);
@ -113,37 +117,37 @@ public class DirectSolrSpellChecker extends SolrSpellChecker {
sd = core.getResourceLoader().newInstance(distClass, StringDistance.class);
float minAccuracy = DEFAULT_ACCURACY;
Float accuracy = (Float) config.get(ACCURACY);
Float accuracy = params.getFloat(ACCURACY);
if (accuracy != null)
minAccuracy = accuracy;
int maxEdits = DEFAULT_MAXEDITS;
Integer edits = (Integer) config.get(MAXEDITS);
Integer edits = params.getInt(MAXEDITS);
if (edits != null)
maxEdits = edits;
int minPrefix = DEFAULT_MINPREFIX;
Integer prefix = (Integer) config.get(MINPREFIX);
Integer prefix = params.getInt(MINPREFIX);
if (prefix != null)
minPrefix = prefix;
int maxInspections = DEFAULT_MAXINSPECTIONS;
Integer inspections = (Integer) config.get(MAXINSPECTIONS);
Integer inspections = params.getInt(MAXINSPECTIONS);
if (inspections != null)
maxInspections = inspections;
float minThreshold = DEFAULT_THRESHOLD_TOKEN_FREQUENCY;
Float threshold = (Float) config.get(THRESHOLD_TOKEN_FREQUENCY);
Float threshold = params.getFloat(THRESHOLD_TOKEN_FREQUENCY);
if (threshold != null)
minThreshold = threshold;
int minQueryLength = DEFAULT_MINQUERYLENGTH;
Integer queryLength = (Integer) config.get(MINQUERYLENGTH);
Integer queryLength = params.getInt(MINQUERYLENGTH);
if (queryLength != null)
minQueryLength = queryLength;
float maxQueryFrequency = DEFAULT_MAXQUERYFREQUENCY;
Float queryFreq = (Float) config.get(MAXQUERYFREQUENCY);
Float queryFreq = params.getFloat(MAXQUERYFREQUENCY);
if (queryFreq != null)
maxQueryFrequency = queryFreq;

View File

@ -73,7 +73,8 @@ public class SolrGangliaReporterTest extends SolrTestCaseJ4 {
gangliaReporter.start();
Thread.sleep(5000);
assertTrue(names.size() >= 3);
for (String name : names) {
String[] frozenNames = (String[])names.toArray(new String[names.size()]);
for (String name : frozenNames) {
assertTrue(name, name.startsWith("test.solr.node.cores."));
}
}

View File

@ -69,7 +69,8 @@ public class SolrGraphiteReporterTest extends SolrTestCaseJ4 {
assertTrue(reporter instanceof SolrGraphiteReporter);
Thread.sleep(5000);
assertTrue(mock.lines.size() >= 3);
for (String line : mock.lines) {
String[] frozenLines = (String[])mock.lines.toArray(new String[mock.lines.size()]);
for (String line : frozenLines) {
assertTrue(line, line.startsWith("test.solr.node.cores."));
}
} finally {

View File

@ -35,8 +35,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The scheduler wraps two topics that represent high priority and low priority task queues.
* Each time the scheduler is called it will check to see if there are any high priority tasks in the queue. If there
* The priority function wraps two topics that represent high priority and low priority task queues.
* Each time the priority function is called it will check to see if there are any high priority tasks in the queue. If there
* are high priority tasks, then the high priority queue will be read until it returns the EOF Tuple.
*
* If there are no tasks in the high priority queue, then the lower priority task queue will be opened and read until the EOF Tuple is
@ -45,7 +45,7 @@ import org.slf4j.LoggerFactory;
* The scheduler is designed to be wrapped by the executor function and a daemon function can be used to call the executor iteratively.
**/
public class SchedulerStream extends TupleStream implements Expressible {
public class PriorityStream extends TupleStream implements Expressible {
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@ -53,7 +53,7 @@ public class SchedulerStream extends TupleStream implements Expressible {
private PushBackStream tasks;
private TupleStream currentStream;
public SchedulerStream(StreamExpression expression, StreamFactory factory) throws IOException {
public PriorityStream(StreamExpression expression, StreamFactory factory) throws IOException {
// grab all parameters out
List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);

View File

@ -2826,7 +2826,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
}
@Test
public void testSchedulerStream() throws Exception {
public void testPriorityStream() throws Exception {
Assume.assumeTrue(!useAlias);
new UpdateRequest()
@ -2845,7 +2845,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
StreamFactory factory = new StreamFactory()
.withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
.withFunctionName("topic", TopicStream.class)
.withFunctionName("schedule", SchedulerStream.class);
.withFunctionName("priority", PriorityStream.class);
StreamExpression expression;
TupleStream stream;
@ -2856,7 +2856,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
try {
FieldComparator comp = new FieldComparator("a_i", ComparatorOrder.ASCENDING);
expression = StreamExpressionParser.parse("schedule(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0)," +
expression = StreamExpressionParser.parse("priority(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0)," +
"topic(collection1, collection1, q=\"a_s:hello1\", fl=\"id,a_i\", id=2000000, initialCheckpoint=0))");
stream = factory.constructStream(expression);
StreamContext context = new StreamContext();
@ -2870,7 +2870,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
assertEquals(tuples.size(), 4);
assertOrder(tuples, 5, 6, 7, 8);
expression = StreamExpressionParser.parse("schedule(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0)," +
expression = StreamExpressionParser.parse("priority(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0)," +
"topic(collection1, collection1, q=\"a_s:hello1\", fl=\"id,a_i\", id=2000000, initialCheckpoint=0))");
stream = factory.constructStream(expression);
context = new StreamContext();
@ -2883,7 +2883,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
assertEquals(tuples.size(), 6);
assertOrder(tuples, 0, 1, 2, 3, 4, 9);
expression = StreamExpressionParser.parse("schedule(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0)," +
expression = StreamExpressionParser.parse("priority(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0)," +
"topic(collection1, collection1, q=\"a_s:hello1\", fl=\"id,a_i\", id=2000000, initialCheckpoint=0))");
stream = factory.constructStream(expression);
context = new StreamContext();
@ -2900,7 +2900,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
}
@Test
public void testParallelSchedulerStream() throws Exception {
public void testParallelPriorityStream() throws Exception {
Assume.assumeTrue(!useAlias);
new UpdateRequest()
@ -2920,7 +2920,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
.withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
.withFunctionName("topic", TopicStream.class)
.withFunctionName("parallel", ParallelStream.class)
.withFunctionName("schedule", SchedulerStream.class);
.withFunctionName("priority", PriorityStream.class);
StreamExpression expression;
TupleStream stream;
@ -2931,7 +2931,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
try {
FieldComparator comp = new FieldComparator("a_i", ComparatorOrder.ASCENDING);
expression = StreamExpressionParser.parse("parallel(collection1, workers=2, sort=\"_version_ asc\", schedule(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0, partitionKeys=id)," +
expression = StreamExpressionParser.parse("parallel(collection1, workers=2, sort=\"_version_ asc\", priority(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0, partitionKeys=id)," +
"topic(collection1, collection1, q=\"a_s:hello1\", fl=\"id,a_i\", id=2000000, initialCheckpoint=0, partitionKeys=id)))");
stream = factory.constructStream(expression);
StreamContext context = new StreamContext();
@ -2945,7 +2945,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
assertEquals(tuples.size(), 4);
assertOrder(tuples, 5, 6, 7, 8);
expression = StreamExpressionParser.parse("parallel(collection1, workers=2, sort=\"_version_ asc\", schedule(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0, partitionKeys=id)," +
expression = StreamExpressionParser.parse("parallel(collection1, workers=2, sort=\"_version_ asc\", priority(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0, partitionKeys=id)," +
"topic(collection1, collection1, q=\"a_s:hello1\", fl=\"id,a_i\", id=2000000, initialCheckpoint=0, partitionKeys=id)))");
stream = factory.constructStream(expression);
context = new StreamContext();
@ -2958,7 +2958,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
assertEquals(tuples.size(), 6);
assertOrder(tuples, 0, 1, 2, 3, 4, 9);
expression = StreamExpressionParser.parse("parallel(collection1, workers=2, sort=\"_version_ asc\", schedule(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0, partitionKeys=id)," +
expression = StreamExpressionParser.parse("parallel(collection1, workers=2, sort=\"_version_ asc\", priority(topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_i\", id=1000000, initialCheckpoint=0, partitionKeys=id)," +
"topic(collection1, collection1, q=\"a_s:hello1\", fl=\"id,a_i\", id=2000000, initialCheckpoint=0, partitionKeys=id)))");
stream = factory.constructStream(expression);
context = new StreamContext();