merge trunk

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4446@1397893 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-10-13 16:54:02 +00:00
commit d2be37f187
23 changed files with 201 additions and 122 deletions

View File

@ -271,9 +271,6 @@ public final class Util {
final Comparator<T> comparator; final Comparator<T> comparator;
// Set once the queue has filled:
FSTPath<T> bottom = null;
TreeSet<FSTPath<T>> queue = null; TreeSet<FSTPath<T>> queue = null;
public TopNSearcher(FST<T> fst, int topN, Comparator<T> comparator) { public TopNSearcher(FST<T> fst, int topN, Comparator<T> comparator) {
@ -291,9 +288,10 @@ public final class Util {
assert queue != null; assert queue != null;
T cost = fst.outputs.add(path.cost, path.arc.output); T cost = fst.outputs.add(path.cost, path.arc.output);
//System.out.println(" addIfCompetitive bottom=" + bottom + " queue.size()=" + queue.size()); //System.out.println(" addIfCompetitive queue.size()=" + queue.size() + " path=" + path + " + label=" + path.arc.label);
if (bottom != null) { if (queue.size() == topN) {
FSTPath<T> bottom = queue.last();
int comp = comparator.compare(cost, bottom.cost); int comp = comparator.compare(cost, bottom.cost);
if (comp > 0) { if (comp > 0) {
// Doesn't compete // Doesn't compete
@ -323,23 +321,10 @@ public final class Util {
newInput.length = path.input.length+1; newInput.length = path.input.length+1;
final FSTPath<T> newPath = new FSTPath<T>(cost, path.arc, comparator, newInput); final FSTPath<T> newPath = new FSTPath<T>(cost, path.arc, comparator, newInput);
// this is pointless right? we do it above already:
//newPath.input.grow(path.input.length+1);
//System.arraycopy(path.input.ints, 0, newPath.input.ints, 0, path.input.length);
//newPath.input.ints[path.input.length] = path.arc.label;
//newPath.input.length = path.input.length+1;
//System.out.println(" add path=" + newPath);
queue.add(newPath); queue.add(newPath);
if (bottom != null) {
final FSTPath<T> removed = queue.pollLast(); if (queue.size() == topN+1) {
assert removed == bottom; queue.pollLast();
bottom = queue.last();
//System.out.println(" now re-set bottom: " + bottom + " queue=" + queue);
} else if (queue.size() == topN) {
// Queue just filled up:
bottom = queue.last();
//System.out.println(" now set bottom: " + bottom);
} }
} }
@ -387,7 +372,7 @@ public final class Util {
// For each top N path: // For each top N path:
while (results.size() < topN) { while (results.size() < topN) {
//System.out.println("\nfind next path"); //System.out.println("\nfind next path: queue.size=" + queue.size());
FSTPath<T> path; FSTPath<T> path;

View File

@ -533,9 +533,6 @@ public class AnalyzingSuggester extends Lookup {
if (exactFirst) { if (exactFirst) {
Util.TopNSearcher<Pair<Long,BytesRef>> searcher;
searcher = new Util.TopNSearcher<Pair<Long,BytesRef>>(fst, num, weightComparator);
int count = 0; int count = 0;
for (FSTUtil.Path<Pair<Long,BytesRef>> path : prefixPaths) { for (FSTUtil.Path<Pair<Long,BytesRef>> path : prefixPaths) {
if (fst.findTargetArc(END_BYTE, path.fstNode, scratchArc, bytesReader) != null) { if (fst.findTargetArc(END_BYTE, path.fstNode, scratchArc, bytesReader) != null) {
@ -545,6 +542,9 @@ public class AnalyzingSuggester extends Lookup {
} }
} }
// Searcher just to find the single exact only
// match, if present:
Util.TopNSearcher<Pair<Long,BytesRef>> searcher;
searcher = new Util.TopNSearcher<Pair<Long,BytesRef>>(fst, count * maxSurfaceFormsPerAnalyzedForm, weightComparator); searcher = new Util.TopNSearcher<Pair<Long,BytesRef>>(fst, count * maxSurfaceFormsPerAnalyzedForm, weightComparator);
// NOTE: we could almost get away with only using // NOTE: we could almost get away with only using

View File

@ -789,4 +789,18 @@ public class AnalyzingSuggesterTest extends LuceneTestCase {
assertEquals("a ", results.get(1).key); assertEquals("a ", results.get(1).key);
assertEquals(50, results.get(1).value); assertEquals(50, results.get(1).value);
} }
public void testQueueExhaustion() throws Exception {
Analyzer a = new MockAnalyzer(random());
AnalyzingSuggester suggester = new AnalyzingSuggester(a, a, AnalyzingSuggester.EXACT_FIRST, 256, -1);
suggester.build(new TermFreqArrayIterator(new TermFreq[] {
new TermFreq("a", 2),
new TermFreq("a b c", 3),
new TermFreq("a c a", 1),
new TermFreq("a c b", 1),
}));
List<LookupResult> results = suggester.lookup("a", false, 4);
}
} }

View File

@ -42,6 +42,8 @@ New Features
values of a multiValued field in their original order when highlighting. values of a multiValued field in their original order when highlighting.
(Joel Bernstein via yonik) (Joel Bernstein via yonik)
* SOLR-3929: Support configuring IndexWriter max thread count in solrconfig.
(phunt via Mark Miller)
Optimizations Optimizations
---------------------- ----------------------
@ -59,6 +61,9 @@ Optimizations
* SOLR-3734: Improve Schema-Browser Handling for CopyField using * SOLR-3734: Improve Schema-Browser Handling for CopyField using
dynamicField's (steffkes) dynamicField's (steffkes)
* SOLR-3941: The "commitOnLeader" part of distributed recovery can use
openSearcher=false. (Tomas Fernandez Lobbe via Mark Miller)
Bug Fixes Bug Fixes
---------------------- ----------------------
@ -70,6 +75,12 @@ Bug Fixes
* SOLR-3917: Partial State on Schema-Browser UI is not defined for Dynamic * SOLR-3917: Partial State on Schema-Browser UI is not defined for Dynamic
Fields & Types (steffkes) Fields & Types (steffkes)
* SOLR-3939: Consider a sync attempt from leader to replica that fails due
to 404 a success. (Mark Miller, Joel Bernstein)
* SOLR-3940: Rejoining the leader election incorrectly triggers the code path
for a fresh cluster start rather than fail over. (Mark Miller)
Other Changes Other Changes
---------------------- ----------------------

View File

@ -18,9 +18,10 @@ package org.apache.solr.uima.processor;
*/ */
/** /**
* Exception thrown when an error happening while mapping UIMA CAS model to Solt fields * Exception thrown when an error happening while mapping UIMA CAS model to Solr fields
*/ */
public class FieldMappingException extends Exception { public class FieldMappingException extends Exception {
public FieldMappingException(Exception e) { public FieldMappingException(Exception e) {
super(e);
} }
} }

View File

@ -26,19 +26,19 @@ import java.util.Map;
*/ */
public class SolrUIMAConfiguration { public class SolrUIMAConfiguration {
private String[] fieldsToAnalyze; private final String[] fieldsToAnalyze;
private boolean fieldsMerging; private final boolean fieldsMerging;
private Map<String, Map<String, MapField>> typesFeaturesFieldsMapping; private final Map<String, Map<String, MapField>> typesFeaturesFieldsMapping;
private String aePath; private final String aePath;
private Map<String, Object> runtimeParameters; private final Map<String, Object> runtimeParameters;
private boolean ignoreErrors; private final boolean ignoreErrors;
private String logField; private final String logField;
SolrUIMAConfiguration(String aePath, String[] fieldsToAnalyze, boolean fieldsMerging, SolrUIMAConfiguration(String aePath, String[] fieldsToAnalyze, boolean fieldsMerging,
Map<String, Map<String, MapField>> typesFeaturesFieldsMapping, Map<String, Map<String, MapField>> typesFeaturesFieldsMapping,
@ -82,7 +82,8 @@ public class SolrUIMAConfiguration {
static final class MapField { static final class MapField {
private String fieldName, fieldNameFeature; private String fieldName;
private final String fieldNameFeature;
private boolean prefix; // valid if dynamicField == true private boolean prefix; // valid if dynamicField == true
// false: *_s, true: s_* // false: *_s, true: s_*

View File

@ -32,7 +32,7 @@ import org.apache.solr.uima.processor.SolrUIMAConfiguration.MapField;
*/ */
public class SolrUIMAConfigurationReader { public class SolrUIMAConfigurationReader {
private NamedList<Object> args; private final NamedList<Object> args;
public SolrUIMAConfigurationReader(NamedList<Object> args) { public SolrUIMAConfigurationReader(NamedList<Object> args) {
this.args = args; this.args = args;

View File

@ -38,9 +38,9 @@ public class UIMAToSolrMapper {
private final Logger log = LoggerFactory.getLogger(UIMAToSolrMapper.class); private final Logger log = LoggerFactory.getLogger(UIMAToSolrMapper.class);
private SolrInputDocument document; private final SolrInputDocument document;
private JCas cas; private final JCas cas;
public UIMAToSolrMapper(SolrInputDocument document, JCas cas) { public UIMAToSolrMapper(SolrInputDocument document, JCas cas) {
this.document = document; this.document = document;
@ -64,15 +64,15 @@ public class UIMAToSolrMapper {
String fieldNameFeatureValue = fieldNameFeature == null ? null : String fieldNameFeatureValue = fieldNameFeature == null ? null :
fs.getFeatureValueAsString(type.getFeatureByBaseName(fieldNameFeature)); fs.getFeatureValueAsString(type.getFeatureByBaseName(fieldNameFeature));
String fieldName = mapField.getFieldName(fieldNameFeatureValue); String fieldName = mapField.getFieldName(fieldNameFeatureValue);
log.info(new StringBuffer("mapping ").append(typeName).append("@").append(featureName) log.info(new StringBuilder("mapping ").append(typeName).append("@").append(featureName)
.append(" to ").append(fieldName).toString()); .append(" to ").append(fieldName).toString());
String featureValue = null; String featureValue;
if (fs instanceof Annotation && "coveredText".equals(featureName)) { if (fs instanceof Annotation && "coveredText".equals(featureName)) {
featureValue = ((Annotation) fs).getCoveredText(); featureValue = ((Annotation) fs).getCoveredText();
} else { } else {
featureValue = fs.getFeatureValueAsString(type.getFeatureByBaseName(featureName)); featureValue = fs.getFeatureValueAsString(type.getFeatureByBaseName(featureName));
} }
log.info(new StringBuffer("writing ").append(featureValue).append(" in ").append( log.info(new StringBuilder("writing ").append(featureValue).append(" in ").append(
fieldName).toString()); fieldName).toString());
document.addField(fieldName, featureValue, 1.0f); document.addField(fieldName, featureValue, 1.0f);
} }

View File

@ -73,9 +73,9 @@ public class UIMAUpdateRequestProcessor extends UpdateRequestProcessor {
/* get the fields to analyze */ /* get the fields to analyze */
String[] texts = getTextsToAnalyze(solrInputDocument); String[] texts = getTextsToAnalyze(solrInputDocument);
for (int i = 0; i < texts.length; i++) { for (String currentText : texts) {
text = texts[i]; text = currentText;
if (text != null && text.length()>0) { if (text != null && text.length() > 0) {
/* process the text value */ /* process the text value */
JCas jcas = processText(text); JCas jcas = processText(text);
@ -133,8 +133,8 @@ public class UIMAUpdateRequestProcessor extends UpdateRequestProcessor {
String[] textVals; String[] textVals;
if (merge) { if (merge) {
StringBuilder unifiedText = new StringBuilder(""); StringBuilder unifiedText = new StringBuilder("");
for (int i = 0; i < fieldsToAnalyze.length; i++) { for (String aFieldsToAnalyze : fieldsToAnalyze) {
unifiedText.append(String.valueOf(solrInputDocument.getFieldValue(fieldsToAnalyze[i]))); unifiedText.append(String.valueOf(solrInputDocument.getFieldValue(aFieldsToAnalyze)));
} }
textVals = new String[1]; textVals = new String[1];
textVals[0] = unifiedText.toString(); textVals[0] = unifiedText.toString();
@ -150,7 +150,7 @@ public class UIMAUpdateRequestProcessor extends UpdateRequestProcessor {
/* process a field value executing UIMA the CAS containing it as document text */ /* process a field value executing UIMA the CAS containing it as document text */
private JCas processText(String textFieldValue) throws ResourceInitializationException, private JCas processText(String textFieldValue) throws ResourceInitializationException,
AnalysisEngineProcessException { AnalysisEngineProcessException {
log.info(new StringBuffer("Analyzing text").toString()); log.info(new StringBuilder("Analyzing text").toString());
/* get the UIMA analysis engine */ /* get the UIMA analysis engine */
AnalysisEngine ae = aeProvider.getAE(); AnalysisEngine ae = aeProvider.getAE();
@ -160,7 +160,7 @@ public class UIMAUpdateRequestProcessor extends UpdateRequestProcessor {
/* perform analysis on text field */ /* perform analysis on text field */
ae.process(jcas); ae.process(jcas);
log.info(new StringBuilder("Text processing completed").toString()); log.info("Text processing completed");
return jcas; return jcas;
} }

View File

@ -1,49 +0,0 @@
package org.apache.solr.uima.analysis;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.request.SolrQueryRequest;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*/
public class UIMAAnnotationsTokenizerFactoryTest extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("uima/uima-tokenizers-solrconfig.xml", "uima/uima-tokenizers-schema.xml");
}
@Test
public void testInitialization() throws Exception {
assertNotNull(h.getCore().getSchema().getField("sentences"));
assertNotNull(h.getCore().getSchema().getFieldType("sentences"));
}
@Test
public void testIndexAndQuery() throws Exception {
assertU("<add><doc><field name=\"id\">123</field><field name=\"text\">One and 1 is two. Instead One or 1 is 0.</field></doc></add>");
assertU(commit());
SolrQueryRequest req = req("qt", "/terms", "terms.fl", "sentences");
assertQ(req, "//lst[@name='sentences']/int[@name='One and 1 is two.']");
assertQ(req, "//lst[@name='sentences']/int[@name=' Instead One or 1 is 0.']");
req.close();
}
}

View File

@ -23,8 +23,9 @@ import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
/** /**
* Integration test which uses {@link org.apache.lucene.analysis.uima.UIMAAnnotationsTokenizerFactory} in Solr schema
*/ */
public class UIMATypeAwareAnnotationsTokenizerFactoryTest extends SolrTestCaseJ4 { public class UIMATokenizersSolrIntegrationTest extends SolrTestCaseJ4 {
@BeforeClass @BeforeClass
public static void beforeClass() throws Exception { public static void beforeClass() throws Exception {
@ -33,12 +34,24 @@ public class UIMATypeAwareAnnotationsTokenizerFactoryTest extends SolrTestCaseJ4
@Test @Test
public void testInitialization() throws Exception { public void testInitialization() throws Exception {
assertNotNull(h.getCore().getSchema().getField("sentences"));
assertNotNull(h.getCore().getSchema().getFieldType("sentences"));
assertNotNull(h.getCore().getSchema().getField("nouns")); assertNotNull(h.getCore().getSchema().getField("nouns"));
assertNotNull(h.getCore().getSchema().getFieldType("nouns")); assertNotNull(h.getCore().getSchema().getFieldType("nouns"));
} }
@Test @Test
public void testIndexAndQuery() throws Exception { public void testUIMATokenizerIndexAndQuery() throws Exception {
assertU("<add><doc><field name=\"id\">123</field><field name=\"text\">One and 1 is two. Instead One or 1 is 0.</field></doc></add>");
assertU(commit());
SolrQueryRequest req = req("qt", "/terms", "terms.fl", "sentences");
assertQ(req, "//lst[@name='sentences']/int[@name='One and 1 is two.']");
assertQ(req, "//lst[@name='sentences']/int[@name=' Instead One or 1 is 0.']");
req.close();
}
@Test
public void testUIMATypeAwareTokenizerIndexAndQuery() throws Exception {
assertU("<add><doc><field name=\"id\">123</field><field name=\"text\">The counter counts the beans: 1 and 2 and three.</field></doc></add>"); assertU("<add><doc><field name=\"id\">123</field><field name=\"text\">The counter counts the beans: 1 and 2 and three.</field></doc></add>");
assertU(commit()); assertU(commit());
SolrQueryRequest req = req("qt", "/terms", "terms.fl", "nouns"); SolrQueryRequest req = req("qt", "/terms", "terms.fl", "nouns");

View File

@ -324,7 +324,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
SolrException.log(log, "Error trying to start recovery", t); SolrException.log(log, "Error trying to start recovery", t);
} }
leaderElector.joinElection(this); leaderElector.joinElection(this, true);
} }
private boolean shouldIBeLeader(ZkNodeProps leaderProps, SolrCore core) { private boolean shouldIBeLeader(ZkNodeProps leaderProps, SolrCore core) {

View File

@ -18,7 +18,6 @@ package org.apache.solr.cloud;
*/ */
import java.io.IOException; import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
@ -43,7 +42,7 @@ import org.slf4j.LoggerFactory;
* Leader Election process. This class contains the logic by which a * Leader Election process. This class contains the logic by which a
* leader is chosen. First call * {@link #setup(ElectionContext)} to ensure * leader is chosen. First call * {@link #setup(ElectionContext)} to ensure
* the election process is init'd. Next call * the election process is init'd. Next call
* {@link #joinElection(ElectionContext)} to start the leader election. * {@link #joinElection(ElectionContext, boolean)} to start the leader election.
* *
* The implementation follows the classic ZooKeeper recipe of creating an * The implementation follows the classic ZooKeeper recipe of creating an
* ephemeral, sequential node for each candidate and then looking at the set * ephemeral, sequential node for each candidate and then looking at the set
@ -203,7 +202,7 @@ public class LeaderElector {
* *
* @return sequential node number * @return sequential node number
*/ */
public int joinElection(ElectionContext context) throws KeeperException, InterruptedException, IOException { public int joinElection(ElectionContext context, boolean replacement) throws KeeperException, InterruptedException, IOException {
final String shardsElectZkPath = context.electionPath + LeaderElector.ELECTION_NODE; final String shardsElectZkPath = context.electionPath + LeaderElector.ELECTION_NODE;
long sessionId = zkClient.getSolrZooKeeper().getSessionId(); long sessionId = zkClient.getSolrZooKeeper().getSessionId();
@ -259,7 +258,7 @@ public class LeaderElector {
} }
} }
int seq = getSeq(leaderSeqPath); int seq = getSeq(leaderSeqPath);
checkIfIamLeader(seq, context, false); checkIfIamLeader(seq, context, replacement);
return seq; return seq;
} }

View File

@ -37,6 +37,7 @@ import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.cloud.ZooKeeperException; import org.apache.solr.common.cloud.ZooKeeperException;
import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.core.CoreContainer; import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.CoreDescriptor; import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.core.RequestHandlers.LazyRequestHandlerWrapper; import org.apache.solr.core.RequestHandlers.LazyRequestHandlerWrapper;
@ -177,6 +178,7 @@ public class RecoveryStrategy extends Thread implements ClosableThread {
UpdateRequest ureq = new UpdateRequest(); UpdateRequest ureq = new UpdateRequest();
ureq.setParams(new ModifiableSolrParams()); ureq.setParams(new ModifiableSolrParams());
ureq.getParams().set(DistributedUpdateProcessor.COMMIT_END_POINT, true); ureq.getParams().set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
ureq.getParams().set(UpdateParams.OPEN_SEARCHER, false);
ureq.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, true).process( ureq.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, true).process(
server); server);
server.shutdown(); server.shutdown();

View File

@ -191,7 +191,7 @@ public final class ZkController {
ZkController.this.overseer = new Overseer(shardHandler, adminPath, zkStateReader); ZkController.this.overseer = new Overseer(shardHandler, adminPath, zkStateReader);
ElectionContext context = new OverseerElectionContext(zkClient, overseer, getNodeName()); ElectionContext context = new OverseerElectionContext(zkClient, overseer, getNodeName());
overseerElector.joinElection(context); overseerElector.joinElection(context, true);
zkStateReader.createClusterStateWatchersAndUpdate(); zkStateReader.createClusterStateWatchersAndUpdate();
// cc.newCmdDistribExecutor(); // cc.newCmdDistribExecutor();
@ -422,7 +422,7 @@ public final class ZkController {
this.overseer = new Overseer(shardHandler, adminPath, zkStateReader); this.overseer = new Overseer(shardHandler, adminPath, zkStateReader);
ElectionContext context = new OverseerElectionContext(zkClient, overseer, getNodeName()); ElectionContext context = new OverseerElectionContext(zkClient, overseer, getNodeName());
overseerElector.setup(context); overseerElector.setup(context);
overseerElector.joinElection(context); overseerElector.joinElection(context, false);
zkStateReader.createClusterStateWatchersAndUpdate(); zkStateReader.createClusterStateWatchersAndUpdate();
} catch (IOException e) { } catch (IOException e) {
@ -730,7 +730,7 @@ public final class ZkController {
leaderElector.setup(context); leaderElector.setup(context);
electionContexts.put(coreZkNodeName, context); electionContexts.put(coreZkNodeName, context);
leaderElector.joinElection(context); leaderElector.joinElection(context, false);
} }

View File

@ -312,6 +312,11 @@ public class PeerSync {
log.warn(msg() + " got a 503 from " + srsp.getShardAddress() + ", counting as success"); log.warn(msg() + " got a 503 from " + srsp.getShardAddress() + ", counting as success");
return true; return true;
} }
if (cantReachIsSuccess && sreq.purpose == 1 && srsp.getException() instanceof SolrException && ((SolrException) srsp.getException()).code() == 404) {
log.warn(msg() + " got a 404 from " + srsp.getShardAddress() + ", counting as success");
return true;
}
// TODO: at least log??? // TODO: at least log???
// srsp.getException().printStackTrace(System.out); // srsp.getException().printStackTrace(System.out);

View File

@ -44,6 +44,7 @@ public class SolrIndexConfig {
public final boolean useCompoundFile; public final boolean useCompoundFile;
public final int maxBufferedDocs; public final int maxBufferedDocs;
public final int maxMergeDocs; public final int maxMergeDocs;
public final int maxIndexingThreads;
public final int mergeFactor; public final int mergeFactor;
public final double ramBufferSizeMB; public final double ramBufferSizeMB;
@ -71,6 +72,7 @@ public class SolrIndexConfig {
useCompoundFile = false; useCompoundFile = false;
maxBufferedDocs = -1; maxBufferedDocs = -1;
maxMergeDocs = -1; maxMergeDocs = -1;
maxIndexingThreads = IndexWriterConfig.DEFAULT_MAX_THREAD_STATES;
mergeFactor = -1; mergeFactor = -1;
ramBufferSizeMB = 32; ramBufferSizeMB = 32;
writeLockTimeout = -1; writeLockTimeout = -1;
@ -116,6 +118,7 @@ public class SolrIndexConfig {
useCompoundFile=solrConfig.getBool(prefix+"/useCompoundFile", def.useCompoundFile); useCompoundFile=solrConfig.getBool(prefix+"/useCompoundFile", def.useCompoundFile);
maxBufferedDocs=solrConfig.getInt(prefix+"/maxBufferedDocs",def.maxBufferedDocs); maxBufferedDocs=solrConfig.getInt(prefix+"/maxBufferedDocs",def.maxBufferedDocs);
maxMergeDocs=solrConfig.getInt(prefix+"/maxMergeDocs",def.maxMergeDocs); maxMergeDocs=solrConfig.getInt(prefix+"/maxMergeDocs",def.maxMergeDocs);
maxIndexingThreads=solrConfig.getInt(prefix+"/maxIndexingThreads",def.maxIndexingThreads);
mergeFactor=solrConfig.getInt(prefix+"/mergeFactor",def.mergeFactor); mergeFactor=solrConfig.getInt(prefix+"/mergeFactor",def.mergeFactor);
ramBufferSizeMB = solrConfig.getDouble(prefix+"/ramBufferSizeMB", def.ramBufferSizeMB); ramBufferSizeMB = solrConfig.getDouble(prefix+"/ramBufferSizeMB", def.ramBufferSizeMB);
@ -176,6 +179,10 @@ public class SolrIndexConfig {
iwc.setMergePolicy(buildMergePolicy(schema)); iwc.setMergePolicy(buildMergePolicy(schema));
iwc.setMergeScheduler(buildMergeScheduler(schema)); iwc.setMergeScheduler(buildMergeScheduler(schema));
if (maxIndexingThreads != -1) {
iwc.setMaxThreadStates(maxIndexingThreads);
}
return iwc; return iwc;
} }

View File

@ -0,0 +1,27 @@
<?xml version="1.0" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<config>
<dataDir>${solr.data.dir:}</dataDir>
<luceneMatchVersion>${tests.luceneMatchVersion:LUCENE_CURRENT}</luceneMatchVersion>
<indexConfig>
<maxIndexingThreads>123</maxIndexingThreads>
</indexConfig>
</config>

View File

@ -49,6 +49,7 @@ import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest; import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest; import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest.Create; import org.apache.solr.client.solrj.request.CoreAdminRequest.Create;
import org.apache.solr.client.solrj.request.CoreAdminRequest.Unload;
import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.response.CoreAdminResponse; import org.apache.solr.client.solrj.response.CoreAdminResponse;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
@ -742,10 +743,10 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
0, 0,
((HttpSolrServer) client).getBaseURL().length() ((HttpSolrServer) client).getBaseURL().length()
- DEFAULT_COLLECTION.length() - 1); - DEFAULT_COLLECTION.length() - 1);
createCollection(oneInstanceCollection2, collectionClients, baseUrl, 1, "slice1"); createSolrCore(oneInstanceCollection2, collectionClients, baseUrl, 1, "slice1");
createCollection(oneInstanceCollection2, collectionClients, baseUrl, 2, "slice2"); createSolrCore(oneInstanceCollection2, collectionClients, baseUrl, 2, "slice2");
createCollection(oneInstanceCollection2, collectionClients, baseUrl, 3, "slice2"); createSolrCore(oneInstanceCollection2, collectionClients, baseUrl, 3, "slice2");
createCollection(oneInstanceCollection2, collectionClients, baseUrl, 4, "slice1"); createSolrCore(oneInstanceCollection2, collectionClients, baseUrl, 4, "slice1");
while (pending != null && pending.size() > 0) { while (pending != null && pending.size() > 0) {
@ -764,7 +765,7 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
assertAllActive(oneInstanceCollection2, solrj.getZkStateReader()); assertAllActive(oneInstanceCollection2, solrj.getZkStateReader());
printLayout(); //printLayout();
// TODO: enable when we don't falsely get slice1... // TODO: enable when we don't falsely get slice1...
// solrj.getZkStateReader().getLeaderUrl(oneInstanceCollection2, "slice1", 30000); // solrj.getZkStateReader().getLeaderUrl(oneInstanceCollection2, "slice1", 30000);
@ -803,6 +804,27 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
assertNotNull(slices); assertNotNull(slices);
String roles = slices.get("slice1").getReplicasMap().values().iterator().next().getStr(ZkStateReader.ROLES_PROP); String roles = slices.get("slice1").getReplicasMap().values().iterator().next().getStr(ZkStateReader.ROLES_PROP);
assertEquals("none", roles); assertEquals("none", roles);
ZkCoreNodeProps props = new ZkCoreNodeProps(solrj.getZkStateReader().getClusterState().getLeader(oneInstanceCollection2, "slice1"));
// now test that unloading a core gets us a new leader
HttpSolrServer server = new HttpSolrServer(baseUrl);
Unload unloadCmd = new Unload(true);
unloadCmd.setCoreName(props.getCoreName());
String leader = props.getCoreUrl();
server.request(unloadCmd);
int tries = 50;
while (leader.equals(zkStateReader.getLeaderUrl(oneInstanceCollection2, "slice1", 10000))) {
Thread.sleep(100);
if (tries-- == 0) {
fail("Leader never changed");
}
}
} }
private void testSearchByCollectionName() throws SolrServerException { private void testSearchByCollectionName() throws SolrServerException {
@ -875,10 +897,10 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
private void createCollection(String collection, private void createCollection(String collection,
List<SolrServer> collectionClients, String baseUrl, int num) { List<SolrServer> collectionClients, String baseUrl, int num) {
createCollection(collection, collectionClients, baseUrl, num, null); createSolrCore(collection, collectionClients, baseUrl, num, null);
} }
private void createCollection(final String collection, private void createSolrCore(final String collection,
List<SolrServer> collectionClients, final String baseUrl, final int num, List<SolrServer> collectionClients, final String baseUrl, final int num,
final String shardId) { final String shardId) {
Callable call = new Callable() { Callable call = new Callable() {

View File

@ -40,7 +40,6 @@ import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NoNodeException; import org.apache.zookeeper.KeeperException.NoNodeException;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
@Slow @Slow
@ -114,7 +113,7 @@ public class LeaderElectionTest extends SolrTestCaseJ4 {
elector, "shard1", "collection1", Integer.toString(nodeNumber), elector, "shard1", "collection1", Integer.toString(nodeNumber),
props, zkStateReader); props, zkStateReader);
elector.setup(context); elector.setup(context);
seq = elector.joinElection(context); seq = elector.joinElection(context, false);
electionDone = true; electionDone = true;
seqToThread.put(seq, this); seqToThread.put(seq, this);
} }
@ -175,7 +174,7 @@ public class LeaderElectionTest extends SolrTestCaseJ4 {
ElectionContext context = new ShardLeaderElectionContextBase(elector, ElectionContext context = new ShardLeaderElectionContextBase(elector,
"shard2", "collection1", "dummynode1", props, zkStateReader); "shard2", "collection1", "dummynode1", props, zkStateReader);
elector.setup(context); elector.setup(context);
elector.joinElection(context); elector.joinElection(context, false);
assertEquals("http://127.0.0.1/solr/", assertEquals("http://127.0.0.1/solr/",
getLeaderUrl("collection1", "shard2")); getLeaderUrl("collection1", "shard2"));
} }
@ -188,7 +187,7 @@ public class LeaderElectionTest extends SolrTestCaseJ4 {
ElectionContext firstContext = new ShardLeaderElectionContextBase(first, ElectionContext firstContext = new ShardLeaderElectionContextBase(first,
"slice1", "collection2", "dummynode1", props, zkStateReader); "slice1", "collection2", "dummynode1", props, zkStateReader);
first.setup(firstContext); first.setup(firstContext);
first.joinElection(firstContext); first.joinElection(firstContext, false);
Thread.sleep(1000); Thread.sleep(1000);
assertEquals("original leader was not registered", "http://127.0.0.1/solr/1/", getLeaderUrl("collection2", "slice1")); assertEquals("original leader was not registered", "http://127.0.0.1/solr/1/", getLeaderUrl("collection2", "slice1"));
@ -199,7 +198,7 @@ public class LeaderElectionTest extends SolrTestCaseJ4 {
ElectionContext context = new ShardLeaderElectionContextBase(second, ElectionContext context = new ShardLeaderElectionContextBase(second,
"slice1", "collection2", "dummynode1", props, zkStateReader); "slice1", "collection2", "dummynode1", props, zkStateReader);
second.setup(context); second.setup(context);
second.joinElection(context); second.joinElection(context, false);
Thread.sleep(1000); Thread.sleep(1000);
assertEquals("original leader should have stayed leader", "http://127.0.0.1/solr/1/", getLeaderUrl("collection2", "slice1")); assertEquals("original leader should have stayed leader", "http://127.0.0.1/solr/1/", getLeaderUrl("collection2", "slice1"));
firstContext.cancelElection(); firstContext.cancelElection();

View File

@ -139,7 +139,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
ShardLeaderElectionContextBase ctx = new ShardLeaderElectionContextBase( ShardLeaderElectionContextBase ctx = new ShardLeaderElectionContextBase(
elector, shardId, collection, nodeName + "_" + coreName, props, elector, shardId, collection, nodeName + "_" + coreName, props,
zkStateReader); zkStateReader);
elector.joinElection(ctx); elector.joinElection(ctx, false);
return shardId; return shardId;
} }
Thread.sleep(500); Thread.sleep(500);
@ -876,7 +876,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
new HttpShardHandlerFactory().getShardHandler(), "/admin/cores", reader); new HttpShardHandlerFactory().getShardHandler(), "/admin/cores", reader);
ElectionContext ec = new OverseerElectionContext(zkClient, overseer, address.replaceAll("/", "_")); ElectionContext ec = new OverseerElectionContext(zkClient, overseer, address.replaceAll("/", "_"));
overseerElector.setup(ec); overseerElector.setup(ec);
overseerElector.joinElection(ec); overseerElector.joinElection(ec, false);
return zkClient; return zkClient;
} }

View File

@ -0,0 +1,36 @@
package org.apache.solr.core;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.solr.SolrTestCaseJ4;
import org.junit.BeforeClass;
public class TestSolrIndexConfig extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-indexconfig.xml","schema.xml");
}
public void testIndexConfig() throws Exception {
IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getSchema());
assertEquals(123, iwc.getMaxThreadStates());
}
}

View File

@ -136,6 +136,12 @@
<!-- Maximum time to wait for a write lock (ms) for an IndexWriter. Default: 1000 --> <!-- Maximum time to wait for a write lock (ms) for an IndexWriter. Default: 1000 -->
<!-- <writeLockTimeout>1000</writeLockTimeout> --> <!-- <writeLockTimeout>1000</writeLockTimeout> -->
<!-- The maximum number of simultaneous threads that may be
indexing documents at once in IndexWriter; if more than this
many threads arrive they will wait for others to finish.
Default in Solr/Lucene is 8. -->
<!-- <maxIndexingThreads>8</maxIndexingThreads> -->
<!-- Expert: Enabling compound file will use less files for the index, <!-- Expert: Enabling compound file will use less files for the index,
using fewer file descriptors on the expense of performance decrease. using fewer file descriptors on the expense of performance decrease.
Default in Lucene is "true". Default in Solr is "false" (since 3.6) --> Default in Lucene is "true". Default in Solr is "false" (since 3.6) -->