merge missing dirs

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene3069@1513364 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Han Jiang 2013-08-13 06:33:29 +00:00
commit 134cfe0b29
284 changed files with 4906 additions and 1755 deletions

View File

@ -423,4 +423,19 @@
</antcall> </antcall>
</target> </target>
<target name="regenerate" description="Runs all code regenerators">
<subant target="regenerate" inheritall="false" failonerror="false">
<fileset dir="lucene" includes="build.xml" />
<!-- todo:
<fileset dir="solr" includes="build.xml" />-->
</subant>
</target>
<target name="-check-after-regeneration">
<subant buildpath="." antfile="extra-targets.xml" target="-check-after-regeneration" inheritall="false" failonerror="true"/>
</target>
<!-- TODO: remove me when jenkins works -->
<target name="regenerateAndCheck" depends="regenerate,-check-after-regeneration"/>
</project> </project>

View File

@ -25,5 +25,6 @@
<orderEntry type="module" module-name="analysis-common" /> <orderEntry type="module" module-name="analysis-common" />
<orderEntry type="module" module-name="lucene-core" /> <orderEntry type="module" module-name="lucene-core" />
<orderEntry type="module" module-name="queryparser" /> <orderEntry type="module" module-name="queryparser" />
<orderEntry type="module" module-name="join" />
</component> </component>
</module> </module>

View File

@ -27,5 +27,6 @@
<orderEntry type="module" scope="TEST" module-name="suggest" /> <orderEntry type="module" scope="TEST" module-name="suggest" />
<orderEntry type="module" scope="TEST" module-name="spatial" /> <orderEntry type="module" scope="TEST" module-name="spatial" />
<orderEntry type="module" scope="TEST" module-name="misc" /> <orderEntry type="module" scope="TEST" module-name="misc" />
<orderEntry type="module" scope="TEST" module-name="join" />
</component> </component>
</module> </module>

View File

@ -407,12 +407,12 @@
<dependency> <dependency>
<groupId>org.carrot2</groupId> <groupId>org.carrot2</groupId>
<artifactId>carrot2-mini</artifactId> <artifactId>carrot2-mini</artifactId>
<version>3.6.2</version> <version>3.8.0</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.carrot2</groupId> <groupId>org.carrot2</groupId>
<artifactId>morfologik-polish</artifactId> <artifactId>morfologik-polish</artifactId>
<version>1.6.0</version> <version>1.7.1</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.codehaus.woodstox</groupId> <groupId>org.codehaus.woodstox</groupId>

View File

@ -93,6 +93,11 @@
<artifactId>lucene-misc</artifactId> <artifactId>lucene-misc</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-join</artifactId>
<version>${project.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId> <artifactId>lucene-queryparser</artifactId>

View File

@ -198,6 +198,12 @@
<artifactId>jetty-util</artifactId> <artifactId>jetty-util</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>dom4j</groupId>
<artifactId>dom4j</artifactId>
<version>1.6.1</version>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>
<sourceDirectory/> <sourceDirectory/>

View File

@ -115,7 +115,7 @@ def prepare(root, version, gpgKeyID, gpgPassword, doTest):
print(' lucene prepare-release') print(' lucene prepare-release')
os.chdir('lucene') os.chdir('lucene')
cmd = 'ant -Dversion=%s -Dspecversion=%s' % (version, version) cmd = 'ant -Dversion=%s' % version
if gpgKeyID is not None: if gpgKeyID is not None:
cmd += ' -Dgpg.key=%s prepare-release' % gpgKeyID cmd += ' -Dgpg.key=%s prepare-release' % gpgKeyID
else: else:
@ -128,7 +128,7 @@ def prepare(root, version, gpgKeyID, gpgPassword, doTest):
print(' solr prepare-release') print(' solr prepare-release')
os.chdir('../solr') os.chdir('../solr')
cmd = 'ant -Dversion=%s -Dspecversion=%s' % (version, version) cmd = 'ant -Dversion=%s' % version
if gpgKeyID is not None: if gpgKeyID is not None:
cmd += ' -Dgpg.key=%s prepare-release' % gpgKeyID cmd += ' -Dgpg.key=%s prepare-release' % gpgKeyID
else: else:

View File

@ -50,7 +50,7 @@ while True:
elif l.endswith('\n'): elif l.endswith('\n'):
l = l[:-1] l = l[:-1]
if l.startswith('diff ') or l.startswith('Binary files '): if l.startswith('diff ') or l.startswith('Binary files '):
keep = l.lower().find('/build/') == -1 and (l.lower().startswith('Only in') or ((l.lower().endswith('.java') or l.lower().endswith('.txt') or l.lower().endswith('.xml') or l.lower().endswith('.iml')) and l.find('/.svn/') == -1)) keep = not l.endswith('timehints.txt') and l.lower().find('/build/') == -1 and (l.lower().startswith('Only in') or ((l.lower().endswith('.java') or l.lower().endswith('.txt') or l.lower().endswith('.xml') or l.lower().endswith('.iml')) and l.find('/.svn/') == -1))
if keep: if keep:
print print
print print

View File

@ -62,9 +62,23 @@
</mvn> </mvn>
</target> </target>
<target xmlns:ivy="antlib:org.apache.ivy.ant" name="-check-svn-working-copy" depends="ivy-availability-check,ivy-fail,ivy-configure,resolve-groovy"> <target name="-check-svn-working-copy" depends="ivy-availability-check,ivy-fail,ivy-configure,resolve-groovy">
<svn-checker failonmodifications="false"/>
</target>
<!-- should only be called by jenkins, not precommit! -->
<target name="-check-after-regeneration" depends="ivy-availability-check,ivy-fail,ivy-configure,resolve-groovy">
<svn-checker failonmodifications="true"/>
</target>
<macrodef xmlns:ivy="antlib:org.apache.ivy.ant" name="svn-checker">
<attribute name="failonmodifications" default="true"/> <!-- false if file modifications are allowed -->
<sequential>
<ivy:cachepath organisation="org.tmatesoft.svnkit" module="svnkit" revision="1.7.8" <ivy:cachepath organisation="org.tmatesoft.svnkit" module="svnkit" revision="1.7.8"
inline="true" conf="default" transitive="true" pathid="svnkit.classpath"/> inline="true" conf="default" transitive="true" pathid="svnkit.classpath"/>
<local name="svn.checkprops.failed"/>
<local name="svn.unversioned.failed"/>
<local name="svn.changed.failed"/>
<script language="groovy" taskname="svn"> <script language="groovy" taskname="svn">
<classpath> <classpath>
<path refid="groovy.classpath"/> <path refid="groovy.classpath"/>
@ -80,10 +94,10 @@
File basedir = new File(project.getProperty('basedir')).getAbsoluteFile(); File basedir = new File(project.getProperty('basedir')).getAbsoluteFile();
int baseLen = basedir.toString().length(); int baseLen = basedir.toString().length();
def convertRelative = { def convertRelative = {
file -> file.getAbsolutePath().substring(baseLen + 1).replace(File.separatorChar, (char)'/'); file -> '.' + file.getAbsolutePath().substring(baseLen).replace(File.separatorChar, (char)'/');
} }
Set missingProps = new TreeSet(), unversioned = new TreeSet(); Set missingProps = new TreeSet(), unversioned = new TreeSet(), changed = new TreeSet();
self.log('Getting all versioned and unversioned files...'); self.log('Getting all versioned and unversioned files...');
statusClient.doStatus(basedir, SVNRevision.WORKING, SVNDepth.fromRecurse(true), false, true, false, false, { statusClient.doStatus(basedir, SVNRevision.WORKING, SVNDepth.fromRecurse(true), false, true, false, false, {
@ -94,6 +108,10 @@
} else if (status.getKind() == SVNNodeKind.FILE && nodeStatus != SVNStatusType.STATUS_DELETED) { } else if (status.getKind() == SVNNodeKind.FILE && nodeStatus != SVNStatusType.STATUS_DELETED) {
missingProps.add(convertRelative(status.getFile())); missingProps.add(convertRelative(status.getFile()));
} }
if (nodeStatus == SVNStatusType.STATUS_MODIFIED || nodeStatus == SVNStatusType.STATUS_REPLACED ||
nodeStatus == SVNStatusType.STATUS_DELETED || nodeStatus == SVNStatusType.STATUS_ADDED) {
changed.add(convertRelative(status.getFile()));
}
} as ISVNStatusHandler, null); } as ISVNStatusHandler, null);
self.log('Filtering files with existing svn:eol-style...'); self.log('Filtering files with existing svn:eol-style...');
@ -115,10 +133,20 @@
}; };
project.setProperty('svn.checkprops.failed', convertSet2String(missingProps)); project.setProperty('svn.checkprops.failed', convertSet2String(missingProps));
project.setProperty('svn.unversioned.failed', convertSet2String(unversioned)); project.setProperty('svn.unversioned.failed', convertSet2String(unversioned));
project.setProperty('svn.changed.failed', convertSet2String(changed));
]]></script> ]]></script>
<fail if="svn.checkprops.failed" <fail if="svn.checkprops.failed"
message="The following files are missing svn:eol-style (or binary svn:mime-type):${line.separator}${svn.checkprops.failed}"/> message="The following files are missing svn:eol-style (or binary svn:mime-type):${line.separator}${svn.checkprops.failed}"/>
<fail if="svn.unversioned.failed" <fail if="svn.unversioned.failed"
message="Source checkout is dirty after running tests!!! Offending files:${line.separator}${svn.unversioned.failed}"/> message="Source checkout is dirty after running tests!!! Offending files:${line.separator}${svn.unversioned.failed}"/>
</target> <fail message="Source checkout is modified !!! Offending files:${line.separator}${svn.changed.failed}">
<condition>
<and>
<istrue value="@{failonmodifications}"/>
<isset property="svn.changed.failed"/>
</and>
</condition>
</fail>
</sequential>
</macrodef>
</project> </project>

View File

@ -25,7 +25,7 @@ $Id$
Versions of Major Components Versions of Major Components
--------------------- ---------------------
Apache Tika 1.4 Apache Tika 1.4
Carrot2 3.6.2 Carrot2 3.8.0
Velocity 1.7 and Velocity Tools 2.0 Velocity 1.7 and Velocity Tools 2.0
Apache UIMA 2.3.1 Apache UIMA 2.3.1
Apache ZooKeeper 3.4.5 Apache ZooKeeper 3.4.5
@ -53,7 +53,7 @@ Other Changes
Versions of Major Components Versions of Major Components
--------------------- ---------------------
Apache Tika 1.4 Apache Tika 1.4
Carrot2 3.6.2 Carrot2 3.8.0
Velocity 1.7 and Velocity Tools 2.0 Velocity 1.7 and Velocity Tools 2.0
Apache UIMA 2.3.1 Apache UIMA 2.3.1
Apache ZooKeeper 3.4.5 Apache ZooKeeper 3.4.5
@ -61,15 +61,89 @@ Apache ZooKeeper 3.4.5
Upgrading from Solr 4.4.0 Upgrading from Solr 4.4.0
---------------------- ----------------------
* XML configuration parsing is now more strict about situations where a single
setting is allowed but multiple values are found. In the past, one value
would be chosen arbitrarily and silently. Starting with 4.5, configuration
parsing will fail with an error in situations like this. If you see error
messages such as "solrconfig.xml contains more than one value for config path:
indexConfig/infoStream" check your solrconfig.xml file for multiple occurrences
of "infoStream" and delete the one that you do not wish to use. See SOLR-4953
for more details.
Detailed Change List Detailed Change List
---------------------- ----------------------
New Features
----------------------
* SOLR-5126: Update Carrot2 clustering to version 3.8.0, update Morfologik
to version 1.7.1 (Dawid Weiss)
* SOLR-2345: Enhanced geodist() to work with an RPT field, provided that the
field is referenced via 'sfield' and the query point is constant.
(David Smiley)
* SOLR-5082: The encoding of URL-encoded query parameters can be changed with
the "ie" (input encoding) parameter, e.g. "select?q=m%FCller&ie=ISO-8859-1".
The default is UTF-8. To change the encoding of POSTed content, use the
"Content-Type" HTTP header. (Uwe Schindler, David Smiley)
* SOLR-4221: Custom sharding (Noble Paul)
* SOLR-4808: Persist and use router,replicationFactor and maxShardsPerNode at Collection and Shard level (Noble Paul, Shalin Mangar)
* SOLR-5006: CREATESHARD command for 'implicit' shards (Noble Paul)
* SOLR-5017: Allow sharding based on the value of a field (Noble Paul)
* SOLR-4222:create custom sharded collection via collections API (Noble Paul)
Bug Fixes
----------------------
* SOLR-3633: web UI reports an error if CoreAdminHandler says there are no
SolrCores (steffkes)
* SOLR-4489: SpellCheckComponent can throw StringIndexOutOfBoundsException
when generating collations involving multiple word-break corrections.
(James Dyer)
* SOLR-5107: Fixed NPE when using numTerms=0 in LukeRequestHandler
(Ahmet Arslan, hossman)
* SOLR-4679, SOLR-4908, SOLR-5124: Text extracted from HTML or PDF files
using Solr Cell was missing ignorable whitespace, which is inserted by
TIKA for convenience to support plain text extraction without using the
HTML elements. This bug resulted in glued words. (hossman, Uwe Schindler)
* SOLR-5121: zkcli usage help for makepath doesn't match actual command.
(Daniel Collins via Mark Miller)
* SOLR-5119: Managed schema problems after adding fields via Schema Rest API.
(Nils Kübler, Steve Rowe)
Optimizations
----------------------
* SOLR-5044: Admin UI - Note on Core-Admin about directories while creating
core (steffkes)
Other Changes Other Changes
---------------------- ----------------------
* SOLR-4708: Enable ClusteringComponent by default in collection1 example.
The solr.clustering.enabled system property is set to 'true' by default.
(ehatcher, Dawid Weiss)
* SOLR-4914: Factor out core list persistence and discovery into a * SOLR-4914: Factor out core list persistence and discovery into a
new CoresLocator interface. (Alan Woodward) new CoresLocator interface. (Alan Woodward)
* SOLR-5056: Improve type safety of ConfigSolr class. (Alan Woodward)
* SOLR-4951: Better randomization of MergePolicy in Solr tests (hossman)
* SOLR-4953: Make XML Configuration parsing fail if an xpath matches multiple
nodes when only a single value is expected. (hossman)
* The routing parameter "shard.keys" is deprecated as part of SOLR-5017 .The new parameter name is '_route_' .
The old parameter should continue to work for another release (Noble Paul)
================== 4.4.0 ================== ================== 4.4.0 ==================
Versions of Major Components Versions of Major Components
@ -108,6 +182,7 @@ Upgrading from Solr 4.3.0
in 5.0. If you are still using these field types, you should migrate your in 5.0. If you are still using these field types, you should migrate your
fields to TrieIntField. fields to TrieIntField.
Detailed Change List Detailed Change List
---------------------- ----------------------
@ -187,6 +262,16 @@ New Features
* SOLR-4943: Add a new system wide info admin handler that exposes the system info * SOLR-4943: Add a new system wide info admin handler that exposes the system info
that could previously only be retrieved using a SolrCore. (Mark Miller) that could previously only be retrieved using a SolrCore. (Mark Miller)
* SOLR-3076: Block joins. Documents and their sub-documents must be indexed
as a block.
{!parent which=<allParents>}<someChildren> takes in a query that matches child
documents and results in matches on their parents.
{!child of=<allParents>}<someParents> takes in a query that matches some parent
documents and results in matches on their children.
(Mikhail Khludnev, Vadim Kirilchuk, Alan Woodward, Tom Burton-West, Mike McCandless,
hossman, yonik)
Bug Fixes Bug Fixes
---------------------- ----------------------
@ -312,6 +397,9 @@ Bug Fixes
* SOLR-5039: Admin/Schema Browser displays -1 for term counts for multiValued fields. * SOLR-5039: Admin/Schema Browser displays -1 for term counts for multiValued fields.
* SOLR-5037: The CSV loader now accepts field names that are not in the schema.
(gsingers, ehatcher, Steve Rowe)
Optimizations Optimizations
---------------------- ----------------------

View File

@ -226,7 +226,21 @@
</target> </target>
<!-- Validation (license/notice/api checks). --> <!-- Validation (license/notice/api checks). -->
<target name="validate" depends="check-licenses,rat-sources,check-forbidden-apis" description="Validate stuff." /> <target name="validate" depends="check-example-lucene-match-version,check-licenses,rat-sources,check-forbidden-apis" description="Validate stuff." />
<target name="check-example-lucene-match-version">
<fail message="Some example solrconfig.xml files do not refer to the correct luceneMatchVersion: ${tests.luceneMatchVersion}">
<condition>
<resourcecount when="greater" count="0">
<fileset dir="${example}" includes="**/solrconfig.xml">
<not>
<contains text="&lt;luceneMatchVersion&gt;${tests.luceneMatchVersion}&lt;" casesensitive="no"/>
</not>
</fileset>
</resourcecount>
</condition>
</fail>
</target>
<target name="check-licenses" depends="compile-tools,resolve,load-custom-tasks" description="Validate license stuff."> <target name="check-licenses" depends="compile-tools,resolve,load-custom-tasks" description="Validate license stuff.">
<license-check-macro dir="${basedir}" licensedir="${common-solr.dir}/licenses"> <license-check-macro dir="${basedir}" licensedir="${common-solr.dir}/licenses">

View File

@ -82,6 +82,7 @@
<pathelement location="${grouping.jar}"/> <pathelement location="${grouping.jar}"/>
<pathelement location="${queries.jar}"/> <pathelement location="${queries.jar}"/>
<pathelement location="${queryparser.jar}"/> <pathelement location="${queryparser.jar}"/>
<pathelement location="${join.jar}"/>
</path> </path>
<path id="solr.base.classpath"> <path id="solr.base.classpath">
@ -141,7 +142,7 @@
<target name="prep-lucene-jars" <target name="prep-lucene-jars"
depends="jar-lucene-core, jar-analyzers-phonetic, jar-analyzers-kuromoji, jar-codecs, jar-suggest, jar-highlighter, jar-memory, depends="jar-lucene-core, jar-analyzers-phonetic, jar-analyzers-kuromoji, jar-codecs, jar-suggest, jar-highlighter, jar-memory,
jar-misc, jar-spatial, jar-grouping, jar-queries, jar-queryparser"> jar-misc, jar-spatial, jar-grouping, jar-queries, jar-queryparser, jar-join">
<property name="solr.deps.compiled" value="true"/> <property name="solr.deps.compiled" value="true"/>
</target> </target>

View File

@ -20,9 +20,9 @@
<info organisation="org.apache.solr" module="analysis-extras"/> <info organisation="org.apache.solr" module="analysis-extras"/>
<dependencies> <dependencies>
<dependency org="com.ibm.icu" name="icu4j" rev="49.1" transitive="false"/> <dependency org="com.ibm.icu" name="icu4j" rev="49.1" transitive="false"/>
<dependency org="org.carrot2" name="morfologik-polish" rev="1.6.0" transitive="false"/> <dependency org="org.carrot2" name="morfologik-polish" rev="1.7.1" transitive="false"/>
<dependency org="org.carrot2" name="morfologik-fsa" rev="1.6.0" transitive="false"/> <dependency org="org.carrot2" name="morfologik-fsa" rev="1.7.1" transitive="false"/>
<dependency org="org.carrot2" name="morfologik-stemming" rev="1.6.0" transitive="false"/> <dependency org="org.carrot2" name="morfologik-stemming" rev="1.7.1" transitive="false"/>
<exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/> <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
</dependencies> </dependencies>
</ivy-module> </ivy-module>

View File

@ -19,14 +19,25 @@
<ivy-module version="2.0"> <ivy-module version="2.0">
<info organisation="org.apache.solr" module="clustering"/> <info organisation="org.apache.solr" module="clustering"/>
<dependencies> <dependencies>
<dependency org="org.carrot2" name="carrot2-mini" rev="3.6.2" transitive="false"/> <dependency org="org.carrot2" name="carrot2-mini" rev="3.8.0" transitive="false"/>
<dependency org="org.carrot2.attributes" name="attributes-binder" rev="1.0.1" transitive="false"/>
<dependency org="com.carrotsearch" name="hppc" rev="0.4.1" transitive="false"/> <dependency org="com.carrotsearch" name="hppc" rev="0.5.2" transitive="false"/>
<dependency org="org.carrot2.attributes" name="attributes-binder" rev="1.2.0" transitive="false"/>
<dependency org="org.simpleframework" name="simple-xml" rev="2.7" transitive="false"/>
<dependency org="org.apache.mahout" name="mahout-math" rev="0.6" transitive="false"/>
<dependency org="org.apache.mahout" name="mahout-collections" rev="1.0" transitive="false"/>
<dependency org="org.codehaus.jackson" name="jackson-core-asl" rev="1.7.4" transitive="false"/> <dependency org="org.codehaus.jackson" name="jackson-core-asl" rev="1.7.4" transitive="false"/>
<dependency org="org.codehaus.jackson" name="jackson-mapper-asl" rev="1.7.4" transitive="false"/> <dependency org="org.codehaus.jackson" name="jackson-mapper-asl" rev="1.7.4" transitive="false"/>
<dependency org="org.apache.mahout" name="mahout-collections" rev="1.0" transitive="false"/>
<dependency org="org.apache.mahout" name="mahout-math" rev="0.6" transitive="false"/> <!--
<dependency org="org.simpleframework" name="simple-xml" rev="2.6.4" transitive="false"/> Included as part of Solr's environment.
com.google.guava:guava:jar:14.0.1:compile
commons-lang:commons-lang:jar:2.6:compile
-->
<exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/> <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
</dependencies> </dependencies>
</ivy-module> </ivy-module>

View File

@ -37,8 +37,7 @@ public abstract class DocumentClusteringEngine extends ClusteringEngine {
public abstract NamedList cluster(SolrParams solrParams); public abstract NamedList cluster(SolrParams solrParams);
/** /**
* Experimental. Subject to change before the next release * Experimental. Subject to change before the next release
*
* *
* Cluster the set of docs. Clustering of documents is often an expensive task that can take a long time. * Cluster the set of docs. Clustering of documents is often an expensive task that can take a long time.
* @param docs The docs to cluster. If null, cluster all docs as in {@link #cluster(org.apache.solr.common.params.SolrParams)} * @param docs The docs to cluster. If null, cluster all docs as in {@link #cluster(org.apache.solr.common.params.SolrParams)}

View File

@ -77,6 +77,7 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.common.io.Closeables; import com.google.common.io.Closeables;
import com.google.common.io.Closer;
/** /**
* Search results clustering engine based on Carrot2 clustering algorithms. * Search results clustering engine based on Carrot2 clustering algorithms.
@ -140,7 +141,13 @@ public class CarrotClusteringEngine extends SearchClusteringEngine {
+ ". Using the default " + resource + " from Carrot JAR."); + ". Using the default " + resource + " from Carrot JAR.");
return new IResource[] {}; return new IResource[] {};
} finally { } finally {
if (resourceStream != null) Closeables.closeQuietly(resourceStream); if (resourceStream != null) {
try {
resourceStream.close();
} catch (IOException e) {
// ignore.
}
}
} }
log.info("Loaded Solr resource: " + resourceName); log.info("Loaded Solr resource: " + resourceName);

View File

@ -52,7 +52,7 @@ public class ClusteringComponentTest extends AbstractClusteringTestCase {
SolrRequestHandler handler = core.getRequestHandler("standard"); SolrRequestHandler handler = core.getRequestHandler("standard");
SolrQueryResponse rsp; SolrQueryResponse rsp;
rsp = new SolrQueryResponse(); rsp = new SolrQueryResponse();
rsp.add("responseHeader", new SimpleOrderedMap()); rsp.add("responseHeader", new SimpleOrderedMap<Object>());
SolrQueryRequest req = new LocalSolrQueryRequest(core, params); SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp); handler.handleRequest(req, rsp);
NamedList values = rsp.getValues(); NamedList values = rsp.getValues();
@ -70,7 +70,7 @@ public class ClusteringComponentTest extends AbstractClusteringTestCase {
handler = core.getRequestHandler("docClustering"); handler = core.getRequestHandler("docClustering");
rsp = new SolrQueryResponse(); rsp = new SolrQueryResponse();
rsp.add("responseHeader", new SimpleOrderedMap()); rsp.add("responseHeader", new SimpleOrderedMap<Object>());
req = new LocalSolrQueryRequest(core, params); req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp); handler.handleRequest(req, rsp);
values = rsp.getValues(); values = rsp.getValues();

View File

@ -15,7 +15,6 @@ package org.apache.solr.handler.clustering.carrot2;
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
import java.util.Collections;
import java.util.List; import java.util.List;
import org.carrot2.core.Cluster; import org.carrot2.core.Cluster;

View File

@ -303,6 +303,13 @@ public class SolrContentHandler extends DefaultHandler implements ExtractingPara
bldrStack.getLast().append(chars, offset, length); bldrStack.getLast().append(chars, offset, length);
} }
/**
* Treat the same as any other characters
*/
@Override
public void ignorableWhitespace(char[] chars, int offset, int length) throws SAXException {
characters(chars, offset, length);
}
/** /**
* Can be used to transform input values based on their {@link org.apache.solr.schema.SchemaField} * Can be used to transform input values based on their {@link org.apache.solr.schema.SchemaField}

View File

@ -6,6 +6,7 @@
<p> <p>
Here is some text Here is some text
</p> </p>
<p>distinct<br/>words</p>
<div>Here is some text in a div</div> <div>Here is some text in a div</div>
<div>This has a <a href="http://www.apache.org">link</a>.</div> <div>This has a <a href="http://www.apache.org">link</a>.</div>
</body> </body>

View File

@ -88,6 +88,10 @@ public class ExtractingRequestHandlerTest extends SolrTestCaseJ4 {
assertU(commit()); assertU(commit());
assertQ(req("title:Welcome"), "//*[@numFound='1']"); assertQ(req("title:Welcome"), "//*[@numFound='1']");
assertQ(req("extractedContent:distinctwords"), "//*[@numFound='0']");
assertQ(req("extractedContent:distinct"), "//*[@numFound='1']");
assertQ(req("extractedContent:words"), "//*[@numFound='2']");
assertQ(req("extractedContent:\"distinct words\""), "//*[@numFound='1']");
loadLocal("extraction/simple.html", loadLocal("extraction/simple.html",
"literal.id","simple2", "literal.id","simple2",

View File

@ -48,13 +48,10 @@
ends) will be included. ends) will be included.
--> -->
<lib dir="../../dist/" regex="solr-cell-\d.*\.jar" /> <lib dir="../../dist/" regex="solr-cell-\d.*\.jar" />
<lib dir="../../dist/" regex="solr-clustering-\d.*\.jar" />
<!-- <!--
If a dir option (with or without a regex) is used and nothing is If a dir option (with or without a regex) is used and nothing is
found that matches, it will be ignored found that matches, it will be ignored
--> -->
<lib dir="../../contrib/clustering/lib/downloads/" />
<lib dir="../../contrib/clustering/lib/" />
<lib dir="/total/crap/dir/ignored" /> <lib dir="/total/crap/dir/ignored" />
<!-- <!--
an exact path can be used to specify a specific file. This will an exact path can be used to specify a specific file. This will
@ -596,68 +593,6 @@
</arr> </arr>
</requestHandler> </requestHandler>
<!--
Clustering Component http://wiki.apache.org/solr/ClusteringComponent
This relies on third party jars which are not included in the
release. To use this component (and the "/clustering" handler) Those
jars will need to be downloaded, and you'll need to set the
solr.cluster.enabled system property when running solr... java
-Dsolr.clustering.enabled=true -jar start.jar
-->
<searchComponent name="clusteringComponent"
enable="${solr.clustering.enabled:false}" class="org.apache.solr.handler.clustering.ClusteringComponent">
<!-- Declare an engine -->
<lst name="engine">
<!-- The name, only one can be named "default" -->
<str name="name">default</str>
<!--
Class name of Carrot2 clustering algorithm. Currently available
algorithms are: *
org.carrot2.clustering.lingo.LingoClusteringAlgorithm *
org.carrot2.clustering.stc.STCClusteringAlgorithm See
http://project.carrot2.org/algorithms.html for the algorithm's
characteristics.
-->
<str name="carrot.algorithm">org.carrot2.clustering.lingo.LingoClusteringAlgorithm</str>
<!--
Overriding values for Carrot2 default algorithm attributes. For
a description of all available attributes, see:
http://download.carrot2.org/stable/manual/#chapter.components.
Use attribute key as name attribute of str elements below. These
can be further overridden for individual requests by specifying
attribute key as request parameter name and attribute value as
parameter value.
-->
<str name="LingoClusteringAlgorithm.desiredClusterCountBase">20</str>
</lst>
<lst name="engine">
<str name="name">stc</str>
<str name="carrot.algorithm">org.carrot2.clustering.stc.STCClusteringAlgorithm</str>
</lst>
</searchComponent>
<requestHandler name="/clustering" enable="${solr.clustering.enabled:false}"
class="solr.SearchHandler">
<lst name="defaults">
<bool name="clustering">true</bool>
<str name="clustering.engine">default</str>
<bool name="clustering.results">true</bool>
<!-- The title field -->
<str name="carrot.title">name</str>
<str name="carrot.url">id</str>
<!-- The field to cluster on -->
<str name="carrot.snippet">features</str>
<!-- produce summaries -->
<bool name="carrot.produceSummary">true</bool>
<!-- the maximum number of labels per cluster -->
<!--<int name="carrot.numDescriptions">5</int>-->
<!-- produce sub clusters -->
<bool name="carrot.outputSubClusters">false</bool>
</lst>
<arr name="last-components">
<str>clusteringComponent</str>
</arr>
</requestHandler>
<!-- Solr Cell: http://wiki.apache.org/solr/ExtractingRequestHandler --> <!-- Solr Cell: http://wiki.apache.org/solr/ExtractingRequestHandler -->
<requestHandler name="/update/extract" <requestHandler name="/update/extract"
class="org.apache.solr.handler.extraction.ExtractingRequestHandler" class="org.apache.solr.handler.extraction.ExtractingRequestHandler"

View File

@ -48,13 +48,10 @@
ends) will be included. ends) will be included.
--> -->
<lib dir="../../dist/" regex="solr-cell-\d.*\.jar" /> <lib dir="../../dist/" regex="solr-cell-\d.*\.jar" />
<lib dir="../../dist/" regex="solr-clustering-\d.*\.jar" />
<!-- <!--
If a dir option (with or without a regex) is used and nothing is If a dir option (with or without a regex) is used and nothing is
found that matches, it will be ignored found that matches, it will be ignored
--> -->
<lib dir="../../contrib/clustering/lib/downloads/" />
<lib dir="../../contrib/clustering/lib/" />
<lib dir="/total/crap/dir/ignored" /> <lib dir="/total/crap/dir/ignored" />
<!-- <!--
an exact path can be used to specify a specific file. This will an exact path can be used to specify a specific file. This will
@ -595,68 +592,6 @@
</arr> </arr>
</requestHandler> </requestHandler>
<!--
Clustering Component http://wiki.apache.org/solr/ClusteringComponent
This relies on third party jars which are not included in the
release. To use this component (and the "/clustering" handler) Those
jars will need to be downloaded, and you'll need to set the
solr.cluster.enabled system property when running solr... java
-Dsolr.clustering.enabled=true -jar start.jar
-->
<searchComponent name="clusteringComponent"
enable="${solr.clustering.enabled:false}" class="org.apache.solr.handler.clustering.ClusteringComponent">
<!-- Declare an engine -->
<lst name="engine">
<!-- The name, only one can be named "default" -->
<str name="name">default</str>
<!--
Class name of Carrot2 clustering algorithm. Currently available
algorithms are: *
org.carrot2.clustering.lingo.LingoClusteringAlgorithm *
org.carrot2.clustering.stc.STCClusteringAlgorithm See
http://project.carrot2.org/algorithms.html for the algorithm's
characteristics.
-->
<str name="carrot.algorithm">org.carrot2.clustering.lingo.LingoClusteringAlgorithm</str>
<!--
Overriding values for Carrot2 default algorithm attributes. For
a description of all available attributes, see:
http://download.carrot2.org/stable/manual/#chapter.components.
Use attribute key as name attribute of str elements below. These
can be further overridden for individual requests by specifying
attribute key as request parameter name and attribute value as
parameter value.
-->
<str name="LingoClusteringAlgorithm.desiredClusterCountBase">20</str>
</lst>
<lst name="engine">
<str name="name">stc</str>
<str name="carrot.algorithm">org.carrot2.clustering.stc.STCClusteringAlgorithm</str>
</lst>
</searchComponent>
<requestHandler name="/clustering" enable="${solr.clustering.enabled:false}"
class="solr.SearchHandler">
<lst name="defaults">
<bool name="clustering">true</bool>
<str name="clustering.engine">default</str>
<bool name="clustering.results">true</bool>
<!-- The title field -->
<str name="carrot.title">name</str>
<str name="carrot.url">id</str>
<!-- The field to cluster on -->
<str name="carrot.snippet">features</str>
<!-- produce summaries -->
<bool name="carrot.produceSummary">true</bool>
<!-- the maximum number of labels per cluster -->
<!--<int name="carrot.numDescriptions">5</int>-->
<!-- produce sub clusters -->
<bool name="carrot.outputSubClusters">false</bool>
</lst>
<arr name="last-components">
<str>clusteringComponent</str>
</arr>
</requestHandler>
<!-- Solr Cell: http://wiki.apache.org/solr/ExtractingRequestHandler --> <!-- Solr Cell: http://wiki.apache.org/solr/ExtractingRequestHandler -->
<requestHandler name="/update/extract" <requestHandler name="/update/extract"
class="org.apache.solr.handler.extraction.ExtractingRequestHandler" class="org.apache.solr.handler.extraction.ExtractingRequestHandler"

View File

@ -41,6 +41,7 @@
<dependency org="org.restlet.jee" name="org.restlet" rev="2.1.1" conf="compile->*"/> <dependency org="org.restlet.jee" name="org.restlet" rev="2.1.1" conf="compile->*"/>
<dependency org="org.restlet.jee" name="org.restlet.ext.servlet" rev="2.1.1" conf="compile->*"/> <dependency org="org.restlet.jee" name="org.restlet.ext.servlet" rev="2.1.1" conf="compile->*"/>
<dependency org="joda-time" name="joda-time" rev="2.2" conf="compile->*"/> <dependency org="joda-time" name="joda-time" rev="2.2" conf="compile->*"/>
<dependency org="dom4j" name="dom4j" rev="1.6.1" transitive="false"/>
<dependency org="javax.servlet" name="javax.servlet-api" rev="3.0.1" conf="test->*"/> <dependency org="javax.servlet" name="javax.servlet-api" rev="3.0.1" conf="test->*"/>
<dependency org="org.easymock" name="easymock" rev="3.0" conf="test->*"/> <dependency org="org.easymock" name="easymock" rev="3.0" conf="test->*"/>

View File

@ -17,22 +17,36 @@ package org.apache.solr.cloud;
* the License. * the License.
*/ */
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.util.StrUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.solr.common.cloud.ClusterState; import static org.apache.solr.cloud.OverseerCollectionProcessor.CREATE_NODE_SET;
import org.apache.solr.common.cloud.Replica; import static org.apache.solr.cloud.OverseerCollectionProcessor.MAX_SHARDS_PER_NODE;
import org.apache.solr.common.cloud.Slice; import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
import static org.apache.solr.cloud.OverseerCollectionProcessor.REPLICATION_FACTOR;
public class Assign { public class Assign {
private static Pattern COUNT = Pattern.compile("core_node(\\d+)"); private static Pattern COUNT = Pattern.compile("core_node(\\d+)");
private static Logger log = LoggerFactory
.getLogger(Assign.class);
public static String assignNode(String collection, ClusterState state) { public static String assignNode(String collection, ClusterState state) {
Map<String, Slice> sliceMap = state.getSlicesMap(collection); Map<String, Slice> sliceMap = state.getSlicesMap(collection);
@ -100,4 +114,91 @@ public class Assign {
returnShardId = shardIdNames.get(0); returnShardId = shardIdNames.get(0);
return returnShardId; return returnShardId;
} }
static class Node {
public final String nodeName;
public int thisCollectionNodes=0;
public int totalNodes=0;
Node(String nodeName) {
this.nodeName = nodeName;
}
public int weight(){
return (thisCollectionNodes * 100) + totalNodes;
}
}
public static ArrayList<Node> getNodesForNewShard(ClusterState clusterState, String collectionName, int numSlices, int maxShardsPerNode, int repFactor, String createNodeSetStr) {
List<String> createNodeList = createNodeSetStr == null ? null: StrUtils.splitSmart(createNodeSetStr, ",", true);
Set<String> nodes = clusterState.getLiveNodes();
List<String> nodeList = new ArrayList<String>(nodes.size());
nodeList.addAll(nodes);
if (createNodeList != null) nodeList.retainAll(createNodeList);
HashMap<String,Node> nodeNameVsShardCount = new HashMap<String, Node>();
for (String s : nodeList) nodeNameVsShardCount.put(s,new Node(s));
for (String s : clusterState.getCollections()) {
DocCollection c = clusterState.getCollection(s);
//identify suitable nodes by checking the no:of cores in each of them
for (Slice slice : c.getSlices()) {
Collection<Replica> replicas = slice.getReplicas();
for (Replica replica : replicas) {
Node count = nodeNameVsShardCount.get(replica.getNodeName());
if (count != null) {
count.totalNodes++;
if (s.equals(collectionName)) {
count.thisCollectionNodes++;
if (count.thisCollectionNodes >= maxShardsPerNode) nodeNameVsShardCount.remove(replica.getNodeName());
}
}
}
}
}
if (nodeNameVsShardCount.size() <= 0) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot create collection " + collectionName
+ ". No live Solr-instances" + ((createNodeList != null)?" among Solr-instances specified in " + CREATE_NODE_SET + ":" + createNodeSetStr:""));
}
if (repFactor > nodeNameVsShardCount.size()) {
log.warn("Specified "
+ REPLICATION_FACTOR
+ " of "
+ repFactor
+ " on collection "
+ collectionName
+ " is higher than or equal to the number of Solr instances currently live or part of your " + CREATE_NODE_SET + "("
+ nodeList.size()
+ "). Its unusual to run two replica of the same slice on the same Solr-instance.");
}
int maxCoresAllowedToCreate = maxShardsPerNode * nodeList.size();
int requestedCoresToCreate = numSlices * repFactor;
int minCoresToCreate = requestedCoresToCreate;
if (maxCoresAllowedToCreate < minCoresToCreate) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot create shards " + collectionName + ". Value of "
+ MAX_SHARDS_PER_NODE + " is " + maxShardsPerNode
+ ", and the number of live nodes is " + nodeList.size()
+ ". This allows a maximum of " + maxCoresAllowedToCreate
+ " to be created. Value of " + NUM_SLICES + " is " + numSlices
+ " and value of " + REPLICATION_FACTOR + " is " + repFactor
+ ". This requires " + requestedCoresToCreate
+ " shards to be created (higher than the allowed number)");
}
ArrayList<Node> sortedNodeList = new ArrayList<>(nodeNameVsShardCount.values());
Collections.sort(sortedNodeList, new Comparator<Node>() {
@Override
public int compare(Node x, Node y) {
return (x.weight() < y.weight()) ? -1 : ((x.weight() == y.weight()) ? 0 : 1);
}
});
return sortedNodeList;
}
} }

View File

@ -1,8 +1,5 @@
package org.apache.solr.cloud; package org.apache.solr.cloud;
import java.io.IOException;
import java.util.Map;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.ClusterState;
@ -21,6 +18,9 @@ import org.apache.zookeeper.KeeperException.NoNodeException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Map;
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
@ -148,8 +148,8 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
collection); collection);
Overseer.getInQueue(zkClient).offer(ZkStateReader.toJSON(m)); Overseer.getInQueue(zkClient).offer(ZkStateReader.toJSON(m));
String leaderVoteWait = cc.getZkController().getLeaderVoteWait(); int leaderVoteWait = cc.getZkController().getLeaderVoteWait();
if (!weAreReplacement && leaderVoteWait != null) { if (!weAreReplacement) {
waitForReplicasToComeUp(weAreReplacement, leaderVoteWait); waitForReplicasToComeUp(weAreReplacement, leaderVoteWait);
} }
@ -309,8 +309,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
} }
private void waitForReplicasToComeUp(boolean weAreReplacement, private void waitForReplicasToComeUp(boolean weAreReplacement,
String leaderVoteWait) throws InterruptedException { int timeout) throws InterruptedException {
int timeout = Integer.parseInt(leaderVoteWait);
long timeoutAt = System.currentTimeMillis() + timeout; long timeoutAt = System.currentTimeMillis() + timeout;
final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE; final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE;

View File

@ -17,16 +17,6 @@ package org.apache.solr.cloud;
* the License. * the License.
*/ */
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ClosableThread; import org.apache.solr.common.cloud.ClosableThread;
@ -46,6 +36,16 @@ import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/** /**
* Cluster leader. Responsible node assignments, cluster state file? * Cluster leader. Responsible node assignments, cluster state file?
*/ */
@ -203,6 +203,8 @@ public class Overseer {
clusterState = createShard(clusterState, message); clusterState = createShard(clusterState, message);
} else if ("updateshardstate".equals(operation)) { } else if ("updateshardstate".equals(operation)) {
clusterState = updateShardState(clusterState, message); clusterState = updateShardState(clusterState, message);
} else if (OverseerCollectionProcessor.CREATECOLLECTION.equals(operation)) {
clusterState = buildCollection(clusterState, message);
} else { } else {
throw new RuntimeException("unknown operation:" + operation throw new RuntimeException("unknown operation:" + operation
+ " contents:" + message.getProperties()); + " contents:" + message.getProperties());
@ -210,6 +212,27 @@ public class Overseer {
return clusterState; return clusterState;
} }
private ClusterState buildCollection(ClusterState clusterState, ZkNodeProps message) {
String collection = message.getStr("name");
log.info("building a new collection: " + collection);
if(clusterState.getCollections().contains(collection) ){
log.warn("Collection {} already exists. exit" ,collection);
return clusterState;
}
ArrayList<String> shardNames = new ArrayList<String>();
if(ImplicitDocRouter.NAME.equals( message.getStr("router",DocRouter.DEFAULT_NAME))){
getShardNames(shardNames,message.getStr("shards",DocRouter.DEFAULT_NAME));
} else {
int numShards = message.getInt(ZkStateReader.NUM_SHARDS_PROP, -1);
if(numShards<1) throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,"numShards is a required parameter for 'compositeId' router");
getShardNames(numShards, shardNames);
}
return createCollection(clusterState,collection,shardNames,message);
}
private ClusterState updateShardState(ClusterState clusterState, ZkNodeProps message) { private ClusterState updateShardState(ClusterState clusterState, ZkNodeProps message) {
String collection = message.getStr(ZkStateReader.COLLECTION_PROP); String collection = message.getStr(ZkStateReader.COLLECTION_PROP);
log.info("Update shard state invoked for collection: " + collection); log.info("Update shard state invoked for collection: " + collection);
@ -294,12 +317,22 @@ public class Overseer {
} }
message.getProperties().put(ZkStateReader.CORE_NODE_NAME_PROP, coreNodeName); message.getProperties().put(ZkStateReader.CORE_NODE_NAME_PROP, coreNodeName);
} }
Integer numShards = message.getStr(ZkStateReader.NUM_SHARDS_PROP)!=null?Integer.parseInt(message.getStr(ZkStateReader.NUM_SHARDS_PROP)):null; Integer numShards = message.getInt(ZkStateReader.NUM_SHARDS_PROP, null);
log.info("Update state numShards={} message={}", numShards, message); log.info("Update state numShards={} message={}", numShards, message);
String router = message.getStr(OverseerCollectionProcessor.ROUTER,DocRouter.DEFAULT_NAME);
List<String> shardNames = new ArrayList<String>();
//collection does not yet exist, create placeholders if num shards is specified //collection does not yet exist, create placeholders if num shards is specified
boolean collectionExists = state.getCollections().contains(collection); boolean collectionExists = state.getCollections().contains(collection);
if (!collectionExists && numShards!=null) { if (!collectionExists && numShards!=null) {
state = createCollection(state, collection, numShards); if(ImplicitDocRouter.NAME.equals(router)){
getShardNames(shardNames, message.getStr("shards",null));
numShards = shardNames.size();
}else {
getShardNames(numShards, shardNames);
}
state = createCollection(state, collection, shardNames, message);
} }
// use the provided non null shardId // use the provided non null shardId
@ -391,34 +424,42 @@ public class Overseer {
return newClusterState; return newClusterState;
} }
private Map<String,Object> defaultCollectionProps() { private ClusterState createCollection(ClusterState state, String collectionName, List<String> shards , ZkNodeProps message) {
HashMap<String,Object> props = new HashMap<String, Object>(2); log.info("Create collection {} with shards {}", collectionName, shards);;
props.put(DocCollection.DOC_ROUTER, DocRouter.DEFAULT_NAME);
return props;
}
private ClusterState createCollection(ClusterState state, String collectionName, int numShards) { String routerName = message.getStr(OverseerCollectionProcessor.ROUTER,DocRouter.DEFAULT_NAME);
log.info("Create collection {} with numShards {}", collectionName, numShards); DocRouter router = DocRouter.getDocRouter(routerName);
DocRouter router = DocRouter.DEFAULT; List<DocRouter.Range> ranges = router.partitionRange(shards.size(), router.fullRange());
List<DocRouter.Range> ranges = router.partitionRange(numShards, router.fullRange());
Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>(); Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>();
Map<String, Slice> newSlices = new LinkedHashMap<String,Slice>(); Map<String, Slice> newSlices = new LinkedHashMap<String,Slice>();
newCollections.putAll(state.getCollectionStates()); newCollections.putAll(state.getCollectionStates());
for (int i = 0; i < shards.size(); i++) {
String sliceName = shards.get(i);
/*}
for (int i = 0; i < numShards; i++) { for (int i = 0; i < numShards; i++) {
final String sliceName = "shard" + (i+1); final String sliceName = "shard" + (i+1);*/
Map<String,Object> sliceProps = new LinkedHashMap<String,Object>(1); Map<String, Object> sliceProps = new LinkedHashMap<String, Object>(1);
sliceProps.put(Slice.RANGE, ranges.get(i)); sliceProps.put(Slice.RANGE, ranges == null? null: ranges.get(i));
newSlices.put(sliceName, new Slice(sliceName, null, sliceProps)); newSlices.put(sliceName, new Slice(sliceName, null, sliceProps));
} }
// TODO: fill in with collection properties read from the /collections/<collectionName> node // TODO: fill in with collection properties read from the /collections/<collectionName> node
Map<String,Object> collectionProps = defaultCollectionProps(); Map<String,Object> collectionProps = new HashMap<String,Object>();
for (Entry<String, Object> e : OverseerCollectionProcessor.COLL_PROPS.entrySet()) {
Object val = message.get(e.getKey());
if(val == null){
val = OverseerCollectionProcessor.COLL_PROPS.get(e.getKey());
}
if(val != null) collectionProps.put(e.getKey(),val);
}
collectionProps.put(DocCollection.DOC_ROUTER, routerName);
DocCollection newCollection = new DocCollection(collectionName, newSlices, collectionProps, router); DocCollection newCollection = new DocCollection(collectionName, newSlices, collectionProps, router);
@ -466,7 +507,6 @@ public class Overseer {
private ClusterState updateSlice(ClusterState state, String collectionName, Slice slice) { private ClusterState updateSlice(ClusterState state, String collectionName, Slice slice) {
// System.out.println("###!!!### OLD CLUSTERSTATE: " + JSONUtil.toJSON(state.getCollectionStates())); // System.out.println("###!!!### OLD CLUSTERSTATE: " + JSONUtil.toJSON(state.getCollectionStates()));
// System.out.println("Updating slice:" + slice); // System.out.println("Updating slice:" + slice);
Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>(state.getCollectionStates()); // make a shallow copy Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>(state.getCollectionStates()); // make a shallow copy
DocCollection coll = newCollections.get(collectionName); DocCollection coll = newCollections.get(collectionName);
Map<String,Slice> slices; Map<String,Slice> slices;
@ -681,6 +721,28 @@ public class Overseer {
} }
static void getShardNames(Integer numShards, List<String> shardNames) {
if(numShards == null)
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "numShards" + " is a required param");
for (int i = 0; i < numShards; i++) {
final String sliceName = "shard" + (i + 1);
shardNames.add(sliceName);
}
}
static void getShardNames(List<String> shardNames, String shards) {
if(shards ==null)
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "shards" + " is a required param");
for (String s : shards.split(",")) {
if(s ==null || s.trim().isEmpty()) continue;
shardNames.add(s.trim());
}
if(shardNames.isEmpty())
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "shards" + " is a required param");
}
class OverseerThread extends Thread implements ClosableThread { class OverseerThread extends Thread implements ClosableThread {
private volatile boolean isClosed; private volatile boolean isClosed;

View File

@ -17,14 +17,6 @@ package org.apache.solr.cloud;
* limitations under the License. * limitations under the License.
*/ */
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.solr.client.solrj.SolrResponse; import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrServer;
@ -40,6 +32,7 @@ import org.apache.solr.common.cloud.ClosableThread;
import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.DocRouter; import org.apache.solr.common.cloud.DocRouter;
import org.apache.solr.common.cloud.ImplicitDocRouter;
import org.apache.solr.common.cloud.PlainIdRouter; import org.apache.solr.common.cloud.PlainIdRouter;
import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.Slice;
@ -61,6 +54,21 @@ import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.solr.cloud.Assign.Node;
import static org.apache.solr.cloud.Assign.getNodesForNewShard;
import static org.apache.solr.common.cloud.DocRouter.ROUTE_FIELD;
import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
public class OverseerCollectionProcessor implements Runnable, ClosableThread { public class OverseerCollectionProcessor implements Runnable, ClosableThread {
public static final String NUM_SLICES = "numShards"; public static final String NUM_SLICES = "numShards";
@ -85,6 +93,22 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
public static final String DELETESHARD = "deleteshard"; public static final String DELETESHARD = "deleteshard";
public static final String ROUTER = "router";
public static final String SHARDS_PROP = "shards";
public static final String CREATESHARD = "createshard";
public static final String COLL_CONF = "collection.configName";
public static final Map<String,Object> COLL_PROPS = asMap(
ROUTER,DocRouter.DEFAULT_NAME,
REPLICATION_FACTOR, "1",
MAX_SHARDS_PER_NODE,"1",
ROUTE_FIELD,null);
// TODO: use from Overseer? // TODO: use from Overseer?
private static final String QUEUE_OPERATION = "operation"; private static final String QUEUE_OPERATION = "operation";
@ -168,6 +192,7 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
protected SolrResponse processMessage(ZkNodeProps message, String operation) { protected SolrResponse processMessage(ZkNodeProps message, String operation) {
log.warn("OverseerCollectionProcessor.processMessage : "+ operation + " , "+ message.toString());
NamedList results = new NamedList(); NamedList results = new NamedList();
try { try {
@ -185,6 +210,8 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
deleteAlias(zkStateReader.getAliases(), message); deleteAlias(zkStateReader.getAliases(), message);
} else if (SPLITSHARD.equals(operation)) { } else if (SPLITSHARD.equals(operation)) {
splitShard(zkStateReader.getClusterState(), message, results); splitShard(zkStateReader.getClusterState(), message, results);
} else if (CREATESHARD.equals(operation)) {
createShard(zkStateReader.getClusterState(), message, results);
} else if (DELETESHARD.equals(operation)) { } else if (DELETESHARD.equals(operation)) {
deleteShard(zkStateReader.getClusterState(), message, results); deleteShard(zkStateReader.getClusterState(), message, results);
} else { } else {
@ -334,6 +361,83 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
} }
private boolean createShard(ClusterState clusterState, ZkNodeProps message, NamedList results) throws KeeperException, InterruptedException {
log.info("create shard invoked");
String collectionName = message.getStr(COLLECTION_PROP);
String shard = message.getStr(SHARD_ID_PROP);
if(collectionName == null || shard ==null)
throw new SolrException(ErrorCode.BAD_REQUEST, "'collection' and 'shard' are required parameters" );
int numSlices = 1;
DocCollection collection = clusterState.getCollection(collectionName);
int maxShardsPerNode = collection.getInt(MAX_SHARDS_PER_NODE, 1);
int repFactor = message.getInt(REPLICATION_FACTOR, collection.getInt(MAX_SHARDS_PER_NODE, 1));
// int minReplicas = message.getInt("minReplicas",repFactor);
String createNodeSetStr =message.getStr(CREATE_NODE_SET);
ArrayList<Node> sortedNodeList = getNodesForNewShard(clusterState, collectionName, numSlices, maxShardsPerNode, repFactor, createNodeSetStr);
Overseer.getInQueue(zkStateReader.getZkClient()).offer(ZkStateReader.toJSON(message));
// wait for a while until we don't see the collection
long waitUntil = System.currentTimeMillis() + 30000;
boolean created = false;
while (System.currentTimeMillis() < waitUntil) {
Thread.sleep(100);
created = zkStateReader.getClusterState().getCollection(collectionName).getSlice(shard) !=null;
if (created) break;
}
if (!created)
throw new SolrException(ErrorCode.SERVER_ERROR, "Could not fully create shard: " + message.getStr("name"));
String configName = message.getStr(COLL_CONF);
String sliceName = shard;
for (int j = 1; j <= repFactor; j++) {
String nodeName = sortedNodeList.get(((j - 1)) % sortedNodeList.size()).nodeName;
String shardName = collectionName + "_" + sliceName + "_replica" + j;
log.info("Creating shard " + shardName + " as part of slice "
+ sliceName + " of collection " + collectionName + " on "
+ nodeName);
// Need to create new params for each request
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(CoreAdminParams.ACTION, CoreAdminAction.CREATE.toString());
params.set(CoreAdminParams.NAME, shardName);
params.set(COLL_CONF, configName);
params.set(CoreAdminParams.COLLECTION, collectionName);
params.set(CoreAdminParams.SHARD, sliceName);
params.set(ZkStateReader.NUM_SHARDS_PROP, numSlices);
ShardRequest sreq = new ShardRequest();
params.set("qt", adminPath);
sreq.purpose = 1;
String replica = zkStateReader.getZkClient()
.getBaseUrlForNodeName(nodeName);
if (replica.startsWith("http://")) replica = replica.substring(7);
sreq.shards = new String[]{replica};
sreq.actualShards = sreq.shards;
sreq.params = params;
shardHandler.submit(sreq, replica, sreq.params);
}
ShardResponse srsp;
do {
srsp = shardHandler.takeCompletedOrError();
if (srsp != null) {
processResponse(results, srsp);
}
} while (srsp != null);
log.info("Finished create command on all shards for collection: "
+ collectionName);
return true;
}
private boolean splitShard(ClusterState clusterState, ZkNodeProps message, NamedList results) { private boolean splitShard(ClusterState clusterState, ZkNodeProps message, NamedList results) {
log.info("Split shard invoked"); log.info("Split shard invoked");
String collectionName = message.getStr("collection"); String collectionName = message.getStr("collection");
@ -732,7 +836,7 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
shardHandler.submit(sreq, replica, sreq.params); shardHandler.submit(sreq, replica, sreq.params);
} }
private void createCollection(ClusterState clusterState, ZkNodeProps message, NamedList results) { private void createCollection(ClusterState clusterState, ZkNodeProps message, NamedList results) throws KeeperException, InterruptedException {
String collectionName = message.getStr("name"); String collectionName = message.getStr("name");
if (clusterState.getCollections().contains(collectionName)) { if (clusterState.getCollections().contains(collectionName)) {
throw new SolrException(ErrorCode.BAD_REQUEST, "collection already exists: " + collectionName); throw new SolrException(ErrorCode.BAD_REQUEST, "collection already exists: " + collectionName);
@ -742,14 +846,22 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
// look at the replication factor and see if it matches reality // look at the replication factor and see if it matches reality
// if it does not, find best nodes to create more cores // if it does not, find best nodes to create more cores
int repFactor = msgStrToInt(message, REPLICATION_FACTOR, 1); int repFactor = message.getInt( REPLICATION_FACTOR, 1);
Integer numSlices = msgStrToInt(message, NUM_SLICES, null); Integer numSlices = message.getInt(NUM_SLICES, null);
String router = message.getStr(ROUTER, DocRouter.DEFAULT_NAME);
List<String> shardNames = new ArrayList<>();
if(ImplicitDocRouter.NAME.equals(router)){
Overseer.getShardNames(shardNames, message.getStr("shards",null));
numSlices = shardNames.size();
} else {
Overseer.getShardNames(numSlices,shardNames);
}
if (numSlices == null) { if (numSlices == null ) {
throw new SolrException(ErrorCode.BAD_REQUEST, NUM_SLICES + " is a required param"); throw new SolrException(ErrorCode.BAD_REQUEST, NUM_SLICES + " is a required param");
} }
int maxShardsPerNode = msgStrToInt(message, MAX_SHARDS_PER_NODE, 1); int maxShardsPerNode = message.getInt(MAX_SHARDS_PER_NODE, 1);
String createNodeSetStr; String createNodeSetStr;
List<String> createNodeList = ((createNodeSetStr = message.getStr(CREATE_NODE_SET)) == null)?null:StrUtils.splitSmart(createNodeSetStr, ",", true); List<String> createNodeList = ((createNodeSetStr = message.getStr(CREATE_NODE_SET)) == null)?null:StrUtils.splitSmart(createNodeSetStr, ",", true);
@ -761,8 +873,6 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
throw new SolrException(ErrorCode.BAD_REQUEST, NUM_SLICES + " must be > 0"); throw new SolrException(ErrorCode.BAD_REQUEST, NUM_SLICES + " must be > 0");
} }
String configName = message.getStr("collection.configName");
// we need to look at every node and see how many cores it serves // we need to look at every node and see how many cores it serves
// add our new cores to existing nodes serving the least number of cores // add our new cores to existing nodes serving the least number of cores
// but (for now) require that each core goes on a distinct node. // but (for now) require that each core goes on a distinct node.
@ -806,10 +916,28 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
+ " shards to be created (higher than the allowed number)"); + " shards to be created (higher than the allowed number)");
} }
for (int i = 1; i <= numSlices; i++) { // ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION,
// Overseer.CREATECOLLECTION, "name", message.getStr("name"));
Overseer.getInQueue(zkStateReader.getZkClient()).offer(ZkStateReader.toJSON(message));
// wait for a while until we don't see the collection
long waitUntil = System.currentTimeMillis() + 30000;
boolean created = false;
while (System.currentTimeMillis() < waitUntil) {
Thread.sleep(100);
created = zkStateReader.getClusterState().getCollections().contains(message.getStr("name"));
if(created) break;
}
if (!created)
throw new SolrException(ErrorCode.SERVER_ERROR, "Could not fully createcollection: " + message.getStr("name"));
String configName = message.getStr(COLL_CONF);
log.info("going to create cores replicas shardNames {} , repFactor : {}", shardNames, repFactor);
for (int i = 1; i <= shardNames.size(); i++) {
String sliceName = shardNames.get(i-1);
for (int j = 1; j <= repFactor; j++) { for (int j = 1; j <= repFactor; j++) {
String nodeName = nodeList.get((repFactor * (i - 1) + (j - 1)) % nodeList.size()); String nodeName = nodeList.get((repFactor * (i - 1) + (j - 1)) % nodeList.size());
String sliceName = "shard" + i;
String shardName = collectionName + "_" + sliceName + "_replica" + j; String shardName = collectionName + "_" + sliceName + "_replica" + j;
log.info("Creating shard " + shardName + " as part of slice " log.info("Creating shard " + shardName + " as part of slice "
+ sliceName + " of collection " + collectionName + " on " + sliceName + " of collection " + collectionName + " on "
@ -820,7 +948,7 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
params.set(CoreAdminParams.ACTION, CoreAdminAction.CREATE.toString()); params.set(CoreAdminParams.ACTION, CoreAdminAction.CREATE.toString());
params.set(CoreAdminParams.NAME, shardName); params.set(CoreAdminParams.NAME, shardName);
params.set("collection.configName", configName); params.set(COLL_CONF, configName);
params.set(CoreAdminParams.COLLECTION, collectionName); params.set(CoreAdminParams.COLLECTION, collectionName);
params.set(CoreAdminParams.SHARD, sliceName); params.set(CoreAdminParams.SHARD, sliceName);
params.set(ZkStateReader.NUM_SHARDS_PROP, numSlices); params.set(ZkStateReader.NUM_SHARDS_PROP, numSlices);
@ -947,19 +1075,16 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
} }
} }
private Integer msgStrToInt(ZkNodeProps message, String key, Integer def)
throws Exception {
String str = message.getStr(key);
try {
return str == null ? def : Integer.valueOf(str);
} catch (Exception ex) {
SolrException.log(log, "Could not parse " + key, ex);
throw ex;
}
}
@Override @Override
public boolean isClosed() { public boolean isClosed() {
return isClosed; return isClosed;
} }
public static Map<String, Object> asMap(Object... vals) {
HashMap<String, Object> m = new HashMap<String, Object>();
for(int i=0; i<vals.length; i+=2) {
m.put(String.valueOf(vals[i]), vals[i+1]);
}
return m;
}
} }

View File

@ -67,7 +67,7 @@ public class SyncStrategy {
ModifiableSolrParams params = new ModifiableSolrParams(); ModifiableSolrParams params = new ModifiableSolrParams();
params.set(HttpClientUtil.PROP_MAX_CONNECTIONS, 10000); params.set(HttpClientUtil.PROP_MAX_CONNECTIONS, 10000);
params.set(HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST, 20); params.set(HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST, 20);
params.set(HttpClientUtil.PROP_CONNECTION_TIMEOUT, 30000); params.set(HttpClientUtil.PROP_CONNECTION_TIMEOUT, 15000);
params.set(HttpClientUtil.PROP_SO_TIMEOUT, 30000); params.set(HttpClientUtil.PROP_SO_TIMEOUT, 30000);
params.set(HttpClientUtil.PROP_USE_RETRY, false); params.set(HttpClientUtil.PROP_USE_RETRY, false);
client = HttpClientUtil.createClient(params); client = HttpClientUtil.createClient(params);
@ -285,8 +285,8 @@ public class SyncStrategy {
recoverRequestCmd.setCoreName(coreName); recoverRequestCmd.setCoreName(coreName);
HttpSolrServer server = new HttpSolrServer(baseUrl, client); HttpSolrServer server = new HttpSolrServer(baseUrl, client);
server.setConnectionTimeout(45000); server.setConnectionTimeout(15000);
server.setSoTimeout(45000); server.setSoTimeout(30000);
try { try {
server.request(recoverRequestCmd); server.request(recoverRequestCmd);
} catch (Throwable t) { } catch (Throwable t) {

View File

@ -129,7 +129,7 @@ public class ZkCLI {
System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + UPCONFIG + " -" + CONFDIR + " /opt/solr/collection1/conf" + " -" + CONFNAME + " myconf"); System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + UPCONFIG + " -" + CONFDIR + " /opt/solr/collection1/conf" + " -" + CONFNAME + " myconf");
System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + DOWNCONFIG + " -" + CONFDIR + " /opt/solr/collection1/conf" + " -" + CONFNAME + " myconf"); System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + DOWNCONFIG + " -" + CONFDIR + " /opt/solr/collection1/conf" + " -" + CONFNAME + " myconf");
System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + LINKCONFIG + " -" + COLLECTION + " collection1" + " -" + CONFNAME + " myconf"); System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + LINKCONFIG + " -" + COLLECTION + " collection1" + " -" + CONFNAME + " myconf");
System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + MAKEPATH + " /apache/solr/data.txt 'config data'"); System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + MAKEPATH + " /apache/solr");
System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + PUT + " /solr.conf 'conf data'"); System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + PUT + " /solr.conf 'conf data'");
System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + CLEAR + " /solr"); System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + CLEAR + " /solr");
System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + LIST); System.out.println("zkcli.sh -zkhost localhost:9983 -cmd " + LIST);

View File

@ -130,7 +130,7 @@ public final class ZkController {
protected volatile Overseer overseer; protected volatile Overseer overseer;
private String leaderVoteWait; private int leaderVoteWait;
private boolean genericCoreNodeNames; private boolean genericCoreNodeNames;
@ -141,7 +141,7 @@ public final class ZkController {
private UpdateShardHandler updateShardHandler; private UpdateShardHandler updateShardHandler;
public ZkController(final CoreContainer cc, String zkServerAddress, int zkClientTimeout, int zkClientConnectTimeout, String localHost, String locaHostPort, public ZkController(final CoreContainer cc, String zkServerAddress, int zkClientTimeout, int zkClientConnectTimeout, String localHost, String locaHostPort,
String localHostContext, String leaderVoteWait, boolean genericCoreNodeNames, int distribUpdateConnTimeout, int distribUpdateSoTimeout, final CurrentCoreDescriptorProvider registerOnReconnect) throws InterruptedException, String localHostContext, int leaderVoteWait, boolean genericCoreNodeNames, int distribUpdateConnTimeout, int distribUpdateSoTimeout, final CurrentCoreDescriptorProvider registerOnReconnect) throws InterruptedException,
TimeoutException, IOException { TimeoutException, IOException {
if (cc == null) throw new IllegalArgumentException("CoreContainer cannot be null."); if (cc == null) throw new IllegalArgumentException("CoreContainer cannot be null.");
this.cc = cc; this.cc = cc;
@ -242,7 +242,7 @@ public final class ZkController {
init(registerOnReconnect); init(registerOnReconnect);
} }
public String getLeaderVoteWait() { public int getLeaderVoteWait() {
return leaderVoteWait; return leaderVoteWait;
} }
@ -769,7 +769,7 @@ public final class ZkController {
// in this case, we want to wait for the leader as long as the leader might // in this case, we want to wait for the leader as long as the leader might
// wait for a vote, at least - but also long enough that a large cluster has // wait for a vote, at least - but also long enough that a large cluster has
// time to get its act together // time to get its act together
String leaderUrl = getLeader(cloudDesc, Integer.parseInt(leaderVoteWait) + 600000); String leaderUrl = getLeader(cloudDesc, leaderVoteWait + 600000);
String ourUrl = ZkCoreNodeProps.getCoreUrl(baseUrl, coreName); String ourUrl = ZkCoreNodeProps.getCoreUrl(baseUrl, coreName);
log.info("We are " + ourUrl + " and leader is " + leaderUrl); log.info("We are " + ourUrl + " and leader is " + leaderUrl);

View File

@ -233,14 +233,13 @@ public class Config {
} }
public Node getNode(String path, Document doc, boolean errIfMissing) { public Node getNode(String path, Document doc, boolean errIfMissing) {
XPath xpath = xpathFactory.newXPath(); XPath xpath = xpathFactory.newXPath();
Node nd = null; String xstr = normalize(path);
String xstr = normalize(path);
try { try {
nd = (Node)xpath.evaluate(xstr, doc, XPathConstants.NODE); NodeList nodes = (NodeList)xpath.evaluate(xstr, doc,
XPathConstants.NODESET);
if (nd==null) { if (nodes==null || 0 == nodes.getLength() ) {
if (errIfMissing) { if (errIfMissing) {
throw new RuntimeException(name + " missing "+path); throw new RuntimeException(name + " missing "+path);
} else { } else {
@ -248,7 +247,11 @@ public class Config {
return null; return null;
} }
} }
if ( 1 < nodes.getLength() ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
name + " contains more than one value for config path: " + path);
}
Node nd = nodes.item(0);
log.trace(name + ":" + path + "=" + nd); log.trace(name + ":" + path + "=" + nd);
return nd; return nd;

View File

@ -21,6 +21,7 @@ import com.google.common.base.Charsets;
import com.google.common.io.ByteStreams; import com.google.common.io.ByteStreams;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.logging.LogWatcherConfig;
import org.apache.solr.util.DOMUtil; import org.apache.solr.util.DOMUtil;
import org.apache.solr.util.PropertiesUtil; import org.apache.solr.util.PropertiesUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -100,20 +101,106 @@ public abstract class ConfigSolr {
public abstract CoresLocator getCoresLocator(); public abstract CoresLocator getCoresLocator();
public PluginInfo getShardHandlerFactoryPluginInfo() { public PluginInfo getShardHandlerFactoryPluginInfo() {
Node node = config.getNode(getShardHandlerFactoryConfigPath(), false); Node node = config.getNode(getShardHandlerFactoryConfigPath(), false);
return (node == null) ? null : new PluginInfo(node, "shardHandlerFactory", false, true); return (node == null) ? null : new PluginInfo(node, "shardHandlerFactory", false, true);
} }
public Node getUnsubsititutedShardHandlerFactoryPluginNode() {
return config.getUnsubstitutedNode(getShardHandlerFactoryConfigPath(), false);
}
protected abstract String getShardHandlerFactoryConfigPath(); protected abstract String getShardHandlerFactoryConfigPath();
public String getZkHost() {
String sysZkHost = System.getProperty("zkHost");
if (sysZkHost != null)
return sysZkHost;
return get(CfgProp.SOLR_ZKHOST, null);
}
public int getZkClientTimeout() {
String sysProp = System.getProperty("zkClientTimeout");
if (sysProp != null)
return Integer.parseInt(sysProp);
return getInt(CfgProp.SOLR_ZKCLIENTTIMEOUT, DEFAULT_ZK_CLIENT_TIMEOUT);
}
private static final int DEFAULT_ZK_CLIENT_TIMEOUT = 15000;
private static final int DEFAULT_LEADER_VOTE_WAIT = 180000; // 3 minutes
private static final int DEFAULT_CORE_LOAD_THREADS = 3;
protected static final String DEFAULT_CORE_ADMIN_PATH = "/admin/cores";
public String getZkHostPort() {
return get(CfgProp.SOLR_HOSTPORT, null);
}
public String getZkHostContext() {
return get(CfgProp.SOLR_HOSTCONTEXT, null);
}
public String getHost() {
return get(CfgProp.SOLR_HOST, null);
}
public int getLeaderVoteWait() {
return getInt(CfgProp.SOLR_LEADERVOTEWAIT, DEFAULT_LEADER_VOTE_WAIT);
}
public boolean getGenericCoreNodeNames() {
return getBool(CfgProp.SOLR_GENERICCORENODENAMES, false);
}
public int getDistributedConnectionTimeout() {
return getInt(CfgProp.SOLR_DISTRIBUPDATECONNTIMEOUT, 0);
}
public int getDistributedSocketTimeout() {
return getInt(CfgProp.SOLR_DISTRIBUPDATESOTIMEOUT, 0);
}
public int getCoreLoadThreadCount() {
return getInt(ConfigSolr.CfgProp.SOLR_CORELOADTHREADS, DEFAULT_CORE_LOAD_THREADS);
}
public String getSharedLibDirectory() {
return get(ConfigSolr.CfgProp.SOLR_SHAREDLIB , null);
}
public String getDefaultCoreName() {
return get(CfgProp.SOLR_CORES_DEFAULT_CORE_NAME, null);
}
public abstract boolean isPersistent();
public String getAdminPath() {
return get(CfgProp.SOLR_ADMINPATH, DEFAULT_CORE_ADMIN_PATH);
}
public String getCoreAdminHandlerClass() {
return get(CfgProp.SOLR_ADMINHANDLER, "org.apache.solr.handler.admin.CoreAdminHandler");
}
public boolean hasSchemaCache() {
return getBool(ConfigSolr.CfgProp.SOLR_SHARESCHEMA, false);
}
public String getManagementPath() {
return get(CfgProp.SOLR_MANAGEMENTPATH, null);
}
public LogWatcherConfig getLogWatcherConfig() {
return new LogWatcherConfig(
getBool(CfgProp.SOLR_LOGGING_ENABLED, false),
get(CfgProp.SOLR_LOGGING_CLASS, null),
get(CfgProp.SOLR_LOGGING_WATCHER_THRESHOLD, null),
getInt(CfgProp.SOLR_LOGGING_WATCHER_SIZE, 50)
);
}
public int getTransientCacheSize() {
return getInt(CfgProp.SOLR_TRANSIENTCACHESIZE, Integer.MAX_VALUE);
}
// Ugly for now, but we'll at least be able to centralize all of the differences between 4x and 5x. // Ugly for now, but we'll at least be able to centralize all of the differences between 4x and 5x.
public static enum CfgProp { protected static enum CfgProp {
SOLR_ADMINHANDLER, SOLR_ADMINHANDLER,
SOLR_CORELOADTHREADS, SOLR_CORELOADTHREADS,
SOLR_COREROOTDIRECTORY, SOLR_COREROOTDIRECTORY,

View File

@ -125,11 +125,26 @@ public class ConfigSolrXml extends ConfigSolr {
propMap.put(CfgProp.SOLR_LOGGING_WATCHER_THRESHOLD, doSub("solr/logging/watcher/int[@name='threshold']")); propMap.put(CfgProp.SOLR_LOGGING_WATCHER_THRESHOLD, doSub("solr/logging/watcher/int[@name='threshold']"));
} }
@Override
public String getDefaultCoreName() {
return "collection1";
}
@Override
public boolean isPersistent() {
return true;
}
@Override @Override
protected String getShardHandlerFactoryConfigPath() { protected String getShardHandlerFactoryConfigPath() {
return "solr/shardHandlerFactory"; return "solr/shardHandlerFactory";
} }
@Override
public String getAdminPath() {
return DEFAULT_CORE_ADMIN_PATH;
}
@Override @Override
public CoresLocator getCoresLocator() { public CoresLocator getCoresLocator() {
return coresLocator; return coresLocator;

View File

@ -110,10 +110,16 @@ public class ConfigSolrXmlOld extends ConfigSolr {
} }
} }
@Override
public boolean isPersistent() { public boolean isPersistent() {
return config.getBool("solr/@persistent", false); return config.getBool("solr/@persistent", false);
} }
@Override
public String getDefaultCoreName() {
return get(CfgProp.SOLR_CORES_DEFAULT_CORE_NAME, DEFAULT_DEFAULT_CORE_NAME);
}
private void fillPropMap() { private void fillPropMap() {
propMap.put(CfgProp.SOLR_CORELOADTHREADS, propMap.put(CfgProp.SOLR_CORELOADTHREADS,
@ -268,15 +274,17 @@ public class ConfigSolrXmlOld extends ConfigSolr {
return new Properties(); return new Properties();
} }
public static final String DEFAULT_DEFAULT_CORE_NAME = "collection1";
public static final String DEF_SOLR_XML = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n" public static final String DEF_SOLR_XML = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n"
+ "<solr persistent=\"false\">\n" + "<solr persistent=\"false\">\n"
+ " <cores adminPath=\"/admin/cores\" defaultCoreName=\"" + " <cores adminPath=\"/admin/cores\" defaultCoreName=\""
+ CoreContainer.DEFAULT_DEFAULT_CORE_NAME + DEFAULT_DEFAULT_CORE_NAME
+ "\"" + "\""
+ " host=\"${host:}\" hostPort=\"${hostPort:}\" hostContext=\"${hostContext:}\" zkClientTimeout=\"${zkClientTimeout:15000}\"" + " host=\"${host:}\" hostPort=\"${hostPort:}\" hostContext=\"${hostContext:}\" zkClientTimeout=\"${zkClientTimeout:15000}\""
+ ">\n" + ">\n"
+ " <core name=\"" + " <core name=\""
+ CoreContainer.DEFAULT_DEFAULT_CORE_NAME + DEFAULT_DEFAULT_CORE_NAME
+ "\" shard=\"${shard:}\" collection=\"${collection:collection1}\" instanceDir=\"collection1\" />\n" + "\" shard=\"${shard:}\" collection=\"${collection:collection1}\" instanceDir=\"collection1\" />\n"
+ " </cores>\n" + "</solr>"; + " </cores>\n" + "</solr>";

View File

@ -55,10 +55,9 @@ import java.util.concurrent.CompletionService;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkNotNull;
@ -67,55 +66,34 @@ import static com.google.common.base.Preconditions.checkNotNull;
* *
* @since solr 1.3 * @since solr 1.3
*/ */
public class CoreContainer public class CoreContainer {
{
private static final String LEADER_VOTE_WAIT = "180000"; // 3 minutes
private static final int CORE_LOAD_THREADS = 3;
private static final int DEFAULT_ZK_CLIENT_TIMEOUT = 15000;
public static final String DEFAULT_DEFAULT_CORE_NAME = "collection1";
private static final boolean DEFAULT_SHARE_SCHEMA = false;
protected static Logger log = LoggerFactory.getLogger(CoreContainer.class);
protected static final Logger log = LoggerFactory.getLogger(CoreContainer.class);
private final SolrCores solrCores = new SolrCores(this); private final SolrCores solrCores = new SolrCores(this);
protected final Map<String,Exception> coreInitFailures = protected final Map<String,Exception> coreInitFailures =
Collections.synchronizedMap(new LinkedHashMap<String,Exception>()); Collections.synchronizedMap(new LinkedHashMap<String,Exception>());
protected boolean persistent = false;
protected String adminPath = null;
protected volatile String managementPath = null;
protected CoreAdminHandler coreAdminHandler = null; protected CoreAdminHandler coreAdminHandler = null;
protected CollectionsHandler collectionsHandler = null; protected CollectionsHandler collectionsHandler = null;
protected String libDir = null; private InfoHandler infoHandler;
protected Properties containerProperties; protected Properties containerProperties;
protected Map<String ,IndexSchema> indexSchemaCache; protected Map<String ,IndexSchema> indexSchemaCache;
protected String adminHandler;
protected boolean shareSchema; protected boolean shareSchema;
protected Integer zkClientTimeout;
protected String defaultCoreName = null;
protected int distribUpdateConnTimeout = 0;
protected int distribUpdateSoTimeout = 0;
protected ZkContainer zkSys = new ZkContainer(); protected ZkContainer zkSys = new ZkContainer();
private ShardHandlerFactory shardHandlerFactory; private ShardHandlerFactory shardHandlerFactory;
protected LogWatcher logging = null; protected LogWatcher logging = null;
private String zkHost;
private int transientCacheSize = Integer.MAX_VALUE;
private int coreLoadThreads;
private CloserThread backgroundCloser = null; private CloserThread backgroundCloser = null;
protected final ConfigSolr cfg; protected final ConfigSolr cfg;
protected final SolrResourceLoader loader; protected final SolrResourceLoader loader;
protected final String solrHome;
private InfoHandler infoHandler; protected final String solrHome;
protected final CoresLocator coresLocator; protected final CoresLocator coresLocator;
@ -202,10 +180,8 @@ public class CoreContainer
log.info("Loading cores into CoreContainer [instanceDir={}]", loader.getInstanceDir()); log.info("Loading cores into CoreContainer [instanceDir={}]", loader.getInstanceDir());
ThreadPoolExecutor coreLoadExecutor = null;
// add the sharedLib to the shared resource loader before initializing cfg based plugins // add the sharedLib to the shared resource loader before initializing cfg based plugins
libDir = cfg.get(ConfigSolr.CfgProp.SOLR_SHAREDLIB , null); String libDir = cfg.getSharedLibDirectory();
if (libDir != null) { if (libDir != null) {
File f = FileUtils.resolvePath(new File(solrHome), libDir); File f = FileUtils.resolvePath(new File(solrHome), libDir);
log.info("loading shared library: " + f.getAbsolutePath()); log.info("loading shared library: " + f.getAbsolutePath());
@ -215,79 +191,32 @@ public class CoreContainer
shardHandlerFactory = ShardHandlerFactory.newInstance(cfg.getShardHandlerFactoryPluginInfo(), loader); shardHandlerFactory = ShardHandlerFactory.newInstance(cfg.getShardHandlerFactoryPluginInfo(), loader);
solrCores.allocateLazyCores(cfg, loader); solrCores.allocateLazyCores(cfg.getTransientCacheSize(), loader);
logging = JulWatcher.newRegisteredLogWatcher(cfg, loader); logging = JulWatcher.newRegisteredLogWatcher(cfg.getLogWatcherConfig(), loader);
if (cfg instanceof ConfigSolrXmlOld) { //TODO: Remove for 5.0 shareSchema = cfg.hasSchemaCache();
String dcoreName = cfg.get(ConfigSolr.CfgProp.SOLR_CORES_DEFAULT_CORE_NAME, null);
if (dcoreName != null && !dcoreName.isEmpty()) {
defaultCoreName = dcoreName;
}
persistent = cfg.getBool(ConfigSolr.CfgProp.SOLR_PERSISTENT, false);
adminPath = cfg.get(ConfigSolr.CfgProp.SOLR_ADMINPATH, "/admin/cores");
} else {
adminPath = "/admin/cores";
defaultCoreName = DEFAULT_DEFAULT_CORE_NAME;
}
zkHost = cfg.get(ConfigSolr.CfgProp.SOLR_ZKHOST, null);
coreLoadThreads = cfg.getInt(ConfigSolr.CfgProp.SOLR_CORELOADTHREADS, CORE_LOAD_THREADS);
shareSchema = cfg.getBool(ConfigSolr.CfgProp.SOLR_SHARESCHEMA, DEFAULT_SHARE_SCHEMA);
zkClientTimeout = cfg.getInt(ConfigSolr.CfgProp.SOLR_ZKCLIENTTIMEOUT, DEFAULT_ZK_CLIENT_TIMEOUT);
distribUpdateConnTimeout = cfg.getInt(ConfigSolr.CfgProp.SOLR_DISTRIBUPDATECONNTIMEOUT, 0);
distribUpdateSoTimeout = cfg.getInt(ConfigSolr.CfgProp.SOLR_DISTRIBUPDATESOTIMEOUT, 0);
// Note: initZooKeeper will apply hardcoded default if cloud mode
String hostPort = cfg.get(ConfigSolr.CfgProp.SOLR_HOSTPORT, null);
// Note: initZooKeeper will apply hardcoded default if cloud mode
String hostContext = cfg.get(ConfigSolr.CfgProp.SOLR_HOSTCONTEXT, null);
String host = cfg.get(ConfigSolr.CfgProp.SOLR_HOST, null);
String leaderVoteWait = cfg.get(ConfigSolr.CfgProp.SOLR_LEADERVOTEWAIT, LEADER_VOTE_WAIT);
adminHandler = cfg.get(ConfigSolr.CfgProp.SOLR_ADMINHANDLER, null);
managementPath = cfg.get(ConfigSolr.CfgProp.SOLR_MANAGEMENTPATH, null);
transientCacheSize = cfg.getInt(ConfigSolr.CfgProp.SOLR_TRANSIENTCACHESIZE, Integer.MAX_VALUE);
boolean genericCoreNodeNames = cfg.getBool(ConfigSolr.CfgProp.SOLR_GENERICCORENODENAMES, false);
if (shareSchema) { if (shareSchema) {
indexSchemaCache = new ConcurrentHashMap<String,IndexSchema>(); indexSchemaCache = new ConcurrentHashMap<String,IndexSchema>();
} }
zkClientTimeout = Integer.parseInt(System.getProperty("zkClientTimeout", zkSys.initZooKeeper(this, solrHome, cfg);
Integer.toString(zkClientTimeout)));
zkSys.initZooKeeper(this, solrHome, zkHost, zkClientTimeout, hostPort, hostContext, host, leaderVoteWait, genericCoreNodeNames, distribUpdateConnTimeout, distribUpdateSoTimeout);
if (isZooKeeperAware() && coreLoadThreads <= 1) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"SolrCloud requires a value of at least 2 in solr.xml for coreLoadThreads");
}
if (adminPath != null) {
if (adminHandler == null) {
coreAdminHandler = new CoreAdminHandler(this);
} else {
coreAdminHandler = this.createMultiCoreHandler(adminHandler);
}
}
collectionsHandler = new CollectionsHandler(this); collectionsHandler = new CollectionsHandler(this);
infoHandler = new InfoHandler(this); infoHandler = new InfoHandler(this);
coreAdminHandler = createMultiCoreHandler(cfg.getCoreAdminHandlerClass());
containerProperties = cfg.getSolrProperties("solr"); containerProperties = cfg.getSolrProperties("solr");
// setup executor to load cores in parallel // setup executor to load cores in parallel
coreLoadExecutor = new ThreadPoolExecutor(coreLoadThreads, coreLoadThreads, 1, ExecutorService coreLoadExecutor = Executors.newFixedThreadPool(cfg.getCoreLoadThreadCount(),
TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(),
new DefaultSolrThreadFactory("coreLoadExecutor")); new DefaultSolrThreadFactory("coreLoadExecutor"));
try { try {
CompletionService<SolrCore> completionService = new ExecutorCompletionService<SolrCore>( CompletionService<SolrCore> completionService = new ExecutorCompletionService<SolrCore>(
coreLoadExecutor); coreLoadExecutor);
Set<Future<SolrCore>> pending = new HashSet<Future<SolrCore>>(); Set<Future<SolrCore>> pending = new HashSet<Future<SolrCore>>();
List<CoreDescriptor> cds = coresLocator.discover(this); List<CoreDescriptor> cds = coresLocator.discover(this);
@ -759,7 +688,7 @@ public class CoreContainer
//5.0 remove all checkDefaults? //5.0 remove all checkDefaults?
private String checkDefault(String name) { private String checkDefault(String name) {
return (null == name || name.isEmpty()) ? defaultCoreName : name; return (null == name || name.isEmpty()) ? getDefaultCoreName() : name;
} }
/** /**
@ -918,46 +847,34 @@ public class CoreContainer
* the default core name, or null if there is no default core name * the default core name, or null if there is no default core name
*/ */
public String getDefaultCoreName() { public String getDefaultCoreName() {
return defaultCoreName; return cfg.getDefaultCoreName();
} }
// all of the following properties aren't synchronized // all of the following properties aren't synchronized
// but this should be OK since they normally won't be changed rapidly // but this should be OK since they normally won't be changed rapidly
@Deprecated @Deprecated
public boolean isPersistent() { public boolean isPersistent() {
return persistent; return cfg.isPersistent();
}
@Deprecated
public void setPersistent(boolean persistent) {
this.persistent = persistent;
} }
public String getAdminPath() { public String getAdminPath() {
return adminPath; return cfg.getAdminPath();
}
public String getManagementPath() {
return managementPath;
} }
/** /**
* Sets the alternate path for multicore handling: * Gets the alternate path for multicore handling:
* This is used in case there is a registered unnamed core (aka name is "") to * This is used in case there is a registered unnamed core (aka name is "") to
* declare an alternate way of accessing named cores. * declare an alternate way of accessing named cores.
* This can also be used in a pseudo single-core environment so admins can prepare * This can also be used in a pseudo single-core environment so admins can prepare
* a new version before swapping. * a new version before swapping.
*/ */
public void setManagementPath(String path) { public String getManagementPath() {
this.managementPath = path; return cfg.getManagementPath();
} }
public LogWatcher getLogging() { public LogWatcher getLogging() {
return logging; return logging;
} }
public void setLogging(LogWatcher v) {
logging = v;
}
/** /**
* Determines whether the core is already loaded or not but does NOT load the core * Determines whether the core is already loaded or not but does NOT load the core

View File

@ -27,12 +27,8 @@ import org.apache.solr.util.plugin.NamedListInitializedPlugin;
* Factory used to build a new IndexReader instance. * Factory used to build a new IndexReader instance.
*/ */
public abstract class IndexReaderFactory implements NamedListInitializedPlugin { public abstract class IndexReaderFactory implements NamedListInitializedPlugin {
protected int termInfosIndexDivisor = 1;//IndexReader.DEFAULT_TERMS_INDEX_DIVISOR; Set this once Lucene makes this public.
/** /**
* Potentially initializes {@link #termInfosIndexDivisor}. Overriding classes should call super.init() in order * init will be called just once, immediately after creation.
* to make sure termInfosIndexDivisor is set.
* <p>
* <code>init</code> will be called just once, immediately after creation.
* <p> * <p>
* The args are user-level initialization parameters that may be specified * The args are user-level initialization parameters that may be specified
* when declaring an indexReaderFactory in solrconfig.xml * when declaring an indexReaderFactory in solrconfig.xml
@ -40,18 +36,10 @@ public abstract class IndexReaderFactory implements NamedListInitializedPlugin {
*/ */
@Override @Override
public void init(NamedList args) { public void init(NamedList args) {
Integer v = (Integer)args.get("setTermIndexDivisor"); Object v = args.get("setTermIndexDivisor");
if (v != null) { if (v != null) {
termInfosIndexDivisor = v.intValue(); throw new IllegalArgumentException("Illegal parameter 'setTermIndexDivisor'");
} }
}
/**
*
* @return The setting of {@link #termInfosIndexDivisor}
*/
public int getTermInfosIndexDivisor() {
return termInfosIndexDivisor;
} }
/** /**

View File

@ -190,6 +190,11 @@ public class JmxMonitoredMap<K, V> extends
return ObjectName.getInstance(jmxRootName, map); return ObjectName.getInstance(jmxRootName, map);
} }
/** For test verification */
public MBeanServer getServer() {
return server;
}
/** /**
* DynamicMBean is used to dynamically expose all SolrInfoMBean * DynamicMBean is used to dynamically expose all SolrInfoMBean
* getStatistics() NameList keys as String getters. * getStatistics() NameList keys as String getters.

View File

@ -124,8 +124,8 @@ public class SolrConfig extends Config {
// Old indexDefaults and mainIndex sections are deprecated and fails fast for luceneMatchVersion=>LUCENE_40. // Old indexDefaults and mainIndex sections are deprecated and fails fast for luceneMatchVersion=>LUCENE_40.
// For older solrconfig.xml's we allow the old sections, but never mixed with the new <indexConfig> // For older solrconfig.xml's we allow the old sections, but never mixed with the new <indexConfig>
boolean hasDeprecatedIndexConfig = get("indexDefaults/text()", null) != null || get("mainIndex/text()", null) != null; boolean hasDeprecatedIndexConfig = (getNode("indexDefaults", false) != null) || (getNode("mainIndex", false) != null);
boolean hasNewIndexConfig = get("indexConfig/text()", null) != null; boolean hasNewIndexConfig = getNode("indexConfig", false) != null;
if(hasDeprecatedIndexConfig){ if(hasDeprecatedIndexConfig){
if(luceneMatchVersion.onOrAfter(Version.LUCENE_40)) { if(luceneMatchVersion.onOrAfter(Version.LUCENE_40)) {
throw new SolrException(ErrorCode.FORBIDDEN, "<indexDefaults> and <mainIndex> configuration sections are discontinued. Use <indexConfig> instead."); throw new SolrException(ErrorCode.FORBIDDEN, "<indexDefaults> and <mainIndex> configuration sections are discontinued. Use <indexConfig> instead.");

View File

@ -65,14 +65,13 @@ class SolrCores {
// Trivial helper method for load, note it implements LRU on transient cores. Also note, if // Trivial helper method for load, note it implements LRU on transient cores. Also note, if
// there is no setting for max size, nothing is done and all cores go in the regular "cores" list // there is no setting for max size, nothing is done and all cores go in the regular "cores" list
protected void allocateLazyCores(final ConfigSolr cfg, final SolrResourceLoader loader) { protected void allocateLazyCores(final int cacheSize, final SolrResourceLoader loader) {
final int transientCacheSize = cfg.getInt(ConfigSolr.CfgProp.SOLR_TRANSIENTCACHESIZE, Integer.MAX_VALUE); if (cacheSize != Integer.MAX_VALUE) {
if (transientCacheSize != Integer.MAX_VALUE) { CoreContainer.log.info("Allocating transient cache for {} transient cores", cacheSize);
CoreContainer.log.info("Allocating transient cache for {} transient cores", transientCacheSize); transientCores = new LinkedHashMap<String, SolrCore>(cacheSize, 0.75f, true) {
transientCores = new LinkedHashMap<String, SolrCore>(transientCacheSize, 0.75f, true) {
@Override @Override
protected boolean removeEldestEntry(Map.Entry<String, SolrCore> eldest) { protected boolean removeEldestEntry(Map.Entry<String, SolrCore> eldest) {
if (size() > transientCacheSize) { if (size() > cacheSize) {
synchronized (modifyLock) { synchronized (modifyLock) {
SolrCore coreToClose = eldest.getValue(); SolrCore coreToClose = eldest.getValue();
logger.info("Closing transient core [{}]", coreToClose.getName()); logger.info("Closing transient core [{}]", coreToClose.getName());

View File

@ -31,6 +31,6 @@ public class StandardIndexReaderFactory extends IndexReaderFactory {
@Override @Override
public DirectoryReader newReader(Directory indexDir, SolrCore core) throws IOException { public DirectoryReader newReader(Directory indexDir, SolrCore core) throws IOException {
return DirectoryReader.open(indexDir, termInfosIndexDivisor); return DirectoryReader.open(indexDir);
} }
} }

View File

@ -17,14 +17,6 @@ package org.apache.solr.core;
* limitations under the License. * limitations under the License.
*/ */
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeoutException;
import org.apache.solr.cloud.CurrentCoreDescriptorProvider; import org.apache.solr.cloud.CurrentCoreDescriptorProvider;
import org.apache.solr.cloud.SolrZkServer; import org.apache.solr.cloud.SolrZkServer;
import org.apache.solr.cloud.ZkController; import org.apache.solr.cloud.ZkController;
@ -40,6 +32,14 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource; import org.xml.sax.InputSource;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeoutException;
public class ZkContainer { public class ZkContainer {
protected static Logger log = LoggerFactory.getLogger(ZkContainer.class); protected static Logger log = LoggerFactory.getLogger(ZkContainer.class);
@ -49,53 +49,31 @@ public class ZkContainer {
private String hostPort; private String hostPort;
private String hostContext; private String hostContext;
private String host; private String host;
private String leaderVoteWait; private int leaderVoteWait;
private Boolean genericCoreNodeNames; private Boolean genericCoreNodeNames;
private int distribUpdateConnTimeout; private int distribUpdateConnTimeout;
public SolrZkServer getZkServer() {
return zkServer;
}
public int getZkClientTimeout() {
return zkClientTimeout;
}
public String getHostPort() {
return hostPort;
}
public String getHostContext() {
return hostContext;
}
public String getHost() {
return host;
}
public String getLeaderVoteWait() {
return leaderVoteWait;
}
public boolean getGenericCoreNodeNames() {
return genericCoreNodeNames;
}
public int getDistribUpdateConnTimeout() {
return distribUpdateConnTimeout;
}
public int getDistribUpdateSoTimeout() {
return distribUpdateSoTimeout;
}
private int distribUpdateSoTimeout; private int distribUpdateSoTimeout;
public ZkContainer() { public ZkContainer() {
} }
public void initZooKeeper(final CoreContainer cc, String solrHome, String zkHost, int zkClientTimeout, String hostPort, String hostContext, String host, String leaderVoteWait, boolean genericCoreNodeNames, int distribUpdateConnTimeout, int distribUpdateSoTimeout) { public void initZooKeeper(final CoreContainer cc, String solrHome, ConfigSolr config) {
if (config.getCoreLoadThreadCount() <= 1) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"SolrCloud requires a value of at least 2 in solr.xml for coreLoadThreads");
}
initZooKeeper(cc, solrHome,
config.getZkHost(), config.getZkClientTimeout(), config.getZkHostPort(), config.getZkHostContext(),
config.getHost(), config.getLeaderVoteWait(), config.getGenericCoreNodeNames(),
config.getDistributedConnectionTimeout(), config.getDistributedSocketTimeout());
}
public void initZooKeeper(final CoreContainer cc, String solrHome, String zkHost, int zkClientTimeout, String hostPort,
String hostContext, String host, int leaderVoteWait, boolean genericCoreNodeNames,
int distribUpdateConnTimeout, int distribUpdateSoTimeout) {
ZkController zkController = null; ZkController zkController = null;
// if zkHost sys property is not set, we are not using ZooKeeper // if zkHost sys property is not set, we are not using ZooKeeper

View File

View File

View File

@ -17,10 +17,6 @@ package org.apache.solr.handler.admin;
* limitations under the License. * limitations under the License.
*/ */
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.solr.client.solrj.SolrResponse; import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrServer;
@ -32,6 +28,7 @@ import org.apache.solr.cloud.OverseerCollectionProcessor;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.ImplicitDocRouter;
import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.cloud.ZkNodeProps; import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.cloud.ZkStateReader;
@ -49,6 +46,22 @@ import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION;
import static org.apache.solr.cloud.OverseerCollectionProcessor.COLL_CONF;
import static org.apache.solr.cloud.OverseerCollectionProcessor.CREATESHARD;
import static org.apache.solr.cloud.OverseerCollectionProcessor.CREATE_NODE_SET;
import static org.apache.solr.cloud.OverseerCollectionProcessor.MAX_SHARDS_PER_NODE;
import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
import static org.apache.solr.cloud.OverseerCollectionProcessor.REPLICATION_FACTOR;
import static org.apache.solr.cloud.OverseerCollectionProcessor.ROUTER;
import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARDS_PROP;
import static org.apache.solr.common.cloud.DocRouter.ROUTE_FIELD;
import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
public class CollectionsHandler extends RequestHandlerBase { public class CollectionsHandler extends RequestHandlerBase {
protected static Logger log = LoggerFactory.getLogger(CollectionsHandler.class); protected static Logger log = LoggerFactory.getLogger(CollectionsHandler.class);
@ -139,6 +152,9 @@ public class CollectionsHandler extends RequestHandlerBase {
case DELETESHARD: { case DELETESHARD: {
this.handleDeleteShardAction(req, rsp); this.handleDeleteShardAction(req, rsp);
break; break;
}case CREATESHARD: {
this.handleCreateShard(req, rsp);
break;
} }
default: { default: {
@ -260,13 +276,7 @@ public class CollectionsHandler extends RequestHandlerBase {
private void handleCreateAction(SolrQueryRequest req, private void handleCreateAction(SolrQueryRequest req,
SolrQueryResponse rsp) throws InterruptedException, KeeperException { SolrQueryResponse rsp) throws InterruptedException, KeeperException {
log.info("Creating Collection : " + req.getParamString()); log.info("Creating Collection : " + req.getParamString());
Integer numReplicas = req.getParams().getInt(OverseerCollectionProcessor.REPLICATION_FACTOR, 1);
String name = req.getParams().required().get("name"); String name = req.getParams().required().get("name");
String configName = req.getParams().get("collection.configName");
String numShards = req.getParams().get(OverseerCollectionProcessor.NUM_SLICES);
String maxShardsPerNode = req.getParams().get(OverseerCollectionProcessor.MAX_SHARDS_PER_NODE);
String createNodeSetStr = req.getParams().get(OverseerCollectionProcessor.CREATE_NODE_SET);
if (name == null) { if (name == null) {
log.error("Collection name is required to create a new collection"); log.error("Collection name is required to create a new collection");
throw new SolrException(ErrorCode.BAD_REQUEST, throw new SolrException(ErrorCode.BAD_REQUEST,
@ -276,20 +286,46 @@ public class CollectionsHandler extends RequestHandlerBase {
Map<String,Object> props = new HashMap<String,Object>(); Map<String,Object> props = new HashMap<String,Object>();
props.put(Overseer.QUEUE_OPERATION, props.put(Overseer.QUEUE_OPERATION,
OverseerCollectionProcessor.CREATECOLLECTION); OverseerCollectionProcessor.CREATECOLLECTION);
props.put(OverseerCollectionProcessor.REPLICATION_FACTOR, numReplicas.toString());
props.put("name", name); copyIfNotNull(req.getParams(),props,
if (configName != null) { "name",
props.put("collection.configName", configName); REPLICATION_FACTOR,
} COLL_CONF,
props.put(OverseerCollectionProcessor.NUM_SLICES, numShards); NUM_SLICES,
props.put(OverseerCollectionProcessor.MAX_SHARDS_PER_NODE, maxShardsPerNode); MAX_SHARDS_PER_NODE,
props.put(OverseerCollectionProcessor.CREATE_NODE_SET, createNodeSetStr); CREATE_NODE_SET ,
ROUTER,
SHARDS_PROP,
ROUTE_FIELD);
ZkNodeProps m = new ZkNodeProps(props); ZkNodeProps m = new ZkNodeProps(props);
handleResponse(OverseerCollectionProcessor.CREATECOLLECTION, m, rsp); handleResponse(OverseerCollectionProcessor.CREATECOLLECTION, m, rsp);
} }
private void handleCreateShard(SolrQueryRequest req, SolrQueryResponse rsp) throws KeeperException, InterruptedException {
log.info("Create shard: " + req.getParamString());
req.getParams().required().check(COLLECTION_PROP, SHARD_ID_PROP);
ClusterState clusterState = coreContainer.getZkController().getClusterState();
if(!ImplicitDocRouter.NAME.equals( clusterState.getCollection(req.getParams().get(COLLECTION_PROP)).getStr(ROUTER)))
throw new SolrException(ErrorCode.BAD_REQUEST, "shards can be added only to 'implicit' collections" );
Map<String, Object> map = OverseerCollectionProcessor.asMap(QUEUE_OPERATION, CREATESHARD);
copyIfNotNull(req.getParams(),map,COLLECTION_PROP, SHARD_ID_PROP, REPLICATION_FACTOR);
ZkNodeProps m = new ZkNodeProps(map);
handleResponse(CREATESHARD, m, rsp);
}
private static void copyIfNotNull(SolrParams params, Map<String, Object> props, String... keys) {
if(keys !=null){
for (String key : keys) {
String v = params.get(key);
if(v != null) props.put(key,v);
}
}
}
private void handleDeleteShardAction(SolrQueryRequest req, private void handleDeleteShardAction(SolrQueryRequest req,
SolrQueryResponse rsp) throws InterruptedException, KeeperException { SolrQueryResponse rsp) throws InterruptedException, KeeperException {
log.info("Deleting Shard : " + req.getParamString()); log.info("Deleting Shard : " + req.getParamString());

View File

@ -18,6 +18,7 @@
package org.apache.solr.handler.admin; package org.apache.solr.handler.admin;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
@ -295,17 +296,17 @@ public class CoreAdminHandler extends RequestHandlerBase {
} }
protected void handleMergeAction(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { protected void handleMergeAction(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
SolrParams params = req.getParams(); SolrParams params = req.getParams();
String cname = params.required().get(CoreAdminParams.CORE); String cname = params.required().get(CoreAdminParams.CORE);
SolrCore core = coreContainer.getCore(cname); SolrCore core = coreContainer.getCore(cname);
SolrQueryRequest wrappedReq = null; SolrQueryRequest wrappedReq = null;
SolrCore[] sourceCores = null; List<SolrCore> sourceCores = Lists.newArrayList();
RefCounted<SolrIndexSearcher>[] searchers = null; List<RefCounted<SolrIndexSearcher>> searchers = Lists.newArrayList();
// stores readers created from indexDir param values // stores readers created from indexDir param values
DirectoryReader[] readersToBeClosed = null; List<DirectoryReader> readersToBeClosed = Lists.newArrayList();
Directory[] dirsToBeReleased = null; List<Directory> dirsToBeReleased = Lists.newArrayList();
if (core != null) { if (core != null) {
try { try {
String[] dirNames = params.getParams(CoreAdminParams.INDEX_DIR); String[] dirNames = params.getParams(CoreAdminParams.INDEX_DIR);
@ -315,38 +316,34 @@ public class CoreAdminHandler extends RequestHandlerBase {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,
"At least one indexDir or srcCore must be specified"); "At least one indexDir or srcCore must be specified");
sourceCores = new SolrCore[sources.length];
for (int i = 0; i < sources.length; i++) { for (int i = 0; i < sources.length; i++) {
String source = sources[i]; String source = sources[i];
SolrCore srcCore = coreContainer.getCore(source); SolrCore srcCore = coreContainer.getCore(source);
if (srcCore == null) if (srcCore == null)
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Core: " + source + " does not exist"); "Core: " + source + " does not exist");
sourceCores[i] = srcCore; sourceCores.add(srcCore);
} }
} else { } else {
readersToBeClosed = new DirectoryReader[dirNames.length];
dirsToBeReleased = new Directory[dirNames.length];
DirectoryFactory dirFactory = core.getDirectoryFactory(); DirectoryFactory dirFactory = core.getDirectoryFactory();
for (int i = 0; i < dirNames.length; i++) { for (int i = 0; i < dirNames.length; i++) {
Directory dir = dirFactory.get(dirNames[i], DirContext.DEFAULT, core.getSolrConfig().indexConfig.lockType); Directory dir = dirFactory.get(dirNames[i], DirContext.DEFAULT, core.getSolrConfig().indexConfig.lockType);
dirsToBeReleased[i] = dir; dirsToBeReleased.add(dir);
// TODO: why doesn't this use the IR factory? what is going on here? // TODO: why doesn't this use the IR factory? what is going on here?
readersToBeClosed[i] = DirectoryReader.open(dir); readersToBeClosed.add(DirectoryReader.open(dir));
} }
} }
DirectoryReader[] readers = null; List<DirectoryReader> readers = null;
if (readersToBeClosed != null) { if (readersToBeClosed.size() > 0) {
readers = readersToBeClosed; readers = readersToBeClosed;
} else { } else {
readers = new DirectoryReader[sourceCores.length]; readers = Lists.newArrayList();
searchers = new RefCounted[sourceCores.length]; for (SolrCore solrCore: sourceCores) {
for (int i = 0; i < sourceCores.length; i++) {
SolrCore solrCore = sourceCores[i];
// record the searchers so that we can decref // record the searchers so that we can decref
searchers[i] = solrCore.getSearcher(); RefCounted<SolrIndexSearcher> searcher = solrCore.getSearcher();
readers[i] = searchers[i].get().getIndexReader(); searchers.add(searcher);
readers.add(searcher.get().getIndexReader());
} }
} }
@ -356,23 +353,21 @@ public class CoreAdminHandler extends RequestHandlerBase {
UpdateRequestProcessor processor = UpdateRequestProcessor processor =
processorChain.createProcessor(wrappedReq, rsp); processorChain.createProcessor(wrappedReq, rsp);
processor.processMergeIndexes(new MergeIndexesCommand(readers, req)); processor.processMergeIndexes(new MergeIndexesCommand(readers, req));
} catch (Exception e) {
// log and rethrow so that if the finally fails we don't lose the original problem
log.error("ERROR executing merge:", e);
throw e;
} finally { } finally {
if (searchers != null) { for (RefCounted<SolrIndexSearcher> searcher : searchers) {
for (RefCounted<SolrIndexSearcher> searcher : searchers) { if (searcher != null) searcher.decref();
if (searcher != null) searcher.decref();
}
} }
if (sourceCores != null) { for (SolrCore solrCore : sourceCores) {
for (SolrCore solrCore : sourceCores) { if (solrCore != null) solrCore.close();
if (solrCore != null) solrCore.close();
}
} }
if (readersToBeClosed != null) IOUtils.closeWhileHandlingException(readersToBeClosed); IOUtils.closeWhileHandlingException(readersToBeClosed);
if (dirsToBeReleased != null) { for (Directory dir : dirsToBeReleased) {
for (Directory dir : dirsToBeReleased) { DirectoryFactory dirFactory = core.getDirectoryFactory();
DirectoryFactory dirFactory = core.getDirectoryFactory(); dirFactory.release(dir);
dirFactory.release(dir);
}
} }
if (wrappedReq != null) wrappedReq.close(); if (wrappedReq != null) wrappedReq.close();
core.close(); core.close();

View File

@ -576,7 +576,7 @@ public class LukeRequestHandler extends RequestHandlerBase
throws IOException { throws IOException {
SolrParams params = req.getParams(); SolrParams params = req.getParams();
int numTerms = params.getInt( NUMTERMS, DEFAULT_COUNT ); final int numTerms = params.getInt( NUMTERMS, DEFAULT_COUNT );
TopTermQueue tiq = new TopTermQueue(numTerms + 1); // Something to collect the top N terms in. TopTermQueue tiq = new TopTermQueue(numTerms + 1); // Something to collect the top N terms in.
@ -600,7 +600,7 @@ public class LukeRequestHandler extends RequestHandlerBase
int freq = termsEnum.docFreq(); // This calculation seems odd, but it gives the same results as it used to. int freq = termsEnum.docFreq(); // This calculation seems odd, but it gives the same results as it used to.
int slot = 32 - Integer.numberOfLeadingZeros(Math.max(0, freq - 1)); int slot = 32 - Integer.numberOfLeadingZeros(Math.max(0, freq - 1));
buckets[slot] = buckets[slot] + 1; buckets[slot] = buckets[slot] + 1;
if (freq > tiq.minFreq) { if (numTerms > 0 && freq > tiq.minFreq) {
UnicodeUtil.UTF8toUTF16(text, spare); UnicodeUtil.UTF8toUTF16(text, spare);
String t = spare.toString(); String t = spare.toString();

View File

@ -16,18 +16,6 @@ package org.apache.solr.handler.component;
* limitations under the License. * limitations under the License.
*/ */
import java.net.ConnectException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import org.apache.http.client.HttpClient; import org.apache.http.client.HttpClient;
import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrResponse; import org.apache.solr.client.solrj.SolrResponse;
@ -44,7 +32,6 @@ import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ModifiableSolrParams;
@ -55,6 +42,18 @@ import org.apache.solr.common.util.StrUtils;
import org.apache.solr.core.CoreDescriptor; import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import java.net.ConnectException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
public class HttpShardHandler extends ShardHandler { public class HttpShardHandler extends ShardHandler {
private HttpShardHandlerFactory httpShardHandlerFactory; private HttpShardHandlerFactory httpShardHandlerFactory;
@ -277,7 +276,8 @@ public class HttpShardHandler extends ShardHandler {
// we weren't provided with an explicit list of slices to query via "shards", so use the cluster state // we weren't provided with an explicit list of slices to query via "shards", so use the cluster state
clusterState = zkController.getClusterState(); clusterState = zkController.getClusterState();
String shardKeys = params.get(ShardParams.SHARD_KEYS); String shardKeys = params.get(ShardParams._ROUTE_);
if(shardKeys == null) shardKeys = params.get(ShardParams.SHARD_KEYS);//eprecated
// This will be the complete list of slices we need to query for this request. // This will be the complete list of slices we need to query for this request.
slices = new HashMap<String,Slice>(); slices = new HashMap<String,Slice>();

View File

@ -562,7 +562,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
for (String id : elevations.ids) { for (String id : elevations.ids) {
term.copyChars(id); term.copyChars(id);
if (seen.contains(id) == false && termsEnum.seekExact(term, false)) { if (seen.contains(id) == false && termsEnum.seekExact(term)) {
docsEnum = termsEnum.docs(liveDocs, docsEnum, DocsEnum.FLAG_NONE); docsEnum = termsEnum.docs(liveDocs, docsEnum, DocsEnum.FLAG_NONE);
if (docsEnum != null) { if (docsEnum != null) {
int docId = docsEnum.nextDoc(); int docId = docsEnum.nextDoc();

View File

View File

View File

@ -163,7 +163,7 @@ public class TermsComponent extends SearchComponent {
BytesRef term = null; BytesRef term = null;
if (lowerBytes != null) { if (lowerBytes != null) {
if (termsEnum.seekCeil(lowerBytes, true) == TermsEnum.SeekStatus.END) { if (termsEnum.seekCeil(lowerBytes) == TermsEnum.SeekStatus.END) {
termsEnum = null; termsEnum = null;
} else { } else {
term = termsEnum.term(); term = termsEnum.term();

View File

@ -25,8 +25,6 @@ import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.StrUtils; import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStream;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.update.*; import org.apache.solr.update.*;
import org.apache.solr.update.processor.UpdateRequestProcessor; import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.internal.csv.CSVStrategy; import org.apache.solr.internal.csv.CSVStrategy;
@ -63,15 +61,13 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
public static Logger log = LoggerFactory.getLogger(CSVLoaderBase.class); public static Logger log = LoggerFactory.getLogger(CSVLoaderBase.class);
final IndexSchema schema;
final SolrParams params; final SolrParams params;
final CSVStrategy strategy; final CSVStrategy strategy;
final UpdateRequestProcessor processor; final UpdateRequestProcessor processor;
// hashmap to save any literal fields and their values // hashmap to save any literal fields and their values
HashMap <SchemaField, String> literals; HashMap <String, String> literals;
String[] fieldnames; String[] fieldnames;
SchemaField[] fields;
CSVLoaderBase.FieldAdder[] adders; CSVLoaderBase.FieldAdder[] adders;
String rowId = null;// if not null, add a special field by the name given with the line number/row id as the value String rowId = null;// if not null, add a special field by the name given with the line number/row id as the value
@ -92,7 +88,7 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
private class FieldAdder { private class FieldAdder {
void add(SolrInputDocument doc, int line, int column, String val) { void add(SolrInputDocument doc, int line, int column, String val) {
if (val.length() > 0) { if (val.length() > 0) {
doc.addField(fields[column].getName(),val,1.0f); doc.addField(fieldnames[column],val,1.0f);
} }
} }
} }
@ -101,7 +97,7 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
private class FieldAdderEmpty extends CSVLoaderBase.FieldAdder { private class FieldAdderEmpty extends CSVLoaderBase.FieldAdder {
@Override @Override
void add(SolrInputDocument doc, int line, int column, String val) { void add(SolrInputDocument doc, int line, int column, String val) {
doc.addField(fields[column].getName(),val,1.0f); doc.addField(fieldnames[column],val,1.0f);
} }
} }
@ -168,8 +164,7 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
CSVLoaderBase(SolrQueryRequest req, UpdateRequestProcessor processor) { CSVLoaderBase(SolrQueryRequest req, UpdateRequestProcessor processor) {
this.processor = processor; this.processor = processor;
this.params = req.getParams(); this.params = req.getParams();
schema = req.getSchema(); this.literals = new HashMap<String, String>();
this.literals = new HashMap<SchemaField, String>();
templateAdd = new AddUpdateCommand(req); templateAdd = new AddUpdateCommand(req);
templateAdd.overwrite=params.getBool(OVERWRITE,true); templateAdd.overwrite=params.getBool(OVERWRITE,true);
@ -243,7 +238,6 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
// from a POST, one could cache all of this setup info based on the params. // from a POST, one could cache all of this setup info based on the params.
// The link from FieldAdder to this would need to be severed for that to happen. // The link from FieldAdder to this would need to be severed for that to happen.
fields = new SchemaField[fieldnames.length];
adders = new CSVLoaderBase.FieldAdder[fieldnames.length]; adders = new CSVLoaderBase.FieldAdder[fieldnames.length];
String skipStr = params.get(SKIP); String skipStr = params.get(SKIP);
List<String> skipFields = skipStr==null ? null : StrUtils.splitSmart(skipStr,','); List<String> skipFields = skipStr==null ? null : StrUtils.splitSmart(skipStr,',');
@ -251,12 +245,11 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
CSVLoaderBase.FieldAdder adder = new CSVLoaderBase.FieldAdder(); CSVLoaderBase.FieldAdder adder = new CSVLoaderBase.FieldAdder();
CSVLoaderBase.FieldAdder adderKeepEmpty = new CSVLoaderBase.FieldAdderEmpty(); CSVLoaderBase.FieldAdder adderKeepEmpty = new CSVLoaderBase.FieldAdderEmpty();
for (int i=0; i<fields.length; i++) { for (int i=0; i<fieldnames.length; i++) {
String fname = fieldnames[i]; String fname = fieldnames[i];
// to skip a field, leave the entries in fields and addrs null // to skip a field, leave the entries in fields and addrs null
if (fname.length()==0 || (skipFields!=null && skipFields.contains(fname))) continue; if (fname.length()==0 || (skipFields!=null && skipFields.contains(fname))) continue;
fields[i] = schema.getField(fname);
boolean keepEmpty = params.getFieldBool(fname,EMPTY,false); boolean keepEmpty = params.getFieldBool(fname,EMPTY,false);
adders[i] = keepEmpty ? adderKeepEmpty : adder; adders[i] = keepEmpty ? adderKeepEmpty : adder;
@ -297,11 +290,7 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
if (!pname.startsWith(LITERALS_PREFIX)) continue; if (!pname.startsWith(LITERALS_PREFIX)) continue;
String name = pname.substring(LITERALS_PREFIX.length()); String name = pname.substring(LITERALS_PREFIX.length());
//TODO: need to look at this in light of schemaless literals.put(name, params.get(pname));
SchemaField sf = schema.getFieldOrNull(name);
if(sf == null)
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Invalid field name for literal:'"+ name +"'");
literals.put(sf, params.get(pname));
} }
} }
@ -368,8 +357,8 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
} }
if (vals==null) break; if (vals==null) break;
if (vals.length != fields.length) { if (vals.length != fieldnames.length) {
input_err("expected "+fields.length+" values but got "+vals.length, vals, line); input_err("expected "+fieldnames.length+" values but got "+vals.length, vals, line);
} }
addDoc(line,vals); addDoc(line,vals);
@ -389,16 +378,15 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
// the line number is passed for error reporting in MT mode as well as for optional rowId. // the line number is passed for error reporting in MT mode as well as for optional rowId.
// first, create the lucene document // first, create the lucene document
for (int i=0; i<vals.length; i++) { for (int i=0; i<vals.length; i++) {
if (fields[i]==null) continue; // ignore this field if (adders[i]==null) continue; // skip this field
String val = vals[i]; String val = vals[i];
adders[i].add(doc, line, i, val); adders[i].add(doc, line, i, val);
} }
// add any literals // add any literals
for (SchemaField sf : literals.keySet()) { for (String fname : literals.keySet()) {
String fn = sf.getName(); String val = literals.get(fname);
String val = literals.get(sf); doc.addField(fname, val);
doc.addField(fn, val);
} }
if (rowId != null){ if (rowId != null){
doc.addField(rowId, line + rowIdOffset); doc.addField(rowId, line + rowIdOffset);

View File

@ -16,50 +16,51 @@ package org.apache.solr.handler.loader;
* limitations under the License. * limitations under the License.
*/ */
import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.update.processor.UpdateRequestProcessor; import org.apache.solr.common.params.SolrParams;
import org.apache.solr.update.AddUpdateCommand; import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.RollbackUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.util.xslt.TransformerProvider;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.StrUtils; import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.XMLErrorLogger; import org.apache.solr.common.util.XMLErrorLogger;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrConfig;
import org.apache.solr.handler.RequestHandlerUtils; import org.apache.solr.handler.RequestHandlerUtils;
import org.apache.solr.handler.UpdateRequestHandler; import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.update.RollbackUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.util.EmptyEntityResolver; import org.apache.solr.util.EmptyEntityResolver;
import org.apache.commons.io.IOUtils; import org.apache.solr.util.xslt.TransformerProvider;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource; import org.xml.sax.InputSource;
import org.xml.sax.XMLReader; import org.xml.sax.XMLReader;
import javax.xml.stream.XMLStreamReader; import javax.xml.parsers.SAXParserFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.FactoryConfigurationError; import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.Transformer; import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMResult; import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource; import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXSource; import javax.xml.transform.sax.SAXSource;
import javax.xml.parsers.SAXParserFactory;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -381,6 +382,7 @@ public class XMLLoader extends ContentStreamLoader {
float boost = 1.0f; float boost = 1.0f;
boolean isNull = false; boolean isNull = false;
String update = null; String update = null;
Collection<SolrInputDocument> subDocs = null;
Map<String, Map<String, Object>> updateMap = null; Map<String, Map<String, Object>> updateMap = null;
boolean complete = false; boolean complete = false;
while (!complete) { while (!complete) {
@ -395,9 +397,14 @@ public class XMLLoader extends ContentStreamLoader {
case XMLStreamConstants.END_ELEMENT: case XMLStreamConstants.END_ELEMENT:
if ("doc".equals(parser.getLocalName())) { if ("doc".equals(parser.getLocalName())) {
if (subDocs != null && !subDocs.isEmpty()) {
doc.addChildDocuments(subDocs);
subDocs = null;
}
complete = true; complete = true;
break; break;
} else if ("field".equals(parser.getLocalName())) { } else if ("field".equals(parser.getLocalName())) {
// should I warn in some text has been found too
Object v = isNull ? null : text.toString(); Object v = isNull ? null : text.toString();
if (update != null) { if (update != null) {
if (updateMap == null) updateMap = new HashMap<String, Map<String, Object>>(); if (updateMap == null) updateMap = new HashMap<String, Map<String, Object>>();
@ -425,34 +432,43 @@ public class XMLLoader extends ContentStreamLoader {
} }
doc.addField(name, v, boost); doc.addField(name, v, boost);
boost = 1.0f; boost = 1.0f;
// field is over
name = null;
} }
break; break;
case XMLStreamConstants.START_ELEMENT: case XMLStreamConstants.START_ELEMENT:
text.setLength(0); text.setLength(0);
String localName = parser.getLocalName(); String localName = parser.getLocalName();
if (!"field".equals(localName)) { if ("doc".equals(localName)) {
log.warn("unexpected XML tag doc/" + localName); if (subDocs == null)
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, subDocs = Lists.newArrayList();
"unexpected XML tag doc/" + localName); subDocs.add(readDoc(parser));
} }
boost = 1.0f; else {
update = null; if (!"field".equals(localName)) {
isNull = false; log.warn("unexpected XML tag doc/" + localName);
String attrVal = ""; throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
for (int i = 0; i < parser.getAttributeCount(); i++) { "unexpected XML tag doc/" + localName);
attrName = parser.getAttributeLocalName(i); }
attrVal = parser.getAttributeValue(i); boost = 1.0f;
if ("name".equals(attrName)) { update = null;
name = attrVal; isNull = false;
} else if ("boost".equals(attrName)) { String attrVal = "";
boost = Float.parseFloat(attrVal); for (int i = 0; i < parser.getAttributeCount(); i++) {
} else if ("null".equals(attrName)) { attrName = parser.getAttributeLocalName(i);
isNull = StrUtils.parseBoolean(attrVal); attrVal = parser.getAttributeValue(i);
} else if ("update".equals(attrName)) { if ("name".equals(attrName)) {
update = attrVal; name = attrVal;
} else { } else if ("boost".equals(attrName)) {
log.warn("Unknown attribute doc/field/@" + attrName); boost = Float.parseFloat(attrVal);
} else if ("null".equals(attrName)) {
isNull = StrUtils.parseBoolean(attrVal);
} else if ("update".equals(attrName)) {
update = attrVal;
} else {
log.warn("Unknown attribute doc/field/@" + attrName);
}
} }
} }
break; break;

View File

@ -113,7 +113,7 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
formatters.put("", fmt); formatters.put("", fmt);
formatters.put(null, fmt); formatters.put(null, fmt);
// Load the formatters // Load the encoders
SolrEncoder enc = solrCore.initPlugins(info.getChildren("encoder"), encoders,SolrEncoder.class,null); SolrEncoder enc = solrCore.initPlugins(info.getChildren("encoder"), encoders,SolrEncoder.class,null);
if (enc == null) enc = new DefaultEncoder(); if (enc == null) enc = new DefaultEncoder();
encoders.put("", enc); encoders.put("", enc);

View File

@ -19,8 +19,18 @@ package org.apache.solr.logging;
*/ */
public class ListenerConfig { public class ListenerConfig {
public int size = 50;
public String threshold = null; public final int size;
public final String threshold;
public ListenerConfig(int size, String threshold) {
this.size = size;
this.threshold = threshold;
}
public ListenerConfig() {
this(50, null);
}
// Down the line, settings for solr URL/core to store logging // Down the line, settings for solr URL/core to store logging
} }

View File

@ -19,7 +19,6 @@ package org.apache.solr.logging;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.core.ConfigSolr;
import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.logging.jul.JulWatcher; import org.apache.solr.logging.jul.JulWatcher;
import org.apache.solr.logging.log4j.Log4jWatcher; import org.apache.solr.logging.log4j.Log4jWatcher;
@ -119,35 +118,32 @@ public abstract class LogWatcher<E> {
* JUL and Log4j watchers are supported out-of-the-box. You can register your own * JUL and Log4j watchers are supported out-of-the-box. You can register your own
* LogWatcher implementation via the plugins architecture * LogWatcher implementation via the plugins architecture
* *
* @param config the CoreContainer's config, with logging configuration details * @param config a LogWatcherConfig object, containing the configuration for this LogWatcher.
* @param loader a SolrResourceLoader, to be used to load plugin LogWatcher implementations. * @param loader a SolrResourceLoader, to be used to load plugin LogWatcher implementations.
* Can be null if * Can be null if
* *
* @return a LogWatcher configured for the container's logging framework * @return a LogWatcher configured for the container's logging framework
*/ */
public static LogWatcher newRegisteredLogWatcher(ConfigSolr config, SolrResourceLoader loader) { public static LogWatcher newRegisteredLogWatcher(LogWatcherConfig config, SolrResourceLoader loader) {
if (!config.getBool(ConfigSolr.CfgProp.SOLR_LOGGING_ENABLED, true)) if (!config.isEnabled())
return null; return null;
LogWatcher logWatcher = createWatcher(config, loader); LogWatcher logWatcher = createWatcher(config, loader);
if (logWatcher != null) { if (logWatcher != null) {
ListenerConfig v = new ListenerConfig(); if (config.getWatcherSize() > 0) {
v.size = config.getInt(ConfigSolr.CfgProp.SOLR_LOGGING_WATCHER_SIZE, 50);
v.threshold = config.get(ConfigSolr.CfgProp.SOLR_LOGGING_WATCHER_THRESHOLD, null);
if (v.size > 0) {
log.info("Registering Log Listener [{}]", logWatcher.getName()); log.info("Registering Log Listener [{}]", logWatcher.getName());
logWatcher.registerListener(v); logWatcher.registerListener(config.asListenerConfig());
} }
} }
return logWatcher; return logWatcher;
} }
private static LogWatcher createWatcher(ConfigSolr config, SolrResourceLoader loader) { private static LogWatcher createWatcher(LogWatcherConfig config, SolrResourceLoader loader) {
String fname = config.get(ConfigSolr.CfgProp.SOLR_LOGGING_CLASS, null); String fname = config.getLoggingClass();
String slf4jImpl; String slf4jImpl;
try { try {

View File

@ -0,0 +1,74 @@
package org.apache.solr.logging;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Defines the configuration of a {@link LogWatcher}
*/
public class LogWatcherConfig {
private final boolean enabled;
private final String loggingClass;
private final int watcherSize;
private final String watcherThreshold;
public LogWatcherConfig(boolean enabled, String loggingClass, String watcherThreshold, int watcherSize) {
this.enabled = enabled;
this.loggingClass = loggingClass;
this.watcherThreshold = watcherThreshold;
this.watcherSize = watcherSize;
}
/**
* @return true if the LogWatcher is enabled
*/
public boolean isEnabled() {
return enabled;
}
/**
* Get the implementation of the LogWatcher to use. May be "JUL" or "log4j" for the default
* java.util.logging or log4j implementations, or the fully-qualified name of a class extending
* {@link LogWatcher}.
* @return the LogWatcher class to use
*/
public String getLoggingClass() {
return loggingClass;
}
/**
* @return the size of the LogWatcher queue
*/
public int getWatcherSize() {
return watcherSize;
}
/**
* @return the threshold above which logging events will be recorded
*/
public String getWatcherThreshold() {
return watcherThreshold;
}
/**
* @return a {@link ListenerConfig} object using this config's settings.
*/
public ListenerConfig asListenerConfig() {
return new ListenerConfig(watcherSize, watcherThreshold);
}
}

View File

@ -768,7 +768,7 @@ public class SimpleFacets {
// facet.offset when sorting by index order. // facet.offset when sorting by index order.
if (startTermBytes != null) { if (startTermBytes != null) {
if (termsEnum.seekCeil(startTermBytes, true) == TermsEnum.SeekStatus.END) { if (termsEnum.seekCeil(startTermBytes) == TermsEnum.SeekStatus.END) {
termsEnum = null; termsEnum = null;
} else { } else {
term = termsEnum.term(); term = termsEnum.term();

View File

View File

@ -231,13 +231,13 @@ public class UnInvertedField extends DocTermOrds {
TermsEnum te = getOrdTermsEnum(searcher.getAtomicReader()); TermsEnum te = getOrdTermsEnum(searcher.getAtomicReader());
if (te != null && prefix != null && prefix.length() > 0) { if (te != null && prefix != null && prefix.length() > 0) {
final BytesRef prefixBr = new BytesRef(prefix); final BytesRef prefixBr = new BytesRef(prefix);
if (te.seekCeil(prefixBr, true) == TermsEnum.SeekStatus.END) { if (te.seekCeil(prefixBr) == TermsEnum.SeekStatus.END) {
startTerm = numTermsInField; startTerm = numTermsInField;
} else { } else {
startTerm = (int) te.ord(); startTerm = (int) te.ord();
} }
prefixBr.append(UnicodeUtil.BIG_TERM); prefixBr.append(UnicodeUtil.BIG_TERM);
if (te.seekCeil(prefixBr, true) == TermsEnum.SeekStatus.END) { if (te.seekCeil(prefixBr) == TermsEnum.SeekStatus.END) {
endTerm = numTermsInField; endTerm = numTermsInField;
} else { } else {
endTerm = (int) te.ord(); endTerm = (int) te.ord();

View File

View File

View File

View File

@ -240,12 +240,14 @@ public abstract class AbstractSpatialFieldType<T extends SpatialStrategy> extend
//We get the valueSource for the score then the filter and combine them. //We get the valueSource for the score then the filter and combine them.
ValueSource valueSource; ValueSource valueSource;
if ("distance".equals(score)) if ("distance".equals(score)) {
valueSource = strategy.makeDistanceValueSource(spatialArgs.getShape().getCenter()); double multiplier = 1.0;//TODO support units=kilometers
else if ("recipDistance".equals(score)) valueSource = strategy.makeDistanceValueSource(spatialArgs.getShape().getCenter(), multiplier);
} else if ("recipDistance".equals(score)) {
valueSource = strategy.makeRecipDistanceValueSource(spatialArgs.getShape()); valueSource = strategy.makeRecipDistanceValueSource(spatialArgs.getShape());
else } else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'score' local-param must be one of 'none', 'distance', or 'recipDistance'"); throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'score' local-param must be one of 'none', 'distance', or 'recipDistance'");
}
FunctionQuery functionQuery = new FunctionQuery(valueSource); FunctionQuery functionQuery = new FunctionQuery(valueSource);
if (localParams != null && !localParams.getBool(FILTER_PARAM, true)) if (localParams != null && !localParams.getBool(FILTER_PARAM, true))

View File

View File

@ -397,10 +397,6 @@ public class IndexSchema {
return analyzer != null ? analyzer : getDynamicFieldType(fieldName).getAnalyzer(); return analyzer != null ? analyzer : getDynamicFieldType(fieldName).getAnalyzer();
} }
@Override
protected TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) {
return components;
}
} }
private class SolrQueryAnalyzer extends SolrIndexAnalyzer { private class SolrQueryAnalyzer extends SolrIndexAnalyzer {

View File

@ -390,6 +390,9 @@ public final class ManagedIndexSchema extends IndexSchema {
newSchema.uniqueKeyFieldName = uniqueKeyFieldName; newSchema.uniqueKeyFieldName = uniqueKeyFieldName;
newSchema.uniqueKeyFieldType = uniqueKeyFieldType; newSchema.uniqueKeyFieldType = uniqueKeyFieldType;
// After the schema is persisted, resourceName is the same as managedSchemaResourceName
newSchema.resourceName = managedSchemaResourceName;
if (includeFieldDataStructures) { if (includeFieldDataStructures) {
// These need new collections, since addFields() can add members to them // These need new collections, since addFields() can add members to them
newSchema.fields.putAll(fields); newSchema.fields.putAll(fields);

View File

View File

View File

View File

View File

View File

View File

View File

View File

View File

View File

@ -309,7 +309,7 @@ class JoinQuery extends Query {
if (prefix == null) { if (prefix == null) {
term = termsEnum.next(); term = termsEnum.next();
} else { } else {
if (termsEnum.seekCeil(prefix, true) != TermsEnum.SeekStatus.END) { if (termsEnum.seekCeil(prefix) != TermsEnum.SeekStatus.END) {
term = termsEnum.term(); term = termsEnum.term();
} }
} }

View File

View File

View File

View File

0
solr/core/src/java/org/apache/solr/search/QParser.java Executable file → Normal file
View File

View File

@ -20,6 +20,8 @@ import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrInfoMBean; import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.join.BlockJoinChildQParserPlugin;
import org.apache.solr.search.join.BlockJoinParentQParserPlugin;
import org.apache.solr.util.plugin.NamedListInitializedPlugin; import org.apache.solr.util.plugin.NamedListInitializedPlugin;
import java.net.URL; import java.net.URL;
@ -47,7 +49,9 @@ public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrI
JoinQParserPlugin.NAME, JoinQParserPlugin.class, JoinQParserPlugin.NAME, JoinQParserPlugin.class,
SurroundQParserPlugin.NAME, SurroundQParserPlugin.class, SurroundQParserPlugin.NAME, SurroundQParserPlugin.class,
SwitchQParserPlugin.NAME, SwitchQParserPlugin.class, SwitchQParserPlugin.NAME, SwitchQParserPlugin.class,
MaxScoreQParserPlugin.NAME, MaxScoreQParserPlugin.class MaxScoreQParserPlugin.NAME, MaxScoreQParserPlugin.class,
BlockJoinParentQParserPlugin.NAME, BlockJoinParentQParserPlugin.class,
BlockJoinChildQParserPlugin.NAME, BlockJoinChildQParserPlugin.class
}; };
/** return a {@link QParser} */ /** return a {@link QParser} */

View File

View File

View File

@ -718,7 +718,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
if (terms == null) return -1; if (terms == null) return -1;
BytesRef termBytes = t.bytes(); BytesRef termBytes = t.bytes();
final TermsEnum termsEnum = terms.iterator(null); final TermsEnum termsEnum = terms.iterator(null);
if (!termsEnum.seekExact(termBytes, false)) { if (!termsEnum.seekExact(termBytes)) {
return -1; return -1;
} }
DocsEnum docs = termsEnum.docs(atomicReader.getLiveDocs(), null, DocsEnum.FLAG_NONE); DocsEnum docs = termsEnum.docs(atomicReader.getLiveDocs(), null, DocsEnum.FLAG_NONE);
@ -742,7 +742,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
if (terms == null) continue; if (terms == null) continue;
TermsEnum te = terms.iterator(null); TermsEnum te = terms.iterator(null);
if (te.seekExact(idBytes, true)) { if (te.seekExact(idBytes)) {
DocsEnum docs = te.docs(reader.getLiveDocs(), null, DocsEnum.FLAG_NONE); DocsEnum docs = te.docs(reader.getLiveDocs(), null, DocsEnum.FLAG_NONE);
int id = docs.nextDoc(); int id = docs.nextDoc();
if (id == DocIdSetIterator.NO_MORE_DOCS) continue; if (id == DocIdSetIterator.NO_MORE_DOCS) continue;

View File

View File

View File

View File

@ -297,7 +297,7 @@ public class FileFloatSource extends ValueSource {
continue; // go to next line in file.. leave values as default. continue; // go to next line in file.. leave values as default.
} }
if (!termsEnum.seekExact(internalKey, false)) { if (!termsEnum.seekExact(internalKey)) {
if (notFoundCount<10) { // collect first 10 not found for logging if (notFoundCount<10) { // collect first 10 not found for logging
notFound.add(key); notFound.add(key);
} }

View File

@ -20,28 +20,37 @@ package org.apache.solr.search.function.distance;
import com.spatial4j.core.distance.DistanceUtils; import com.spatial4j.core.distance.DistanceUtils;
import com.spatial4j.core.exception.InvalidShapeException; import com.spatial4j.core.exception.InvalidShapeException;
import com.spatial4j.core.io.ParseUtils; import com.spatial4j.core.io.ParseUtils;
import com.spatial4j.core.shape.Point;
import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.ConstNumberSource; import org.apache.lucene.queries.function.valuesource.ConstNumberSource;
import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource; import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource;
import org.apache.lucene.queries.function.valuesource.MultiValueSource; import org.apache.lucene.queries.function.valuesource.MultiValueSource;
import org.apache.lucene.queries.function.valuesource.VectorValueSource; import org.apache.lucene.queries.function.valuesource.VectorValueSource;
import org.apache.lucene.spatial.SpatialStrategy;
import org.apache.solr.common.params.SpatialParams; import org.apache.solr.common.params.SpatialParams;
import org.apache.solr.schema.AbstractSpatialFieldType;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.FunctionQParser; import org.apache.solr.search.FunctionQParser;
import org.apache.solr.search.SyntaxError; import org.apache.solr.search.SyntaxError;
import org.apache.solr.search.ValueSourceParser; import org.apache.solr.search.ValueSourceParser;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.List; import java.util.List;
/** /**
* Parses "geodist" creating {@link HaversineConstFunction} or {@link HaversineFunction}. * Parses "geodist" creating {@link HaversineConstFunction} or {@link HaversineFunction}
* or calling {@link SpatialStrategy#makeDistanceValueSource(com.spatial4j.core.shape.Point,double)}.
*/ */
public class GeoDistValueSourceParser extends ValueSourceParser { public class GeoDistValueSourceParser extends ValueSourceParser {
@Override @Override
public ValueSource parse(FunctionQParser fp) throws SyntaxError { public ValueSource parse(FunctionQParser fp) throws SyntaxError {
// TODO: dispatch through SpatialQueryable in the future? // TODO: dispatch through SpatialQueryable in the future?
//note: parseValueSourceList can't handle a field reference to an AbstractSpatialFieldType,
// so those fields are expressly handled via sfield=
List<ValueSource> sources = fp.parseValueSourceList(); List<ValueSource> sources = fp.parseValueSourceList();
// "m" is a multi-value source, "x" is a single-value source // "m" is a multi-value source, "x" is a single-value source
@ -104,7 +113,7 @@ public class GeoDistValueSourceParser extends ValueSourceParser {
} }
// We have all the parameters at this point, now check if one of the points is constant // We have all the parameters at this point, now check if one of the points is constant
double[] constants; double[] constants;//latLon
constants = getConstants(mv1); constants = getConstants(mv1);
MultiValueSource other = mv2; MultiValueSource other = mv2;
if (constants == null) { if (constants == null) {
@ -112,6 +121,24 @@ public class GeoDistValueSourceParser extends ValueSourceParser {
other = mv1; other = mv1;
} }
// At this point we dispatch to one of:
// * SpatialStrategy.makeDistanceValueSource
// * HaversineConstFunction
// * HaversineFunction
// sfield can only be in mv2, according to the logic above
if (mv2 instanceof SpatialStrategyMultiValueSource) {
if (constants == null)
throw new SyntaxError("When using AbstractSpatialFieldType (e.g. RPT not LatLonType)," +
" the point must be supplied as constants");
// note: uses Haversine by default but can be changed via distCalc=...
SpatialStrategy strategy = ((SpatialStrategyMultiValueSource) mv2).strategy;
Point queryPoint = strategy.getSpatialContext().makePoint(constants[1], constants[0]);
//TODO Spatial4j 0.4 will have a direct constant
double multiplier = DistanceUtils.degrees2Dist(1, DistanceUtils.EARTH_MEAN_RADIUS_KM);
return strategy.makeDistanceValueSource(queryPoint, multiplier);
}
if (constants != null && other instanceof VectorValueSource) { if (constants != null && other instanceof VectorValueSource) {
return new HaversineConstFunction(constants[0], constants[1], (VectorValueSource)other); return new HaversineConstFunction(constants[0], constants[1], (VectorValueSource)other);
} }
@ -155,11 +182,33 @@ public class GeoDistValueSourceParser extends ValueSourceParser {
String sfield = fp.getParam(SpatialParams.FIELD); String sfield = fp.getParam(SpatialParams.FIELD);
if (sfield == null) return null; if (sfield == null) return null;
SchemaField sf = fp.getReq().getSchema().getField(sfield); SchemaField sf = fp.getReq().getSchema().getField(sfield);
ValueSource vs = sf.getType().getValueSource(sf, fp); FieldType type = sf.getType();
if (!(vs instanceof MultiValueSource)) { if (type instanceof AbstractSpatialFieldType) {
throw new SyntaxError("Spatial field must implement MultiValueSource:" + sf); AbstractSpatialFieldType asft = (AbstractSpatialFieldType) type;
return new SpatialStrategyMultiValueSource(asft.getStrategy(sfield));
}
ValueSource vs = type.getValueSource(sf, fp);
if (vs instanceof MultiValueSource) {
return (MultiValueSource)vs;
}
throw new SyntaxError("Spatial field must implement MultiValueSource or extend AbstractSpatialFieldType:" + sf);
}
/** An unfortunate hack to use a {@link SpatialStrategy} instead of
* a ValueSource. */
private static class SpatialStrategyMultiValueSource extends VectorValueSource {
final SpatialStrategy strategy;
public SpatialStrategyMultiValueSource(SpatialStrategy strategy) {
super(Collections.EMPTY_LIST);
this.strategy = strategy;
}
@Override
public List<ValueSource> getSources() {
throw new IllegalStateException();
} }
return (MultiValueSource)vs;
} }
} }

Some files were not shown because too many files have changed in this diff Show More