mirror of https://github.com/apache/lucene.git
Merged /lucene/dev/trunk:r1419558-1430123
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4547@1430130 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
61e368a46f
|
@ -5,6 +5,7 @@
|
|||
/eclipse-build
|
||||
/classes
|
||||
/build
|
||||
/idea-build
|
||||
/dist
|
||||
/*~
|
||||
/velocity.log
|
||||
|
|
45
build.xml
45
build.xml
|
@ -114,7 +114,7 @@
|
|||
<property name="maven-version" value="2.2.1"/>
|
||||
|
||||
<target name="get-maven-poms"
|
||||
description="Copy Maven POMs from dev-tools/maven/ to the working copy root">
|
||||
description="Copy Maven POMs from dev-tools/maven/ to maven-build/">
|
||||
<copy todir="${maven-build-dir}" overwrite="true">
|
||||
<fileset dir="${basedir}/dev-tools/maven"/>
|
||||
<filterset begintoken="@" endtoken="@">
|
||||
|
@ -154,6 +154,7 @@
|
|||
<subant buildpath="." antfile="extra-targets.xml" target="-run-maven-build" inheritall="false" failonerror="true">
|
||||
<propertyset>
|
||||
<propertyref prefix="maven-"/>
|
||||
<propertyref builtin="commandline"/>
|
||||
</propertyset>
|
||||
</subant>
|
||||
</target>
|
||||
|
@ -170,11 +171,28 @@
|
|||
|
||||
<target name="eclipse" depends="resolve" description="Setup Eclipse configuration">
|
||||
<copy file="dev-tools/eclipse/dot.project" tofile=".project" overwrite="false"/>
|
||||
<copy file="dev-tools/eclipse/dot.classpath" tofile=".classpath" overwrite="true"/>
|
||||
<mkdir dir=".settings"/>
|
||||
<copy todir=".settings/" overwrite="true">
|
||||
<fileset dir="dev-tools/eclipse/dot.settings" includes="*.prefs" />
|
||||
</copy>
|
||||
|
||||
<pathconvert property="eclipse.fileset.sourcefolders" pathsep="|" dirsep="/">
|
||||
<dirset dir="${basedir}/lucene" includes="**/src/java, **/src/resources, **/src/test, **/src/test-files, **/src/examples" excludes="tools/**, build/**, backwards/**" />
|
||||
<dirset dir="${basedir}/solr" includes="**/src/java, **/src/resources, **/src/test, **/src/test-files, **/src/examples" excludes="build/**" />
|
||||
<map from="${basedir}/" to=""/>
|
||||
</pathconvert>
|
||||
<!-- TODO: find a better way to exclude duplicate JAR files & fix the servlet-api mess! -->
|
||||
<pathconvert property="eclipse.fileset.libs" pathsep="|" dirsep="/">
|
||||
<fileset dir="${basedir}/lucene" includes="**/lib/*.jar" excludes="**/*servlet-api*.jar, analysis/uima/**, tools/**, build/**"/>
|
||||
<fileset dir="${basedir}/solr" includes="**/lib/*.jar" excludes="core/lib/*servlet-api*.jar, contrib/analysis-extras/**, test-framework/**, build/**, dist/**, package/**" />
|
||||
<map from="${basedir}/" to=""/>
|
||||
</pathconvert>
|
||||
<xslt in="${ant.file}" out=".classpath" style="dev-tools/eclipse/dot.classpath.xsl" force="true">
|
||||
<outputproperty name="indent" value="yes"/>
|
||||
<param name="eclipse.fileset.libs" expression="${eclipse.fileset.libs}"/>
|
||||
<param name="eclipse.fileset.sourcefolders" expression="${eclipse.fileset.sourcefolders}"/>
|
||||
</xslt>
|
||||
|
||||
<echo>
|
||||
SUCCESS: You must right-click your project and choose Refresh.
|
||||
Your project must use a Java 6 JRE.
|
||||
|
@ -294,7 +312,7 @@
|
|||
<subant buildpath="." antfile="extra-targets.xml" target="-check-svn-working-copy" inheritall="false" failonerror="true"/>
|
||||
</target>
|
||||
|
||||
<target name="run-clover" description="Runs all tests to measure coverage and generates report (pass "ANT_ARGS=-Xmx1536M" as environment)" depends="clean">
|
||||
<target name="run-clover" description="Runs all tests to measure coverage and generates report (pass "ANT_OPTS=-Xmx1536M" as environment)" depends="clean">
|
||||
<antcall>
|
||||
<param name="run.clover" value="true"/>
|
||||
<!-- must be 1, as clover does not like parallel test runs: -->
|
||||
|
@ -351,7 +369,26 @@
|
|||
<!-- Jenkins tasks -->
|
||||
<target name="jenkins-hourly" depends="clean,test-with-heapdumps,validate,documentation-lint,jar-checksums,check-svn-working-copy"/>
|
||||
|
||||
<target name="jenkins-maven-nightly" depends="clean,remove-maven-artifacts,run-maven-build,generate-maven-artifacts,validate-maven-dependencies"/>
|
||||
<target name="jenkins-nightly">
|
||||
<antcall>
|
||||
<param name="tests.nightly" value="true"/>
|
||||
<target name="jenkins-hourly"/>
|
||||
</antcall>
|
||||
</target>
|
||||
|
||||
<target name="jenkins-maven-nightly" depends="clean,clean-maven-build">
|
||||
<!-- step 1: build, install, deploy, and validate ANT-generated maven artifacts: -->
|
||||
<antcall>
|
||||
<target name="remove-maven-artifacts"/>
|
||||
<!-- this implicitely publishes the maven artifacts: -->
|
||||
<target name="validate-maven-dependencies"/>
|
||||
</antcall>
|
||||
<!-- step 2: run the maven build to check that the pom templates also work to drive "mvn": -->
|
||||
<antcall>
|
||||
<target name="remove-maven-artifacts"/>
|
||||
<target name="run-maven-build"/>
|
||||
</antcall>
|
||||
</target>
|
||||
|
||||
<target name="jenkins-clover" depends="run-clover"/>
|
||||
</project>
|
||||
|
|
|
@ -1,195 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" path="lucene/core/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/core/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/core/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/codecs/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/codecs" path="lucene/codecs/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/codecs/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/demo/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/demo/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/demo/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/highlighter/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/highlighter/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/memory/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/memory/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/misc/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/misc/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/sandbox/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/sandbox/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/test-framework/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/tests-framework" path="lucene/test-framework/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/common/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/analysis-common" path="lucene/analysis/common/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/common/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/icu/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/analysis-icu" path="lucene/analysis/icu/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/icu/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/kuromoji/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/analysis-kuromoji" path="lucene/analysis/kuromoji/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/kuromoji/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/phonetic/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/analysis-phonetic" path="lucene/analysis/phonetic/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/phonetic/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/smartcn/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/analysis-smartcn" path="lucene/analysis/smartcn/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/smartcn/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/stempel/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/analysis-stempel" path="lucene/analysis/stempel/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/stempel/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/morfologik/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/analysis-morfologik" path="lucene/analysis/morfologik/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/morfologik/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/uima/src/java"/>
|
||||
<classpathentry kind="src" output="eclipse-build/analysis-uima" path="lucene/analysis/uima/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/analysis/uima/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/benchmark/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/benchmark/src/test"/>
|
||||
<classpathentry excluding="src" including="conf/**" kind="src" path="lucene/benchmark"/>
|
||||
<classpathentry kind="src" path="lucene/classification/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/classification/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/facet/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/facet/src/examples"/>
|
||||
<classpathentry kind="src" path="lucene/facet/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/grouping/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/grouping/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/join/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/join/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/queries/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/queries/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/queryparser/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/queryparser/src/resources"/>
|
||||
<classpathentry kind="src" path="lucene/queryparser/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/suggest/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/suggest/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/spatial/src/java"/>
|
||||
<classpathentry kind="src" path="lucene/spatial/src/test"/>
|
||||
<classpathentry kind="src" path="lucene/spatial/src/test-files"/>
|
||||
<classpathentry kind="lib" path="lucene/spatial/lib/spatial4j-0.3.jar"/>
|
||||
<classpathentry kind="src" path="solr/core/src/java"/>
|
||||
<classpathentry kind="src" path="solr/core/src/test"/>
|
||||
<classpathentry kind="src" path="solr/core/src/test-files"/>
|
||||
<classpathentry kind="src" path="solr/solrj/src/java"/>
|
||||
<classpathentry kind="src" path="solr/solrj/src/test"/>
|
||||
<classpathentry kind="src" path="solr/solrj/src/test-files"/>
|
||||
<classpathentry kind="src" path="solr/test-framework/src/java"/>
|
||||
<classpathentry kind="src" path="solr/contrib/analysis-extras/src/java"/>
|
||||
<classpathentry kind="src" path="solr/contrib/analysis-extras/src/test"/>
|
||||
<classpathentry kind="src" path="solr/contrib/analysis-extras/src/test-files"/>
|
||||
<classpathentry kind="src" path="solr/contrib/clustering/src/java"/>
|
||||
<classpathentry kind="src" path="solr/contrib/clustering/src/test"/>
|
||||
<classpathentry kind="src" path="solr/contrib/clustering/src/test-files"/>
|
||||
<classpathentry kind="src" path="solr/contrib/dataimporthandler/src/java"/>
|
||||
<classpathentry kind="src" path="solr/contrib/dataimporthandler/src/test"/>
|
||||
<classpathentry kind="src" path="solr/contrib/dataimporthandler/src/test-files"/>
|
||||
<classpathentry kind="src" path="solr/contrib/dataimporthandler-extras/src/java"/>
|
||||
<classpathentry kind="src" path="solr/contrib/dataimporthandler-extras/src/test"/>
|
||||
<classpathentry kind="src" path="solr/contrib/dataimporthandler-extras/src/test-files"/>
|
||||
<classpathentry kind="src" path="solr/contrib/extraction/src/java"/>
|
||||
<classpathentry kind="src" path="solr/contrib/extraction/src/test"/>
|
||||
<classpathentry kind="src" path="solr/contrib/extraction/src/test-files"/>
|
||||
<classpathentry kind="src" path="solr/contrib/langid/src/java"/>
|
||||
<classpathentry kind="src" path="solr/contrib/langid/src/resources"/>
|
||||
<classpathentry kind="src" path="solr/contrib/langid/src/test"/>
|
||||
<classpathentry kind="src" path="solr/contrib/langid/src/test-files"/>
|
||||
<classpathentry kind="src" path="solr/contrib/uima/src/java"/>
|
||||
<classpathentry kind="src" path="solr/contrib/uima/src/resources"/>
|
||||
<classpathentry kind="src" path="solr/contrib/uima/src/test"/>
|
||||
<classpathentry kind="src" path="solr/contrib/uima/src/test-files"/>
|
||||
<classpathentry kind="src" path="solr/contrib/velocity/src/java"/>
|
||||
<classpathentry kind="src" path="solr/contrib/velocity/src/test"/>
|
||||
<classpathentry kind="src" path="solr/contrib/velocity/src/test-files"/>
|
||||
<classpathentry kind="lib" path="lucene/test-framework/lib/ant-1.8.2.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/test-framework/lib/junit-4.10.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/sandbox/lib/jakarta-regexp-1.4.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/analysis/icu/lib/icu4j-49.1.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/analysis/phonetic/lib/commons-codec-1.7.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/analysis/morfologik/lib/morfologik-fsa-1.5.3.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/analysis/morfologik/lib/morfologik-polish-1.5.3.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/analysis/morfologik/lib/morfologik-stemming-1.5.3.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/benchmark/lib/commons-compress-1.4.1.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/benchmark/lib/xercesImpl-2.9.1.jar"/>
|
||||
<classpathentry kind="lib" path="lucene/benchmark/lib/nekohtml-1.9.17.jar"/>
|
||||
<classpathentry kind="lib" path="solr/core/lib/commons-fileupload-1.2.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/core/lib/commons-cli-1.2.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/httpclient-4.1.3.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/httpcore-4.1.4.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/httpmime-4.1.3.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/commons-io-2.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/core/lib/commons-lang-2.6.jar"/>
|
||||
<classpathentry kind="lib" path="solr/core/lib/easymock-2.2.jar"/>
|
||||
<classpathentry kind="lib" path="solr/core/lib/guava-13.0.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/core/lib/metrics-core-2.1.2.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/jcl-over-slf4j-1.6.4.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/log4j-over-slf4j-1.6.4.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/slf4j-api-1.6.4.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/slf4j-jdk14-1.6.4.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/wstx-asl-3.2.7.jar"/>
|
||||
<classpathentry kind="lib" path="solr/solrj/lib/zookeeper-3.4.5.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-continuation-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-deploy-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-http-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-io-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-jmx-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-security-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-server-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-servlet-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-util-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-webapp-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/jetty-xml-8.1.8.v20121106.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/lib/servlet-api-3.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/clustering/lib/carrot2-core-3.5.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/clustering/lib/hppc-0.3.3.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/clustering/lib/jackson-core-asl-1.7.4.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/clustering/lib/jackson-mapper-asl-1.7.4.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/clustering/lib/mahout-collections-0.3.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/clustering/lib/mahout-math-0.3.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/clustering/lib/simple-xml-2.4.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/dataimporthandler/lib/activation-1.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/dataimporthandler/lib/mail-1.4.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/example-DIH/solr/db/lib/derby-10.9.1.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/example/example-DIH/solr/db/lib/hsqldb-1.8.0.10.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/bcmail-jdk15-1.45.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/bcprov-jdk15-1.45.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/boilerpipe-1.1.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/dom4j-1.6.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/metadata-extractor-2.4.0-beta-1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/netcdf-4.2-min.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/rome-0.9.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/tagsoup-1.2.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/xmlbeans-2.3.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/langid/lib/langdetect-1.1-20120112.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/langid/lib/jsonic-1.2.7.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/uima/lib/commons-digester-2.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/uima/lib/AlchemyAPIAnnotator-2.3.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/uima/lib/OpenCalaisAnnotator-2.3.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/uima/lib/Tagger-2.3.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/uima/lib/WhitespaceTokenizer-2.3.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/uima/lib/uimaj-core-2.3.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/velocity/lib/velocity-1.6.4.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/velocity/lib/velocity-tools-2.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/velocity/lib/commons-beanutils-1.7.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/velocity/lib/commons-collections-3.2.1.jar"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
|
||||
<classpathentry kind="lib" path="lucene/test-framework/lib/randomizedtesting-runner-2.0.5.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/apache-mime4j-core-0.7.2.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/apache-mime4j-dom-0.7.2.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/fontbox-1.7.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/icu4j-49.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/isoparser-1.0-RC-1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/jdom-1.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/jempbox-1.7.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/juniversalchardet-1.0.3.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/pdfbox-1.7.0.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/poi-3.8.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/poi-ooxml-3.8.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/poi-ooxml-schemas-3.8.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/poi-scratchpad-3.8.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/tika-core-1.2.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/tika-parsers-1.2.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/vorbis-java-core-0.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/vorbis-java-tika-0.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/xercesImpl-2.9.1.jar"/>
|
||||
<classpathentry kind="lib" path="solr/contrib/extraction/lib/xz-1.0.jar"/>
|
||||
<classpathentry kind="output" path="eclipse-build/other"/>
|
||||
</classpath>
|
|
@ -0,0 +1,70 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:str="http://exslt.org/strings"
|
||||
extension-element-prefixes="str"
|
||||
>
|
||||
<xsl:param name="eclipse.fileset.sourcefolders"/>
|
||||
<xsl:param name="eclipse.fileset.libs"/>
|
||||
|
||||
<!--
|
||||
NOTE: This template matches the root element of any given input XML document!
|
||||
The XSL input file is ignored completely.
|
||||
-->
|
||||
<xsl:template match="/">
|
||||
<classpath>
|
||||
<xsl:for-each select="str:split($eclipse.fileset.sourcefolders,'|')">
|
||||
<!-- hack to sort the list, starts-with() returns "true" which sorts before "false" if descending: -->
|
||||
<xsl:sort select="string(starts-with(text(), 'lucene/core/'))" order="descending" lang="en"/>
|
||||
<xsl:sort select="string(starts-with(text(), 'lucene/test-framework/'))" order="descending" lang="en"/>
|
||||
<xsl:sort select="string(starts-with(text(), 'lucene/'))" order="descending" lang="en"/>
|
||||
<xsl:sort select="string(starts-with(text(), 'solr/core/'))" order="descending" lang="en"/>
|
||||
<xsl:sort select="string(starts-with(text(), 'solr/solrj/'))" order="descending" lang="en"/>
|
||||
<xsl:sort select="string(starts-with(text(), 'solr/test-framework/'))" order="descending" lang="en"/>
|
||||
<xsl:sort select="string(starts-with(text(), 'solr/'))" order="descending" lang="en"/>
|
||||
<!-- all others in one group above are sorted by path name: -->
|
||||
<xsl:sort select="text()" order="ascending" lang="en"/>
|
||||
|
||||
<classpathentry kind="src" path="{.}">
|
||||
<!-- make Lucene's resource folders unique (for SPI), but leave the main SPI in default target folder: -->
|
||||
<xsl:if test="starts-with(.,'lucene/') and not(starts-with(.,'lucene/core')) and contains(.,'/src/resources')">
|
||||
<xsl:attribute name="output">
|
||||
<xsl:text>eclipse-build/</xsl:text><xsl:value-of select="position()"/>
|
||||
</xsl:attribute>
|
||||
</xsl:if>
|
||||
</classpathentry>
|
||||
<!-- special case for benchmark, we add extra entry after the tests: -->
|
||||
<xsl:if test="text()='lucene/benchmark/src/test'">
|
||||
<classpathentry excluding="src" including="conf/**" kind="src" path="lucene/benchmark"/>
|
||||
</xsl:if>
|
||||
</xsl:for-each>
|
||||
|
||||
<!-- the main resources folder is here (see above), so it's listed after the test-framework resources, making preflex-override work: -->
|
||||
<classpathentry kind="output" path="eclipse-build/main"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
|
||||
|
||||
<xsl:for-each select="str:split($eclipse.fileset.libs,'|')">
|
||||
<!-- sort the jars by path name: -->
|
||||
<xsl:sort select="text()" order="ascending" lang="en"/>
|
||||
<classpathentry kind="lib" path="{.}"/>
|
||||
</xsl:for-each>
|
||||
</classpath>
|
||||
</xsl:template>
|
||||
|
||||
</xsl:stylesheet>
|
|
@ -60,5 +60,14 @@
|
|||
<arguments>1.0-projectRelativePath-matches-false-false-solr/dist</arguments>
|
||||
</matcher>
|
||||
</filter>
|
||||
<filter>
|
||||
<id>1353353379246</id>
|
||||
<name></name>
|
||||
<type>10</type>
|
||||
<matcher>
|
||||
<id>org.eclipse.ui.ide.multiFilter</id>
|
||||
<arguments>1.0-name-matches-false-false-.svn</arguments>
|
||||
</matcher>
|
||||
</filter>
|
||||
</filteredResources>
|
||||
</projectDescription>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<library name="JUnit">
|
||||
<CLASSES>
|
||||
<root url="jar://$PROJECT_DIR$/lucene/test-framework/lib/junit-4.10.jar!/" />
|
||||
<root url="jar://$PROJECT_DIR$/lucene/test-framework/lib/randomizedtesting-runner-2.0.5.jar!/" />
|
||||
<root url="jar://$PROJECT_DIR$/lucene/test-framework/lib/randomizedtesting-runner-2.0.8.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
|
|
|
@ -149,11 +149,6 @@
|
|||
<artifactId>guava</artifactId>
|
||||
<version>13.0.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.yammer.metrics</groupId>
|
||||
<artifactId>metrics-core</artifactId>
|
||||
<version>2.1.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
|
@ -338,7 +333,7 @@
|
|||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
<artifactId>velocity</artifactId>
|
||||
<version>1.6.4</version>
|
||||
<version>1.7</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
|
@ -353,12 +348,12 @@
|
|||
<dependency>
|
||||
<groupId>org.carrot2</groupId>
|
||||
<artifactId>carrot2-core</artifactId>
|
||||
<version>3.5.0</version>
|
||||
<version>3.6.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.carrot2</groupId>
|
||||
<artifactId>morfologik-polish</artifactId>
|
||||
<version>1.5.3</version>
|
||||
<version>1.5.5</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.woodstox</groupId>
|
||||
|
@ -368,7 +363,8 @@
|
|||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
<version>2.2</version>
|
||||
<version>3.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
|
@ -444,7 +440,7 @@
|
|||
<dependency>
|
||||
<groupId>com.carrotsearch.randomizedtesting</groupId>
|
||||
<artifactId>randomizedtesting-runner</artifactId>
|
||||
<version>2.0.5</version>
|
||||
<version>2.0.8</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
@ -550,7 +546,8 @@
|
|||
<runOrder>random</runOrder>
|
||||
<reportFormat>plain</reportFormat>
|
||||
<workingDirectory>${project.build.directory}/test</workingDirectory>
|
||||
<argLine>-Xmx512M</argLine>
|
||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
||||
<argLine>-Xmx512M -XX:MaxPermSize=256M</argLine>
|
||||
<systemPropertyVariables>
|
||||
<tempDir>.</tempDir>
|
||||
<java.awt.headless>true</java.awt.headless>
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
<module-directory>solr/contrib/analysis-extras</module-directory>
|
||||
<top-level>../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}</module-path>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<scm>
|
||||
<connection>scm:svn:${vc-anonymous-base-url}/${module-directory}</connection>
|
||||
|
@ -98,5 +99,16 @@
|
|||
</includes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
<module-directory>solr/contrib/clustering</module-directory>
|
||||
<top-level>../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}</module-path>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<scm>
|
||||
<connection>scm:svn:${vc-anonymous-base-url}/${module-directory}</connection>
|
||||
|
@ -106,6 +107,22 @@
|
|||
<groupId>org.carrot2</groupId>
|
||||
<artifactId>morfologik-stemming</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>jfree</groupId>
|
||||
<artifactId>jcommon</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-math</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.uncommons.maths</groupId>
|
||||
<artifactId>uncommons-maths</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>xpp3</groupId>
|
||||
<artifactId>xpp3</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
@ -123,5 +140,16 @@
|
|||
</includes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
<module-directory>solr/contrib/dataimporthandler-extras</module-directory>
|
||||
<top-level>../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}</module-path>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<scm>
|
||||
<connection>scm:svn:${vc-anonymous-base-url}/${module-directory}</connection>
|
||||
|
@ -101,5 +102,16 @@
|
|||
</includes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
<module-directory>solr/contrib/dataimporthandler</module-directory>
|
||||
<top-level>../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}</module-path>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<scm>
|
||||
<connection>scm:svn:${vc-anonymous-base-url}/${module-directory}</connection>
|
||||
|
@ -104,6 +105,15 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -38,6 +38,7 @@
|
|||
<module-directory>solr/contrib/extraction</module-directory>
|
||||
<top-level>../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}</module-path>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<scm>
|
||||
<connection>scm:svn:${vc-anonymous-base-url}/${module-directory}</connection>
|
||||
|
@ -98,5 +99,16 @@
|
|||
</includes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
<module-directory>solr/contrib/langid</module-directory>
|
||||
<top-level>../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}</module-path>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<scm>
|
||||
<connection>scm:svn:${vc-anonymous-base-url}/${module-directory}</connection>
|
||||
|
@ -104,5 +105,16 @@
|
|||
</includes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
<module-directory>solr/contrib/uima</module-directory>
|
||||
<top-level>../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}</module-path>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<scm>
|
||||
<connection>scm:svn:${vc-anonymous-base-url}/${module-directory}</connection>
|
||||
|
@ -123,5 +124,16 @@
|
|||
</includes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
<module-directory>solr/contrib/velocity</module-directory>
|
||||
<top-level>../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}</module-path>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<scm>
|
||||
<connection>scm:svn:${vc-anonymous-base-url}/${module-directory}</connection>
|
||||
|
@ -139,5 +140,16 @@
|
|||
</includes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -196,10 +196,6 @@
|
|||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpmime</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.yammer.metrics</groupId>
|
||||
<artifactId>metrics-core</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<sourceDirectory>${module-path}</sourceDirectory>
|
||||
|
|
|
@ -34,6 +34,7 @@
|
|||
<module-directory>solr/core</module-directory>
|
||||
<top-level>../../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}/src/test</module-path>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
@ -118,6 +119,15 @@
|
|||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -104,7 +104,7 @@
|
|||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>../test-classes/maven.testlogging.properties</java.util.logging.config.file>
|
||||
<tests.jettyConnector>${tests.jettyConnector}</tests.jettyConnector>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
|
|
@ -62,6 +62,10 @@
|
|||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.jboss.netty</groupId>
|
||||
<artifactId>netty</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
<module-directory>solr/solrj</module-directory>
|
||||
<top-level>../../../../..</top-level>
|
||||
<module-path>${top-level}/${module-directory}/src/test</module-path>
|
||||
<maven.install.skip>true</maven.install.skip>
|
||||
<surefire-solr-directory>${top-level}/../../solr</surefire-solr-directory>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
@ -108,6 +108,15 @@
|
|||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>${surefire-solr-directory}/testlogging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -56,6 +56,9 @@
|
|||
mavenVersion="${maven-version}" failonerror="true" fork="true">
|
||||
<arg value="-fae"/>
|
||||
<arg value="install"/>
|
||||
<syspropertyset>
|
||||
<propertyref builtin="commandline"/>
|
||||
</syspropertyset>
|
||||
</mvn>
|
||||
</target>
|
||||
|
||||
|
|
|
@ -80,7 +80,33 @@ Changes in backwards compatibility policy
|
|||
can use OrdinalPolicy.NO_PARENTS to never write any parent category ordinal
|
||||
to the fulltree posting payload (but note that you need a special
|
||||
FacetsAccumulator - see javadocs). (Shai Erera)
|
||||
|
||||
|
||||
* LUCENE-4594: Spatial PrefixTreeStrategy no longer indexes center points of
|
||||
non-point shapes. If you want to call makeDistanceValueSource() based on
|
||||
shape centers, you need to do this yourself in another spatial field.
|
||||
(David Smiley)
|
||||
|
||||
* LUCENE-4615: Replace IntArrayAllocator and FloatArrayAllocator by ArraysPool.
|
||||
FacetArrays no longer takes those allocators; if you need to reuse the arrays,
|
||||
you should use ReusingFacetArrays. (Shai Erera, Gilad Barkai)
|
||||
|
||||
* LUCENE-4621: FacetIndexingParams is now a concrete class (instead of DefaultFIP).
|
||||
Also, the entire IndexingParams chain is now immutable. If you need to override
|
||||
a setting, you should extend the relevant class.
|
||||
Additionally, FacetSearchParams is now immutable, and requires all FacetRequests
|
||||
to speified at initialization time. (Shai Erera)
|
||||
|
||||
* LUCENE-4647: CategoryDocumentBuilder and EnhancementsDocumentBuilder are replaced
|
||||
by FacetFields and AssociationsFacetFields respectively. CategoryEnhancement and
|
||||
AssociationEnhancement were removed in favor of a simplified CategoryAssociation
|
||||
interface, with CategoryIntAssociation and CategoryFloatAssociation
|
||||
implementations.
|
||||
NOTE: indexes that contain category enhancements/associations are not supported
|
||||
by the new code and should be recreated. (Shai Erera)
|
||||
|
||||
* LUCENE-4659: Massive cleanup to CategoryPath API. Additionally, CategoryPath is
|
||||
now immutable, so you don't need to clone() it. (Shai Erera)
|
||||
|
||||
New Features
|
||||
|
||||
* LUCENE-4226: New experimental StoredFieldsFormat that compresses chunks of
|
||||
|
@ -138,7 +164,14 @@ New Features
|
|||
Wikipedia category pages and non-category pages into separate line files.
|
||||
extractWikipedia.alg was changed to use this task, so now it creates two
|
||||
files. (Doron Cohen)
|
||||
|
||||
|
||||
* LUCENE-4290: Added PostingsHighlighter to the highlighter module. It uses
|
||||
offsets from the postings lists to highlight documents. (Robert Muir)
|
||||
|
||||
* LUCENE-4628: Added CommonTermsQuery that executes high-frequency terms
|
||||
in a optional sub-query to prevent slow queries due to "common" terms
|
||||
like stopwords. (Simon Willnauer)
|
||||
|
||||
API Changes
|
||||
|
||||
* LUCENE-4399: Deprecated AppendingCodec. Lucene's term dictionaries
|
||||
|
@ -165,6 +198,19 @@ API Changes
|
|||
* LUCENE-4591: CompressingStoredFields{Writer,Reader} now accept a segment
|
||||
suffix as a constructor parameter. (Renaud Delbru via Adrien Grand)
|
||||
|
||||
* LUCENE-4605: Added DocsEnum.FLAG_NONE which can be passed instead of 0 as
|
||||
the flag to .docs() and .docsAndPositions(). (Shai Erera)
|
||||
|
||||
* LUCENE-4617: Remove FST.pack() method. Previously to make a packed FST,
|
||||
you had to make a Builder with willPackFST=true (telling it you will later pack it),
|
||||
create your fst with finish(), and then call pack() to get another FST.
|
||||
Instead just pass true for doPackFST to Builder and finish() returns a packed FST.
|
||||
(Robert Muir)
|
||||
|
||||
* LUCENE-4663: Deprecate IndexSearcher.document(int, Set). This was not intended
|
||||
to be final, nor named document(). Use IndexSearcher.doc(int, Set) instead.
|
||||
(Robert Muir)
|
||||
|
||||
Bug Fixes
|
||||
|
||||
* LUCENE-1822: BaseFragListBuilder hard-coded 6 char margin is too naive.
|
||||
|
@ -246,6 +292,39 @@ Bug Fixes
|
|||
* LUCENE-4596: fix a concurrency bug in DirectoryTaxonomyWriter.
|
||||
(Shai Erera)
|
||||
|
||||
* LUCENE-4594: Spatial PrefixTreeStrategy would index center-points in addition
|
||||
to the shape to index if it was non-point, in the same field. But sometimes
|
||||
the center-point isn't actually in the shape (consider a LineString), and for
|
||||
highly precise shapes it could cause makeDistanceValueSource's cache to load
|
||||
parts of the shape's boundary erroneously too. So center points aren't
|
||||
indexed any more; you should use another spatial field. (David Smiley)
|
||||
|
||||
* LUCENE-4629: IndexWriter misses to delete documents if a document block is
|
||||
indexed and the Iterator throws an exception. Documents were only rolled back
|
||||
if the actual indexing process failed. (Simon Willnauer)
|
||||
|
||||
* LUCENE-4608: Handle large number of requested fragments better.
|
||||
(Martijn van Groningen)
|
||||
|
||||
* LUCENE-4633: DirectoryTaxonomyWriter.replaceTaxonomy did not refresh its
|
||||
internal reader, which could cause an existing category to be added twice.
|
||||
(Shai Erera)
|
||||
|
||||
* LUCENE-4461: If you added the same FacetRequest more than once, you would get
|
||||
inconsistent results. (Gilad Barkai via Shai Erera)
|
||||
|
||||
* LUCENE-4656: Fix regression in IndexWriter to work with empty TokenStreams
|
||||
that have no TermToBytesRefAttribute (commonly provided by CharTermAttribute),
|
||||
e.g., oal.analysis.miscellaneous.EmptyTokenStream.
|
||||
(Uwe Schindler, Adrien Grand, Robert Muir)
|
||||
|
||||
* LUCENE-4660: ConcurrentMergeScheduler was taking too long to
|
||||
un-pause incoming threads it had paused when too many merges were
|
||||
queued up. (Mike McCandless)
|
||||
|
||||
* LUCENE-4662: Add missing elided articles and prepositions to FrenchAnalyzer's
|
||||
DEFAULT_ARTICLES list passed to ElisionFilter. (David Leunen via Steve Rowe)
|
||||
|
||||
Changes in Runtime Behavior
|
||||
|
||||
* LUCENE-4586: Change default ResultMode of FacetRequest to PER_NODE_IN_TREE.
|
||||
|
@ -310,6 +389,10 @@ Optimizations
|
|||
|
||||
* LUCENE-4598: PayloadIterator no longer uses top-level IndexReader to iterate on the
|
||||
posting's payload. (Shai Erera, Michael McCandless)
|
||||
|
||||
* LUCENE-4661: Drop default maxThreadCount to 1 and maxMergeCount to 2
|
||||
in ConcurrentMergeScheduler, for faster merge performance on
|
||||
spinning-magnet drives (Mike McCandless)
|
||||
|
||||
Documentation
|
||||
|
||||
|
@ -318,6 +401,9 @@ Documentation
|
|||
|
||||
Build
|
||||
|
||||
* LUCENE-4650: Upgrade randomized testing to version 2.0.8: make the
|
||||
test framework more robust under low memory conditions. (Dawid Weiss)
|
||||
|
||||
* LUCENE-4603: Upgrade randomized testing to version 2.0.5: print forked
|
||||
JVM PIDs on heartbeat from hung tests (Dawid Weiss)
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
Apache Lucene
|
||||
Copyright 2012 The Apache Software Foundation
|
||||
Copyright 2013 The Apache Software Foundation
|
||||
|
||||
This product includes software developed by
|
||||
The Apache Software Foundation (http://www.apache.org/).
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*/
|
||||
public class ArabicNormalizationFilterFactory extends TokenFilterFactory implements MultiTermAwareComponent {
|
||||
|
||||
@Override
|
||||
public ArabicNormalizationFilter create(TokenStream input) {
|
||||
return new ArabicNormalizationFilter(input);
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
public class ArabicStemFilterFactory extends TokenFilterFactory {
|
||||
|
||||
|
||||
@Override
|
||||
public ArabicStemFilter create(TokenStream input) {
|
||||
return new ArabicStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class BulgarianStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new BulgarianStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class BrazilianStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public BrazilianStemFilter create(TokenStream in) {
|
||||
return new BrazilianStemFilter(in);
|
||||
}
|
||||
|
|
|
@ -43,6 +43,7 @@ import java.util.regex.Pattern;
|
|||
Set<String> escapedTags = null;
|
||||
Pattern TAG_NAME_PATTERN = Pattern.compile("[^\\s,]+");
|
||||
|
||||
@Override
|
||||
public HTMLStripCharFilter create(Reader input) {
|
||||
HTMLStripCharFilter charFilter;
|
||||
if (null == escapedTags) {
|
||||
|
|
|
@ -50,6 +50,7 @@ public class MappingCharFilterFactory extends CharFilterFactory implements
|
|||
private String mapping;
|
||||
|
||||
// TODO: this should use inputstreams from the loader, not File!
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
mapping = args.get("mapping");
|
||||
|
||||
|
@ -77,6 +78,7 @@ public class MappingCharFilterFactory extends CharFilterFactory implements
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Reader create(Reader input) {
|
||||
// if the map is null, it means there's actually no mappings... just return the original stream
|
||||
// as there is nothing to do here.
|
||||
|
|
|
@ -58,6 +58,7 @@ public final class CJKWidthFilter extends TokenFilter {
|
|||
super(input);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean incrementToken() throws IOException {
|
||||
if (input.incrementToken()) {
|
||||
char text[] = termAtt.buffer();
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.apache.lucene.analysis.util.*;
|
|||
public class CommonGramsFilterFactory extends TokenFilterFactory implements
|
||||
ResourceLoaderAware {
|
||||
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
String commonWordFiles = args.get("words");
|
||||
ignoreCase = getBoolean("ignoreCase", false);
|
||||
|
@ -69,6 +70,7 @@ public class CommonGramsFilterFactory extends TokenFilterFactory implements
|
|||
return commonWords;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CommonGramsFilter create(TokenStream input) {
|
||||
CommonGramsFilter commonGrams = new CommonGramsFilter(luceneMatchVersion, input, commonWords);
|
||||
return commonGrams;
|
||||
|
|
|
@ -50,6 +50,7 @@ public class CommonGramsQueryFilterFactory extends TokenFilterFactory
|
|||
assureMatchVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
String commonWordFiles = args.get("words");
|
||||
ignoreCase = getBoolean("ignoreCase", false);
|
||||
|
@ -82,6 +83,7 @@ public class CommonGramsQueryFilterFactory extends TokenFilterFactory
|
|||
/**
|
||||
* Create a CommonGramsFilter and wrap it with a CommonGramsQueryFilter
|
||||
*/
|
||||
@Override
|
||||
public CommonGramsQueryFilter create(TokenStream input) {
|
||||
CommonGramsFilter commonGrams = new CommonGramsFilter(luceneMatchVersion, input, commonWords);
|
||||
CommonGramsQueryFilter commonGramsQuery = new CommonGramsQueryFilter(
|
||||
|
|
|
@ -56,9 +56,11 @@ public class DictionaryCompoundWordTokenFilterFactory extends TokenFilterFactory
|
|||
maxSubwordSize= getInt("maxSubwordSize",CompoundWordTokenFilterBase.DEFAULT_MAX_SUBWORD_SIZE);
|
||||
onlyLongestMatch = getBoolean("onlyLongestMatch",true);
|
||||
}
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
dictionary = super.getWordSet(loader, dictFile, false);
|
||||
}
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
// if the dictionary is null, it means it was empty
|
||||
return dictionary == null ? input : new DictionaryCompoundWordTokenFilter(luceneMatchVersion,input,dictionary,minWordSize,minSubwordSize,maxSubwordSize,onlyLongestMatch);
|
||||
|
|
|
@ -85,6 +85,7 @@ public class HyphenationCompoundWordTokenFilterFactory extends TokenFilterFactor
|
|||
onlyLongestMatch = getBoolean("onlyLongestMatch", false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
InputStream stream = null;
|
||||
try {
|
||||
|
@ -102,6 +103,7 @@ public class HyphenationCompoundWordTokenFilterFactory extends TokenFilterFactor
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public HyphenationCompoundWordTokenFilter create(TokenStream input) {
|
||||
return new HyphenationCompoundWordTokenFilter(luceneMatchVersion, input, hyphenator, dictionary, minWordSize, minSubwordSize, maxSubwordSize, onlyLongestMatch);
|
||||
}
|
||||
|
|
|
@ -414,6 +414,7 @@ public class HyphenationTree extends TernaryTree implements PatternConsumer {
|
|||
* case characters, in this case a class for letter 'a', for example, should
|
||||
* be defined as "aA", the first character being the normalization char.
|
||||
*/
|
||||
@Override
|
||||
public void addClass(String chargroup) {
|
||||
if (chargroup.length() > 0) {
|
||||
char equivChar = chargroup.charAt(0);
|
||||
|
@ -435,6 +436,7 @@ public class HyphenationTree extends TernaryTree implements PatternConsumer {
|
|||
* @param hyphenatedword a vector of alternating strings and
|
||||
* {@link Hyphen hyphen} objects.
|
||||
*/
|
||||
@Override
|
||||
public void addException(String word, ArrayList<Object> hyphenatedword) {
|
||||
stoplist.put(word, hyphenatedword);
|
||||
}
|
||||
|
@ -449,6 +451,7 @@ public class HyphenationTree extends TernaryTree implements PatternConsumer {
|
|||
* priority of hyphenating at a given point within the pattern. It
|
||||
* should contain only digit characters. (i.e. '0' to '9').
|
||||
*/
|
||||
@Override
|
||||
public void addPattern(String pattern, String ivalue) {
|
||||
int k = ivalues.find(ivalue);
|
||||
if (k <= 0) {
|
||||
|
|
|
@ -515,6 +515,7 @@ public class TernaryTree implements Cloneable {
|
|||
run();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String nextElement() {
|
||||
String res = new String(curkey);
|
||||
cur = up();
|
||||
|
@ -529,6 +530,7 @@ public class TernaryTree implements Cloneable {
|
|||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasMoreElements() {
|
||||
return (cur != -1);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import java.io.Reader;
|
|||
*
|
||||
*/
|
||||
public class KeywordTokenizerFactory extends TokenizerFactory {
|
||||
@Override
|
||||
public KeywordTokenizer create(Reader input) {
|
||||
return new KeywordTokenizer(input);
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ public class LetterTokenizerFactory extends TokenizerFactory {
|
|||
assureMatchVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LetterTokenizer create(Reader input) {
|
||||
return new LetterTokenizer(luceneMatchVersion, input);
|
||||
}
|
||||
|
|
|
@ -43,6 +43,7 @@ public class LowerCaseFilterFactory extends TokenFilterFactory implements MultiT
|
|||
assureMatchVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LowerCaseFilter create(TokenStream input) {
|
||||
return new LowerCaseFilter(luceneMatchVersion,input);
|
||||
}
|
||||
|
|
|
@ -42,6 +42,7 @@ public class LowerCaseTokenizerFactory extends TokenizerFactory implements Multi
|
|||
assureMatchVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LowerCaseTokenizer create(Reader input) {
|
||||
return new LowerCaseTokenizer(luceneMatchVersion,input);
|
||||
}
|
||||
|
|
|
@ -40,6 +40,7 @@ public class WhitespaceTokenizerFactory extends TokenizerFactory {
|
|||
assureMatchVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
public WhitespaceTokenizer create(Reader input) {
|
||||
return new WhitespaceTokenizer(luceneMatchVersion,input);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
* </fieldType></pre>
|
||||
*/
|
||||
public class CzechStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new CzechStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class GermanLightStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new GermanLightStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class GermanMinimalStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new GermanMinimalStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*/
|
||||
public class GermanNormalizationFilterFactory extends TokenFilterFactory implements MultiTermAwareComponent {
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new GermanNormalizationFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class GermanStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public GermanStemFilter create(TokenStream in) {
|
||||
return new GermanStemFilter(in);
|
||||
}
|
||||
|
|
|
@ -48,10 +48,12 @@ public class GreekLowerCaseFilterFactory extends TokenFilterFactory implements M
|
|||
+ "Please process your documents as Unicode instead.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public GreekLowerCaseFilter create(TokenStream in) {
|
||||
return new GreekLowerCaseFilter(luceneMatchVersion, in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractAnalysisFactory getMultiTermComponent() {
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*/
|
||||
public class GreekStemFilterFactory extends TokenFilterFactory {
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new GreekStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class EnglishMinimalStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new EnglishMinimalStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -43,6 +43,7 @@ public class EnglishPossessiveFilterFactory extends TokenFilterFactory {
|
|||
assureMatchVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new EnglishPossessiveFilter(luceneMatchVersion, input);
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*/
|
||||
public class KStemFilterFactory extends TokenFilterFactory {
|
||||
|
||||
@Override
|
||||
public TokenFilter create(TokenStream input) {
|
||||
return new KStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class PorterStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public PorterStemFilter create(TokenStream input) {
|
||||
return new PorterStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class SpanishLightStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new SpanishLightStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class PersianNormalizationFilterFactory extends TokenFilterFactory implements MultiTermAwareComponent {
|
||||
@Override
|
||||
public PersianNormalizationFilter create(TokenStream input) {
|
||||
return new PersianNormalizationFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class FinnishLightStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new FinnishLightStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public final class FrenchAnalyzer extends StopwordAnalyzerBase {
|
|||
/** Default set of articles for ElisionFilter */
|
||||
public static final CharArraySet DEFAULT_ARTICLES = CharArraySet.unmodifiableSet(
|
||||
new CharArraySet(Version.LUCENE_CURRENT, Arrays.asList(
|
||||
"l", "m", "t", "qu", "n", "s", "j"), true));
|
||||
"l", "m", "t", "qu", "n", "s", "j", "d", "c", "jusqu", "quoiqu", "lorsqu", "puisqu"), true));
|
||||
|
||||
/**
|
||||
* Contains words that should be indexed but not stemmed.
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class FrenchLightStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new FrenchLightStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class FrenchMinimalStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new FrenchMinimalStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class GalicianMinimalStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new GalicianMinimalStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class GalicianStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new GalicianStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class HindiNormalizationFilterFactory extends TokenFilterFactory implements MultiTermAwareComponent {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new HindiNormalizationFilter(input);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class HindiStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new HindiStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class HungarianLightStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new HungarianLightStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -447,6 +447,7 @@ public class HunspellDictionary {
|
|||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public char[] parseFlags(String rawFlags) {
|
||||
return rawFlags.toCharArray();
|
||||
}
|
||||
|
@ -460,6 +461,7 @@ public class HunspellDictionary {
|
|||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public char[] parseFlags(String rawFlags) {
|
||||
String[] rawFlagParts = rawFlags.trim().split(",");
|
||||
char[] flags = new char[rawFlagParts.length];
|
||||
|
@ -484,6 +486,7 @@ public class HunspellDictionary {
|
|||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public char[] parseFlags(String rawFlags) {
|
||||
if (rawFlags.length() == 0) {
|
||||
return new char[0];
|
||||
|
|
|
@ -67,6 +67,7 @@ public class HunspellStemFilterFactory extends TokenFilterFactory implements Res
|
|||
*
|
||||
* @param loader ResourceLoader used to load the files
|
||||
*/
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
assureMatchVersion();
|
||||
String dictionaryArg = args.get(PARAM_DICTIONARY);
|
||||
|
@ -116,6 +117,7 @@ public class HunspellStemFilterFactory extends TokenFilterFactory implements Res
|
|||
* @param tokenStream TokenStream that will be filtered
|
||||
* @return HunspellStemFilter that filters the TokenStream
|
||||
*/
|
||||
@Override
|
||||
public TokenStream create(TokenStream tokenStream) {
|
||||
return new HunspellStemFilter(tokenStream, dictionary);
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ public class IndonesianStemFilterFactory extends TokenFilterFactory {
|
|||
stemDerivational = getBoolean("stemDerivational", true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new IndonesianStemFilter(input, stemDerivational);
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class IndicNormalizationFilterFactory extends TokenFilterFactory implements MultiTermAwareComponent {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new IndicNormalizationFilter(input);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class ItalianLightStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new ItalianLightStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
* </fieldType></pre>
|
||||
*/
|
||||
public class LatvianStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new LatvianStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.analysis.TokenStream;
|
|||
*
|
||||
*/
|
||||
public class ASCIIFoldingFilterFactory extends TokenFilterFactory implements MultiTermAwareComponent {
|
||||
@Override
|
||||
public ASCIIFoldingFilter create(TokenStream input) {
|
||||
return new ASCIIFoldingFilter(input);
|
||||
}
|
||||
|
|
|
@ -133,6 +133,7 @@ public class CapitalizationFilterFactory extends TokenFilterFactory {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CapitalizationFilter create(TokenStream input) {
|
||||
return new CapitalizationFilter(input, onlyFirstWord, keep,
|
||||
forceFirstLetter, okPrefix, minWordLength, maxWordCount, maxTokenLength);
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class HyphenatedWordsFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public HyphenatedWordsFilter create(TokenStream input) {
|
||||
return new HyphenatedWordsFilter(input);
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ public class KeepWordFilterFactory extends TokenFilterFactory implements Resourc
|
|||
assureMatchVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
String wordFiles = args.get("words");
|
||||
ignoreCase = getBoolean("ignoreCase", false);
|
||||
|
@ -85,6 +86,7 @@ public class KeepWordFilterFactory extends TokenFilterFactory implements Resourc
|
|||
return words;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
// if the set is null, it means it was empty
|
||||
return words == null ? input : new KeepWordFilter(enablePositionIncrements, input, words);
|
||||
|
|
|
@ -39,6 +39,7 @@ public class KeywordMarkerFilterFactory extends TokenFilterFactory implements Re
|
|||
private CharArraySet protectedWords;
|
||||
private boolean ignoreCase;
|
||||
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
String wordFiles = args.get(PROTECTED_TOKENS);
|
||||
ignoreCase = getBoolean("ignoreCase", false);
|
||||
|
@ -51,6 +52,7 @@ public class KeywordMarkerFilterFactory extends TokenFilterFactory implements Re
|
|||
return ignoreCase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return protectedWords == null ? input : new KeywordMarkerFilter(input, protectedWords);
|
||||
}
|
||||
|
|
|
@ -53,6 +53,7 @@ public class LengthFilterFactory extends TokenFilterFactory {
|
|||
enablePositionIncrements = getBoolean("enablePositionIncrements",false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LengthFilter create(TokenStream input) {
|
||||
return new LengthFilter(enablePositionIncrements, input,min,max);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class RemoveDuplicatesTokenFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public RemoveDuplicatesTokenFilter create(TokenStream input) {
|
||||
return new RemoveDuplicatesTokenFilter(input);
|
||||
}
|
||||
|
|
|
@ -39,6 +39,7 @@ public class StemmerOverrideFilterFactory extends TokenFilterFactory implements
|
|||
private CharArrayMap<String> dictionary = null;
|
||||
private boolean ignoreCase;
|
||||
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
String dictionaryFiles = args.get("dictionary");
|
||||
ignoreCase = getBoolean("ignoreCase", false);
|
||||
|
@ -63,6 +64,7 @@ public class StemmerOverrideFilterFactory extends TokenFilterFactory implements
|
|||
return ignoreCase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return dictionary == null ? input : new StemmerOverrideFilter(luceneMatchVersion, input, dictionary);
|
||||
}
|
||||
|
|
|
@ -49,6 +49,7 @@ public class TrimFilterFactory extends TokenFilterFactory {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TrimFilter create(TokenStream input) {
|
||||
return new TrimFilter(input, updateOffsets);
|
||||
}
|
||||
|
|
|
@ -202,6 +202,7 @@ public final class WordDelimiterFilter extends TokenFilter {
|
|||
this(in, WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE, configurationFlags, protWords);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean incrementToken() throws IOException {
|
||||
while (true) {
|
||||
if (!hasSavedState) {
|
||||
|
|
|
@ -53,6 +53,7 @@ public class WordDelimiterFilterFactory extends TokenFilterFactory implements Re
|
|||
public static final String PROTECTED_TOKENS = "protected";
|
||||
public static final String TYPES = "types";
|
||||
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) throws IOException {
|
||||
String wordFiles = args.get(PROTECTED_TOKENS);
|
||||
if (wordFiles != null) {
|
||||
|
@ -106,6 +107,7 @@ public class WordDelimiterFilterFactory extends TokenFilterFactory implements Re
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public WordDelimiterFilter create(TokenStream input) {
|
||||
return new WordDelimiterFilter(input, typeTable == null ? WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE : typeTable,
|
||||
flags, protectedWords);
|
||||
|
|
|
@ -57,6 +57,7 @@ public class EdgeNGramFilterFactory extends TokenFilterFactory {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public EdgeNGramTokenFilter create(TokenStream input) {
|
||||
return new EdgeNGramTokenFilter(input, side, minGramSize, maxGramSize);
|
||||
}
|
||||
|
|
|
@ -55,6 +55,7 @@ public class EdgeNGramTokenizerFactory extends TokenizerFactory {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public EdgeNGramTokenizer create(Reader input) {
|
||||
return new EdgeNGramTokenizer(input, side, minGramSize, maxGramSize);
|
||||
}
|
||||
|
|
|
@ -51,6 +51,7 @@ public class NGramFilterFactory extends TokenFilterFactory {
|
|||
: NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NGramTokenFilter create(TokenStream input) {
|
||||
return new NGramTokenFilter(input, minGramSize, maxGramSize);
|
||||
}
|
||||
|
|
|
@ -50,6 +50,7 @@ public class NGramTokenizerFactory extends TokenizerFactory {
|
|||
}
|
||||
|
||||
/** Creates the {@link TokenStream} of n-grams from the given {@link Reader}. */
|
||||
@Override
|
||||
public NGramTokenizer create(Reader input) {
|
||||
return new NGramTokenizer(input, minGramSize, maxGramSize);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
* </fieldType></pre>
|
||||
*/
|
||||
public class NorwegianLightStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new NorwegianLightStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
* </fieldType></pre>
|
||||
*/
|
||||
public class NorwegianMinimalStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new NorwegianMinimalStemFilter(input);
|
||||
}
|
||||
|
|
|
@ -120,6 +120,7 @@ public class PathHierarchyTokenizerFactory extends TokenizerFactory {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Tokenizer create(Reader input) {
|
||||
if( reverse ) {
|
||||
return new ReversePathHierarchyTokenizer(input, delimiter, replacement, skip);
|
||||
|
|
|
@ -54,6 +54,7 @@ public class PatternReplaceCharFilterFactory extends CharFilterFactory {
|
|||
// TODO: throw exception if you set maxBlockChars or blockDelimiters ?
|
||||
}
|
||||
|
||||
@Override
|
||||
public CharFilter create(Reader input) {
|
||||
return new PatternReplaceCharFilter( p, replacement, input );
|
||||
}
|
||||
|
|
|
@ -65,6 +65,7 @@ public class PatternReplaceFilterFactory extends TokenFilterFactory {
|
|||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public PatternReplaceFilter create(TokenStream input) {
|
||||
return new PatternReplaceFilter(input, p, replacement, all);
|
||||
}
|
||||
|
|
|
@ -90,6 +90,7 @@ public class PatternTokenizerFactory extends TokenizerFactory
|
|||
/**
|
||||
* Split the input using configured pattern
|
||||
*/
|
||||
@Override
|
||||
public Tokenizer create(final Reader in) {
|
||||
try {
|
||||
return new PatternTokenizer(in, pattern, group);
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
*
|
||||
**/
|
||||
public abstract class AbstractEncoder implements PayloadEncoder {
|
||||
@Override
|
||||
public BytesRef encode(char[] buffer) {
|
||||
return encode(buffer, 0, buffer.length);
|
||||
}
|
||||
|
|
|
@ -49,6 +49,7 @@ public class DelimitedPayloadTokenFilterFactory extends TokenFilterFactory imple
|
|||
private PayloadEncoder encoder;
|
||||
private char delimiter = '|';
|
||||
|
||||
@Override
|
||||
public DelimitedPayloadTokenFilter create(TokenStream input) {
|
||||
return new DelimitedPayloadTokenFilter(input, delimiter, encoder);
|
||||
}
|
||||
|
@ -58,6 +59,7 @@ public class DelimitedPayloadTokenFilterFactory extends TokenFilterFactory imple
|
|||
super.init(args);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void inform(ResourceLoader loader) {
|
||||
String encoderClass = args.get(ENCODER_ATTR);
|
||||
if (encoderClass == null) {
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
**/
|
||||
public class FloatEncoder extends AbstractEncoder implements PayloadEncoder {
|
||||
|
||||
@Override
|
||||
public BytesRef encode(char[] buffer, int offset, int length) {
|
||||
float payload = Float.parseFloat(new String(buffer, offset, length));//TODO: improve this so that we don't have to new Strings
|
||||
byte[] bytes = PayloadHelper.encodeFloat(payload);
|
||||
|
|
|
@ -37,6 +37,7 @@ public class IdentityEncoder extends AbstractEncoder implements PayloadEncoder{
|
|||
this.charset = charset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef encode(char[] buffer, int offset, int length) {
|
||||
final ByteBuffer bb = charset.encode(CharBuffer.wrap(buffer, offset, length));
|
||||
if (bb.hasArray()) {
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
**/
|
||||
public class IntegerEncoder extends AbstractEncoder implements PayloadEncoder {
|
||||
|
||||
@Override
|
||||
public BytesRef encode(char[] buffer, int offset, int length) {
|
||||
int payload = ArrayUtil.parseInt(buffer, offset, length);//TODO: improve this so that we don't have to new Strings
|
||||
byte[] bytes = PayloadHelper.encodeInt(payload);
|
||||
|
|
|
@ -46,6 +46,7 @@ public class NumericPayloadTokenFilterFactory extends TokenFilterFactory {
|
|||
}
|
||||
payload = Float.parseFloat(payloadArg);
|
||||
}
|
||||
@Override
|
||||
public NumericPayloadTokenFilter create(TokenStream input) {
|
||||
return new NumericPayloadTokenFilter(input,payload,typeMatch);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class TokenOffsetPayloadTokenFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenOffsetPayloadTokenFilter create(TokenStream input) {
|
||||
return new TokenOffsetPayloadTokenFilter(input);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class TypeAsPayloadTokenFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TypeAsPayloadTokenFilter create(TokenStream input) {
|
||||
return new TypeAsPayloadTokenFilter(input);
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ public class PositionFilterFactory extends TokenFilterFactory {
|
|||
positionIncrement = getInt("positionIncrement", 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PositionFilter create(TokenStream input) {
|
||||
return new PositionFilter(input, positionIncrement);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
|
|||
*
|
||||
*/
|
||||
public class PortugueseLightStemFilterFactory extends TokenFilterFactory {
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new PortugueseLightStemFilter(input);
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue