mirror of https://github.com/apache/lucene.git
merged with trunk
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/LUCENE2793@1143719 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
4be688e68d
|
@ -0,0 +1,74 @@
|
|||
# /
|
||||
/classes
|
||||
build
|
||||
dist
|
||||
*~
|
||||
velocity.log
|
||||
build.properties
|
||||
.idea
|
||||
*.iml
|
||||
*.ipr
|
||||
*.iws
|
||||
/.project
|
||||
/.classpath
|
||||
/.settings
|
||||
prj.el
|
||||
bin
|
||||
pom.xml
|
||||
target
|
||||
lucene-libs
|
||||
|
||||
# /lucene/
|
||||
|
||||
|
||||
# /lucene/contrib/db/bdb/
|
||||
/lucene/contrib/db/bdb/lib
|
||||
/lucene/contrib/db/bdb/index
|
||||
|
||||
|
||||
# /lucene/contrib/db/bdb-je/
|
||||
/lucene/contrib/db/bdb-je/lib
|
||||
/lucene/contrib/db/bdb-je/index
|
||||
|
||||
|
||||
# /lucene/src/java/org/apache/lucene/queryParser/
|
||||
/lucene/src/java/org/apache/lucene/queryParser/QueryParser.java
|
||||
/lucene/src/java/org/apache/lucene/queryParser/TokenMgrError.java
|
||||
/lucene/src/java/org/apache/lucene/queryParser/ParseException.java
|
||||
/lucene/src/java/org/apache/lucene/queryParser/Token.java
|
||||
/lucene/src/java/org/apache/lucene/queryParser/TokenManager.java
|
||||
/lucene/src/java/org/apache/lucene/queryParser/QueryParserConstants.java
|
||||
|
||||
# /lucene/src/java/org/apache/lucene/util/automaton/
|
||||
/lucene/src/java/org/apache/lucene/util/automaton/moman
|
||||
|
||||
|
||||
# /modules/analysis/common/src/java/org/apache/lucene/analysis/standard/
|
||||
/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/Token.java
|
||||
/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java
|
||||
/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/StandardTokenizerTokenManager.java
|
||||
/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/TokenMgrError.java
|
||||
/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/CharStream.java
|
||||
/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/StandardTokenizerConstants.java
|
||||
|
||||
|
||||
# /modules/benchmark/
|
||||
/modules/benchmark/temp
|
||||
/modules/benchmark/work
|
||||
|
||||
# /solr/
|
||||
/solr/.project
|
||||
/solr/.classpath
|
||||
/solr/.settings
|
||||
/solr/bin
|
||||
/solr/luke
|
||||
/solr/package
|
||||
|
||||
# /solr/example
|
||||
/solr/example/webapps
|
||||
/solr/example/work
|
||||
/solr/example/solr/data
|
||||
/solr/example/exampledocs/post.jar
|
||||
|
||||
# /solr/client/ruby/flare/
|
||||
/solr/client/ruby/flare/tmp
|
|
@ -38,10 +38,17 @@
|
|||
<classpathentry kind="src" path="modules/analysis/stempel/src/java"/>
|
||||
<classpathentry kind="src" path="modules/analysis/stempel/src/resources"/>
|
||||
<classpathentry kind="src" path="modules/analysis/stempel/src/test"/>
|
||||
<classpathentry kind="src" path="modules/analysis/morfologik/src/java"/>
|
||||
<classpathentry kind="src" path="modules/analysis/morfologik/src/test"/>
|
||||
<classpathentry kind="src" path="modules/benchmark/src/java"/>
|
||||
<classpathentry kind="src" path="modules/benchmark/src/test"/>
|
||||
<classpathentry kind="src" path="modules/facet/src/java"/>
|
||||
<classpathentry kind="src" path="modules/facet/src/examples"/>
|
||||
<classpathentry kind="src" path="modules/facet/src/test"/>
|
||||
<classpathentry kind="src" path="modules/grouping/src/java"/>
|
||||
<classpathentry kind="src" path="modules/grouping/src/test"/>
|
||||
<classpathentry kind="src" path="modules/queries/src/java"/>
|
||||
<classpathentry kind="src" path="modules/queries/src/test"/>
|
||||
<classpathentry kind="src" path="modules/suggest/src/java"/>
|
||||
<classpathentry kind="src" path="modules/suggest/src/test"/>
|
||||
<classpathentry kind="src" path="solr/src/java"/>
|
||||
|
@ -76,6 +83,9 @@
|
|||
<classpathentry kind="lib" path="lucene/contrib/queries/lib/jakarta-regexp-1.4.jar"/>
|
||||
<classpathentry kind="lib" path="modules/analysis/icu/lib/icu4j-4_8.jar"/>
|
||||
<classpathentry kind="lib" path="modules/analysis/phonetic/lib/commons-codec-1.4.jar"/>
|
||||
<classpathentry kind="lib" path="modules/analysis/morfologik/lib/morfologik-fsa-1.5.2.jar"/>
|
||||
<classpathentry kind="lib" path="modules/analysis/morfologik/lib/morfologik-polish-1.5.2.jar"/>
|
||||
<classpathentry kind="lib" path="modules/analysis/morfologik/lib/morfologik-stemming-1.5.2.jar"/>
|
||||
<classpathentry kind="lib" path="modules/benchmark/lib/commons-beanutils-1.7.0.jar"/>
|
||||
<classpathentry kind="lib" path="modules/benchmark/lib/commons-collections-3.1.jar"/>
|
||||
<classpathentry kind="lib" path="modules/benchmark/lib/commons-compress-1.1.jar"/>
|
||||
|
|
|
@ -158,7 +158,7 @@
|
|||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.source" value="1.5"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.source" value="1.6"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||
|
@ -167,7 +167,7 @@
|
|||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.5"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.6"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||
|
@ -203,7 +203,7 @@
|
|||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.5"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.6"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="do not insert"/>
|
||||
|
|
|
@ -15,11 +15,14 @@
|
|||
<buildFile url="file://$PROJECT_DIR$/lucene/contrib/xml-query-parser/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/analysis/common/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/analysis/icu/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/analysis/morfologik/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/analysis/phonetic/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/analysis/smartcn/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/analysis/stempel/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/benchmark/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/facet/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/grouping/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/join/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/modules/suggest/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/solr/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/solr/contrib/analysis-extras/build.xml" />
|
||||
|
|
|
@ -9,18 +9,22 @@
|
|||
<module filepath="$PROJECT_DIR$/lucene/contrib/instantiated/instantiated.iml" />
|
||||
<module filepath="$PROJECT_DIR$/lucene/contrib/memory/memory.iml" />
|
||||
<module filepath="$PROJECT_DIR$/lucene/contrib/misc/misc.iml" />
|
||||
<module filepath="$PROJECT_DIR$/lucene/contrib/queries/queries.iml" />
|
||||
<module filepath="$PROJECT_DIR$/lucene/contrib/queryparser/queryparser.iml" />
|
||||
<module filepath="$PROJECT_DIR$/lucene/contrib/queries/queries-contrib.iml" />
|
||||
<module filepath="$PROJECT_DIR$/lucene/contrib/queryparser/queryparser-contrib.iml" />
|
||||
<module filepath="$PROJECT_DIR$/lucene/contrib/spatial/spatial.iml" />
|
||||
<module filepath="$PROJECT_DIR$/lucene/contrib/wordnet/wordnet.iml" />
|
||||
<module filepath="$PROJECT_DIR$/lucene/contrib/xml-query-parser/xml-query-parser.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/analysis/common/common.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/analysis/common/analysis-common.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/analysis/icu/icu.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/analysis/morfologik/morfologik.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/analysis/phonetic/phonetic.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/analysis/smartcn/smartcn.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/analysis/stempel/stempel.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/benchmark/benchmark.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/facet/facet.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/grouping/grouping.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/join/join.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/queries/queries.iml" />
|
||||
<module filepath="$PROJECT_DIR$/modules/suggest/suggest.iml" />
|
||||
<module filepath="$PROJECT_DIR$/solr/solr.iml" />
|
||||
<module filepath="$PROJECT_DIR$/solr/contrib/analysis-extras/analysis-extras.iml" />
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="common analysis module" type="JUnit" factoryName="JUnit">
|
||||
<module name="common" />
|
||||
<module name="analysis-common" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
<option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/modules/analysis/build/common" />
|
||||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
|
@ -50,6 +50,13 @@
|
|||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp -Djetty.testMode=1 -Djetty.insecurerandom=1 -Dsolr.directoryFactory=org.apache.solr.core.MockDirectoryFactory" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="facet module" type="JUnit" factoryName="JUnit">
|
||||
<module name="facet" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
<option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/modules/facet/build" />
|
||||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="grouping module" type="JUnit" factoryName="JUnit">
|
||||
<module name="grouping" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
|
@ -78,6 +85,13 @@
|
|||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="join module" type="JUnit" factoryName="JUnit">
|
||||
<module name="join" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
<option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/modules/join/build" />
|
||||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="lucene" type="JUnit" factoryName="JUnit">
|
||||
<module name="lucene" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
|
@ -99,6 +113,13 @@
|
|||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="morfologik analysis module" type="JUnit" factoryName="JUnit">
|
||||
<module name="morfologik" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
<option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/modules/analysis/build/morfologik" />
|
||||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="phonetic analysis module" type="JUnit" factoryName="JUnit">
|
||||
<module name="phonetic" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
|
@ -107,14 +128,21 @@
|
|||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="queries contrib" type="JUnit" factoryName="JUnit">
|
||||
<module name="queries" />
|
||||
<module name="queries-contrib" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
<option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/lucene/build/contrib/queries" />
|
||||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="queries module" type="JUnit" factoryName="JUnit">
|
||||
<module name="queries" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
<option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/modules/queries/build" />
|
||||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<configuration default="false" name="queryparser contrib" type="JUnit" factoryName="JUnit">
|
||||
<module name="queryparser" />
|
||||
<module name="queryparser-contrib" />
|
||||
<option name="TEST_OBJECT" value="package" />
|
||||
<option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/lucene/build/contrib/queryparser" />
|
||||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
|
@ -176,7 +204,7 @@
|
|||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
</configuration>
|
||||
<list size="25">
|
||||
<list size="29">
|
||||
<item index="0" class="java.lang.String" itemvalue="JUnit.analysis-extras contrib" />
|
||||
<item index="1" class="java.lang.String" itemvalue="JUnit.benchmark module" />
|
||||
<item index="2" class="java.lang.String" itemvalue="JUnit.clustering contrib" />
|
||||
|
@ -184,24 +212,28 @@
|
|||
<item index="4" class="java.lang.String" itemvalue="JUnit.dataimporthandler contrib" />
|
||||
<item index="5" class="java.lang.String" itemvalue="JUnit.extraction contrib" />
|
||||
<item index="6" class="java.lang.String" itemvalue="JUnit.extras from dataimporthandler contrib" />
|
||||
<item index="7" class="java.lang.String" itemvalue="JUnit.grouping module" />
|
||||
<item index="8" class="java.lang.String" itemvalue="JUnit.highlighter contrib" />
|
||||
<item index="9" class="java.lang.String" itemvalue="JUnit.icu analysis module" />
|
||||
<item index="10" class="java.lang.String" itemvalue="JUnit.instantiated contrib" />
|
||||
<item index="11" class="java.lang.String" itemvalue="JUnit.lucene" />
|
||||
<item index="12" class="java.lang.String" itemvalue="JUnit.memory contrib" />
|
||||
<item index="13" class="java.lang.String" itemvalue="JUnit.misc contrib" />
|
||||
<item index="14" class="java.lang.String" itemvalue="JUnit.phonetic analysis module" />
|
||||
<item index="15" class="java.lang.String" itemvalue="JUnit.queries contrib" />
|
||||
<item index="16" class="java.lang.String" itemvalue="JUnit.queryparser contrib" />
|
||||
<item index="17" class="java.lang.String" itemvalue="JUnit.smartcn analysis module" />
|
||||
<item index="18" class="java.lang.String" itemvalue="JUnit.solr" />
|
||||
<item index="19" class="java.lang.String" itemvalue="JUnit.spatial contrib" />
|
||||
<item index="20" class="java.lang.String" itemvalue="JUnit.stempel analysis module" />
|
||||
<item index="21" class="java.lang.String" itemvalue="JUnit.suggest module" />
|
||||
<item index="22" class="java.lang.String" itemvalue="JUnit.uima contrib" />
|
||||
<item index="23" class="java.lang.String" itemvalue="JUnit.wordnet contrib" />
|
||||
<item index="24" class="java.lang.String" itemvalue="JUnit.xml-query-parser contrib" />
|
||||
<item index="7" class="java.lang.String" itemvalue="JUnit.facet module" />
|
||||
<item index="8" class="java.lang.String" itemvalue="JUnit.grouping module" />
|
||||
<item index="9" class="java.lang.String" itemvalue="JUnit.highlighter contrib" />
|
||||
<item index="10" class="java.lang.String" itemvalue="JUnit.icu analysis module" />
|
||||
<item index="11" class="java.lang.String" itemvalue="JUnit.instantiated contrib" />
|
||||
<item index="12" class="java.lang.String" itemvalue="JUnit.join module" />
|
||||
<item index="13" class="java.lang.String" itemvalue="JUnit.lucene" />
|
||||
<item index="14" class="java.lang.String" itemvalue="JUnit.memory contrib" />
|
||||
<item index="15" class="java.lang.String" itemvalue="JUnit.misc contrib" />
|
||||
<item index="16" class="java.lang.String" itemvalue="JUnit.morfologik analysis module" />
|
||||
<item index="17" class="java.lang.String" itemvalue="JUnit.phonetic analysis module" />
|
||||
<item index="18" class="java.lang.String" itemvalue="JUnit.queries contrib" />
|
||||
<item index="19" class="java.lang.String" itemvalue="JUnit.queries module" />
|
||||
<item index="20" class="java.lang.String" itemvalue="JUnit.queryparser contrib" />
|
||||
<item index="21" class="java.lang.String" itemvalue="JUnit.smartcn analysis module" />
|
||||
<item index="22" class="java.lang.String" itemvalue="JUnit.solr" />
|
||||
<item index="23" class="java.lang.String" itemvalue="JUnit.spatial contrib" />
|
||||
<item index="24" class="java.lang.String" itemvalue="JUnit.stempel analysis module" />
|
||||
<item index="25" class="java.lang.String" itemvalue="JUnit.suggest module" />
|
||||
<item index="26" class="java.lang.String" itemvalue="JUnit.uima contrib" />
|
||||
<item index="27" class="java.lang.String" itemvalue="JUnit.wordnet contrib" />
|
||||
<item index="28" class="java.lang.String" itemvalue="JUnit.xml-query-parser contrib" />
|
||||
</list>
|
||||
</component>
|
||||
</project>
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="memory" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
|
@ -11,7 +11,7 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
|
|
|
@ -11,9 +11,8 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -14,9 +14,9 @@
|
|||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="library" name="Servlet API 2.4" level="project" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
</library>
|
||||
</orderEntry>
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="false">
|
||||
<output url="file://$MODULE_DIR$/../build/morfologik/classes/java" />
|
||||
<output-test url="file://$MODULE_DIR$/../build/morfologik/classes/test" />
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/test" isTestSource="true" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="module-library">
|
||||
<library>
|
||||
<CLASSES>
|
||||
<root url="file://$MODULE_DIR$/lib" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
<jarDirectory url="file://$MODULE_DIR$/lib" recursive="false" />
|
||||
</library>
|
||||
</orderEntry>
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
|
@ -21,7 +21,7 @@
|
|||
</library>
|
||||
</orderEntry>
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -25,10 +25,10 @@
|
|||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="highlighter" />
|
||||
<orderEntry type="module" module-name="icu" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="memory" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="false">
|
||||
<output url="file://$MODULE_DIR$/build/classes/java" />
|
||||
<output-test url="file://$MODULE_DIR$/build/classes/test" />
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/examples" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/test" isTestSource="true" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/work" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
|
@ -0,0 +1,18 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="false">
|
||||
<output url="file://$MODULE_DIR$/build/classes/java" />
|
||||
<output-test url="file://$MODULE_DIR$/build/classes/test" />
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/test" isTestSource="true" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/work" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="grouping" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="false">
|
||||
<output url="file://$MODULE_DIR$/build/classes/java" />
|
||||
<output-test url="file://$MODULE_DIR$/build/classes/test" />
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/test" isTestSource="true" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/work" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
|
@ -11,7 +11,6 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
<orderEntry type="module" module-name="smartcn" />
|
||||
<orderEntry type="module" module-name="solr" />
|
||||
<orderEntry type="module" module-name="stempel" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -29,11 +29,11 @@
|
|||
<orderEntry type="module" module-name="memory" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="phonetic" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="solr" />
|
||||
<orderEntry type="module" module-name="spatial" />
|
||||
<orderEntry type="module" module-name="suggest" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
<orderEntry type="library" name="Solr library" level="project" />
|
||||
<orderEntry type="library" name="Solr DIH library" level="project" />
|
||||
<orderEntry type="module" module-name="solr" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -26,11 +26,12 @@
|
|||
<orderEntry type="module" module-name="grouping" />
|
||||
<orderEntry type="module" module-name="highlighter" />
|
||||
<orderEntry type="module" module-name="icu" />
|
||||
<orderEntry type="module" module-name="queries-contrib" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="phonetic" />
|
||||
<orderEntry type="module" module-name="suggest" />
|
||||
<orderEntry type="module" module-name="common" />
|
||||
<orderEntry type="module" module-name="analysis-common" />
|
||||
<orderEntry type="module" module-name="lucene" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -21,7 +21,7 @@ A. How to use nightly Jenkins-built Lucene/Solr Maven artifacts
|
|||
|
||||
B. How to generate Lucene Maven artifacts
|
||||
|
||||
1. Prerequisites: JDK 1.5+ and Ant 1.7.X
|
||||
1. Prerequisites: JDK 1.6+ and Ant 1.7.X
|
||||
|
||||
2. Run the following command from the lucene/ directory:
|
||||
|
||||
|
@ -59,8 +59,7 @@ D. How to use Maven to build Lucene/Solr
|
|||
|
||||
The details, followed by some example Maven commands:
|
||||
|
||||
1. Prerequisites: JDK 1.5+ (for Lucene); JDK 1.6+ (for Solr);
|
||||
Maven 2.2.1 or 3.0.X
|
||||
1. Prerequisites: JDK 1.6+ and Maven 2.2.1 or 3.0.X
|
||||
|
||||
2. Make sure your sources are up to date. If you checked your sources out
|
||||
from the Apache Subversion repository, run "svn update" from the top
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-queries</artifactId>
|
||||
<artifactId>lucene-queries-contrib</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -54,11 +54,6 @@
|
|||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-analyzers-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
|
|
|
@ -27,9 +27,9 @@
|
|||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queries</artifactId>
|
||||
<artifactId>lucene-queries-contrib</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<name>Lucene Queries</name>
|
||||
<name>Lucene Queries Contrib</name>
|
||||
<description>
|
||||
Queries - various query object exotica not in core
|
||||
</description>
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queryparser</artifactId>
|
||||
<artifactId>lucene-queryparser-contrib</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<name>Lucene Query Parser</name>
|
||||
<description>
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-queries</artifactId>
|
||||
<artifactId>lucene-queries-contrib</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -47,11 +47,6 @@
|
|||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-analyzers-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-queries</artifactId>
|
||||
<artifactId>lucene-queries-contrib</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-parent</artifactId>
|
||||
<version>@version@</version>
|
||||
<relativePath>../../../lucene/pom.xml</relativePath>
|
||||
</parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-morfologik</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<name>Lucene Morfologik Polish Lemmatizer</name>
|
||||
<description>
|
||||
A dictionary-driven lemmatizer for Polish (includes morphosyntactic annotations)
|
||||
</description>
|
||||
<properties>
|
||||
<module-directory>modules/analysis/morfologik</module-directory>
|
||||
<build-directory>../build/morfologik</build-directory>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-test-framework</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-analyzers-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-analyzers-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.carrot2</groupId>
|
||||
<artifactId>morfologik-polish</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<directory>${build-directory}</directory>
|
||||
<outputDirectory>${build-directory}/classes/java</outputDirectory>
|
||||
<testOutputDirectory>${build-directory}/classes/test</testOutputDirectory>
|
||||
<sourceDirectory>src/java</sourceDirectory>
|
||||
<testSourceDirectory>src/test</testSourceDirectory>
|
||||
<testResources>
|
||||
<testResource>
|
||||
<directory>${project.build.testSourceDirectory}</directory>
|
||||
<excludes>
|
||||
<exclude>**/*.java</exclude>
|
||||
</excludes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
</build>
|
||||
</project>
|
|
@ -33,6 +33,7 @@
|
|||
<modules>
|
||||
<module>common</module>
|
||||
<module>icu</module>
|
||||
<module>morfologik</module>
|
||||
<module>phonetic</module>
|
||||
<module>smartcn</module>
|
||||
<module>stempel</module>
|
||||
|
|
|
@ -0,0 +1,98 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-parent</artifactId>
|
||||
<version>@version@</version>
|
||||
<relativePath>../../lucene/pom.xml</relativePath>
|
||||
</parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-facet</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<name>Lucene Facets</name>
|
||||
<description>
|
||||
Package for Faceted Indexing and Search
|
||||
</description>
|
||||
<properties>
|
||||
<module-directory>modules/facet</module-directory>
|
||||
<build-directory>build</build-directory>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-analyzers-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-test-framework</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<directory>${build-directory}</directory>
|
||||
<outputDirectory>${build-directory}/classes/java</outputDirectory>
|
||||
<testOutputDirectory>${build-directory}/classes/test</testOutputDirectory>
|
||||
<sourceDirectory>src/java</sourceDirectory>
|
||||
<testSourceDirectory>src/test</testSourceDirectory>
|
||||
<testResources>
|
||||
<testResource>
|
||||
<directory>${project.build.testSourceDirectory}</directory>
|
||||
<excludes>
|
||||
<exclude>**/*.java</exclude>
|
||||
</excludes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>add-source</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>add-source</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<sources>
|
||||
<source>src/examples</source>
|
||||
</sources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
|
@ -0,0 +1,76 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-parent</artifactId>
|
||||
<version>@version@</version>
|
||||
<relativePath>../../lucene/pom.xml</relativePath>
|
||||
</parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-join</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<name>Lucene Join</name>
|
||||
<description>Lucene Join Module</description>
|
||||
<properties>
|
||||
<module-directory>modules/join</module-directory>
|
||||
<build-directory>build</build-directory>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-grouping</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-test-framework</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<directory>${build-directory}</directory>
|
||||
<outputDirectory>${build-directory}/classes/java</outputDirectory>
|
||||
<testOutputDirectory>${build-directory}/classes/test</testOutputDirectory>
|
||||
<sourceDirectory>src/java</sourceDirectory>
|
||||
<testSourceDirectory>src/test</testSourceDirectory>
|
||||
<testResources>
|
||||
<testResource>
|
||||
<directory>${project.build.testSourceDirectory}</directory>
|
||||
<excludes>
|
||||
<exclude>**/*.java</exclude>
|
||||
</excludes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
</build>
|
||||
</project>
|
|
@ -33,7 +33,10 @@
|
|||
<modules>
|
||||
<module>analysis</module>
|
||||
<module>benchmark</module>
|
||||
<module>facet</module>
|
||||
<module>grouping</module>
|
||||
<module>join</module>
|
||||
<module>queries</module>
|
||||
<module>suggest</module>
|
||||
</modules>
|
||||
<build>
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-parent</artifactId>
|
||||
<version>@version@</version>
|
||||
<relativePath>../../lucene/pom.xml</relativePath>
|
||||
</parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queries</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<name>Lucene Queries</name>
|
||||
<description>Lucene Queries Module</description>
|
||||
<properties>
|
||||
<module-directory>modules/queries</module-directory>
|
||||
<build-directory>build</build-directory>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>lucene-test-framework</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<directory>${build-directory}</directory>
|
||||
<outputDirectory>${build-directory}/classes/java</outputDirectory>
|
||||
<testOutputDirectory>${build-directory}/classes/test</testOutputDirectory>
|
||||
<sourceDirectory>src/java</sourceDirectory>
|
||||
<testSourceDirectory>src/test</testSourceDirectory>
|
||||
<testResources>
|
||||
<testResource>
|
||||
<directory>${project.build.testSourceDirectory}</directory>
|
||||
<excludes>
|
||||
<exclude>**/*.java</exclude>
|
||||
</excludes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
</build>
|
||||
</project>
|
|
@ -41,7 +41,7 @@
|
|||
<properties>
|
||||
<base.specification.version>4.0.0</base.specification.version>
|
||||
<maven.build.timestamp.format>yyyy-MM-dd HH:mm:ss</maven.build.timestamp.format>
|
||||
<java.compat.version>1.5</java.compat.version>
|
||||
<java.compat.version>1.6</java.compat.version>
|
||||
<jetty.version>6.1.26</jetty.version>
|
||||
<patched.jetty.version>6.1.26-patched-JETTY-1340</patched.jetty.version>
|
||||
<slf4j.version>1.6.1</slf4j.version>
|
||||
|
@ -280,6 +280,11 @@
|
|||
<artifactId>carrot2-core</artifactId>
|
||||
<version>3.5.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.carrot2</groupId>
|
||||
<artifactId>morfologik-polish</artifactId>
|
||||
<version>1.5.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.woodstox</groupId>
|
||||
<artifactId>wstx-asl</artifactId>
|
||||
|
|
|
@ -78,7 +78,12 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queries</artifactId>
|
||||
<artifactId>lucene-queries-contrib</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queries-contrib</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -96,6 +101,11 @@
|
|||
<artifactId>lucene-grouping</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queries</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-commons-csv</artifactId>
|
||||
|
@ -151,6 +161,11 @@
|
|||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-jdk14</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.woodstox</groupId>
|
||||
<artifactId>wstx-asl</artifactId>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>servlet-api</artifactId>
|
||||
|
|
|
@ -29,6 +29,9 @@ import HTMLParser
|
|||
|
||||
# http://s.apache.org/lusolr32rc2
|
||||
|
||||
JAVA5_HOME = '/usr/local/src/jdk1.5.0_22'
|
||||
JAVA6_HOME = '/usr/local/src/jdk1.6.0_21'
|
||||
|
||||
# TODO
|
||||
# + verify KEYS contains key that signed the release
|
||||
# + make sure changes HTML looks ok
|
||||
|
@ -212,13 +215,40 @@ def testChanges(project, version, changesURLString):
|
|||
raise RuntimeError('did not see Contrib-Changes.html link from %s' % changesURLString)
|
||||
|
||||
s = load(changesURL)
|
||||
checkChangesContent(s, version, changesURL, project, True)
|
||||
|
||||
if s.find('Release %s' % version) == -1:
|
||||
raise RuntimeError('did not see "Release %s" in %s' % (version, changesURL))
|
||||
def testChangesText(dir, version, project):
|
||||
"Checks all CHANGES.txt under this dir."
|
||||
for root, dirs, files in os.walk(dir):
|
||||
|
||||
# NOTE: O(N) but N should be smallish:
|
||||
if 'CHANGES.txt' in files:
|
||||
fullPath = '%s/CHANGES.txt' % root
|
||||
print 'CHECK %s' % fullPath
|
||||
checkChangesContent(open(fullPath).read(), version, fullPath, project, False)
|
||||
|
||||
def checkChangesContent(s, version, name, project, isHTML):
|
||||
|
||||
if isHTML and s.find('Release %s' % version) == -1:
|
||||
raise RuntimeError('did not see "Release %s" in %s' % (version, name))
|
||||
|
||||
if s.lower().find('not yet released') != -1:
|
||||
raise RuntimeError('saw "not yet released" in %s' % name)
|
||||
|
||||
if not isHTML:
|
||||
if project == 'lucene':
|
||||
sub = 'Lucene %s' % version
|
||||
else:
|
||||
sub = version
|
||||
|
||||
if s.find(sub) == -1:
|
||||
# contrib/benchmark never seems to include release info:
|
||||
if name.find('/benchmark/') == -1:
|
||||
raise RuntimeError('did not see "%s" in %s' % (sub, name))
|
||||
|
||||
def run(command, logFile):
|
||||
if os.system('%s > %s 2>&1' % (command, logFile)):
|
||||
raise RuntimeError('command "%s" failed; see log file %s' % (command, logFile))
|
||||
raise RuntimeError('command "%s" failed; see log file %s/%s' % (command, os.getcwd(), logFile))
|
||||
|
||||
def verifyDigests(artifact, urlString, tmpDir):
|
||||
print ' verify md5/sha1 digests'
|
||||
|
@ -327,26 +357,33 @@ def verifyUnpacked(project, artifact, unpackPath, version):
|
|||
if isSrc:
|
||||
if project == 'lucene':
|
||||
print ' run tests w/ Java 5...'
|
||||
run('export JAVA_HOME=/usr/local/src/jdk1.5.0_22; ant test', '%s/test.log' % unpackPath)
|
||||
run('export JAVA_HOME=/usr/local/src/jdk1.5.0_22; ant jar', '%s/compile.log' % unpackPath)
|
||||
testDemo(isSrc)
|
||||
run('export JAVA_HOME=%s; ant test' % JAVA5_HOME, '%s/test.log' % unpackPath)
|
||||
run('export JAVA_HOME=%s; ant jar' % JAVA5_HOME, '%s/compile.log' % unpackPath)
|
||||
testDemo(isSrc, version)
|
||||
else:
|
||||
print ' run tests w/ Java 6...'
|
||||
run('export JAVA_HOME=/usr/local/src/jdk1.6.0_21; ant test', '%s/test.log' % unpackPath)
|
||||
run('export JAVA_HOME=%s; ant test' % JAVA6_HOME, '%s/test.log' % unpackPath)
|
||||
else:
|
||||
if project == 'lucene':
|
||||
testDemo(isSrc)
|
||||
testDemo(isSrc, version)
|
||||
|
||||
def testDemo(isSrc):
|
||||
testChangesText('.', version, project)
|
||||
|
||||
def testDemo(isSrc, version):
|
||||
print ' test demo...'
|
||||
if isSrc:
|
||||
cp = 'build/lucene-core-3.2-SNAPSHOT.jar:build/contrib/demo/lucene-demo-3.2-SNAPSHOT.jar'
|
||||
# allow lucene dev version to be either 3.3 or 3.3.0:
|
||||
if version.endswith('.0'):
|
||||
cp = 'build/lucene-core-%s-SNAPSHOT.jar:build/contrib/demo/lucene-demo-%s-SNAPSHOT.jar' % (version, version)
|
||||
cp += ':build/lucene-core-%s-SNAPSHOT.jar:build/contrib/demo/lucene-demo-%s-SNAPSHOT.jar' % (version[:-2], version[:-2])
|
||||
else:
|
||||
cp = 'build/lucene-core-%s-SNAPSHOT.jar:build/contrib/demo/lucene-demo-%s-SNAPSHOT.jar' % (version, version)
|
||||
docsDir = 'src'
|
||||
else:
|
||||
cp = 'lucene-core-3.2.0.jar:contrib/demo/lucene-demo-3.2.0.jar'
|
||||
cp = 'lucene-core-%s.jar:contrib/demo/lucene-demo-%s.jar' % (version, version)
|
||||
docsDir = 'docs'
|
||||
run('export JAVA_HOME=/usr/local/src/jdk1.5.0_22; java -cp %s org.apache.lucene.demo.IndexFiles -index index -docs %s' % (cp, docsDir), 'index.log')
|
||||
run('export JAVA_HOME=/usr/local/src/jdk1.5.0_22; java -cp %s org.apache.lucene.demo.SearchFiles -index index -query lucene' % cp, 'search.log')
|
||||
run('export JAVA_HOME=%s; %s/bin/java -cp %s org.apache.lucene.demo.IndexFiles -index index -docs %s' % (JAVA5_HOME, JAVA5_HOME, cp, docsDir), 'index.log')
|
||||
run('export JAVA_HOME=%s; %s/bin/java -cp %s org.apache.lucene.demo.SearchFiles -index index -query lucene' % (JAVA5_HOME, JAVA5_HOME, cp), 'search.log')
|
||||
reMatchingDocs = re.compile('(\d+) total matching documents')
|
||||
m = reMatchingDocs.search(open('search.log', 'rb').read())
|
||||
if m is None:
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
Lucene Build Instructions
|
||||
|
||||
Basic steps:
|
||||
0) Install JDK 1.5 (or greater), Ant 1.7.0 (or greater)
|
||||
0) Install JDK 1.5 (or greater), Ant 1.7.x (not 1.6.x, not 1.8.x)
|
||||
1) Download Lucene from Apache and unpack it
|
||||
2) Connect to the top-level of your Lucene installation
|
||||
3) Install JavaCC (optional)
|
||||
4) Run ant
|
||||
|
||||
Step 0) Set up your development environment (JDK 1.5 or greater,
|
||||
Ant 1.7.0 or greater)
|
||||
Ant 1.7.x)
|
||||
|
||||
We'll assume that you know how to get and set up the JDK - if you
|
||||
don't, then we suggest starting at http://java.sun.com and learning
|
||||
|
@ -16,7 +16,7 @@ more about Java, before returning to this README. Lucene runs with
|
|||
JDK 1.5 and later.
|
||||
|
||||
Like many Open Source java projects, Lucene uses Apache Ant for build
|
||||
control. Specifically, you MUST use Ant version 1.7.0 or greater.
|
||||
control. Specifically, you MUST use Ant version 1.7.x
|
||||
|
||||
Ant is "kind of like make without make's wrinkles". Ant is
|
||||
implemented in java and uses XML-based configuration files. You can
|
||||
|
|
|
@ -149,6 +149,13 @@ Changes in backwards compatibility policy
|
|||
files holding stored fields and term vectors) while flushing a
|
||||
segment. (Mike McCandless)
|
||||
|
||||
* LUCENE-2548: Field names (eg in Term, FieldInfo) are no longer
|
||||
interned. (Mike McCandless)
|
||||
|
||||
* LUCENE-2883: The contents of o.a.l.search.function has been consolidated into
|
||||
the queries module and can be found at o.a.l.queries.function. See MIGRATE.txt
|
||||
for more information (Chris Male)
|
||||
|
||||
Changes in Runtime Behavior
|
||||
|
||||
* LUCENE-2846: omitNorms now behaves like omitTermFrequencyAndPositions, if you
|
||||
|
@ -281,6 +288,17 @@ API Changes
|
|||
* LUCENE-2953: In addition to changes in 3.x, PriorityQueue#initialize(int)
|
||||
function was moved into the ctor. (Uwe Schindler, Yonik Seeley)
|
||||
|
||||
* LUCENE-3219: SortField type properties have been moved to an enum
|
||||
SortField.Type. In be consistent, CachedArrayCreator.getSortTypeID() has
|
||||
been changed CachedArrayCreator.getSortType(). (Chris Male)
|
||||
|
||||
* LUCENE-3225: Add TermsEnum.seekExact for faster seeking when you
|
||||
don't need the ceiling term; renamed existing seek methods to either
|
||||
seekCeil or seekExact; changed seekExact(ord) to return no value.
|
||||
Fixed MemoryCodec and SimpleTextCodec to optimize the seekExact
|
||||
case, and fixed places in Lucene to use seekExact when possible.
|
||||
(Mike McCandless)
|
||||
|
||||
New features
|
||||
|
||||
* LUCENE-2604: Added RegexpQuery support to QueryParser. Regular expressions
|
||||
|
@ -458,7 +476,7 @@ Optimizations
|
|||
MultiTermQuery now stores TermState per leaf reader during rewrite to re-
|
||||
seek the term dictionary in TermQuery / TermWeight.
|
||||
(Simon Willnauer, Mike McCandless, Robert Muir)
|
||||
|
||||
|
||||
Bug fixes
|
||||
|
||||
* LUCENE-2633: PackedInts Packed32 and Packed64 did not support internal
|
||||
|
@ -474,6 +492,26 @@ Bug fixes
|
|||
|
||||
======================= Lucene 3.x (not yet released) ================
|
||||
|
||||
Bug fixes
|
||||
|
||||
* LUCENE-3251: Directory#copy failed to close target output if opening the
|
||||
source stream failed. (Simon Willnauer)
|
||||
|
||||
* LUCENE-3254: Fixed minor bug in deletes were written to disk,
|
||||
causing the file to sometimes be larger than it needed to be. (Mike
|
||||
McCandless)
|
||||
|
||||
Optimizations
|
||||
|
||||
* LUCENE-3201, LUCENE-3218: CompoundFileSystem code has been consolidated
|
||||
into a Directory implementation. Reading is optimized for MMapDirectory,
|
||||
NIOFSDirectory and SimpleFSDirectory to only map requested parts of the
|
||||
CFS into an IndexInput. Writing to a CFS now tries to append to the CF
|
||||
directly if possible and merges separately written files on the fly instead
|
||||
of during close. (Simon Willnauer, Robert Muir)
|
||||
|
||||
======================= Lucene 3.3.0 =======================
|
||||
|
||||
Changes in backwards compatibility policy
|
||||
|
||||
* LUCENE-3140: IndexOutput.copyBytes now takes a DataInput (superclass
|
||||
|
@ -489,6 +527,13 @@ Changes in backwards compatibility policy
|
|||
overridden one of these methods, cut over to the non-deprecated
|
||||
implementation. (Uwe Schindler, Robert Muir, Yonik Seeley)
|
||||
|
||||
* LUCENE-3238: Made MultiTermQuery.rewrite() final, to prevent
|
||||
problems (such as not properly setting rewrite methods, or
|
||||
not working correctly with things like SpanMultiTermQueryWrapper).
|
||||
To rewrite to a simpler form, instead return a simpler enum
|
||||
from getEnum(IndexReader). For example, to rewrite to a single term,
|
||||
return a SingleTermEnum. (ludovic Boutros, Uwe Schindler, Robert Muir)
|
||||
|
||||
Changes in runtime behavior
|
||||
|
||||
* LUCENE-2834: the hash used to compute the lock file name when the
|
||||
|
@ -537,6 +582,14 @@ Bug fixes
|
|||
background optimize when documents are still being deleted
|
||||
concurrently with the optimize (Mike McCandless)
|
||||
|
||||
* LUCENE-3222: The RAM accounting for buffered delete terms was
|
||||
failing to measure the space required to hold the term's field and
|
||||
text character data. (Mike McCandless)
|
||||
|
||||
* LUCENE-3238: Fixed bug where using WildcardQuery("prefix*") inside
|
||||
of a SpanMultiTermQueryWrapper rewrote incorrectly and returned
|
||||
an error instead. (ludovic Boutros, Uwe Schindler, Robert Muir)
|
||||
|
||||
API Changes
|
||||
|
||||
* LUCENE-3208: Renamed protected IndexSearcher.createWeight() to expert
|
||||
|
|
|
@ -364,3 +364,21 @@ LUCENE-1458, LUCENE-2111: Flexible Indexing
|
|||
for applications that rely on Lucene's internal document ID
|
||||
assigment. If so, you should instead use LogByteSize/DocMergePolicy
|
||||
during indexing.
|
||||
|
||||
* LUCENE-2883: Lucene's o.a.l.search.function ValueSource based functionality, was consolidated
|
||||
into module/queries along with Solr's similar functionality. The following classes were moved:
|
||||
- o.a.l.search.function.CustomScoreQuery -> o.a.l.queries.CustomScoreQuery
|
||||
- o.a.l.search.function.CustomScoreProvider -> o.a.l.queries.CustomScoreProvider
|
||||
- o.a.l.search.function.NumericIndexDocValueSource -> o.a.l.queries.function.valuesource.NumericIndexDocValueSource
|
||||
The following lists the replacement classes for those removed:
|
||||
- o.a.l.search.function.ByteFieldSource -> o.a.l.queries.function.valuesource.ByteFieldSource
|
||||
- o.a.l.search.function.DocValues -> o.a.l.queries.function.DocValues
|
||||
- o.a.l.search.function.FieldCacheSource -> o.a.l.queries.function.valuesource.FieldCacheSource
|
||||
- o.a.l.search.function.FieldScoreQuery ->o.a.l.queries.function.FunctionQuery
|
||||
- o.a.l.search.function.FloatFieldSource -> o.a.l.queries.function.valuesource.FloatFieldSource
|
||||
- o.a.l.search.function.IntFieldSource -> o.a.l.queries.function.valuesource.IntFieldSource
|
||||
- o.a.l.search.function.OrdFieldSource -> o.a.l.queries.function.valuesource.OrdFieldSource
|
||||
- o.a.l.search.function.ReverseOrdFieldSource -> o.a.l.queries.function.valuesource.ReverseOrdFieldSource
|
||||
- o.a.l.search.function.ShortFieldSource -> o.a.l.queries.function.valuesource.ShortFieldSource
|
||||
- o.a.l.search.function.ValueSource -> o.a.l.queries.function.ValueSource
|
||||
- o.a.l.search.function.ValueSourceQuery -> o.a.l.queries.function.FunctionQuery
|
||||
|
|
|
@ -22,6 +22,7 @@ lucene-core-XX-javadoc.jar
|
|||
|
||||
lucene-test-framework-XX.jar
|
||||
The compiled Lucene test-framework library.
|
||||
Depends on junit 4.7.x (not 4.6.x, not 4.8.x), and Apache Ant 1.7.x (not 1.6.x, not 1.8.x)
|
||||
|
||||
lucene-test-framework-XX-javadoc.jar
|
||||
The Javadoc jar for the compiled Lucene test-framework library.
|
||||
|
|
|
@ -95,15 +95,15 @@
|
|||
|
||||
<property name="javac.deprecation" value="off"/>
|
||||
<property name="javac.debug" value="on"/>
|
||||
<property name="javac.source" value="1.5"/>
|
||||
<property name="javac.target" value="1.5"/>
|
||||
<property name="javac.source.backwards" value="1.5"/>
|
||||
<property name="javac.target.backwards" value="1.5"/>
|
||||
<property name="javac.source" value="1.6"/>
|
||||
<property name="javac.target" value="1.6"/>
|
||||
<property name="javac.source.backwards" value="1.6"/>
|
||||
<property name="javac.target.backwards" value="1.6"/>
|
||||
<!-- clover wants to run with -lib, otherwise we prefer a repeatable
|
||||
classpath -->
|
||||
<property name="javac.includeAntRuntime" value="${run.clover}"/>
|
||||
|
||||
<property name="javadoc.link" value="http://java.sun.com/j2se/1.5/docs/api/"/>
|
||||
<property name="javadoc.link" value="http://download.oracle.com/javase/6/docs/api/"/>
|
||||
<property name="javadoc.access" value="protected"/>
|
||||
<property name="javadoc.charset" value="utf-8"/>
|
||||
<property name="javadoc.dir" value="${common.dir}/build/docs/api"/>
|
||||
|
@ -838,8 +838,6 @@
|
|||
<!-- Lucene -->
|
||||
<arg value="-c" />
|
||||
<arg value="${basedir}/lib" />
|
||||
<arg value="-c" />
|
||||
<arg value="${basedir}/contrib/queries/lib" />
|
||||
</java>
|
||||
</target>
|
||||
|
||||
|
|
|
@ -4,6 +4,11 @@ For more information on past and future Lucene versions, please see:
|
|||
http://s.apache.org/luceneversions
|
||||
|
||||
======================= Trunk (not yet released) =======================
|
||||
|
||||
Changes in runtime behavior
|
||||
|
||||
* LUCENE-3250: Wordnet's SynExpand requires a non-null Analyzer (it no longer
|
||||
treats null as StandardAnalyzer). (Robert Muir)
|
||||
|
||||
Build
|
||||
|
||||
|
@ -55,6 +60,31 @@ Bug Fixes
|
|||
|
||||
======================= Lucene 3.x (not yet released) ================
|
||||
|
||||
New Features
|
||||
|
||||
* LUCENE-3234: provide a limit on phrase analysis in FastVectorHighlighter for
|
||||
highlighting speed up. Use FastVectorHighlighter.setPhraseLimit() to set limit
|
||||
(e.g. 5000). (Mike Sokolov via Koji Sekiguchi)
|
||||
|
||||
* LUCENE-3079: a new facet module which provides faceted indexing & search
|
||||
capabilities. It allows managing a taxonomy of categories, and index them
|
||||
with documents. It also provides search API for aggregating (e.g. count)
|
||||
the weights of the categories that are relevant to the search results.
|
||||
(Shai Erera)
|
||||
|
||||
* LUCENE-3171: Added BlockJoinQuery and BlockJoinCollector, under the
|
||||
new contrib/join module, to enable searches that require joining
|
||||
between parent and child documents. Joined (children + parent)
|
||||
documents must be indexed as a document block, using
|
||||
IndexWriter.add/UpdateDocuments (Mark Harwood, Mike McCandless)
|
||||
|
||||
API Changes
|
||||
|
||||
Bug Fixes
|
||||
|
||||
|
||||
======================= Lucene 3.3.0 =======================
|
||||
|
||||
New Features
|
||||
|
||||
* LUCENE-152: Add KStem (light stemmer for English).
|
||||
|
|
|
@ -75,7 +75,7 @@
|
|||
property="pom.xml.present">
|
||||
</available>
|
||||
|
||||
<target name="dist-maven" if="pom.xml.present" depends="compile-core, jar-src">
|
||||
<target name="dist-maven" if="pom.xml.present" depends="jar-core, jar-src, javadocs">
|
||||
<taskdef resource="org/apache/maven/artifact/ant/antlib.xml"
|
||||
uri="antlib:org.apache.maven.artifact.ant"
|
||||
classpathref="maven-ant-tasks.classpath"/>
|
||||
|
@ -116,6 +116,7 @@
|
|||
<macrodef name="contrib-uptodate">
|
||||
<attribute name="name"/>
|
||||
<attribute name="property" default="@{name}.uptodate"/>
|
||||
<attribute name="contrib-src-name" default="@{name}"/>
|
||||
<attribute name="classpath.property" default="@{name}.jar"/>
|
||||
<!-- set jarfile only, if the target jar file has no generic name, applies to analyzers with its common and smartcn subdir -->
|
||||
<attribute name="jarfile" default="${common.dir}/build/contrib/@{name}/lucene-@{name}-${version}.jar"/>
|
||||
|
@ -123,7 +124,7 @@
|
|||
<!--<echo message="Checking '@{jarfile}' against source folder '${common.dir}/contrib/@{name}/src/java'"/>-->
|
||||
<property name="@{classpath.property}" location="@{jarfile}"/>
|
||||
<uptodate property="@{property}" targetfile="@{jarfile}">
|
||||
<srcfiles dir="${common.dir}/contrib/@{name}/src/java" includes="**/*.java"/>
|
||||
<srcfiles dir="${common.dir}/contrib/@{contrib-src-name}/src/java" includes="**/*.java"/>
|
||||
</uptodate>
|
||||
</sequential>
|
||||
</macrodef>
|
||||
|
@ -131,13 +132,14 @@
|
|||
<macrodef name="module-uptodate">
|
||||
<attribute name="name"/>
|
||||
<attribute name="property" default="@{name}.uptodate"/>
|
||||
<attribute name="module-src-name" default="@{name}"/>
|
||||
<attribute name="classpath.property" default="@{name}.jar"/>
|
||||
<!-- set jarfile only, if the target jar file has no generic name, applies to analyzers with its common and smartcn subdir -->
|
||||
<attribute name="jarfile" default="${common.dir}/../modules/@{name}/build/lucene-@{name}-${version}.jar"/>
|
||||
<attribute name="jarfile" default="${common.dir}/../modules/@{module-src-name}/build/lucene-@{name}-${version}.jar"/>
|
||||
<sequential>
|
||||
<property name="@{classpath.property}" location="@{jarfile}"/>
|
||||
<uptodate property="@{property}" targetfile="@{jarfile}">
|
||||
<srcfiles dir="${common.dir}/../modules/@{name}/src/java" includes="**/*.java"/>
|
||||
<srcfiles dir="${common.dir}/../modules/@{module-src-name}/src/java" includes="**/*.java"/>
|
||||
</uptodate>
|
||||
</sequential>
|
||||
</macrodef>
|
||||
|
|
|
@ -38,7 +38,11 @@ import java.io.IOException;
|
|||
import java.io.InputStreamReader;
|
||||
import java.util.Date;
|
||||
|
||||
/** Index all text files under a directory. See http://lucene.apache.org/java/4_0/demo.html. */
|
||||
/** Index all text files under a directory.
|
||||
* <p>
|
||||
* This is a command-line application demonstrating simple Lucene indexing.
|
||||
* Run it with no command-line arguments for usage information.
|
||||
*/
|
||||
public class IndexFiles {
|
||||
|
||||
private IndexFiles() {}
|
||||
|
@ -47,8 +51,8 @@ public class IndexFiles {
|
|||
public static void main(String[] args) {
|
||||
String usage = "java org.apache.lucene.demo.IndexFiles"
|
||||
+ " [-index INDEX_PATH] [-docs DOCS_PATH] [-update]\n\n"
|
||||
// TODO: Change the link with every release (or: fill in some less error-prone alternative here...)
|
||||
+ "See http://lucene.apache.org/java/4_0/demo.html for details.";
|
||||
+ "This indexes the documents in DOCS_PATH, creating a Lucene index"
|
||||
+ "in INDEX_PATH that can be searched with SearchFiles";
|
||||
String indexPath = "index";
|
||||
String docsPath = null;
|
||||
boolean create = true;
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
<import file="../contrib-build.xml"/>
|
||||
|
||||
<contrib-uptodate name="memory" property="memory.uptodate" classpath.property="memory.jar"/>
|
||||
<contrib-uptodate name="queries" property="queries.uptodate" classpath.property="queries.jar"/>
|
||||
<contrib-uptodate name="queries-contrib" contrib-src-name="queries" property="queries.uptodate" classpath.property="queries.jar"/>
|
||||
|
||||
<path id="classpath">
|
||||
<pathelement path="${memory.jar}"/>
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.memory.MemoryIndex;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
/**
|
||||
* {@link Scorer} implementation which scores text fragments by the number of
|
||||
|
@ -88,7 +87,7 @@ public class QueryScorer implements Scorer {
|
|||
* @param defaultField
|
||||
*/
|
||||
public QueryScorer(Query query, IndexReader reader, String field, String defaultField) {
|
||||
this.defaultField = StringHelper.intern(defaultField);
|
||||
this.defaultField = defaultField;
|
||||
init(query, field, reader, true);
|
||||
}
|
||||
|
||||
|
@ -96,7 +95,7 @@ public class QueryScorer implements Scorer {
|
|||
* @param defaultField - The default field for queries with the field name unspecified
|
||||
*/
|
||||
public QueryScorer(Query query, String field, String defaultField) {
|
||||
this.defaultField = StringHelper.intern(defaultField);
|
||||
this.defaultField = defaultField;
|
||||
init(query, field, null, true);
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.FilteredQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
/**
|
||||
* Utility class used to extract the terms used in a query, plus any weights.
|
||||
|
@ -94,10 +93,6 @@ public final class QueryTermExtractor
|
|||
public static final WeightedTerm[] getTerms(Query query, boolean prohibited, String fieldName)
|
||||
{
|
||||
HashSet<WeightedTerm> terms=new HashSet<WeightedTerm>();
|
||||
if(fieldName!=null)
|
||||
{
|
||||
fieldName= StringHelper.intern(fieldName);
|
||||
}
|
||||
getTerms(query,terms,prohibited,fieldName);
|
||||
return terms.toArray(new WeightedTerm[0]);
|
||||
}
|
||||
|
@ -114,7 +109,6 @@ public final class QueryTermExtractor
|
|||
return getTerms(query,prohibited,null);
|
||||
}
|
||||
|
||||
//fieldname MUST be interned prior to this call
|
||||
private static final void getTerms(Query query, HashSet<WeightedTerm> terms,boolean prohibited, String fieldName)
|
||||
{
|
||||
try
|
||||
|
@ -131,7 +125,7 @@ public final class QueryTermExtractor
|
|||
for (Iterator<Term> iter = nonWeightedTerms.iterator(); iter.hasNext();)
|
||||
{
|
||||
Term term = iter.next();
|
||||
if((fieldName==null)||(term.field()==fieldName))
|
||||
if((fieldName==null)||(term.field().equals(fieldName)))
|
||||
{
|
||||
terms.add(new WeightedTerm(query.getBoost(),term.text()));
|
||||
}
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.apache.lucene.search.spans.SpanOrQuery;
|
|||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.search.spans.Spans;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
/**
|
||||
* Class used to extract {@link WeightedSpanTerm}s from a {@link Query} based on whether
|
||||
|
@ -63,7 +62,7 @@ public class WeightedSpanTermExtractor {
|
|||
|
||||
public WeightedSpanTermExtractor(String defaultField) {
|
||||
if (defaultField != null) {
|
||||
this.defaultField = StringHelper.intern(defaultField);
|
||||
this.defaultField = defaultField;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -314,8 +313,8 @@ public class WeightedSpanTermExtractor {
|
|||
* Necessary to implement matches for queries against <code>defaultField</code>
|
||||
*/
|
||||
private boolean fieldNameComparator(String fieldNameToCheck) {
|
||||
boolean rv = fieldName == null || fieldNameToCheck == fieldName
|
||||
|| fieldNameToCheck == defaultField;
|
||||
boolean rv = fieldName == null || fieldName.equals(fieldNameToCheck)
|
||||
|| (defaultField != null && defaultField.equals(fieldNameToCheck));
|
||||
return rv;
|
||||
}
|
||||
|
||||
|
@ -372,7 +371,7 @@ public class WeightedSpanTermExtractor {
|
|||
public Map<String,WeightedSpanTerm> getWeightedSpanTerms(Query query, TokenStream tokenStream,
|
||||
String fieldName) throws IOException {
|
||||
if (fieldName != null) {
|
||||
this.fieldName = StringHelper.intern(fieldName);
|
||||
this.fieldName = fieldName;
|
||||
} else {
|
||||
this.fieldName = null;
|
||||
}
|
||||
|
@ -408,7 +407,7 @@ public class WeightedSpanTermExtractor {
|
|||
public Map<String,WeightedSpanTerm> getWeightedSpanTermsWithScores(Query query, TokenStream tokenStream, String fieldName,
|
||||
IndexReader reader) throws IOException {
|
||||
if (fieldName != null) {
|
||||
this.fieldName = StringHelper.intern(fieldName);
|
||||
this.fieldName = fieldName;
|
||||
} else {
|
||||
this.fieldName = null;
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ public class FastVectorHighlighter {
|
|||
private final boolean fieldMatch;
|
||||
private final FragListBuilder fragListBuilder;
|
||||
private final FragmentsBuilder fragmentsBuilder;
|
||||
private int phraseLimit = Integer.MAX_VALUE;
|
||||
|
||||
/**
|
||||
* the default constructor.
|
||||
|
@ -173,7 +174,7 @@ public class FastVectorHighlighter {
|
|||
final FieldQuery fieldQuery, IndexReader reader, int docId,
|
||||
String fieldName, int fragCharSize ) throws IOException {
|
||||
FieldTermStack fieldTermStack = new FieldTermStack( reader, docId, fieldName, fieldQuery );
|
||||
FieldPhraseList fieldPhraseList = new FieldPhraseList( fieldTermStack, fieldQuery );
|
||||
FieldPhraseList fieldPhraseList = new FieldPhraseList( fieldTermStack, fieldQuery, phraseLimit );
|
||||
return fragListBuilder.createFieldFragList( fieldPhraseList, fragCharSize );
|
||||
}
|
||||
|
||||
|
@ -190,4 +191,15 @@ public class FastVectorHighlighter {
|
|||
* @return whether fieldMatch or not
|
||||
*/
|
||||
public boolean isFieldMatch(){ return fieldMatch; }
|
||||
|
||||
/**
|
||||
* @return the maximum number of phrases to analyze when searching for the highest-scoring phrase.
|
||||
*/
|
||||
public int getPhraseLimit () { return phraseLimit; }
|
||||
|
||||
/**
|
||||
* set the maximum number of phrases to analyze when searching for the highest-scoring phrase.
|
||||
* The default is unlimited (Integer.MAX_VALUE).
|
||||
*/
|
||||
public void setPhraseLimit (int phraseLimit) { this.phraseLimit = phraseLimit; }
|
||||
}
|
||||
|
|
|
@ -30,21 +30,32 @@ import org.apache.lucene.search.vectorhighlight.FieldTermStack.TermInfo;
|
|||
public class FieldPhraseList {
|
||||
|
||||
LinkedList<WeightedPhraseInfo> phraseList = new LinkedList<WeightedPhraseInfo>();
|
||||
|
||||
|
||||
/**
|
||||
* create a FieldPhraseList that has no limit on the number of phrases to analyze
|
||||
*
|
||||
* @param fieldTermStack FieldTermStack object
|
||||
* @param fieldQuery FieldQuery object
|
||||
*/
|
||||
public FieldPhraseList( FieldTermStack fieldTermStack, FieldQuery fieldQuery){
|
||||
this (fieldTermStack, fieldQuery, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* a constructor.
|
||||
*
|
||||
* @param fieldTermStack FieldTermStack object
|
||||
* @param fieldQuery FieldQuery object
|
||||
* @param phraseLimit maximum size of phraseList
|
||||
*/
|
||||
public FieldPhraseList( FieldTermStack fieldTermStack, FieldQuery fieldQuery ){
|
||||
public FieldPhraseList( FieldTermStack fieldTermStack, FieldQuery fieldQuery, int phraseLimit){
|
||||
final String field = fieldTermStack.getFieldName();
|
||||
|
||||
LinkedList<TermInfo> phraseCandidate = new LinkedList<TermInfo>();
|
||||
QueryPhraseMap currMap = null;
|
||||
QueryPhraseMap nextMap = null;
|
||||
while( !fieldTermStack.isEmpty() ){
|
||||
|
||||
while( !fieldTermStack.isEmpty() && (phraseList.size() < phraseLimit) )
|
||||
{
|
||||
phraseCandidate.clear();
|
||||
|
||||
TermInfo ti = fieldTermStack.pop();
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.search.vectorhighlight.FieldTermStack.TermInfo;
|
||||
|
||||
/**
|
||||
* FieldQuery breaks down query object into terms/phrases and keep
|
||||
* them in QueryPhraseMap structure.
|
||||
* FieldQuery breaks down query object into terms/phrases and keeps
|
||||
* them in a QueryPhraseMap structure.
|
||||
*/
|
||||
public class FieldQuery {
|
||||
|
||||
|
|
|
@ -115,8 +115,7 @@ public class FieldTermStack {
|
|||
* @param termInfo the TermInfo object to be put on the top of the stack
|
||||
*/
|
||||
public void push( TermInfo termInfo ){
|
||||
// termList.push( termInfo ); // avoid Java 1.6 feature
|
||||
termList.addFirst( termInfo );
|
||||
termList.push( termInfo );
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -188,4 +188,34 @@ public class FieldPhraseListTest extends AbstractTestCase {
|
|||
assertEquals( 1, fpl.phraseList.size() );
|
||||
assertEquals( "sppeeeed(1.0)((88,93))", fpl.phraseList.get( 0 ).toString() );
|
||||
}
|
||||
|
||||
/* This test shows a big speedup from limiting the number of analyzed phrases in
|
||||
* this bad case for FieldPhraseList */
|
||||
/* But it is not reliable as a unit test since it is timing-dependent
|
||||
public void testManyRepeatedTerms() throws Exception {
|
||||
long t = System.currentTimeMillis();
|
||||
testManyTermsWithLimit (-1);
|
||||
long t1 = System.currentTimeMillis();
|
||||
testManyTermsWithLimit (1);
|
||||
long t2 = System.currentTimeMillis();
|
||||
assertTrue (t2-t1 * 1000 < t1-t);
|
||||
}
|
||||
private void testManyTermsWithLimit (int limit) throws Exception {
|
||||
StringBuilder buf = new StringBuilder ();
|
||||
for (int i = 0; i < 16000; i++) {
|
||||
buf.append("a b c ");
|
||||
}
|
||||
make1d1fIndex( buf.toString());
|
||||
|
||||
Query query = tq("a");
|
||||
FieldQuery fq = new FieldQuery( query, true, true );
|
||||
FieldTermStack stack = new FieldTermStack( reader, 0, F, fq );
|
||||
FieldPhraseList fpl = new FieldPhraseList( stack, fq, limit);
|
||||
if (limit < 0 || limit > 16000)
|
||||
assertEquals( 16000, fpl.phraseList.size() );
|
||||
else
|
||||
assertEquals( limit, fpl.phraseList.size() );
|
||||
assertEquals( "a(1.0)((0,1))", fpl.phraseList.get( 0 ).toString() );
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
|
|
@ -23,13 +23,13 @@ import org.apache.lucene.util.BytesRef;
|
|||
public class InstantiatedDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
||||
private int upto;
|
||||
private int posUpto;
|
||||
private Bits skipDocs;
|
||||
private Bits liveDocs;
|
||||
private InstantiatedTerm term;
|
||||
protected InstantiatedTermDocumentInformation currentDoc;
|
||||
private final BytesRef payload = new BytesRef();
|
||||
|
||||
public InstantiatedDocsAndPositionsEnum reset(Bits skipDocs, InstantiatedTerm term) {
|
||||
this.skipDocs = skipDocs;
|
||||
public InstantiatedDocsAndPositionsEnum reset(Bits liveDocs, InstantiatedTerm term) {
|
||||
this.liveDocs = liveDocs;
|
||||
this.term = term;
|
||||
upto = -1;
|
||||
return this;
|
||||
|
@ -47,7 +47,7 @@ public class InstantiatedDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
|||
return NO_MORE_DOCS;
|
||||
} else {
|
||||
currentDoc = term.getAssociatedDocuments()[upto];
|
||||
if (skipDocs == null || !skipDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||
if (liveDocs == null || liveDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||
posUpto = -1;
|
||||
return docID();
|
||||
} else {
|
||||
|
@ -69,7 +69,7 @@ public class InstantiatedDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
|||
}
|
||||
currentDoc = term.getAssociatedDocuments()[upto];
|
||||
|
||||
if (skipDocs != null && skipDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||
if (liveDocs != null && !liveDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||
return nextDoc();
|
||||
} else {
|
||||
posUpto = -1;
|
||||
|
|
|
@ -21,12 +21,12 @@ import org.apache.lucene.util.Bits;
|
|||
|
||||
public class InstantiatedDocsEnum extends DocsEnum {
|
||||
private int upto;
|
||||
private Bits skipDocs;
|
||||
private Bits liveDocs;
|
||||
private InstantiatedTerm term;
|
||||
protected InstantiatedTermDocumentInformation currentDoc;
|
||||
|
||||
public InstantiatedDocsEnum reset(Bits skipDocs, InstantiatedTerm term) {
|
||||
this.skipDocs = skipDocs;
|
||||
public InstantiatedDocsEnum reset(Bits liveDocs, InstantiatedTerm term) {
|
||||
this.liveDocs = liveDocs;
|
||||
this.term = term;
|
||||
upto = -1;
|
||||
return this;
|
||||
|
@ -44,7 +44,7 @@ public class InstantiatedDocsEnum extends DocsEnum {
|
|||
return NO_MORE_DOCS;
|
||||
} else {
|
||||
currentDoc = term.getAssociatedDocuments()[upto];
|
||||
if (skipDocs == null || !skipDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||
if (liveDocs == null || liveDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||
return docID();
|
||||
} else {
|
||||
return nextDoc();
|
||||
|
@ -65,7 +65,7 @@ public class InstantiatedDocsEnum extends DocsEnum {
|
|||
}
|
||||
currentDoc = term.getAssociatedDocuments()[upto];
|
||||
|
||||
if (skipDocs != null && skipDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||
if (liveDocs != null && !liveDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||
return nextDoc();
|
||||
} else {
|
||||
return docID();
|
||||
|
|
|
@ -182,9 +182,9 @@ public class InstantiatedIndex
|
|||
}
|
||||
|
||||
// create documents
|
||||
final Bits delDocs = MultiFields.getDeletedDocs(sourceIndexReader);
|
||||
final Bits liveDocs = MultiFields.getLiveDocs(sourceIndexReader);
|
||||
for (int i = 0; i < sourceIndexReader.maxDoc(); i++) {
|
||||
if (delDocs != null && delDocs.get(i)) {
|
||||
if (liveDocs != null && !liveDocs.get(i)) {
|
||||
deletedDocuments.set(i);
|
||||
} else {
|
||||
InstantiatedDocument document = new InstantiatedDocument();
|
||||
|
@ -254,7 +254,7 @@ public class InstantiatedIndex
|
|||
// create term-document informations
|
||||
for (InstantiatedTerm term : orderedTerms) {
|
||||
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(sourceIndexReader,
|
||||
MultiFields.getDeletedDocs(sourceIndexReader),
|
||||
MultiFields.getLiveDocs(sourceIndexReader),
|
||||
term.getTerm().field(),
|
||||
new BytesRef(term.getTerm().text()));
|
||||
int position = 0;
|
||||
|
|
|
@ -107,11 +107,11 @@ public class InstantiatedIndexReader extends IndexReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Bits getDeletedDocs() {
|
||||
public Bits getLiveDocs() {
|
||||
return new Bits() {
|
||||
public boolean get(int n) {
|
||||
return (index.getDeletedDocuments() != null && index.getDeletedDocuments().get(n))
|
||||
|| (uncommittedDeletedDocuments != null && uncommittedDeletedDocuments.get(n));
|
||||
return !(index.getDeletedDocuments() != null && index.getDeletedDocuments().get(n))
|
||||
&& !(uncommittedDeletedDocuments != null && uncommittedDeletedDocuments.get(n));
|
||||
}
|
||||
|
||||
public int length() {
|
||||
|
@ -400,7 +400,7 @@ public class InstantiatedIndexReader extends IndexReader {
|
|||
if (i < 0) {
|
||||
i = -i - 1;
|
||||
}
|
||||
if (i >= orderedTerms.length || orderedTerms[i].field() != field) {
|
||||
if (i >= orderedTerms.length || !orderedTerms[i].field().equals(field)) {
|
||||
// field does not exist
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.index.TermVectorOffsetInfo;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.SimilarityProvider;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.apache.lucene.util.AttributeImpl;
|
||||
|
@ -475,7 +474,7 @@ public class InstantiatedIndexWriter implements Closeable {
|
|||
FieldSetting fieldSetting = fieldSettingsByFieldName.get(field.name());
|
||||
if (fieldSetting == null) {
|
||||
fieldSetting = new FieldSetting();
|
||||
fieldSetting.fieldName = StringHelper.intern(field.name());
|
||||
fieldSetting.fieldName = field.name();
|
||||
fieldSettingsByFieldName.put(fieldSetting.fieldName, fieldSetting);
|
||||
fieldNameBuffer.add(fieldSetting.fieldName);
|
||||
}
|
||||
|
|
|
@ -44,7 +44,20 @@ public class InstantiatedTermsEnum extends TermsEnum {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SeekStatus seek(BytesRef text, boolean useCache) {
|
||||
public boolean seekExact(BytesRef text, boolean useCache) {
|
||||
final Term t = new Term(field, text);
|
||||
int loc = Arrays.binarySearch(terms, t, InstantiatedTerm.termComparator);
|
||||
if (loc < 0) {
|
||||
return false;
|
||||
} else {
|
||||
upto = loc;
|
||||
br.copy(text);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public SeekStatus seekCeil(BytesRef text, boolean useCache) {
|
||||
final Term t = new Term(field, text);
|
||||
int loc = Arrays.binarySearch(terms, t, InstantiatedTerm.termComparator);
|
||||
if (loc < 0) {
|
||||
|
@ -63,19 +76,10 @@ public class InstantiatedTermsEnum extends TermsEnum {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SeekStatus seek(long ord) {
|
||||
public void seekExact(long ord) {
|
||||
assert (start + (int) ord) < terms.length;
|
||||
upto = start + (int) ord;
|
||||
if (upto >= terms.length) {
|
||||
return SeekStatus.END;
|
||||
}
|
||||
|
||||
if (terms[upto].field() == field) {
|
||||
return SeekStatus.FOUND;
|
||||
} else {
|
||||
// make sure field was interned
|
||||
assert !terms[upto].field().equals(field);
|
||||
return SeekStatus.END;
|
||||
}
|
||||
assert field.equals(terms[upto].field());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -84,12 +88,10 @@ public class InstantiatedTermsEnum extends TermsEnum {
|
|||
if (upto >= terms.length) {
|
||||
return null;
|
||||
}
|
||||
if (terms[upto].field() == field) {
|
||||
if (terms[upto].field().equals(field)) {
|
||||
br.copy(terms[upto].getTerm().text());
|
||||
return br;
|
||||
} else {
|
||||
// make sure field was interned
|
||||
assert !terms[upto].field().equals(field);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -116,19 +118,19 @@ public class InstantiatedTermsEnum extends TermsEnum {
|
|||
}
|
||||
|
||||
@Override
|
||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) {
|
||||
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) {
|
||||
if (reuse == null || !(reuse instanceof InstantiatedDocsEnum)) {
|
||||
reuse = new InstantiatedDocsEnum();
|
||||
}
|
||||
return ((InstantiatedDocsEnum) reuse).reset(skipDocs, terms[upto]);
|
||||
return ((InstantiatedDocsEnum) reuse).reset(liveDocs, terms[upto]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) {
|
||||
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) {
|
||||
if (reuse == null || !(reuse instanceof InstantiatedDocsAndPositionsEnum)) {
|
||||
reuse = new InstantiatedDocsAndPositionsEnum();
|
||||
}
|
||||
return ((InstantiatedDocsAndPositionsEnum) reuse).reset(skipDocs, terms[upto]);
|
||||
return ((InstantiatedDocsAndPositionsEnum) reuse).reset(liveDocs, terms[upto]);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -144,9 +146,9 @@ public class InstantiatedTermsEnum extends TermsEnum {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void seek(BytesRef term, TermState state) throws IOException {
|
||||
public void seekExact(BytesRef term, TermState state) throws IOException {
|
||||
assert state != null && state instanceof OrdTermState;
|
||||
seek(((OrdTermState)state).ord); // just use the ord for simplicity
|
||||
seekExact(((OrdTermState)state).ord); // just use the ord for simplicity
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -133,13 +133,13 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
|
||||
Term t = new Term("c", "danny");
|
||||
TermsEnum aprioriTermEnum = MultiFields.getTerms(aprioriReader, t.field()).iterator();
|
||||
aprioriTermEnum.seek(new BytesRef(t.text()));
|
||||
aprioriTermEnum.seekCeil(new BytesRef(t.text()));
|
||||
TermsEnum testTermEnum = MultiFields.getTerms(testReader, t.field()).iterator();
|
||||
testTermEnum.seek(new BytesRef(t.text()));
|
||||
testTermEnum.seekCeil(new BytesRef(t.text()));
|
||||
assertEquals(aprioriTermEnum.term(), testTermEnum.term());
|
||||
|
||||
DocsEnum aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getDeletedDocs(aprioriReader), null);
|
||||
DocsEnum testTermDocs = testTermEnum.docs(MultiFields.getDeletedDocs(testReader), null);
|
||||
DocsEnum aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getLiveDocs(aprioriReader), null);
|
||||
DocsEnum testTermDocs = testTermEnum.docs(MultiFields.getLiveDocs(testReader), null);
|
||||
|
||||
assertEquals(aprioriTermDocs.nextDoc(), testTermDocs.nextDoc());
|
||||
assertEquals(aprioriTermDocs.freq(), testTermDocs.freq());
|
||||
|
@ -186,8 +186,8 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
|
||||
assertEquals(aprioriTermEnum.next(), testTermEnum.next());
|
||||
|
||||
aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getDeletedDocs(aprioriReader), aprioriTermDocs);
|
||||
testTermDocs = testTermEnum.docs(MultiFields.getDeletedDocs(testReader), testTermDocs);
|
||||
aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getLiveDocs(aprioriReader), aprioriTermDocs);
|
||||
testTermDocs = testTermEnum.docs(MultiFields.getLiveDocs(testReader), testTermDocs);
|
||||
|
||||
while (aprioriTermDocs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||
assertTrue(testTermDocs.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||
|
@ -309,13 +309,13 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
assertEquals(air.numDocs(), tir.numDocs());
|
||||
assertEquals(air.numDeletedDocs(), tir.numDeletedDocs());
|
||||
|
||||
final Bits aDelDocs = MultiFields.getDeletedDocs(air);
|
||||
final Bits tDelDocs = MultiFields.getDeletedDocs(tir);
|
||||
assertTrue((aDelDocs != null && tDelDocs != null) ||
|
||||
(aDelDocs == null && tDelDocs == null));
|
||||
if (aDelDocs != null) {
|
||||
final Bits aLiveDocs = MultiFields.getLiveDocs(air);
|
||||
final Bits tLiveDocs = MultiFields.getLiveDocs(tir);
|
||||
assertTrue((aLiveDocs != null && tLiveDocs != null) ||
|
||||
(aLiveDocs == null && tLiveDocs == null));
|
||||
if (aLiveDocs != null) {
|
||||
for (int d =0; d<air.maxDoc(); d++) {
|
||||
assertEquals(aDelDocs.get(d), tDelDocs.get(d));
|
||||
assertEquals(aLiveDocs.get(d), tLiveDocs.get(d));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -366,13 +366,13 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
final Bits apDelDocs = MultiFields.getDeletedDocs(aprioriReader);
|
||||
final Bits testDelDocs = MultiFields.getDeletedDocs(testReader);
|
||||
assertTrue((apDelDocs != null && testDelDocs != null) ||
|
||||
(apDelDocs == null && testDelDocs == null));
|
||||
if (apDelDocs != null) {
|
||||
final Bits apLiveDocs = MultiFields.getLiveDocs(aprioriReader);
|
||||
final Bits testLiveDocs = MultiFields.getLiveDocs(testReader);
|
||||
assertTrue((apLiveDocs != null && testLiveDocs != null) ||
|
||||
(apLiveDocs == null && testLiveDocs == null));
|
||||
if (apLiveDocs != null) {
|
||||
for (int docIndex = 0; docIndex < aprioriReader.numDocs(); docIndex++) {
|
||||
assertEquals(apDelDocs.get(docIndex), testDelDocs.get(docIndex));
|
||||
assertEquals(apLiveDocs.get(docIndex), testLiveDocs.get(docIndex));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -407,8 +407,8 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
|
||||
// compare termDocs seeking
|
||||
|
||||
DocsEnum aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getDeletedDocs(aprioriReader), null);
|
||||
DocsEnum testTermDocs = testTermEnum.docs(MultiFields.getDeletedDocs(testReader), null);
|
||||
DocsEnum aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getLiveDocs(aprioriReader), null);
|
||||
DocsEnum testTermDocs = testTermEnum.docs(MultiFields.getLiveDocs(testReader), null);
|
||||
|
||||
while (aprioriTermDocs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||
assertTrue(testTermDocs.advance(aprioriTermDocs.docID()) != DocsEnum.NO_MORE_DOCS);
|
||||
|
@ -419,8 +419,8 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
|
||||
assertEquals(aprioriReader.docFreq(aprioriField, aprioriTermEnum.term()), testReader.docFreq(aprioriField, testTermEnum.term()));
|
||||
|
||||
aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getDeletedDocs(aprioriReader), aprioriTermDocs);
|
||||
testTermDocs = testTermEnum.docs(MultiFields.getDeletedDocs(testReader), testTermDocs);
|
||||
aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getLiveDocs(aprioriReader), aprioriTermDocs);
|
||||
testTermDocs = testTermEnum.docs(MultiFields.getLiveDocs(testReader), testTermDocs);
|
||||
|
||||
while (true) {
|
||||
if (aprioriTermDocs.nextDoc() == DocsEnum.NO_MORE_DOCS) {
|
||||
|
@ -439,8 +439,8 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
|
||||
// compare term positions
|
||||
|
||||
DocsAndPositionsEnum aprioriTermPositions = aprioriTermEnum.docsAndPositions(MultiFields.getDeletedDocs(aprioriReader), null);
|
||||
DocsAndPositionsEnum testTermPositions = testTermEnum.docsAndPositions(MultiFields.getDeletedDocs(testReader), null);
|
||||
DocsAndPositionsEnum aprioriTermPositions = aprioriTermEnum.docsAndPositions(MultiFields.getLiveDocs(aprioriReader), null);
|
||||
DocsAndPositionsEnum testTermPositions = testTermEnum.docsAndPositions(MultiFields.getLiveDocs(testReader), null);
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: enum1=" + aprioriTermPositions + " enum2=" + testTermPositions);
|
||||
|
|
|
@ -53,8 +53,9 @@ public class TestUnoptimizedReaderOnConstructor extends LuceneTestCase {
|
|||
unoptimizedReader.deleteDocument(2);
|
||||
|
||||
try {
|
||||
new InstantiatedIndex(unoptimizedReader);
|
||||
new InstantiatedIndex(unoptimizedReader);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace(System.out);
|
||||
fail("No exceptions when loading an unoptimized reader!");
|
||||
}
|
||||
|
||||
|
|
|
@ -769,7 +769,7 @@ public class MemoryIndex {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Bits getDeletedDocs() {
|
||||
public Bits getLiveDocs() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -860,7 +860,18 @@ public class MemoryIndex {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SeekStatus seek(BytesRef text, boolean useCache) {
|
||||
public boolean seekExact(BytesRef text, boolean useCache) {
|
||||
termUpto = Arrays.binarySearch(info.sortedTerms, text, termComparator);
|
||||
if (termUpto >= 0) {
|
||||
br.copy(info.sortedTerms[termUpto].getKey());
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public SeekStatus seekCeil(BytesRef text, boolean useCache) {
|
||||
termUpto = Arrays.binarySearch(info.sortedTerms, text, termComparator);
|
||||
if (termUpto < 0) { // not found; choose successor
|
||||
termUpto = -termUpto -1;
|
||||
|
@ -877,13 +888,9 @@ public class MemoryIndex {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SeekStatus seek(long ord) {
|
||||
public void seekExact(long ord) {
|
||||
assert ord < info.sortedTerms.length;
|
||||
termUpto = (int) ord;
|
||||
if (ord < info.sortedTerms.length) {
|
||||
return SeekStatus.FOUND;
|
||||
} else {
|
||||
return SeekStatus.END;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -918,19 +925,19 @@ public class MemoryIndex {
|
|||
}
|
||||
|
||||
@Override
|
||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) {
|
||||
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) {
|
||||
if (reuse == null || !(reuse instanceof MemoryDocsEnum)) {
|
||||
reuse = new MemoryDocsEnum();
|
||||
}
|
||||
return ((MemoryDocsEnum) reuse).reset(skipDocs, info.sortedTerms[termUpto].getValue());
|
||||
return ((MemoryDocsEnum) reuse).reset(liveDocs, info.sortedTerms[termUpto].getValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) {
|
||||
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) {
|
||||
if (reuse == null || !(reuse instanceof MemoryDocsAndPositionsEnum)) {
|
||||
reuse = new MemoryDocsAndPositionsEnum();
|
||||
}
|
||||
return ((MemoryDocsAndPositionsEnum) reuse).reset(skipDocs, info.sortedTerms[termUpto].getValue());
|
||||
return ((MemoryDocsAndPositionsEnum) reuse).reset(liveDocs, info.sortedTerms[termUpto].getValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -939,9 +946,9 @@ public class MemoryIndex {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void seek(BytesRef term, TermState state) throws IOException {
|
||||
public void seekExact(BytesRef term, TermState state) throws IOException {
|
||||
assert state != null;
|
||||
this.seek(((OrdTermState)state).ord);
|
||||
this.seekExact(((OrdTermState)state).ord);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -955,10 +962,10 @@ public class MemoryIndex {
|
|||
private class MemoryDocsEnum extends DocsEnum {
|
||||
private ArrayIntList positions;
|
||||
private boolean hasNext;
|
||||
private Bits skipDocs;
|
||||
private Bits liveDocs;
|
||||
|
||||
public DocsEnum reset(Bits skipDocs, ArrayIntList positions) {
|
||||
this.skipDocs = skipDocs;
|
||||
public DocsEnum reset(Bits liveDocs, ArrayIntList positions) {
|
||||
this.liveDocs = liveDocs;
|
||||
this.positions = positions;
|
||||
hasNext = true;
|
||||
return this;
|
||||
|
@ -971,7 +978,7 @@ public class MemoryIndex {
|
|||
|
||||
@Override
|
||||
public int nextDoc() {
|
||||
if (hasNext && (skipDocs == null || !skipDocs.get(0))) {
|
||||
if (hasNext && (liveDocs == null || liveDocs.get(0))) {
|
||||
hasNext = false;
|
||||
return 0;
|
||||
} else {
|
||||
|
@ -994,10 +1001,10 @@ public class MemoryIndex {
|
|||
private ArrayIntList positions;
|
||||
private int posUpto;
|
||||
private boolean hasNext;
|
||||
private Bits skipDocs;
|
||||
private Bits liveDocs;
|
||||
|
||||
public DocsAndPositionsEnum reset(Bits skipDocs, ArrayIntList positions) {
|
||||
this.skipDocs = skipDocs;
|
||||
public DocsAndPositionsEnum reset(Bits liveDocs, ArrayIntList positions) {
|
||||
this.liveDocs = liveDocs;
|
||||
this.positions = positions;
|
||||
posUpto = 0;
|
||||
hasNext = true;
|
||||
|
@ -1011,7 +1018,7 @@ public class MemoryIndex {
|
|||
|
||||
@Override
|
||||
public int nextDoc() {
|
||||
if (hasNext && (skipDocs == null || !skipDocs.get(0))) {
|
||||
if (hasNext && (liveDocs == null || liveDocs.get(0))) {
|
||||
hasNext = false;
|
||||
return 0;
|
||||
} else {
|
||||
|
|
|
@ -27,22 +27,6 @@
|
|||
|
||||
<import file="../contrib-build.xml"/>
|
||||
|
||||
<module-uptodate name="analysis/common" jarfile="${common.dir}/../modules/analysis/build/common/lucene-analyzers-common-${version}.jar"
|
||||
property="analyzers-common.uptodate" classpath.property="analyzers-common.jar"/>
|
||||
|
||||
<path id="classpath">
|
||||
<pathelement path="${analyzers-common.jar}"/>
|
||||
<path refid="base.classpath"/>
|
||||
</path>
|
||||
|
||||
<target name="compile-core" depends="compile-analyzers-common, common.compile-core" />
|
||||
|
||||
<target name="compile-analyzers-common" unless="analyzers-common.uptodate">
|
||||
<subant target="default">
|
||||
<fileset dir="${common.dir}/../modules/analysis/common" includes="build.xml"/>
|
||||
</subant>
|
||||
</target>
|
||||
|
||||
<target name="build-native-unix" >
|
||||
<mkdir dir="${common.build.dir}/native"/>
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.search.Similarity;
|
|||
import org.apache.lucene.search.SimilarityProvider;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.ReaderUtil;
|
||||
|
||||
|
@ -111,7 +110,6 @@ public class FieldNormModifier {
|
|||
* @param field the field whose norms should be reset
|
||||
*/
|
||||
public void reSetNorms(String field) throws IOException {
|
||||
String fieldName = StringHelper.intern(field);
|
||||
Similarity fieldSim = sim.get(field);
|
||||
IndexReader reader = null;
|
||||
try {
|
||||
|
@ -122,7 +120,7 @@ public class FieldNormModifier {
|
|||
|
||||
final FieldInvertState invertState = new FieldInvertState();
|
||||
for(IndexReader subReader : subReaders) {
|
||||
final Bits delDocs = subReader.getDeletedDocs();
|
||||
final Bits liveDocs = subReader.getLiveDocs();
|
||||
|
||||
int[] termCounts = new int[subReader.maxDoc()];
|
||||
Fields fields = subReader.fields();
|
||||
|
@ -132,7 +130,7 @@ public class FieldNormModifier {
|
|||
TermsEnum termsEnum = terms.iterator();
|
||||
DocsEnum docs = null;
|
||||
while(termsEnum.next() != null) {
|
||||
docs = termsEnum.docs(delDocs, docs);
|
||||
docs = termsEnum.docs(liveDocs, docs);
|
||||
while(true) {
|
||||
int docID = docs.nextDoc();
|
||||
if (docID != docs.NO_MORE_DOCS) {
|
||||
|
@ -147,9 +145,9 @@ public class FieldNormModifier {
|
|||
|
||||
invertState.setBoost(1.0f);
|
||||
for (int d = 0; d < termCounts.length; d++) {
|
||||
if (delDocs == null || !delDocs.get(d)) {
|
||||
if (liveDocs == null || liveDocs.get(d)) {
|
||||
invertState.setLength(termCounts[d]);
|
||||
subReader.setNorm(d, fieldName, fieldSim.encodeNormValue(fieldSim.computeNorm(invertState)));
|
||||
subReader.setNorm(d, field, fieldSim.encodeNormValue(fieldSim.computeNorm(invertState)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,13 +21,12 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.index.IndexWriter; // javadoc
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
|
@ -98,7 +97,7 @@ public class MultiPassIndexSplitter {
|
|||
}
|
||||
IndexWriter w = new IndexWriter(outputs[i], new IndexWriterConfig(
|
||||
Version.LUCENE_CURRENT,
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT))
|
||||
null)
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
System.err.println("Writing part " + (i + 1) + " ...");
|
||||
w.addIndexes(input);
|
||||
|
@ -178,27 +177,17 @@ public class MultiPassIndexSplitter {
|
|||
* Instead, deletions are buffered in a bitset and overlaid with the original
|
||||
* list of deletions.
|
||||
*/
|
||||
public static class FakeDeleteIndexReader extends FilterIndexReader {
|
||||
OpenBitSet dels;
|
||||
OpenBitSet oldDels = null;
|
||||
public static final class FakeDeleteIndexReader extends FilterIndexReader {
|
||||
OpenBitSet liveDocs;
|
||||
|
||||
public FakeDeleteIndexReader(IndexReader in) {
|
||||
super(new SlowMultiReaderWrapper(in));
|
||||
dels = new OpenBitSet(in.maxDoc());
|
||||
if (in.hasDeletions()) {
|
||||
oldDels = new OpenBitSet(in.maxDoc());
|
||||
final Bits oldDelBits = MultiFields.getDeletedDocs(in);
|
||||
assert oldDelBits != null;
|
||||
for (int i = 0; i < in.maxDoc(); i++) {
|
||||
if (oldDelBits.get(i)) oldDels.set(i);
|
||||
}
|
||||
dels.or(oldDels);
|
||||
}
|
||||
doUndeleteAll(); // initialize main bitset
|
||||
}
|
||||
|
||||
@Override
|
||||
public int numDocs() {
|
||||
return in.maxDoc() - (int)dels.cardinality();
|
||||
return (int) liveDocs.cardinality();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -206,26 +195,35 @@ public class MultiPassIndexSplitter {
|
|||
* deletions.
|
||||
*/
|
||||
@Override
|
||||
protected void doUndeleteAll() throws CorruptIndexException, IOException {
|
||||
dels = new OpenBitSet(in.maxDoc());
|
||||
if (oldDels != null) {
|
||||
dels.or(oldDels);
|
||||
protected void doUndeleteAll() {
|
||||
final int maxDoc = in.maxDoc();
|
||||
liveDocs = new OpenBitSet(maxDoc);
|
||||
if (in.hasDeletions()) {
|
||||
final Bits oldLiveDocs = in.getLiveDocs();
|
||||
assert oldLiveDocs != null;
|
||||
// this loop is a little bit ineffective, as Bits has no nextSetBit():
|
||||
for (int i = 0; i < maxDoc; i++) {
|
||||
if (oldLiveDocs.get(i)) liveDocs.fastSet(i);
|
||||
}
|
||||
} else {
|
||||
// mark all docs as valid
|
||||
liveDocs.set(0, maxDoc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doDelete(int n) throws CorruptIndexException, IOException {
|
||||
dels.set(n);
|
||||
protected void doDelete(int n) {
|
||||
liveDocs.clear(n);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasDeletions() {
|
||||
return !dels.isEmpty();
|
||||
return (in.maxDoc() != this.numDocs());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bits getDeletedDocs() {
|
||||
return dels;
|
||||
public Bits getLiveDocs() {
|
||||
return liveDocs;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -351,7 +351,7 @@ public class NRTManager implements Closeable {
|
|||
}
|
||||
|
||||
/** NOTE: caller must separately close the writer. */
|
||||
// @Override -- not until Java 1.6
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
swapSearcher(null, indexingGen.getAndIncrement(), true);
|
||||
}
|
||||
|
|
|
@ -19,16 +19,16 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.TermRangeFilter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.OpenBitSetDISI;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.OpenBitSetDISI;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
|
@ -87,13 +87,14 @@ public class PKIndexSplitter {
|
|||
}
|
||||
|
||||
public static class DocumentFilteredIndexReader extends FilterIndexReader {
|
||||
final Bits readerDels;
|
||||
final Bits liveDocs;
|
||||
final int numDocs;
|
||||
|
||||
public DocumentFilteredIndexReader(IndexReader reader, Filter preserveFilter, boolean negateFilter) throws IOException {
|
||||
super(new SlowMultiReaderWrapper(reader));
|
||||
|
||||
final OpenBitSetDISI bits = new OpenBitSetDISI(in.maxDoc());
|
||||
final int maxDoc = in.maxDoc();
|
||||
final OpenBitSetDISI bits = new OpenBitSetDISI(maxDoc);
|
||||
final DocIdSet docs = preserveFilter.getDocIdSet((AtomicReaderContext) in.getTopReaderContext());
|
||||
if (docs != null) {
|
||||
final DocIdSetIterator it = docs.iterator();
|
||||
|
@ -101,23 +102,24 @@ public class PKIndexSplitter {
|
|||
bits.inPlaceOr(it);
|
||||
}
|
||||
}
|
||||
// this is somehow inverse, if we negate the filter, we delete all documents it matches!
|
||||
if (!negateFilter) {
|
||||
bits.flip(0, in.maxDoc());
|
||||
if (negateFilter) {
|
||||
bits.flip(0, maxDoc);
|
||||
}
|
||||
|
||||
if (in.hasDeletions()) {
|
||||
final Bits oldDelBits = in.getDeletedDocs();
|
||||
assert oldDelBits != null;
|
||||
for (int i = 0; i < in.maxDoc(); i++) {
|
||||
if (oldDelBits.get(i)) {
|
||||
bits.set(i);
|
||||
final Bits oldLiveDocs = in.getLiveDocs();
|
||||
assert oldLiveDocs != null;
|
||||
final DocIdSetIterator it = bits.iterator();
|
||||
for (int i = it.nextDoc(); i < maxDoc; i = it.nextDoc()) {
|
||||
if (!oldLiveDocs.get(i)) {
|
||||
// we can safely modify the current bit, as the iterator already stepped over it:
|
||||
bits.fastClear(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.readerDels = bits;
|
||||
this.numDocs = in.maxDoc() - (int) bits.cardinality();
|
||||
this.liveDocs = bits;
|
||||
this.numDocs = (int) bits.cardinality();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -131,8 +133,8 @@ public class PKIndexSplitter {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Bits getDeletedDocs() {
|
||||
return readerDels;
|
||||
public Bits getLiveDocs() {
|
||||
return liveDocs;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,6 @@ package org.apache.lucene.index;
|
|||
*
|
||||
*/
|
||||
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
||||
|
@ -54,9 +53,6 @@ public class TermVectorAccessor {
|
|||
* @throws IOException
|
||||
*/
|
||||
public void accept(IndexReader indexReader, int documentNumber, String fieldName, TermVectorMapper mapper) throws IOException {
|
||||
|
||||
fieldName = StringHelper.intern(fieldName);
|
||||
|
||||
decoratedMapper.decorated = mapper;
|
||||
decoratedMapper.termVectorStored = false;
|
||||
|
||||
|
@ -100,7 +96,7 @@ public class TermVectorAccessor {
|
|||
positions.clear();
|
||||
}
|
||||
|
||||
final Bits delDocs = MultiFields.getDeletedDocs(indexReader);
|
||||
final Bits liveDocs = MultiFields.getLiveDocs(indexReader);
|
||||
|
||||
Terms terms = MultiFields.getTerms(indexReader, field);
|
||||
boolean anyTerms = false;
|
||||
|
@ -113,9 +109,9 @@ public class TermVectorAccessor {
|
|||
if (text != null) {
|
||||
anyTerms = true;
|
||||
if (!mapper.isIgnoringPositions()) {
|
||||
docs = postings = termsEnum.docsAndPositions(delDocs, postings);
|
||||
docs = postings = termsEnum.docsAndPositions(liveDocs, postings);
|
||||
} else {
|
||||
docs = termsEnum.docs(delDocs, docs);
|
||||
docs = termsEnum.docs(liveDocs, docs);
|
||||
}
|
||||
|
||||
int docID = docs.advance(documentNumber);
|
||||
|
|
|
@ -186,12 +186,12 @@ public class HighFreqTerms {
|
|||
}
|
||||
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
if (termsEnum.seek(termText) != TermsEnum.SeekStatus.FOUND) {
|
||||
if (termsEnum.seekCeil(termText) != TermsEnum.SeekStatus.FOUND) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
Bits skipDocs = MultiFields.getDeletedDocs(reader);
|
||||
if (skipDocs == null) {
|
||||
Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||
if (liveDocs == null) {
|
||||
// TODO: we could do this up front, during the scan
|
||||
// (next()), instead of after-the-fact here w/ seek,
|
||||
// if the codec supports it and there are no del
|
||||
|
@ -202,7 +202,7 @@ public class HighFreqTerms {
|
|||
}
|
||||
}
|
||||
|
||||
DocsEnum de = termsEnum.docs(skipDocs, null);
|
||||
DocsEnum de = termsEnum.docs(liveDocs, null);
|
||||
|
||||
// use DocsEnum.read() and BulkResult api
|
||||
final DocsEnum.BulkReadResult bulkresult = de.getBulkResult();
|
||||
|
|
|
@ -16,7 +16,6 @@ package org.apache.lucene.misc;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
|
@ -40,7 +39,7 @@ public class IndexMergeTool {
|
|||
FSDirectory mergedIndex = FSDirectory.open(new File(args[0]));
|
||||
|
||||
IndexWriter writer = new IndexWriter(mergedIndex, new IndexWriterConfig(
|
||||
Version.LUCENE_CURRENT, new WhitespaceAnalyzer(Version.LUCENE_CURRENT))
|
||||
Version.LUCENE_CURRENT, null)
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
|
||||
Directory[] indexes = new Directory[args.length - 1];
|
||||
|
|
|
@ -21,13 +21,8 @@ import java.io.IOException;
|
|||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.apache.lucene.index.ConcurrentMergeScheduler;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.index.IndexWriter; // javadocs
|
||||
import org.apache.lucene.index.MergePolicy;
|
||||
import org.apache.lucene.index.MergeScheduler;
|
||||
import org.apache.lucene.store.RAMDirectory; // javadocs
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
|
@ -221,6 +216,26 @@ public class NRTCachingDirectory extends Directory {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized CompoundFileDirectory openCompoundInput(String name, IOContext context) throws IOException {
|
||||
if (cache.fileExists(name)) {
|
||||
return cache.openCompoundInput(name, context);
|
||||
} else {
|
||||
return delegate.openCompoundInput(name, context);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized CompoundFileDirectory createCompoundOutput(String name, IOContext context)
|
||||
throws IOException {
|
||||
if (cache.fileExists(name)) {
|
||||
throw new IOException("File " + name + "already exists");
|
||||
} else {
|
||||
return delegate.createCompoundOutput(name, context);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Close this directory, which flushes any cached files
|
||||
* to the delegate and then closes the delegate. */
|
||||
@Override
|
||||
|
|
|
@ -78,7 +78,7 @@ public class TestIndexSplitter extends LuceneTestCase {
|
|||
_TestUtil.rmDir(destDir2);
|
||||
destDir2.mkdirs();
|
||||
IndexSplitter.main(new String[] {dir.getAbsolutePath(), destDir2.getAbsolutePath(), splitSegName});
|
||||
assertEquals(3, destDir2.listFiles().length);
|
||||
assertEquals(4, destDir2.listFiles().length);
|
||||
Directory fsDirDest2 = newFSDirectory(destDir2);
|
||||
r = IndexReader.open(fsDirDest2, true);
|
||||
assertEquals(50, r.maxDoc());
|
||||
|
|
|
@ -73,7 +73,7 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase {
|
|||
Document doc = ir.document(0);
|
||||
assertEquals("0", doc.get("id"));
|
||||
TermsEnum te = MultiFields.getTerms(ir, "id").iterator();
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seek(new BytesRef("1")));
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seekCeil(new BytesRef("1")));
|
||||
assertNotSame("1", te.term().utf8ToString());
|
||||
ir.close();
|
||||
ir = IndexReader.open(dirs[1], true);
|
||||
|
@ -81,7 +81,7 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase {
|
|||
doc = ir.document(0);
|
||||
assertEquals("1", doc.get("id"));
|
||||
te = MultiFields.getTerms(ir, "id").iterator();
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seek(new BytesRef("0")));
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seekCeil(new BytesRef("0")));
|
||||
|
||||
assertNotSame("0", te.term().utf8ToString());
|
||||
ir.close();
|
||||
|
@ -91,10 +91,10 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase {
|
|||
assertEquals("2", doc.get("id"));
|
||||
|
||||
te = MultiFields.getTerms(ir, "id").iterator();
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seek(new BytesRef("1")));
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seekCeil(new BytesRef("1")));
|
||||
assertNotSame("1", te.term());
|
||||
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seek(new BytesRef("0")));
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seekCeil(new BytesRef("0")));
|
||||
assertNotSame("0", te.term().utf8ToString());
|
||||
ir.close();
|
||||
for (Directory d : dirs)
|
||||
|
@ -132,7 +132,7 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase {
|
|||
// make sure the deleted doc is not here
|
||||
TermsEnum te = MultiFields.getTerms(ir, "id").iterator();
|
||||
Term t = new Term("id", (NUM_DOCS - 1) + "");
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seek(new BytesRef(t.text())));
|
||||
assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seekCeil(new BytesRef(t.text())));
|
||||
assertNotSame(t.text(), te.term().utf8ToString());
|
||||
ir.close();
|
||||
for (Directory d : dirs)
|
||||
|
|
|
@ -134,11 +134,11 @@ public class TestNRTManager extends LuceneTestCase {
|
|||
System.out.println("TEST: now warm merged reader=" + reader);
|
||||
}
|
||||
final int maxDoc = reader.maxDoc();
|
||||
final Bits delDocs = reader.getDeletedDocs();
|
||||
final Bits liveDocs = reader.getLiveDocs();
|
||||
int sum = 0;
|
||||
final int inc = Math.max(1, maxDoc/50);
|
||||
for(int docID=0;docID<maxDoc;docID += inc) {
|
||||
if (delDocs == null || !delDocs.get(docID)) {
|
||||
if (liveDocs == null || liveDocs.get(docID)) {
|
||||
final Document doc = reader.document(docID);
|
||||
sum += doc.getFields().size();
|
||||
}
|
||||
|
@ -524,7 +524,7 @@ public class TestNRTManager extends LuceneTestCase {
|
|||
//System.out.println("trigger " + trigger);
|
||||
shift = random.nextInt(trigger);
|
||||
}
|
||||
termsEnum.seek(new BytesRef(""));
|
||||
termsEnum.seekCeil(new BytesRef(""));
|
||||
continue;
|
||||
}
|
||||
seenTermCount++;
|
||||
|
@ -670,7 +670,7 @@ public class TestNRTManager extends LuceneTestCase {
|
|||
|
||||
private int runQuery(IndexSearcher s, Query q) throws Exception {
|
||||
s.search(q, 10);
|
||||
return s.search(q, null, 10, new Sort(new SortField("title", SortField.STRING))).totalHits;
|
||||
return s.search(q, null, 10, new Sort(new SortField("title", SortField.Type.STRING))).totalHits;
|
||||
}
|
||||
|
||||
private void smokeTestSearcher(IndexSearcher s) throws Exception {
|
||||
|
|
|
@ -87,9 +87,9 @@ public class TestPKIndexSplitter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void checkContents(IndexReader ir, String indexname) throws Exception {
|
||||
final Bits delDocs = MultiFields.getDeletedDocs(ir);
|
||||
final Bits liveDocs = MultiFields.getLiveDocs(ir);
|
||||
for (int i = 0; i < ir.maxDoc(); i++) {
|
||||
if (delDocs == null || !delDocs.get(i)) {
|
||||
if (liveDocs == null || liveDocs.get(i)) {
|
||||
assertEquals(indexname, ir.document(i).get("indexname"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -155,14 +155,14 @@ public class TestAppendingCodec extends LuceneTestCase {
|
|||
Terms terms = fields.terms("f");
|
||||
assertNotNull(terms);
|
||||
TermsEnum te = terms.iterator();
|
||||
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("quick")));
|
||||
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("brown")));
|
||||
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("fox")));
|
||||
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("jumped")));
|
||||
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("over")));
|
||||
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("lazy")));
|
||||
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("dog")));
|
||||
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("the")));
|
||||
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("quick")));
|
||||
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("brown")));
|
||||
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("fox")));
|
||||
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("jumped")));
|
||||
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("over")));
|
||||
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("lazy")));
|
||||
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("dog")));
|
||||
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("the")));
|
||||
DocsEnum de = te.docs(null, null);
|
||||
assertTrue(de.advance(0) != DocsEnum.NO_MORE_DOCS);
|
||||
assertEquals(2, de.freq());
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
limitations under the License.
|
||||
-->
|
||||
|
||||
<project name="queries" default="default">
|
||||
<project name="queries-contrib" default="default">
|
||||
|
||||
<description>
|
||||
Queries - various query object exotica not in core
|
||||
|
|
|
@ -86,7 +86,7 @@ public class DuplicateFilter extends Filter
|
|||
|
||||
private OpenBitSet correctBits(IndexReader reader) throws IOException {
|
||||
OpenBitSet bits = new OpenBitSet(reader.maxDoc()); //assume all are INvalid
|
||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
||||
final Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||
Terms terms = reader.fields().terms(fieldName);
|
||||
if (terms != null) {
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
|
@ -96,7 +96,7 @@ public class DuplicateFilter extends Filter
|
|||
if (currTerm == null) {
|
||||
break;
|
||||
} else {
|
||||
docs = termsEnum.docs(delDocs, docs);
|
||||
docs = termsEnum.docs(liveDocs, docs);
|
||||
int doc = docs.nextDoc();
|
||||
if (doc != DocsEnum.NO_MORE_DOCS) {
|
||||
if (keepMode == KM_USE_FIRST_OCCURRENCE) {
|
||||
|
@ -124,7 +124,7 @@ public class DuplicateFilter extends Filter
|
|||
|
||||
OpenBitSet bits=new OpenBitSet(reader.maxDoc());
|
||||
bits.set(0,reader.maxDoc()); //assume all are valid
|
||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
||||
final Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||
Terms terms = reader.fields().terms(fieldName);
|
||||
if (terms != null) {
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
|
@ -136,7 +136,7 @@ public class DuplicateFilter extends Filter
|
|||
} else {
|
||||
if (termsEnum.docFreq() > 1) {
|
||||
// unset potential duplicates
|
||||
docs = termsEnum.docs(delDocs, docs);
|
||||
docs = termsEnum.docs(liveDocs, docs);
|
||||
int doc = docs.nextDoc();
|
||||
if (doc != DocsEnum.NO_MORE_DOCS) {
|
||||
if (keepMode == KM_USE_FIRST_OCCURRENCE) {
|
||||
|
|
|
@ -190,7 +190,6 @@ public class FuzzyLikeThisQuery extends Query
|
|||
CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
|
||||
|
||||
int corpusNumDocs=reader.numDocs();
|
||||
Term internSavingTemplateTerm =new Term(f.fieldName); //optimization to avoid constructing new Term() objects
|
||||
HashSet<String> processedTerms=new HashSet<String>();
|
||||
ts.reset();
|
||||
while (ts.incrementToken())
|
||||
|
@ -201,7 +200,7 @@ public class FuzzyLikeThisQuery extends Query
|
|||
processedTerms.add(term);
|
||||
ScoreTermQueue variantsQ=new ScoreTermQueue(MAX_VARIANTS_PER_TERM); //maxNum variants considered for any one term
|
||||
float minScore=0;
|
||||
Term startTerm=internSavingTemplateTerm.createTerm(term);
|
||||
Term startTerm=new Term(f.fieldName, term);
|
||||
AttributeSource atts = new AttributeSource();
|
||||
MaxNonCompetitiveBoostAttribute maxBoostAtt =
|
||||
atts.addAttribute(MaxNonCompetitiveBoostAttribute.class);
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
* This class will be removed in Lucene 5.0
|
||||
*/
|
||||
@Deprecated
|
||||
public final class SlowCollatedStringComparator extends FieldComparator<BytesRef> {
|
||||
public final class SlowCollatedStringComparator extends FieldComparator<String> {
|
||||
|
||||
private final String[] values;
|
||||
private DocTerms currentDocTerms;
|
||||
|
@ -99,13 +99,12 @@ public final class SlowCollatedStringComparator extends FieldComparator<BytesRef
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesRef value(int slot) {
|
||||
final String s = values[slot];
|
||||
return s == null ? null : new BytesRef(values[slot]);
|
||||
public String value(int slot) {
|
||||
return values[slot];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareValues(BytesRef first, BytesRef second) {
|
||||
public int compareValues(String first, String second) {
|
||||
if (first == null) {
|
||||
if (second == null) {
|
||||
return 0;
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TermsFilter extends Filter
|
|||
OpenBitSet result=new OpenBitSet(reader.maxDoc());
|
||||
Fields fields = reader.fields();
|
||||
BytesRef br = new BytesRef();
|
||||
Bits delDocs = reader.getDeletedDocs();
|
||||
Bits liveDocs = reader.getLiveDocs();
|
||||
if (fields != null) {
|
||||
String lastField = null;
|
||||
Terms termsC = null;
|
||||
|
@ -71,7 +71,7 @@ public class TermsFilter extends Filter
|
|||
DocsEnum docs = null;
|
||||
for (Iterator<Term> iter = terms.iterator(); iter.hasNext();) {
|
||||
Term term = iter.next();
|
||||
if (term.field() != lastField) {
|
||||
if (!term.field().equals(lastField)) {
|
||||
termsC = fields.terms(term.field());
|
||||
termsEnum = termsC.iterator();
|
||||
lastField = term.field();
|
||||
|
@ -79,8 +79,8 @@ public class TermsFilter extends Filter
|
|||
|
||||
if (terms != null) {
|
||||
br.copy(term.bytes());
|
||||
if (termsEnum.seek(br) == TermsEnum.SeekStatus.FOUND) {
|
||||
docs = termsEnum.docs(delDocs, docs);
|
||||
if (termsEnum.seekCeil(br) == TermsEnum.SeekStatus.FOUND) {
|
||||
docs = termsEnum.docs(liveDocs, docs);
|
||||
while(docs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||
result.set(docs.docID());
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.search.regex;
|
|||
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.FilteredTermsEnum;
|
||||
import org.apache.lucene.search.RegexpQuery; // javadoc
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
|
@ -29,7 +30,10 @@ import java.io.IOException;
|
|||
/** Implements the regular expression term search query.
|
||||
* The expressions supported depend on the regular expression implementation
|
||||
* used by way of the {@link RegexCapabilities} interface.
|
||||
*
|
||||
* <p>
|
||||
* NOTE: You may wish to consider using the regex query support
|
||||
* in {@link RegexpQuery} instead, as it has better performance.
|
||||
*
|
||||
* @see RegexTermsEnum
|
||||
*/
|
||||
public class RegexQuery extends MultiTermQuery implements RegexQueryCapable {
|
||||
|
|
|
@ -139,7 +139,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
|
|||
Document d=searcher.doc(hits[i].doc);
|
||||
String url=d.get(KEY_FIELD);
|
||||
DocsEnum td = MultiFields.getTermDocsEnum(reader,
|
||||
MultiFields.getDeletedDocs(reader),
|
||||
MultiFields.getLiveDocs(reader),
|
||||
KEY_FIELD,
|
||||
new BytesRef(url));
|
||||
int lastDoc=0;
|
||||
|
@ -163,7 +163,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
|
|||
Document d=searcher.doc(hits[i].doc);
|
||||
String url=d.get(KEY_FIELD);
|
||||
DocsEnum td = MultiFields.getTermDocsEnum(reader,
|
||||
MultiFields.getDeletedDocs(reader),
|
||||
MultiFields.getLiveDocs(reader),
|
||||
KEY_FIELD,
|
||||
new BytesRef(url));
|
||||
int lastDoc=0;
|
||||
|
|
|
@ -41,6 +41,7 @@ public class TestSlowCollationMethods extends LuceneTestCase {
|
|||
private static IndexReader reader;
|
||||
private static Directory dir;
|
||||
private static int numDocs;
|
||||
private static String splitDoc;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
|
@ -59,6 +60,7 @@ public class TestSlowCollationMethods extends LuceneTestCase {
|
|||
doc.add(field);
|
||||
iw.addDocument(doc);
|
||||
}
|
||||
splitDoc = _TestUtil.randomUnicodeString(random);
|
||||
reader = iw.getReader();
|
||||
iw.close();
|
||||
|
||||
|
@ -76,6 +78,15 @@ public class TestSlowCollationMethods extends LuceneTestCase {
|
|||
dir = null;
|
||||
}
|
||||
|
||||
private void doCheckSorting(TopDocs docs) throws Exception {
|
||||
String prev = "";
|
||||
for (ScoreDoc doc : docs.scoreDocs) {
|
||||
String value = reader.document(doc.doc).get("field");
|
||||
assertTrue(collator.compare(value, prev) >= 0);
|
||||
prev = value;
|
||||
}
|
||||
}
|
||||
|
||||
public void testSort() throws Exception {
|
||||
SortField sf = new SortField("field", new FieldComparatorSource() {
|
||||
@Override
|
||||
|
@ -83,13 +94,16 @@ public class TestSlowCollationMethods extends LuceneTestCase {
|
|||
return new SlowCollatedStringComparator(numHits, fieldname, collator);
|
||||
}
|
||||
});
|
||||
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), null, numDocs, new Sort(sf));
|
||||
String prev = "";
|
||||
for (ScoreDoc doc : docs.scoreDocs) {
|
||||
String value = reader.document(doc.doc).get("field");
|
||||
assertTrue(collator.compare(value, prev) >= 0);
|
||||
prev = value;
|
||||
}
|
||||
final Sort sort = new Sort(sf);
|
||||
|
||||
final TopDocs docs1 = searcher.search(TermRangeQuery.newStringRange("field", null, splitDoc, true, true), null, numDocs/(1+random.nextInt(4)), sort);
|
||||
doCheckSorting(docs1);
|
||||
|
||||
final TopDocs docs2 = searcher.search(TermRangeQuery.newStringRange("field", splitDoc, null, true, true), null, numDocs/(1+random.nextInt(4)), sort);
|
||||
doCheckSorting(docs2);
|
||||
|
||||
final TopDocs docs = TopDocs.merge(sort, numDocs/(1+random.nextInt(4)), new TopDocs[]{docs1, docs2});
|
||||
doCheckSorting(docs);
|
||||
}
|
||||
|
||||
private void doTestRanges(String startPoint, String endPoint, Query query) throws Exception {
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
limitations under the License.
|
||||
-->
|
||||
|
||||
<project name="queryparser" default="default">
|
||||
<project name="queryparser-contrib" default="default">
|
||||
|
||||
<description>
|
||||
Flexible Query Parser
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
package org.apache.lucene.queryParser.core.config;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This class is the base of {@link QueryConfigHandler} and {@link FieldConfig}.
|
||||
* It has operations to set, unset and get configuration values.
|
||||
* </p>
|
||||
* <p>
|
||||
* Each configuration is is a key->value pair. The key should be an unique
|
||||
* {@link ConfigurationKey} instance and it also holds the value's type.
|
||||
* </p>
|
||||
*
|
||||
* @see ConfigurationKey
|
||||
*/
|
||||
public abstract class AbstractQueryConfig {
|
||||
|
||||
final private HashMap<ConfigurationKey<?>, Object> configMap = new HashMap<ConfigurationKey<?>, Object>();
|
||||
|
||||
AbstractQueryConfig() {
|
||||
// although this class is public, it can only be constructed from package
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value held by the given key.
|
||||
*
|
||||
* @param <T> the value's type
|
||||
*
|
||||
* @param key the key, cannot be <code>null</code>
|
||||
*
|
||||
* @return the value held by the given key
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T get(ConfigurationKey<T> key) {
|
||||
|
||||
if (key == null) {
|
||||
throw new IllegalArgumentException("key cannot be null!");
|
||||
}
|
||||
|
||||
return (T) this.configMap.get(key);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if there is a value set with the given key, otherwise false.
|
||||
*
|
||||
* @param <T> @param <T> the value's type
|
||||
* @param key the key, cannot be <code>null</code>
|
||||
* @return true if there is a value set with the given key, otherwise false
|
||||
*/
|
||||
public <T> boolean has(ConfigurationKey<T> key) {
|
||||
|
||||
if (key == null) {
|
||||
throw new IllegalArgumentException("key cannot be null!");
|
||||
}
|
||||
|
||||
return this.configMap.containsKey(key);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a key and its value.
|
||||
*
|
||||
* @param <T> the value's type
|
||||
* @param key the key, cannot be <code>null</code>
|
||||
* @param value
|
||||
*/
|
||||
public <T> void set(ConfigurationKey<T> key, T value) {
|
||||
|
||||
if (key == null) {
|
||||
throw new IllegalArgumentException("key cannot be null!");
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
unset(key);
|
||||
|
||||
} else {
|
||||
this.configMap.put(key, value);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsets the given key and its value.
|
||||
*
|
||||
* @param <T> the value's type
|
||||
* @param key the key
|
||||
* @return true if the key and value was set and removed, otherwise false
|
||||
*/
|
||||
public <T> boolean unset(ConfigurationKey<T> key) {
|
||||
|
||||
if (key == null) {
|
||||
throw new IllegalArgumentException("key cannot be null!");
|
||||
}
|
||||
|
||||
return this.configMap.remove(key) != null;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
package org.apache.lucene.queryParser.core.config;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* An instance of this class represents a key that is used to retrieve a value
|
||||
* from {@link AbstractQueryConfig}. It also holds the value's type, which is
|
||||
* defined in the generic argument.
|
||||
*
|
||||
* @see AbstractQueryConfig
|
||||
*/
|
||||
final public class ConfigurationKey<T> {
|
||||
|
||||
private ConfigurationKey() {}
|
||||
|
||||
/**
|
||||
* Creates a new instance.
|
||||
*
|
||||
* @param <T> the value's type
|
||||
*
|
||||
* @return a new instance
|
||||
*/
|
||||
public static <T> ConfigurationKey<T> newInstance() {
|
||||
return new ConfigurationKey<T>();
|
||||
}
|
||||
|
||||
}
|
|
@ -17,16 +17,10 @@ package org.apache.lucene.queryParser.core.config;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
|
||||
/**
|
||||
* This class represents a field configuration. Every configuration should be
|
||||
* set using the methods inherited from {@link AttributeSource}.
|
||||
*
|
||||
* @see QueryConfigHandler
|
||||
* @see org.apache.lucene.util.Attribute
|
||||
* This class represents a field configuration.
|
||||
*/
|
||||
public class FieldConfig extends AttributeSource {
|
||||
public class FieldConfig extends AbstractQueryConfig {
|
||||
|
||||
private String fieldName;
|
||||
|
||||
|
@ -57,7 +51,7 @@ public class FieldConfig extends AttributeSource {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "<fieldconfig name=\"" + this.fieldName + "\" attributes=\""
|
||||
return "<fieldconfig name=\"" + this.fieldName + "\" configurations=\""
|
||||
+ super.toString() + "\"/>";
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue