mirror of
https://github.com/apache/lucene.git
synced 2025-03-06 08:19:23 +00:00
LUCENE-3998: consolidate facet module examples under demo
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1440578 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9bfa9dc767
commit
1e63ff0b6e
@ -191,8 +191,7 @@
|
||||
<exclude name="analysis/stempel/classes/java/org/egothor/stemmer/Compile.class"/>
|
||||
<exclude name="analysis/stempel/classes/java/org/egothor/stemmer/DiffIt.class"/>
|
||||
<exclude name="benchmark/**"/>
|
||||
<exclude name="demo/classes/java/org/apache/lucene/demo/IndexFiles.class"/>
|
||||
<exclude name="demo/classes/java/org/apache/lucene/demo/SearchFiles.class"/>
|
||||
<exclude name="demo/classes/java/org/apache/lucene/**"/>
|
||||
<exclude name="misc/classes/java/org/apache/lucene/index/CompoundFileExtractor.class"/>
|
||||
<exclude name="misc/classes/java/org/apache/lucene/index/IndexSplitter.class"/>
|
||||
<exclude name="misc/classes/java/org/apache/lucene/index/MultiPassIndexSplitter.class"/>
|
||||
|
@ -33,20 +33,22 @@
|
||||
<pathelement path="${analyzers-common.jar}"/>
|
||||
<pathelement path="${queryparser.jar}"/>
|
||||
<pathelement path="${lucene-core.jar}"/>
|
||||
<pathelement path="${facet.jar}"/>
|
||||
<fileset dir="lib"/>
|
||||
</path>
|
||||
|
||||
<target name="javadocs" depends="javadocs-analyzers-common,javadocs-queryparser,compile-core">
|
||||
<target name="javadocs" depends="javadocs-analyzers-common,javadocs-queryparser,javadocs-facet,compile-core">
|
||||
<!-- we link the example source in the javadocs, as its ref'ed elsewhere -->
|
||||
<invoke-module-javadoc linksource="yes">
|
||||
<links>
|
||||
<link href="../analyzers-common"/>
|
||||
<link href="../queryparser"/>
|
||||
<link href="../facet"/>
|
||||
</links>
|
||||
</invoke-module-javadoc>
|
||||
</target>
|
||||
|
||||
<target name="compile-core" depends="jar-analyzers-common,jar-queryparser,common.compile-core" />
|
||||
<target name="compile-core" depends="jar-analyzers-common,jar-queryparser,jar-facet,common.compile-core" />
|
||||
|
||||
<target name="default" depends="jar-core,build-web-demo"/>
|
||||
|
||||
@ -63,6 +65,7 @@
|
||||
<lib file="${queries.jar}"/>
|
||||
<lib file="${queryparser.jar}"/>
|
||||
<lib file="${lucene-core.jar}"/>
|
||||
<lib file="${facet.jar}"/>
|
||||
</war>
|
||||
</target>
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
package org.apache.lucene.facet.example;
|
||||
package org.apache.lucene.demo.facet;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -1,4 +1,4 @@
|
||||
package org.apache.lucene.facet.example;
|
||||
package org.apache.lucene.demo.facet;
|
||||
|
||||
import org.apache.lucene.util.Version;
|
||||
|
@ -1,14 +1,14 @@
|
||||
package org.apache.lucene.facet.example.adaptive;
|
||||
package org.apache.lucene.demo.facet.adaptive;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import org.apache.lucene.facet.example.ExampleResult;
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.facet.example.simple.SimpleIndexer;
|
||||
import org.apache.lucene.facet.example.simple.SimpleSearcher;
|
||||
import org.apache.lucene.demo.facet.ExampleResult;
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.simple.SimpleIndexer;
|
||||
import org.apache.lucene.demo.facet.simple.SimpleSearcher;
|
||||
import org.apache.lucene.facet.search.AdaptiveFacetsAccumulator;
|
||||
import org.apache.lucene.facet.search.results.FacetResult;
|
||||
|
@ -1,9 +1,9 @@
|
||||
package org.apache.lucene.facet.example.adaptive;
|
||||
package org.apache.lucene.demo.facet.adaptive;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.facet.example.simple.SimpleUtils;
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.simple.SimpleUtils;
|
||||
import org.apache.lucene.facet.search.AdaptiveFacetsAccumulator;
|
||||
import org.apache.lucene.facet.search.ScoredDocIdCollector;
|
||||
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
@ -0,0 +1,22 @@
|
||||
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<html><head></head>
|
||||
<body>
|
||||
Facets example code for using AdaptiveFacetsAccumulator.
|
||||
</body>
|
||||
</html>
|
@ -1,13 +1,13 @@
|
||||
package org.apache.lucene.facet.example.association;
|
||||
package org.apache.lucene.demo.facet.association;
|
||||
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.simple.SimpleUtils;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.facet.associations.AssociationsFacetFields;
|
||||
import org.apache.lucene.facet.associations.CategoryAssociation;
|
||||
import org.apache.lucene.facet.associations.CategoryAssociationsContainer;
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.facet.example.simple.SimpleUtils;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
@ -1,12 +1,12 @@
|
||||
package org.apache.lucene.facet.example.association;
|
||||
package org.apache.lucene.demo.facet.association;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import org.apache.lucene.facet.example.ExampleResult;
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.ExampleResult;
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.facet.search.results.FacetResult;
|
||||
|
||||
/*
|
@ -1,4 +1,4 @@
|
||||
package org.apache.lucene.facet.example.association;
|
||||
package org.apache.lucene.demo.facet.association;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@ -6,7 +6,7 @@ import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
import org.apache.lucene.facet.example.simple.SimpleSearcher;
|
||||
import org.apache.lucene.demo.facet.simple.SimpleSearcher;
|
||||
import org.apache.lucene.facet.search.params.associations.AssociationFloatSumFacetRequest;
|
||||
import org.apache.lucene.facet.search.params.associations.AssociationIntSumFacetRequest;
|
||||
import org.apache.lucene.facet.search.results.FacetResult;
|
@ -1,4 +1,4 @@
|
||||
package org.apache.lucene.facet.example.association;
|
||||
package org.apache.lucene.demo.facet.association;
|
||||
|
||||
import org.apache.lucene.facet.associations.CategoryAssociation;
|
||||
import org.apache.lucene.facet.associations.CategoryFloatAssociation;
|
@ -0,0 +1,22 @@
|
||||
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<html><head></head>
|
||||
<body>
|
||||
Facets example code for using associations.
|
||||
</body>
|
||||
</html>
|
@ -1,4 +1,4 @@
|
||||
package org.apache.lucene.facet.example.multiCL;
|
||||
package org.apache.lucene.demo.facet.multiCL;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
@ -6,11 +6,11 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.simple.SimpleUtils;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.facet.example.simple.SimpleUtils;
|
||||
import org.apache.lucene.facet.index.FacetFields;
|
||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||
@ -90,12 +90,16 @@ public class MultiCLIndexer {
|
||||
|
||||
/**
|
||||
* Create an index, and adds to it sample documents and facets.
|
||||
* @param indexDir Directory in which the index should be created.
|
||||
* @param taxoDir Directory in which the taxonomy index should be created.
|
||||
* @throws Exception on error (no detailed exception handling here for sample simplicity
|
||||
*
|
||||
* @param indexDir
|
||||
* Directory in which the index should be created.
|
||||
* @param taxoDir
|
||||
* Directory in which the taxonomy index should be created.
|
||||
* @throws Exception
|
||||
* on error (no detailed exception handling here for sample
|
||||
* simplicity
|
||||
*/
|
||||
public static void index(Directory indexDir, Directory taxoDir)
|
||||
throws Exception {
|
||||
public static void index(Directory indexDir, Directory taxoDir) throws Exception {
|
||||
|
||||
Random random = new Random(2003);
|
||||
|
@ -1,12 +1,12 @@
|
||||
package org.apache.lucene.facet.example.multiCL;
|
||||
package org.apache.lucene.demo.facet.multiCL;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import org.apache.lucene.facet.example.ExampleResult;
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.ExampleResult;
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.facet.search.results.FacetResult;
|
||||
|
||||
/*
|
@ -1,4 +1,4 @@
|
||||
package org.apache.lucene.facet.example.multiCL;
|
||||
package org.apache.lucene.demo.facet.multiCL;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@ -13,8 +13,8 @@ import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
import org.apache.lucene.search.MultiCollector;
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.facet.example.simple.SimpleUtils;
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.simple.SimpleUtils;
|
||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||
import org.apache.lucene.facet.search.FacetsCollector;
|
||||
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
@ -0,0 +1,22 @@
|
||||
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<html><head></head>
|
||||
<body>
|
||||
Facets example code for using multiple category lists.
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,22 @@
|
||||
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<html><head></head>
|
||||
<body>
|
||||
Facets example code.
|
||||
</body>
|
||||
</html>
|
@ -1,12 +1,12 @@
|
||||
package org.apache.lucene.facet.example.simple;
|
||||
package org.apache.lucene.demo.facet.simple;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.facet.index.FacetFields;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
@ -1,4 +1,4 @@
|
||||
package org.apache.lucene.facet.example.simple;
|
||||
package org.apache.lucene.demo.facet.simple;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@ -7,8 +7,8 @@ import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import org.apache.lucene.facet.example.ExampleResult;
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.ExampleResult;
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.facet.search.results.FacetResult;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
@ -1,9 +1,9 @@
|
||||
package org.apache.lucene.facet.example.simple;
|
||||
package org.apache.lucene.demo.facet.simple;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||
import org.apache.lucene.facet.search.DrillDown;
|
||||
import org.apache.lucene.facet.search.FacetsCollector;
|
@ -1,8 +1,8 @@
|
||||
package org.apache.lucene.facet.example.simple;
|
||||
package org.apache.lucene.demo.facet.simple;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.demo.facet.ExampleUtils;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
|
||||
/*
|
@ -0,0 +1,22 @@
|
||||
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<html><head></head>
|
||||
<body>
|
||||
Facets simple example code.
|
||||
</body>
|
||||
</html>
|
@ -1,10 +1,10 @@
|
||||
package org.apache.lucene.facet.example;
|
||||
package org.apache.lucene.demo.facet;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.facet.example.ExampleResult;
|
||||
import org.apache.lucene.facet.example.adaptive.AdaptiveMain;
|
||||
import org.apache.lucene.demo.facet.ExampleResult;
|
||||
import org.apache.lucene.demo.facet.adaptive.AdaptiveMain;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
@ -1,10 +1,10 @@
|
||||
package org.apache.lucene.facet.example;
|
||||
package org.apache.lucene.demo.facet;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.facet.example.ExampleResult;
|
||||
import org.apache.lucene.facet.example.association.CategoryAssociationsMain;
|
||||
import org.apache.lucene.demo.facet.ExampleResult;
|
||||
import org.apache.lucene.demo.facet.association.CategoryAssociationsMain;
|
||||
import org.apache.lucene.facet.search.results.FacetResultNode;
|
||||
|
||||
/*
|
@ -1,4 +1,4 @@
|
||||
package org.apache.lucene.facet.example;
|
||||
package org.apache.lucene.demo.facet;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
@ -6,7 +6,8 @@ import java.util.List;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.facet.example.multiCL.MultiCLMain;
|
||||
import org.apache.lucene.demo.facet.ExampleResult;
|
||||
import org.apache.lucene.demo.facet.multiCL.MultiCLMain;
|
||||
import org.apache.lucene.facet.search.results.FacetResult;
|
||||
import org.apache.lucene.facet.search.results.FacetResultNode;
|
||||
|
@ -1,12 +1,12 @@
|
||||
package org.apache.lucene.facet.example;
|
||||
package org.apache.lucene.demo.facet;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.facet.example.ExampleResult;
|
||||
import org.apache.lucene.facet.example.simple.SimpleMain;
|
||||
import org.apache.lucene.demo.facet.ExampleResult;
|
||||
import org.apache.lucene.demo.facet.simple.SimpleMain;
|
||||
import org.apache.lucene.facet.search.results.FacetResult;
|
||||
import org.apache.lucene.facet.search.results.FacetResultNode;
|
||||
|
@ -28,52 +28,6 @@
|
||||
|
||||
<import file="../module-build.xml"/>
|
||||
|
||||
<property name="examples.dir" location="src/examples"/>
|
||||
|
||||
<path id="examples.classpath">
|
||||
<path refid="classpath" />
|
||||
<pathelement location="${build.dir}/classes/java" />
|
||||
<pathelement path="${analyzers-common.jar}" />
|
||||
</path>
|
||||
|
||||
<path id="test.classpath">
|
||||
<path refid="test.base.classpath" />
|
||||
<pathelement location="${build.dir}/classes/examples" />
|
||||
</path>
|
||||
|
||||
<path id="classpath">
|
||||
<!-- TODO, cut over tests to MockAnalyzer etc and nuke this dependency -->
|
||||
<pathelement path="${analyzers-common.jar}" />
|
||||
<path refid="base.classpath"/>
|
||||
</path>
|
||||
|
||||
<target name="compile-examples" description="Compiles Facets examples">
|
||||
<compile srcdir="${examples.dir}" destdir="${build.dir}/classes/examples">
|
||||
<classpath refid="examples.classpath" />
|
||||
</compile>
|
||||
</target>
|
||||
|
||||
<target name="jar-examples" depends="compile-examples">
|
||||
<jarify basedir="${build.dir}/classes/examples"
|
||||
destfile="${build.dir}/${final.name}-examples.jar"
|
||||
title="Lucene Search Engine: ${ant.project.name}-examples"
|
||||
manifest.file="${build.dir}/EXAMPLES-MANIFEST.MF">
|
||||
<fileset dir="src/examples" />
|
||||
</jarify>
|
||||
</target>
|
||||
|
||||
<target name="compile-core" depends="jar-analyzers-common,common.compile-core,compile-examples" description="Compiles facet classes" />
|
||||
|
||||
<target name="jar-core" depends="common.jar-core,jar-examples" />
|
||||
|
||||
<target name="javadocs" depends="javadocs-analyzers-common,compile-core">
|
||||
<invoke-module-javadoc>
|
||||
<links>
|
||||
<link href="../analyzers-common"/>
|
||||
</links>
|
||||
</invoke-module-javadoc>
|
||||
</target>
|
||||
|
||||
<target name="run-encoding-benchmark" depends="compile-test">
|
||||
<java classname="org.apache.lucene.util.encoding.EncodingSpeed" fork="true" failonerror="true">
|
||||
<classpath refid="test.classpath" />
|
||||
|
@ -1,17 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>Simple faceted indexing and search sample</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Simple faceted indexing and search sample</h1>
|
||||
|
||||
A simple faceted example, showing how to:
|
||||
<ol>
|
||||
<li>Create an index.</li>
|
||||
<li>Add documents with facets to the index.</li>
|
||||
<li>Search the index.</li>
|
||||
</ol>
|
||||
|
||||
For more complex examples see the other sample code packages.
|
||||
</body>
|
||||
</html>
|
@ -1,9 +1,8 @@
|
||||
package org.apache.lucene.facet.example.merge;
|
||||
package org.apache.lucene.facet.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.facet.example.ExampleUtils;
|
||||
import org.apache.lucene.facet.index.OrdinalMappingAtomicReader;
|
||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
@ -17,6 +16,7 @@ import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.MultiReader;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
@ -52,8 +52,7 @@ public class TaxonomyMergeUtils {
|
||||
*/
|
||||
public static void merge(Directory srcIndexDir, Directory srcTaxDir, Directory destIndexDir, Directory destTaxDir,
|
||||
FacetIndexingParams params) throws IOException {
|
||||
IndexWriter destIndexWriter = new IndexWriter(destIndexDir,
|
||||
new IndexWriterConfig(ExampleUtils.EXAMPLE_VER, null));
|
||||
IndexWriter destIndexWriter = new IndexWriter(destIndexDir, new IndexWriterConfig(Version.LUCENE_42, null));
|
||||
DirectoryTaxonomyWriter destTaxWriter = new DirectoryTaxonomyWriter(destTaxDir);
|
||||
merge(srcIndexDir, srcTaxDir, new MemoryOrdinalMap(), destIndexWriter, destTaxWriter, params);
|
||||
destTaxWriter.close();
|
@ -1,19 +1,7 @@
|
||||
package org.apache.lucene.facet;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.facet.search.results.FacetResult;
|
||||
import org.apache.lucene.facet.search.results.FacetResultNode;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
@ -34,68 +22,6 @@ import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
public class FacetTestUtils {
|
||||
|
||||
public static class IndexTaxonomyReaderPair {
|
||||
public DirectoryReader indexReader;
|
||||
public DirectoryTaxonomyReader taxReader;
|
||||
public IndexSearcher indexSearcher;
|
||||
|
||||
public void close() throws IOException {
|
||||
indexReader.close();
|
||||
taxReader.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class IndexTaxonomyWriterPair {
|
||||
public IndexWriter indexWriter;
|
||||
public TaxonomyWriter taxWriter;
|
||||
|
||||
public void close() throws IOException {
|
||||
indexWriter.close();
|
||||
taxWriter.close();
|
||||
}
|
||||
|
||||
public void commit() throws IOException {
|
||||
indexWriter.commit();
|
||||
taxWriter.commit();
|
||||
}
|
||||
}
|
||||
|
||||
public static Directory[][] createIndexTaxonomyDirs(int number) {
|
||||
Directory[][] dirs = new Directory[number][2];
|
||||
for (int i = 0; i < number; i++) {
|
||||
dirs[i][0] = LuceneTestCase.newDirectory();
|
||||
dirs[i][1] = LuceneTestCase.newDirectory();
|
||||
}
|
||||
return dirs;
|
||||
}
|
||||
|
||||
public static IndexTaxonomyReaderPair[] createIndexTaxonomyReaderPair(Directory[][] dirs) throws IOException {
|
||||
IndexTaxonomyReaderPair[] pairs = new IndexTaxonomyReaderPair[dirs.length];
|
||||
for (int i = 0; i < dirs.length; i++) {
|
||||
IndexTaxonomyReaderPair pair = new IndexTaxonomyReaderPair();
|
||||
pair.indexReader = DirectoryReader.open(dirs[i][0]);
|
||||
pair.indexSearcher = new IndexSearcher(pair.indexReader);
|
||||
pair.taxReader = new DirectoryTaxonomyReader(dirs[i][1]);
|
||||
pairs[i] = pair;
|
||||
}
|
||||
return pairs;
|
||||
}
|
||||
|
||||
public static IndexTaxonomyWriterPair[] createIndexTaxonomyWriterPair(Directory[][] dirs) throws IOException {
|
||||
IndexTaxonomyWriterPair[] pairs = new IndexTaxonomyWriterPair[dirs.length];
|
||||
for (int i = 0; i < dirs.length; i++) {
|
||||
IndexTaxonomyWriterPair pair = new IndexTaxonomyWriterPair();
|
||||
pair.indexWriter = new IndexWriter(dirs[i][0], new IndexWriterConfig(
|
||||
LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(LuceneTestCase.random())));
|
||||
pair.taxWriter = new DirectoryTaxonomyWriter(dirs[i][1]);
|
||||
pair.indexWriter.commit();
|
||||
pair.taxWriter.commit();
|
||||
pairs[i] = pair;
|
||||
}
|
||||
return pairs;
|
||||
}
|
||||
|
||||
public static String toSimpleString(FacetResult fr) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
toSimpleString(0, sb, fr.getFacetResultNode(), "");
|
||||
|
@ -8,7 +8,6 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.example.merge.TaxonomyMergeUtils;
|
||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||
import org.apache.lucene.facet.search.FacetsCollector;
|
||||
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
||||
@ -18,6 +17,7 @@ import org.apache.lucene.facet.search.results.FacetResultNode;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.facet.util.TaxonomyMergeUtils;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
|
@ -14,7 +14,6 @@ import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.FacetTestUtils;
|
||||
import org.apache.lucene.facet.index.FacetFields;
|
||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||
@ -77,12 +76,14 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
|
||||
@Test
|
||||
public void testDefault() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
|
||||
// create and open an index writer
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dirs[0][0], newIndexWriterConfig(
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), indexDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
|
||||
|
||||
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams(Collections.<CategoryPath, CategoryListParams>emptyMap());
|
||||
|
||||
@ -92,7 +93,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(dirs[0][1]);
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
@ -105,17 +106,19 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
assertOrdinalsExist("$facets", ir);
|
||||
|
||||
IOUtils.close(tr, ir, iw, tw);
|
||||
IOUtils.close(dirs[0]);
|
||||
IOUtils.close(indexDir, taxoDir);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCustom() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
|
||||
// create and open an index writer
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dirs[0][0], newIndexWriterConfig(
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), indexDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
|
||||
|
||||
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams(
|
||||
Collections.singletonMap(new CategoryPath("Author"), new CategoryListParams("$author")));
|
||||
@ -125,7 +128,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(dirs[0][1]);
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
@ -139,17 +142,19 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
assertOrdinalsExist("$author", ir);
|
||||
|
||||
IOUtils.close(tr, ir, iw, tw);
|
||||
IOUtils.close(dirs[0]);
|
||||
IOUtils.close(indexDir, taxoDir);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTwoCustomsSameField() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
|
||||
// create and open an index writer
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dirs[0][0], newIndexWriterConfig(
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), indexDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
|
||||
|
||||
Map<CategoryPath,CategoryListParams> paramsMap = new HashMap<CategoryPath,CategoryListParams>();
|
||||
paramsMap.put(new CategoryPath("Band"), new CategoryListParams("$music"));
|
||||
@ -161,7 +166,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(dirs[0][1]);
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
@ -176,7 +181,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
assertOrdinalsExist("$music", ir);
|
||||
|
||||
IOUtils.close(tr, ir, iw, tw);
|
||||
IOUtils.close(dirs[0]);
|
||||
IOUtils.close(indexDir, taxoDir);
|
||||
}
|
||||
|
||||
private void assertOrdinalsExist(String field, IndexReader ir) throws IOException {
|
||||
@ -191,12 +196,14 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
|
||||
@Test
|
||||
public void testDifferentFieldsAndText() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
|
||||
// create and open an index writer
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dirs[0][0], newIndexWriterConfig(
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), indexDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
|
||||
|
||||
Map<CategoryPath,CategoryListParams> paramsMap = new HashMap<CategoryPath,CategoryListParams>();
|
||||
paramsMap.put(new CategoryPath("Band"), new CategoryListParams("$bands"));
|
||||
@ -208,7 +215,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(dirs[0][1]);
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
@ -222,17 +229,19 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
assertOrdinalsExist("$composers", ir);
|
||||
|
||||
IOUtils.close(tr, ir, iw, tw);
|
||||
IOUtils.close(dirs[0]);
|
||||
IOUtils.close(indexDir, taxoDir);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSomeSameSomeDifferent() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
|
||||
// create and open an index writer
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dirs[0][0], newIndexWriterConfig(
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), indexDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
|
||||
|
||||
Map<CategoryPath,CategoryListParams> paramsMap = new HashMap<CategoryPath,CategoryListParams>();
|
||||
paramsMap.put(new CategoryPath("Band"), new CategoryListParams("$music"));
|
||||
@ -246,7 +255,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(dirs[0][1]);
|
||||
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
@ -259,11 +268,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
|
||||
assertOrdinalsExist("$literature", ir);
|
||||
|
||||
IOUtils.close(tr, ir, iw, tw);
|
||||
IOUtils.close(dirs[0]);
|
||||
}
|
||||
|
||||
private Directory[][] getDirs() {
|
||||
return FacetTestUtils.createIndexTaxonomyDirs(1);
|
||||
IOUtils.close(indexDir, taxoDir);
|
||||
}
|
||||
|
||||
private void assertCorrectResults(FacetsCollector facetsCollector) throws IOException {
|
||||
|
@ -3,14 +3,20 @@ package org.apache.lucene.facet.search;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.FacetTestUtils;
|
||||
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyReaderPair;
|
||||
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyWriterPair;
|
||||
import org.apache.lucene.facet.index.FacetFields;
|
||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
@ -35,9 +41,9 @@ import org.junit.Test;
|
||||
|
||||
public class TestTotalFacetCounts extends FacetTestCase {
|
||||
|
||||
private static void initCache(int numEntries) {
|
||||
private static void initCache() {
|
||||
TotalFacetCountsCache.getSingleton().clear();
|
||||
TotalFacetCountsCache.getSingleton().setCacheSize(numEntries); // Set to keep one in mem
|
||||
TotalFacetCountsCache.getSingleton().setCacheSize(1); // Set to keep one in mem
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -50,12 +56,13 @@ public class TestTotalFacetCounts extends FacetTestCase {
|
||||
}
|
||||
|
||||
private void doTestWriteRead(final int partitionSize) throws IOException {
|
||||
initCache(1);
|
||||
initCache();
|
||||
|
||||
// Create temporary RAMDirectories
|
||||
Directory[][] dirs = FacetTestUtils.createIndexTaxonomyDirs(1);
|
||||
// Create our index/taxonomy writers
|
||||
IndexTaxonomyWriterPair[] writers = FacetTestUtils.createIndexTaxonomyWriterPair(dirs);
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
|
||||
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
|
||||
|
||||
FacetIndexingParams iParams = new FacetIndexingParams() {
|
||||
@Override
|
||||
public int getPartitionSize() {
|
||||
@ -75,36 +82,34 @@ public class TestTotalFacetCounts extends FacetTestCase {
|
||||
// The counts that the TotalFacetCountsArray should have after adding
|
||||
// the below facets to the index.
|
||||
int[] expectedCounts = new int[] { 0, 3, 1, 3, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1 };
|
||||
|
||||
// Add a facet to the index
|
||||
TestTotalFacetCountsCache.addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "a", "b");
|
||||
TestTotalFacetCountsCache.addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "c", "d");
|
||||
TestTotalFacetCountsCache.addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "a", "e");
|
||||
TestTotalFacetCountsCache.addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "a", "d");
|
||||
TestTotalFacetCountsCache.addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "c", "g");
|
||||
TestTotalFacetCountsCache.addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "c", "z");
|
||||
TestTotalFacetCountsCache.addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "b", "a");
|
||||
TestTotalFacetCountsCache.addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "1", "2");
|
||||
TestTotalFacetCountsCache.addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "b", "c");
|
||||
String[] categories = new String[] { "a/b", "c/d", "a/e", "a/d", "c/g", "c/z", "b/a", "1/2", "b/c" };
|
||||
|
||||
FacetFields facetFields = new FacetFields(taxoWriter, iParams);
|
||||
for (String cat : categories) {
|
||||
Document doc = new Document();
|
||||
facetFields.addFields(doc, Collections.singletonList(new CategoryPath(cat, '/')));
|
||||
indexWriter.addDocument(doc);
|
||||
}
|
||||
|
||||
// Commit Changes
|
||||
writers[0].close();
|
||||
IOUtils.close(indexWriter, taxoWriter);
|
||||
|
||||
IndexTaxonomyReaderPair[] readers = FacetTestUtils.createIndexTaxonomyReaderPair(dirs);
|
||||
DirectoryReader indexReader = DirectoryReader.open(indexDir);
|
||||
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
|
||||
|
||||
int[] intArray = new int[iParams.getPartitionSize()];
|
||||
|
||||
TotalFacetCountsCache tfcc = TotalFacetCountsCache.getSingleton();
|
||||
File tmpFile = _TestUtil.createTempFile("test", "tmp", TEMP_DIR);
|
||||
tfcc.store(tmpFile, readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
tfcc.store(tmpFile, indexReader, taxoReader, iParams);
|
||||
tfcc.clear(); // not really required because TFCC overrides on load(), but in the test we need not rely on this.
|
||||
tfcc.load(tmpFile, readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
tfcc.load(tmpFile, indexReader, taxoReader, iParams);
|
||||
|
||||
// now retrieve the one just loaded
|
||||
TotalFacetCounts totalCounts = tfcc.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
TotalFacetCounts totalCounts = tfcc.getTotalCounts(indexReader, taxoReader, iParams);
|
||||
|
||||
int partition = 0;
|
||||
for (int i=0; i<expectedCounts.length; i+=partitionSize) {
|
||||
for (int i = 0; i < expectedCounts.length; i += partitionSize) {
|
||||
totalCounts.fillTotalCountsForPartition(intArray, partition);
|
||||
int[] partitionExpectedCounts = new int[partitionSize];
|
||||
int nToCopy = Math.min(partitionSize,expectedCounts.length-i);
|
||||
@ -115,8 +120,8 @@ public class TestTotalFacetCounts extends FacetTestCase {
|
||||
Arrays.equals(partitionExpectedCounts, intArray));
|
||||
++partition;
|
||||
}
|
||||
readers[0].close();
|
||||
IOUtils.close(dirs[0]);
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
IOUtils.close(indexDir, taxoDir);
|
||||
tmpFile.delete();
|
||||
}
|
||||
|
||||
|
@ -4,21 +4,17 @@ import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.FacetTestUtils;
|
||||
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyReaderPair;
|
||||
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyWriterPair;
|
||||
import org.apache.lucene.facet.example.ExampleResult;
|
||||
import org.apache.lucene.facet.example.TestMultiCLExample;
|
||||
import org.apache.lucene.facet.example.multiCL.MultiCLIndexer;
|
||||
import org.apache.lucene.facet.example.multiCL.MultiCLSearcher;
|
||||
import org.apache.lucene.facet.index.FacetFields;
|
||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||
import org.apache.lucene.facet.search.TotalFacetCounts.CreationType;
|
||||
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
||||
import org.apache.lucene.facet.search.params.FacetSearchParams;
|
||||
import org.apache.lucene.facet.search.results.FacetResult;
|
||||
import org.apache.lucene.facet.search.results.FacetResultNode;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
@ -30,6 +26,8 @@ import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
@ -86,7 +84,7 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
}
|
||||
|
||||
/** Utility method to add a document and facets to an index/taxonomy. */
|
||||
static void addFacets(FacetIndexingParams iParams, IndexWriter iw,
|
||||
private static void addFacets(FacetIndexingParams iParams, IndexWriter iw,
|
||||
TaxonomyWriter tw, String... strings) throws IOException {
|
||||
Document doc = new Document();
|
||||
FacetFields facetFields = new FacetFields(tw, iParams);
|
||||
@ -95,7 +93,7 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
}
|
||||
|
||||
/** Clears the cache and sets its size to one. */
|
||||
static void initCache() {
|
||||
private static void initCache() {
|
||||
TFC.clear();
|
||||
TFC.setCacheSize(1); // Set to keep one in memory
|
||||
}
|
||||
@ -107,37 +105,35 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
initCache();
|
||||
}
|
||||
|
||||
/** runs a few instances of {@link MultiCLSearcher} in parallel */
|
||||
/** runs few searches in parallel */
|
||||
public void testGeneralSynchronization() throws Exception {
|
||||
int numIters = atLeast(2);
|
||||
int numIters = atLeast(4);
|
||||
Random random = random();
|
||||
for (int i = 0; i < numIters; i++) {
|
||||
doTestGeneralSynchronization(_TestUtil.nextInt(random(), 2, 4),
|
||||
random().nextBoolean() ? -1 : _TestUtil.nextInt(random(), 1, 10),
|
||||
_TestUtil.nextInt(random(), 0, 3));
|
||||
int numThreads = random.nextInt(3) + 2; // 2-4
|
||||
int sleepMillis = random.nextBoolean() ? -1 : random.nextInt(10) + 1 /*1-10*/;
|
||||
int cacheSize = random.nextInt(4); // 0-3
|
||||
doTestGeneralSynchronization(numThreads, sleepMillis, cacheSize);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run many instances of {@link MultiCLSearcher} in parallel, results should
|
||||
* be sane. Each instance has a random delay for reading bytes, to ensure
|
||||
* that threads finish in different order than started.
|
||||
*/
|
||||
@Test @Nightly
|
||||
public void testGeneralSynchronizationBig() throws Exception {
|
||||
int[] numThreads = new int[] { 2, 3, 5, 8 };
|
||||
int[] sleepMillis = new int[] { -1, 1, 20, 33 };
|
||||
int[] cacheSize = new int[] { 0,1,2,3,5 };
|
||||
for (int size : cacheSize) {
|
||||
for (int sleep : sleepMillis) {
|
||||
for (int nThreads : numThreads) {
|
||||
doTestGeneralSynchronization(nThreads, sleep, size);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
private static final String[] CATEGORIES = new String[] { "a/b", "c/d", "a/e", "a/d", "c/g", "c/z", "b/a", "1/2", "b/c" };
|
||||
|
||||
private void doTestGeneralSynchronization(int numThreads, int sleepMillis,
|
||||
int cacheSize) throws Exception {
|
||||
private void index(Directory indexDir, Directory taxoDir) throws IOException {
|
||||
IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
|
||||
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
|
||||
FacetFields facetFields = new FacetFields(taxoWriter);
|
||||
|
||||
for (String cat : CATEGORIES) {
|
||||
Document doc = new Document();
|
||||
facetFields.addFields(doc, Collections.singletonList(new CategoryPath(cat, '/')));
|
||||
indexWriter.addDocument(doc);
|
||||
}
|
||||
|
||||
IOUtils.close(indexWriter, taxoWriter);
|
||||
}
|
||||
|
||||
private void doTestGeneralSynchronization(int numThreads, int sleepMillis, int cacheSize) throws Exception {
|
||||
TFC.setCacheSize(cacheSize);
|
||||
SlowRAMDirectory slowIndexDir = new SlowRAMDirectory(-1, random());
|
||||
MockDirectoryWrapper indexDir = new MockDirectoryWrapper(random(), slowIndexDir);
|
||||
@ -145,7 +141,7 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
MockDirectoryWrapper taxoDir = new MockDirectoryWrapper(random(), slowTaxoDir);
|
||||
|
||||
// Index documents without the "slowness"
|
||||
MultiCLIndexer.index(indexDir, taxoDir);
|
||||
index(indexDir, taxoDir);
|
||||
|
||||
slowIndexDir.setSleepMillis(sleepMillis);
|
||||
slowTaxoDir.setSleepMillis(sleepMillis);
|
||||
@ -161,80 +157,64 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
private IndexReader indexReader;
|
||||
private TaxonomyReader taxoReader;
|
||||
|
||||
public Multi(IndexReader indexReader, TaxonomyReader taxoReader,
|
||||
FacetIndexingParams iParams) {
|
||||
public Multi(IndexReader indexReader, TaxonomyReader taxoReader, FacetIndexingParams iParams) {
|
||||
this.indexReader = indexReader;
|
||||
this.taxoReader = taxoReader;
|
||||
this.iParams = iParams;
|
||||
}
|
||||
|
||||
public ExampleResult getResults() {
|
||||
ExampleResult exampleRes = new ExampleResult();
|
||||
exampleRes.setFacetResults(results);
|
||||
return exampleRes;
|
||||
public List<FacetResult> getResults() {
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
results = MultiCLSearcher.searchWithFacets(indexReader, taxoReader, iParams);
|
||||
FacetSearchParams fsp = new FacetSearchParams(iParams, new CountFacetRequest(new CategoryPath("a"), 10),
|
||||
new CountFacetRequest(new CategoryPath("b"), 10));
|
||||
IndexSearcher searcher = new IndexSearcher(indexReader);
|
||||
FacetsCollector fc = FacetsCollector.create(fsp, indexReader, taxoReader);
|
||||
searcher.search(new MatchAllDocsQuery(), fc);
|
||||
results = fc.getFacetResults();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Instantiate threads, but do not start them
|
||||
Multi[] multis = new Multi[numThreads];
|
||||
for (int i = 0; i < numThreads - 1; i++) {
|
||||
multis[i] = new Multi(slowIndexReader, slowTaxoReader, MultiCLIndexer.MULTI_IPARAMS);
|
||||
for (int i = 0; i < numThreads; i++) {
|
||||
multis[i] = new Multi(slowIndexReader, slowTaxoReader, FacetIndexingParams.ALL_PARENTS);
|
||||
}
|
||||
// The last thread uses ONLY the DefaultFacetIndexingParams so that
|
||||
// it references a different TFC cache. This will still result
|
||||
// in valid results, but will only search one of the category lists
|
||||
// instead of all of them.
|
||||
multis[numThreads - 1] = new Multi(slowIndexReader, slowTaxoReader, FacetIndexingParams.ALL_PARENTS);
|
||||
|
||||
// Gentleman, start your engines
|
||||
for (Multi m : multis) {
|
||||
m.start();
|
||||
}
|
||||
|
||||
// Wait for threads and get results
|
||||
ExampleResult[] multiResults = new ExampleResult[numThreads];
|
||||
for (int i = 0; i < numThreads; i++) {
|
||||
multis[i].join();
|
||||
multiResults[i] = multis[i].getResults();
|
||||
String[] expLabelsA = new String[] { "a/d", "a/e", "a/b" };
|
||||
String[] expLabelsB = new String[] { "b/c", "b/a" };
|
||||
for (Multi m : multis) {
|
||||
m.join();
|
||||
List<FacetResult> facetResults = m.getResults();
|
||||
assertEquals("expected two results", 2, facetResults.size());
|
||||
|
||||
FacetResultNode nodeA = facetResults.get(0).getFacetResultNode();
|
||||
int i = 0;
|
||||
for (FacetResultNode node : nodeA.subResults) {
|
||||
assertEquals("wrong count", 1, (int) node.value);
|
||||
assertEquals(expLabelsA[i++], node.label.toString('/'));
|
||||
}
|
||||
|
||||
FacetResultNode nodeB = facetResults.get(1).getFacetResultNode();
|
||||
i = 0;
|
||||
for (FacetResultNode node : nodeB.subResults) {
|
||||
assertEquals("wrong count", 1, (int) node.value);
|
||||
assertEquals(expLabelsB[i++], node.label.toString('/'));
|
||||
}
|
||||
}
|
||||
|
||||
// Each of the (numThreads-1) should have the same predictable
|
||||
// results, which we test for here.
|
||||
for (int i = 0; i < numThreads - 1; i++) {
|
||||
ExampleResult eResults = multiResults[i];
|
||||
TestMultiCLExample.assertCorrectMultiResults(eResults);
|
||||
}
|
||||
|
||||
// The last thread, which only searched over the
|
||||
// DefaultFacetIndexingParams,
|
||||
// has its own results
|
||||
ExampleResult eResults = multiResults[numThreads - 1];
|
||||
List<FacetResult> results = eResults.getFacetResults();
|
||||
assertEquals(3, results.size());
|
||||
String[] expLabels = new String[] { "5", "5/5", "6/2" };
|
||||
double[] expValues = new double[] { 0.0, 0.0, 1.0 };
|
||||
for (int i = 0; i < 3; i++) {
|
||||
FacetResult result = results.get(i);
|
||||
assertNotNull("Result should not be null", result);
|
||||
FacetResultNode resNode = result.getFacetResultNode();
|
||||
assertEquals("Invalid label", expLabels[i], resNode.label.toString());
|
||||
assertEquals("Invalid value", expValues[i], resNode.value, 0.0);
|
||||
assertEquals("Invalid number of subresults", 0, resNode.subResults.size());
|
||||
}
|
||||
// we're done, close the index reader and the taxonomy.
|
||||
slowIndexReader.close();
|
||||
slowTaxoReader.close();
|
||||
indexDir.close();
|
||||
taxoDir.close();
|
||||
|
||||
IOUtils.close(slowIndexReader, slowTaxoReader, indexDir, taxoDir);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -245,77 +225,78 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
@Test
|
||||
public void testGenerationalConsistency() throws Exception {
|
||||
// Create temporary RAMDirectories
|
||||
Directory[][] dirs = FacetTestUtils.createIndexTaxonomyDirs(1);
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
|
||||
// Create our index/taxonomy writers
|
||||
IndexTaxonomyWriterPair[] writers = FacetTestUtils.createIndexTaxonomyWriterPair(dirs);
|
||||
IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
|
||||
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
|
||||
FacetIndexingParams iParams = FacetIndexingParams.ALL_PARENTS;
|
||||
|
||||
// Add a facet to the index
|
||||
addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "a", "b");
|
||||
addFacets(iParams, indexWriter, taxoWriter, "a", "b");
|
||||
|
||||
// Commit Changes
|
||||
writers[0].indexWriter.commit();
|
||||
writers[0].taxWriter.commit();
|
||||
indexWriter.commit();
|
||||
taxoWriter.commit();
|
||||
|
||||
// Open readers
|
||||
IndexTaxonomyReaderPair[] readers = FacetTestUtils.createIndexTaxonomyReaderPair(dirs);
|
||||
DirectoryReader indexReader = DirectoryReader.open(indexDir);
|
||||
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
|
||||
|
||||
// As this is the first time we have invoked the TotalFacetCountsManager,
|
||||
// we should expect to compute and not read from disk.
|
||||
TotalFacetCounts totalCounts =
|
||||
TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
TotalFacetCounts totalCounts = TFC.getTotalCounts(indexReader, taxoReader, iParams);
|
||||
int prevGen = assertRecomputed(totalCounts, 0, "after first attempt to get it!");
|
||||
|
||||
// Repeating same operation should pull from the cache - not recomputed.
|
||||
assertTrue("Should be obtained from cache at 2nd attempt",totalCounts ==
|
||||
TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams));
|
||||
TFC.getTotalCounts(indexReader, taxoReader, iParams));
|
||||
|
||||
// Repeat the same operation as above. but clear first - now should recompute again
|
||||
initCache();
|
||||
totalCounts = TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
totalCounts = TFC.getTotalCounts(indexReader, taxoReader, iParams);
|
||||
prevGen = assertRecomputed(totalCounts, prevGen, "after cache clear, 3rd attempt to get it!");
|
||||
|
||||
//store to file
|
||||
File outputFile = _TestUtil.createTempFile("test", "tmp", TEMP_DIR);
|
||||
initCache();
|
||||
TFC.store(outputFile, readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
totalCounts = TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
TFC.store(outputFile, indexReader, taxoReader, iParams);
|
||||
totalCounts = TFC.getTotalCounts(indexReader, taxoReader, iParams);
|
||||
prevGen = assertRecomputed(totalCounts, prevGen, "after cache clear, 4th attempt to get it!");
|
||||
|
||||
//clear and load
|
||||
initCache();
|
||||
TFC.load(outputFile, readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
totalCounts = TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
TFC.load(outputFile, indexReader, taxoReader, iParams);
|
||||
totalCounts = TFC.getTotalCounts(indexReader, taxoReader, iParams);
|
||||
prevGen = assertReadFromDisc(totalCounts, prevGen, "after 5th attempt to get it!");
|
||||
|
||||
// Add a new facet to the index, commit and refresh readers
|
||||
addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "c", "d");
|
||||
writers[0].indexWriter.close();
|
||||
writers[0].taxWriter.close();
|
||||
addFacets(iParams, indexWriter, taxoWriter, "c", "d");
|
||||
IOUtils.close(indexWriter, taxoWriter);
|
||||
|
||||
DirectoryTaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(readers[0].taxReader);
|
||||
TaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(taxoReader);
|
||||
assertNotNull(newTaxoReader);
|
||||
assertTrue("should have received more cagtegories in updated taxonomy", newTaxoReader.getSize() > readers[0].taxReader.getSize());
|
||||
readers[0].taxReader.close();
|
||||
readers[0].taxReader = newTaxoReader;
|
||||
assertTrue("should have received more cagtegories in updated taxonomy", newTaxoReader.getSize() > taxoReader.getSize());
|
||||
taxoReader.close();
|
||||
taxoReader = newTaxoReader;
|
||||
|
||||
DirectoryReader r2 = DirectoryReader.openIfChanged(readers[0].indexReader);
|
||||
DirectoryReader r2 = DirectoryReader.openIfChanged(indexReader);
|
||||
assertNotNull(r2);
|
||||
readers[0].indexReader.close();
|
||||
readers[0].indexReader = r2;
|
||||
indexReader.close();
|
||||
indexReader = r2;
|
||||
|
||||
// now use the new reader - should recompute
|
||||
totalCounts = TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
totalCounts = TFC.getTotalCounts(indexReader, taxoReader, iParams);
|
||||
prevGen = assertRecomputed(totalCounts, prevGen, "after updating the index - 7th attempt!");
|
||||
|
||||
// try again - should not recompute
|
||||
assertTrue("Should be obtained from cache at 8th attempt",totalCounts ==
|
||||
TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams));
|
||||
TFC.getTotalCounts(indexReader, taxoReader, iParams));
|
||||
|
||||
readers[0].close();
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
outputFile.delete();
|
||||
IOUtils.close(dirs[0]);
|
||||
IOUtils.close(indexDir, taxoDir);
|
||||
}
|
||||
|
||||
private int assertReadFromDisc(TotalFacetCounts totalCounts, int prevGen, String errMsg) {
|
||||
@ -341,10 +322,12 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
*/
|
||||
@Test
|
||||
public void testGrowingTaxonomy() throws Exception {
|
||||
// Create temporary RAMDirectories
|
||||
Directory[][] dirs = FacetTestUtils.createIndexTaxonomyDirs(1);
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
|
||||
// Create our index/taxonomy writers
|
||||
IndexTaxonomyWriterPair[] writers = FacetTestUtils.createIndexTaxonomyWriterPair(dirs);
|
||||
IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
|
||||
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
|
||||
FacetIndexingParams iParams = new FacetIndexingParams() {
|
||||
@Override
|
||||
public int getPartitionSize() {
|
||||
@ -352,37 +335,38 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
}
|
||||
};
|
||||
// Add a facet to the index
|
||||
addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "a", "b");
|
||||
addFacets(iParams, indexWriter, taxoWriter, "a", "b");
|
||||
// Commit Changes
|
||||
writers[0].indexWriter.commit();
|
||||
writers[0].taxWriter.commit();
|
||||
indexWriter.commit();
|
||||
taxoWriter.commit();
|
||||
|
||||
IndexTaxonomyReaderPair[] readers = FacetTestUtils.createIndexTaxonomyReaderPair(dirs);
|
||||
DirectoryReader indexReader = DirectoryReader.open(indexDir);
|
||||
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
|
||||
|
||||
// Create TFC and write cache to disk
|
||||
File outputFile = _TestUtil.createTempFile("test", "tmp", TEMP_DIR);
|
||||
TFC.store(outputFile, readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
TFC.store(outputFile, indexReader, taxoReader, iParams);
|
||||
|
||||
// Make the taxonomy grow without touching the index
|
||||
for (int i = 0; i < 10; i++) {
|
||||
writers[0].taxWriter.addCategory(new CategoryPath("foo", Integer.toString(i)));
|
||||
taxoWriter.addCategory(new CategoryPath("foo", Integer.toString(i)));
|
||||
}
|
||||
writers[0].taxWriter.commit();
|
||||
DirectoryTaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(readers[0].taxReader);
|
||||
taxoWriter.commit();
|
||||
TaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(taxoReader);
|
||||
assertNotNull(newTaxoReader);
|
||||
readers[0].taxReader.close();
|
||||
readers[0].taxReader = newTaxoReader;
|
||||
taxoReader.close();
|
||||
taxoReader = newTaxoReader;
|
||||
|
||||
initCache();
|
||||
|
||||
// With the bug, this next call should result in an exception
|
||||
TFC.load(outputFile, readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
TotalFacetCounts totalCounts = TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
TFC.load(outputFile, indexReader, taxoReader, iParams);
|
||||
TotalFacetCounts totalCounts = TFC.getTotalCounts(indexReader, taxoReader, iParams);
|
||||
assertReadFromDisc(totalCounts, 0, "after reading from disk.");
|
||||
|
||||
outputFile.delete();
|
||||
writers[0].close();
|
||||
readers[0].close();
|
||||
IOUtils.close(dirs[0]);
|
||||
IOUtils.close(indexWriter, taxoWriter, indexReader, taxoReader);
|
||||
IOUtils.close(indexDir, taxoDir);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -445,46 +429,52 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
*/
|
||||
@Test
|
||||
public void testMultipleIndices() throws IOException {
|
||||
// Create temporary RAMDirectories
|
||||
Directory[][] dirs = FacetTestUtils.createIndexTaxonomyDirs(2);
|
||||
Directory indexDir1 = newDirectory(), indexDir2 = newDirectory();
|
||||
Directory taxoDir1 = newDirectory(), taxoDir2 = newDirectory();
|
||||
|
||||
// Create our index/taxonomy writers
|
||||
IndexTaxonomyWriterPair[] writers = FacetTestUtils.createIndexTaxonomyWriterPair(dirs);
|
||||
IndexWriter indexWriter1 = new IndexWriter(indexDir1, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
|
||||
IndexWriter indexWriter2 = new IndexWriter(indexDir2, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
|
||||
TaxonomyWriter taxoWriter1 = new DirectoryTaxonomyWriter(taxoDir1);
|
||||
TaxonomyWriter taxoWriter2 = new DirectoryTaxonomyWriter(taxoDir2);
|
||||
FacetIndexingParams iParams = FacetIndexingParams.ALL_PARENTS;
|
||||
|
||||
// Add a facet to the index
|
||||
addFacets(iParams, writers[0].indexWriter, writers[0].taxWriter, "a", "b");
|
||||
addFacets(iParams, writers[1].indexWriter, writers[1].taxWriter, "d", "e");
|
||||
addFacets(iParams, indexWriter1, taxoWriter1, "a", "b");
|
||||
addFacets(iParams, indexWriter1, taxoWriter1, "d", "e");
|
||||
// Commit Changes
|
||||
writers[0].indexWriter.commit();
|
||||
writers[0].taxWriter.commit();
|
||||
writers[1].indexWriter.commit();
|
||||
writers[1].taxWriter.commit();
|
||||
indexWriter1.commit();
|
||||
indexWriter2.commit();
|
||||
taxoWriter1.commit();
|
||||
taxoWriter2.commit();
|
||||
|
||||
// Open two readers
|
||||
IndexTaxonomyReaderPair[] readers = FacetTestUtils.createIndexTaxonomyReaderPair(dirs);
|
||||
DirectoryReader indexReader1 = DirectoryReader.open(indexDir1);
|
||||
DirectoryReader indexReader2 = DirectoryReader.open(indexDir2);
|
||||
TaxonomyReader taxoReader1 = new DirectoryTaxonomyReader(taxoDir1);
|
||||
TaxonomyReader taxoReader2 = new DirectoryTaxonomyReader(taxoDir2);
|
||||
|
||||
// As this is the first time we have invoked the TotalFacetCountsManager, we
|
||||
// should expect to compute.
|
||||
TotalFacetCounts totalCounts0 =
|
||||
TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
TotalFacetCounts totalCounts0 = TFC.getTotalCounts(indexReader1, taxoReader1, iParams);
|
||||
int prevGen = -1;
|
||||
prevGen = assertRecomputed(totalCounts0, prevGen, "after attempt 1");
|
||||
assertTrue("attempt 1b for same input [0] shout find it in cache",
|
||||
totalCounts0 == TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams));
|
||||
totalCounts0 == TFC.getTotalCounts(indexReader1, taxoReader1, iParams));
|
||||
|
||||
// 2nd Reader - As this is the first time we have invoked the
|
||||
// TotalFacetCountsManager, we should expect a state of NEW to be returned.
|
||||
TotalFacetCounts totalCounts1 = TFC.getTotalCounts(readers[1].indexReader, readers[1].taxReader, iParams);
|
||||
TotalFacetCounts totalCounts1 = TFC.getTotalCounts(indexReader2, taxoReader2, iParams);
|
||||
prevGen = assertRecomputed(totalCounts1, prevGen, "after attempt 2");
|
||||
assertTrue("attempt 2b for same input [1] shout find it in cache",
|
||||
totalCounts1 == TFC.getTotalCounts(readers[1].indexReader, readers[1].taxReader, iParams));
|
||||
totalCounts1 == TFC.getTotalCounts(indexReader2, taxoReader2, iParams));
|
||||
|
||||
// Right now cache size is one, so first TFC is gone and should be recomputed
|
||||
totalCounts0 = TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
totalCounts0 = TFC.getTotalCounts(indexReader1, taxoReader1, iParams);
|
||||
prevGen = assertRecomputed(totalCounts0, prevGen, "after attempt 3");
|
||||
|
||||
// Similarly will recompute the second result
|
||||
totalCounts1 = TFC.getTotalCounts(readers[1].indexReader, readers[1].taxReader, iParams);
|
||||
totalCounts1 = TFC.getTotalCounts(indexReader2, taxoReader2, iParams);
|
||||
prevGen = assertRecomputed(totalCounts1, prevGen, "after attempt 4");
|
||||
|
||||
// Now we set the cache size to two, meaning both should exist in the
|
||||
@ -492,23 +482,19 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||
TFC.setCacheSize(2);
|
||||
|
||||
// Re-compute totalCounts0 (was evicted from the cache when the cache was smaller)
|
||||
totalCounts0 = TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams);
|
||||
totalCounts0 = TFC.getTotalCounts(indexReader1, taxoReader1, iParams);
|
||||
prevGen = assertRecomputed(totalCounts0, prevGen, "after attempt 5");
|
||||
|
||||
// now both are in the larger cache and should not be recomputed
|
||||
totalCounts1 = TFC.getTotalCounts(readers[1].indexReader, readers[1].taxReader, iParams);
|
||||
totalCounts1 = TFC.getTotalCounts(indexReader2, taxoReader2, iParams);
|
||||
assertTrue("with cache of size 2 res no. 0 should come from cache",
|
||||
totalCounts0 == TFC.getTotalCounts(readers[0].indexReader, readers[0].taxReader, iParams));
|
||||
totalCounts0 == TFC.getTotalCounts(indexReader1, taxoReader1, iParams));
|
||||
assertTrue("with cache of size 2 res no. 1 should come from cache",
|
||||
totalCounts1 == TFC.getTotalCounts(readers[1].indexReader, readers[1].taxReader, iParams));
|
||||
totalCounts1 == TFC.getTotalCounts(indexReader2, taxoReader2, iParams));
|
||||
|
||||
writers[0].close();
|
||||
writers[1].close();
|
||||
readers[0].close();
|
||||
readers[1].close();
|
||||
for (Directory[] dirset : dirs) {
|
||||
IOUtils.close(dirset);
|
||||
}
|
||||
IOUtils.close(indexWriter1, indexWriter2, taxoWriter1, taxoWriter2);
|
||||
IOUtils.close(indexReader1, indexReader2, taxoReader1, taxoReader2);
|
||||
IOUtils.close(indexDir1, indexDir2, taxoDir1, taxoDir2);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -3,7 +3,6 @@ package org.apache.lucene.facet.search.sampling;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.index.FacetFields;
|
||||
@ -95,7 +94,7 @@ public class OversampleWithDepthTest extends FacetTestCase {
|
||||
}
|
||||
|
||||
private void index100Docs(Directory indexDir, Directory taxoDir, FacetIndexingParams fip) throws IOException {
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer());
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
|
||||
IndexWriter w = new IndexWriter(indexDir, iwc);
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
|
||||
|
||||
|
@ -3,7 +3,7 @@ package org.apache.lucene.facet.taxonomy.directory;
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
@ -256,7 +256,7 @@ public class TestDirectoryTaxonomyReader extends FacetTestCase {
|
||||
// hold onto IW to forceMerge
|
||||
// note how we don't close it, since DTW will close it.
|
||||
final IndexWriter iw = new IndexWriter(dir,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer())
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
|
||||
.setMergePolicy(new LogByteSizeMergePolicy()));
|
||||
DirectoryTaxonomyWriter writer = new DirectoryTaxonomyWriter(dir) {
|
||||
@Override
|
||||
@ -299,7 +299,7 @@ public class TestDirectoryTaxonomyReader extends FacetTestCase {
|
||||
// hold onto IW to forceMerge
|
||||
// note how we don't close it, since DTW will close it.
|
||||
final IndexWriter iw = new IndexWriter(dir,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer())
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
|
||||
.setMergePolicy(new LogByteSizeMergePolicy()));
|
||||
DirectoryTaxonomyWriter writer = new DirectoryTaxonomyWriter(dir) {
|
||||
@Override
|
||||
|
Loading…
x
Reference in New Issue
Block a user