mirror of https://github.com/apache/lucene.git
SOLR-781: facet.sort=false for distrib search + change facet.sort to count/lex
git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@724065 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4111e3b758
commit
8f184caf79
|
@ -103,6 +103,11 @@ New Features
|
|||
|
||||
21. SOLR-819: Added factories for Arabic support (gsingers)
|
||||
|
||||
22. SOLR-781: Distributed search ability to sort field.facet values
|
||||
lexicographically. facet.sort values "true" and "false" are
|
||||
also deprecated and replaced with "count" and "lex".
|
||||
(Lars Kotthoff via yonik
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
1. SOLR-374: Use IndexReader.reopen to save resources by re-using parts of the
|
||||
|
|
|
@ -199,10 +199,21 @@ public class SolrQuery extends ModifiableSolrParams
|
|||
|
||||
/** get facet sort
|
||||
*
|
||||
* @return facet sort or default of true
|
||||
* @return facet sort or default of FacetParams.FACET_SORT_COUNT
|
||||
*/
|
||||
public String getFacetSortString() {
|
||||
return this.get(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT);
|
||||
}
|
||||
|
||||
/** get facet sort
|
||||
*
|
||||
* @return facet sort or default of true
|
||||
* @deprecated Use {@link #getFacetSortString()} instead, true corresponds to
|
||||
* FacetParams.FACET_SORT_COUNT and false to FacetParams.FACET_SORT_LEX.
|
||||
*/
|
||||
@Deprecated
|
||||
public boolean getFacetSort() {
|
||||
return this.getBool(FacetParams.FACET_SORT, true);
|
||||
return this.get(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT).equals(FacetParams.FACET_SORT_COUNT);
|
||||
}
|
||||
|
||||
/** set facet sort
|
||||
|
@ -210,11 +221,24 @@ public class SolrQuery extends ModifiableSolrParams
|
|||
* @param sort sort facets
|
||||
* @return this
|
||||
*/
|
||||
public SolrQuery setFacetSort(Boolean sort) {
|
||||
public SolrQuery setFacetSort(String sort) {
|
||||
this.set(FacetParams.FACET_SORT, sort);
|
||||
return this;
|
||||
}
|
||||
|
||||
/** set facet sort
|
||||
*
|
||||
* @param sort sort facets
|
||||
* @return this
|
||||
* @deprecated Use {@link #setFacetSort(String)} instead, true corresponds to
|
||||
* FacetParams.FACET_SORT_COUNT and false to FacetParams.FACET_SORT_LEX.
|
||||
*/
|
||||
@Deprecated
|
||||
public SolrQuery setFacetSort(boolean sort) {
|
||||
this.set(FacetParams.FACET_SORT, sort == true ? FacetParams.FACET_SORT_COUNT : FacetParams.FACET_SORT_LEX);
|
||||
return this;
|
||||
}
|
||||
|
||||
/** add highlight field
|
||||
*
|
||||
* @param f field to enable for highlighting
|
||||
|
|
|
@ -93,6 +93,13 @@ public class SolrQueryTest extends TestCase {
|
|||
}
|
||||
|
||||
public void testFacetSort() {
|
||||
SolrQuery q = new SolrQuery("dog");
|
||||
assertEquals("count", q.getFacetSortString());
|
||||
q.setFacetSort("lex");
|
||||
assertEquals("lex", q.getFacetSortString());
|
||||
}
|
||||
|
||||
public void testFacetSortLegacy() {
|
||||
SolrQuery q = new SolrQuery("dog");
|
||||
assertTrue("expected default value to be true", q.getFacetSort());
|
||||
q.setFacetSort(false);
|
||||
|
@ -103,7 +110,7 @@ public class SolrQueryTest extends TestCase {
|
|||
SolrQuery q = new SolrQuery("foo");
|
||||
assertEquals(10, q.setFacetLimit(10).getFacetLimit());
|
||||
assertEquals(10, q.setFacetMinCount(10).getFacetMinCount());
|
||||
assertEquals(true, q.setFacetSort(true).getFacetSort());
|
||||
assertEquals("lex", q.setFacetSort("lex").getFacetSortString());
|
||||
assertEquals(10, q.setHighlightSnippets(10).getHighlightSnippets());
|
||||
assertEquals(10, q.setHighlightFragsize(10).getHighlightFragsize());
|
||||
assertEquals(true, q.setHighlightRequireFieldMatch(true).getHighlightRequireFieldMatch());
|
||||
|
|
|
@ -88,11 +88,16 @@ public interface FacetParams {
|
|||
public static final String FACET_MISSING = FACET + ".missing";
|
||||
|
||||
/**
|
||||
* Boolean option: true causes facets to be sorted
|
||||
* by the count, false results in natural index order.
|
||||
* String option: "count" causes facets to be sorted
|
||||
* by the count, "lex" results in lexicographical order.
|
||||
*/
|
||||
public static final String FACET_SORT = FACET + ".sort";
|
||||
|
||||
public static final String FACET_SORT_COUNT = "count";
|
||||
public static final String FACET_SORT_COUNT_LEGACY = "true";
|
||||
public static final String FACET_SORT_LEX = "lex";
|
||||
public static final String FACET_SORT_LEX_LEGACY = "false";
|
||||
|
||||
/**
|
||||
* Only return constraints of a facet field with the given prefix.
|
||||
*/
|
||||
|
|
|
@ -1538,6 +1538,13 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
log.warn( "adding ShowFileRequestHandler with hidden files: "+hide );
|
||||
}
|
||||
}
|
||||
|
||||
String facetSort = solrConfig.get("//bool[@name='facet.sort']", null);
|
||||
if (facetSort != null) {
|
||||
log.warn(
|
||||
"solrconfig.xml uses deprecated <bool name='facet.sort'>. Please "+
|
||||
"update your config to use <string name='facet.sort'>.");
|
||||
}
|
||||
}
|
||||
|
||||
public CoreDescriptor getCoreDescriptor() {
|
||||
|
|
|
@ -122,7 +122,7 @@ public class FacetComponent extends SearchComponent
|
|||
}
|
||||
|
||||
refine.purpose |= ShardRequest.PURPOSE_REFINE_FACETS;
|
||||
refine.params.set(FacetParams.FACET,"true");
|
||||
refine.params.set(FacetParams.FACET,FacetParams.FACET_SORT_COUNT_LEGACY);
|
||||
refine.params.remove(FacetParams.FACET_FIELD);
|
||||
// TODO: perhaps create a more compact facet.terms method?
|
||||
refine.params.set(FacetParams.FACET_QUERY, fqueries.toArray(new String[fqueries.size()]));
|
||||
|
@ -148,14 +148,14 @@ public class FacetComponent extends SearchComponent
|
|||
rb._facetInfo = fi = new FacetInfo();
|
||||
fi.parse(rb.req.getParams(), rb);
|
||||
// should already be true...
|
||||
// sreq.params.set(FacetParams.FACET, "true");
|
||||
// sreq.params.set(FacetParams.FACET, FacetParams.FACET_SORT_COUNT_LEGACY);
|
||||
}
|
||||
|
||||
sreq.params.remove(FacetParams.FACET_MINCOUNT);
|
||||
sreq.params.remove(FacetParams.FACET_OFFSET);
|
||||
sreq.params.remove(FacetParams.FACET_LIMIT);
|
||||
|
||||
for (DistribFieldFacet dff : fi.topFacets.values()) {
|
||||
for (DistribFieldFacet dff : fi.facets.values()) {
|
||||
String paramStart = "f." + dff.field + '.';
|
||||
sreq.params.remove(paramStart + FacetParams.FACET_MINCOUNT);
|
||||
sreq.params.remove(paramStart + FacetParams.FACET_OFFSET);
|
||||
|
@ -176,7 +176,7 @@ public class FacetComponent extends SearchComponent
|
|||
}
|
||||
} else {
|
||||
// turn off faceting on other requests
|
||||
sreq.params.set(FacetParams.FACET, "false");
|
||||
sreq.params.set(FacetParams.FACET, FacetParams.FACET_SORT_LEX_LEGACY);
|
||||
// we could optionally remove faceting params
|
||||
}
|
||||
}
|
||||
|
@ -216,7 +216,7 @@ public class FacetComponent extends SearchComponent
|
|||
|
||||
// step through each facet.field, adding results from this shard
|
||||
NamedList facet_fields = (NamedList)facet_counts.get("facet_fields");
|
||||
for (DistribFieldFacet dff : fi.topFacets.values()) {
|
||||
for (DistribFieldFacet dff : fi.facets.values()) {
|
||||
dff.add(shardNum, (NamedList)facet_fields.get(dff.field), dff.initialLimit);
|
||||
}
|
||||
}
|
||||
|
@ -236,9 +236,10 @@ public class FacetComponent extends SearchComponent
|
|||
}
|
||||
|
||||
|
||||
for (DistribFieldFacet dff : fi.topFacets.values()) {
|
||||
for (DistribFieldFacet dff : fi.facets.values()) {
|
||||
if (dff.limit <= 0) continue; // no need to check these facets for refinement
|
||||
ShardFacetCount[] counts = dff.getSorted();
|
||||
if (dff.minCount <= 1 && (dff.sort.equals(FacetParams.FACET_SORT_LEX) || dff.sort.equals(FacetParams.FACET_SORT_LEX_LEGACY))) continue;
|
||||
ShardFacetCount[] counts = dff.getCountSorted();
|
||||
int ntop = Math.min(counts.length, dff.offset + dff.limit);
|
||||
long smallestCount = counts.length == 0 ? 0 : counts[ntop-1].count;
|
||||
|
||||
|
@ -306,7 +307,7 @@ public class FacetComponent extends SearchComponent
|
|||
String val = qparams.get(QueryParsing.V);
|
||||
|
||||
// Find the right field.facet for this field
|
||||
DistribFieldFacet dff = fi.topFacets.get(field);
|
||||
DistribFieldFacet dff = fi.facets.get(field);
|
||||
if (dff == null) continue; // maybe this wasn't for facet count refinement
|
||||
|
||||
// Find the right constraint count for this value
|
||||
|
@ -351,13 +352,20 @@ public class FacetComponent extends SearchComponent
|
|||
NamedList facet_fields = new SimpleOrderedMap();
|
||||
facet_counts.add("facet_fields", facet_fields);
|
||||
|
||||
for (DistribFieldFacet dff : fi.topFacets.values()) {
|
||||
for (DistribFieldFacet dff : fi.facets.values()) {
|
||||
NamedList fieldCounts = new NamedList(); // order is more important for facets
|
||||
facet_fields.add(dff.field, fieldCounts);
|
||||
|
||||
ShardFacetCount[] counts = dff.countSorted;
|
||||
if (counts == null || dff.needRefinements) {
|
||||
counts = dff.getSorted();
|
||||
ShardFacetCount[] counts;
|
||||
if (dff.sort.equals(FacetParams.FACET_SORT_COUNT) || dff.sort.equals(FacetParams.FACET_SORT_COUNT_LEGACY)) {
|
||||
counts = dff.countSorted;
|
||||
if (counts == null || dff.needRefinements) {
|
||||
counts = dff.getCountSorted();
|
||||
}
|
||||
} else if (dff.sort.equals(FacetParams.FACET_SORT_LEX) || dff.sort.equals(FacetParams.FACET_SORT_LEX_LEGACY)) {
|
||||
counts = dff.getLexSorted();
|
||||
} else { // TODO: log error or throw exception?
|
||||
counts = dff.getLexSorted();
|
||||
}
|
||||
|
||||
int end = dff.limit < 0 ? counts.length : Math.min(dff.offset + dff.limit, counts.length);
|
||||
|
@ -429,8 +437,7 @@ class FacetInfo {
|
|||
|
||||
void parse(SolrParams params, ResponseBuilder rb) {
|
||||
queryFacets = new LinkedHashMap<String,Long>();
|
||||
topFacets = new LinkedHashMap<String,DistribFieldFacet>();
|
||||
listFacets = new LinkedHashMap<String,DistribFieldFacet>();
|
||||
facets = new LinkedHashMap<String,DistribFieldFacet>();
|
||||
|
||||
String[] facetQs = params.getParams(FacetParams.FACET_QUERY);
|
||||
if (facetQs != null) {
|
||||
|
@ -444,18 +451,13 @@ class FacetInfo {
|
|||
for (String field : facetFs) {
|
||||
DistribFieldFacet ff = new DistribFieldFacet(rb, field);
|
||||
ff.fillParams(params, field);
|
||||
if (ff.sort) {
|
||||
topFacets.put(field, ff);
|
||||
} else {
|
||||
listFacets.put(field, ff);
|
||||
}
|
||||
facets.put(field, ff);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LinkedHashMap<String,Long> queryFacets;
|
||||
LinkedHashMap<String,DistribFieldFacet> topFacets; // field facets that order by constraint count (sort=true)
|
||||
LinkedHashMap<String,DistribFieldFacet> listFacets; // field facets that list values in term order
|
||||
LinkedHashMap<String,DistribFieldFacet> facets;
|
||||
}
|
||||
|
||||
|
||||
|
@ -464,7 +466,7 @@ class FieldFacet {
|
|||
int offset;
|
||||
int limit;
|
||||
int minCount;
|
||||
boolean sort;
|
||||
String sort;
|
||||
boolean missing;
|
||||
String prefix;
|
||||
long missingCount;
|
||||
|
@ -482,8 +484,8 @@ class FieldFacet {
|
|||
}
|
||||
this.minCount = mincount;
|
||||
this.missing = params.getFieldBool(field, FacetParams.FACET_MISSING, false);
|
||||
// default to sorting if there is a limit.
|
||||
this.sort = params.getFieldBool(field, FacetParams.FACET_SORT, limit>0);
|
||||
// default to sorting by count if there is a limit.
|
||||
this.sort = params.getFieldParam(field, FacetParams.FACET_SORT, limit>0 ? FacetParams.FACET_SORT_COUNT : FacetParams.FACET_SORT_LEX);
|
||||
this.prefix = params.getFieldParam(field,FacetParams.FACET_PREFIX);
|
||||
}
|
||||
}
|
||||
|
@ -550,8 +552,18 @@ class DistribFieldFacet extends FieldFacet {
|
|||
counted[shardNum] = terms;
|
||||
}
|
||||
|
||||
ShardFacetCount[] getLexSorted() {
|
||||
ShardFacetCount[] arr = counts.values().toArray(new ShardFacetCount[counts.size()]);
|
||||
Arrays.sort(arr, new Comparator<ShardFacetCount>() {
|
||||
public int compare(ShardFacetCount o1, ShardFacetCount o2) {
|
||||
return o1.name.compareTo(o2.name);
|
||||
}
|
||||
});
|
||||
countSorted = arr;
|
||||
return arr;
|
||||
}
|
||||
|
||||
ShardFacetCount[] getSorted() {
|
||||
ShardFacetCount[] getCountSorted() {
|
||||
ShardFacetCount[] arr = counts.values().toArray(new ShardFacetCount[counts.size()]);
|
||||
Arrays.sort(arr, new Comparator<ShardFacetCount>() {
|
||||
public int compare(ShardFacetCount o1, ShardFacetCount o2) {
|
||||
|
|
|
@ -145,7 +145,7 @@ public class SimpleFacets {
|
|||
}
|
||||
boolean missing = params.getFieldBool(field, FacetParams.FACET_MISSING, false);
|
||||
// default to sorting if there is a limit.
|
||||
boolean sort = params.getFieldBool(field, FacetParams.FACET_SORT, limit>0);
|
||||
String sort = params.getFieldParam(field, FacetParams.FACET_SORT, limit>0 ? "count" : "lex");
|
||||
String prefix = params.getFieldParam(field,FacetParams.FACET_PREFIX);
|
||||
|
||||
|
||||
|
@ -229,7 +229,7 @@ public class SimpleFacets {
|
|||
* Use the Lucene FieldCache to get counts for each unique field value in <code>docs</code>.
|
||||
* The field must have at most one indexed token per document.
|
||||
*/
|
||||
public static NamedList getFieldCacheCounts(SolrIndexSearcher searcher, DocSet docs, String fieldName, int offset, int limit, int mincount, boolean missing, boolean sort, String prefix) throws IOException {
|
||||
public static NamedList getFieldCacheCounts(SolrIndexSearcher searcher, DocSet docs, String fieldName, int offset, int limit, int mincount, boolean missing, String sort, String prefix) throws IOException {
|
||||
// TODO: If the number of terms is high compared to docs.size(), and zeros==false,
|
||||
// we should use an alternate strategy to avoid
|
||||
// 1) creating another huge int[] for the counts
|
||||
|
@ -286,7 +286,7 @@ public class SimpleFacets {
|
|||
int off=offset;
|
||||
int lim=limit>=0 ? limit : Integer.MAX_VALUE;
|
||||
|
||||
if (sort) {
|
||||
if (sort.equals(FacetParams.FACET_SORT_COUNT) || sort.equals(FacetParams.FACET_SORT_COUNT_LEGACY)) {
|
||||
int maxsize = limit>0 ? offset+limit : Integer.MAX_VALUE-1;
|
||||
maxsize = Math.min(maxsize, nTerms);
|
||||
final BoundedTreeSet<CountPair<String,Integer>> queue = new BoundedTreeSet<CountPair<String,Integer>>(maxsize);
|
||||
|
@ -344,7 +344,7 @@ public class SimpleFacets {
|
|||
* @see FacetParams#FACET_ZEROS
|
||||
* @see FacetParams#FACET_MISSING
|
||||
*/
|
||||
public NamedList getFacetTermEnumCounts(SolrIndexSearcher searcher, DocSet docs, String field, int offset, int limit, int mincount, boolean missing, boolean sort, String prefix)
|
||||
public NamedList getFacetTermEnumCounts(SolrIndexSearcher searcher, DocSet docs, String field, int offset, int limit, int mincount, boolean missing, String sort, String prefix)
|
||||
throws IOException {
|
||||
|
||||
/* :TODO: potential optimization...
|
||||
|
@ -360,7 +360,7 @@ public class SimpleFacets {
|
|||
FieldType ft = schema.getFieldType(field);
|
||||
|
||||
final int maxsize = limit>=0 ? offset+limit : Integer.MAX_VALUE-1;
|
||||
final BoundedTreeSet<CountPair<String,Integer>> queue = sort ? new BoundedTreeSet<CountPair<String,Integer>>(maxsize) : null;
|
||||
final BoundedTreeSet<CountPair<String,Integer>> queue = (sort.equals("count") || sort.equals("true")) ? new BoundedTreeSet<CountPair<String,Integer>>(maxsize) : null;
|
||||
final NamedList res = new NamedList();
|
||||
|
||||
int min=mincount-1; // the smallest value in the top 'N' values
|
||||
|
@ -400,7 +400,7 @@ public class SimpleFacets {
|
|||
}
|
||||
}
|
||||
|
||||
if (sort) {
|
||||
if (sort.equals("count") || sort.equals("true")) {
|
||||
if (c>min) {
|
||||
queue.add(new CountPair<String,Integer>(t.text(), c));
|
||||
if (queue.size()>=maxsize) min=queue.last().val;
|
||||
|
@ -415,7 +415,7 @@ public class SimpleFacets {
|
|||
} while (te.next());
|
||||
}
|
||||
|
||||
if (sort) {
|
||||
if (sort.equals("count") || sort.equals("true")) {
|
||||
for (CountPair<String,Integer> p : queue) {
|
||||
if (--off>=0) continue;
|
||||
if (--lim<0) break;
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.index.TermDocs;
|
||||
import org.apache.lucene.index.TermEnum;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.solr.common.params.FacetParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
|
@ -416,7 +417,7 @@ class UnInvertedField {
|
|||
|
||||
|
||||
|
||||
public NamedList getCounts(SolrIndexSearcher searcher, DocSet baseDocs, int offset, int limit, int mincount, boolean missing, boolean sort, String prefix) throws IOException {
|
||||
public NamedList getCounts(SolrIndexSearcher searcher, DocSet baseDocs, int offset, int limit, Integer mincount, boolean missing, String sort, String prefix) throws IOException {
|
||||
FieldType ft = searcher.getSchema().getFieldType(field);
|
||||
|
||||
NamedList res = new NamedList(); // order is important
|
||||
|
@ -528,7 +529,7 @@ class UnInvertedField {
|
|||
int off=offset;
|
||||
int lim=limit>=0 ? limit : Integer.MAX_VALUE;
|
||||
|
||||
if (sort) {
|
||||
if (sort.equals(FacetParams.FACET_SORT_COUNT) || sort.equals(FacetParams.FACET_SORT_COUNT_LEGACY)) {
|
||||
int maxsize = limit>0 ? offset+limit : Integer.MAX_VALUE-1;
|
||||
maxsize = Math.min(maxsize, numTermsInField);
|
||||
final BoundedTreeSet<Long> queue = new BoundedTreeSet<Long>(maxsize);
|
||||
|
|
|
@ -537,7 +537,8 @@ public class TestDistributedSearch extends TestCase {
|
|||
|
||||
|
||||
query("q","*:*", "rows",100, "facet","true", "facet.field",t1);
|
||||
query("q","*:*", "rows",100, "facet","true", "facet.field",t1, "facet.limit",-1, "facet.sort",true);
|
||||
query("q","*:*", "rows",100, "facet","true", "facet.field",t1, "facet.limit",-1, "facet.sort","count");
|
||||
query("q","*:*", "rows",100, "facet","true", "facet.field",t1, "facet.limit",-1, "facet.sort","lex");
|
||||
query("q","*:*", "rows",100, "facet","true", "facet.field",t1,"facet.limit",1);
|
||||
query("q","*:*", "rows",100, "facet","true", "facet.query","quick", "facet.query","all", "facet.query","*:*");
|
||||
query("q","*:*", "rows",100, "facet","true", "facet.field",t1, "facet.offset",1);
|
||||
|
|
|
@ -0,0 +1,98 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.request;
|
||||
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
|
||||
public class SimpleFacetsLegacySortTest extends AbstractSolrTestCase {
|
||||
|
||||
public String getSchemaFile() { return "schema.xml"; }
|
||||
public String getSolrConfigFile() { return "solrconfig-facet-sort.xml"; }
|
||||
public String getCoreName() { return "basic"; }
|
||||
|
||||
public void testFacetSortLegacy() {
|
||||
String f = "t_s1";
|
||||
String pre = "//lst[@name='"+f+"']";
|
||||
|
||||
assertU(adoc("id", "1", f, "A"));
|
||||
assertU(adoc("id", "2", f, "B"));
|
||||
assertU(adoc("id", "3", f, "C"));
|
||||
assertU(adoc("id", "4", f, "C"));
|
||||
assertU(adoc("id", "5", f, "D"));
|
||||
assertU(adoc("id", "6", f, "E"));
|
||||
assertU(adoc("id", "7", f, "E"));
|
||||
assertU(adoc("id", "8", f, "E"));
|
||||
assertU(adoc("id", "9", f, "F"));
|
||||
assertU(adoc("id", "10", f, "G"));
|
||||
assertU(adoc("id", "11", f, "G"));
|
||||
assertU(adoc("id", "12", f, "G"));
|
||||
assertU(adoc("id", "13", f, "G"));
|
||||
assertU(adoc("id", "14", f, "G"));
|
||||
assertU(commit());
|
||||
|
||||
assertQ("check for facet.sort=true",
|
||||
req("q", "id:[* TO *]"
|
||||
,"facet", "true"
|
||||
,"facet.field", f
|
||||
,"facet.sort", "true"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=7]"
|
||||
|
||||
,pre+"/int[1][@name='G'][.='5']"
|
||||
,pre+"/int[2][@name='E'][.='3']"
|
||||
,pre+"/int[3][@name='C'][.='2']"
|
||||
,pre+"/int[4][@name='A'][.='1']"
|
||||
,pre+"/int[5][@name='B'][.='1']"
|
||||
,pre+"/int[6][@name='D'][.='1']"
|
||||
,pre+"/int[7][@name='F'][.='1']"
|
||||
);
|
||||
|
||||
assertQ("check for facet.sort=false",
|
||||
req("q", "id:[* TO *]"
|
||||
,"facet", "true"
|
||||
,"facet.field", f
|
||||
,"facet.sort", "false"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=7]"
|
||||
|
||||
,pre+"/int[1][@name='A'][.='1']"
|
||||
,pre+"/int[2][@name='B'][.='1']"
|
||||
,pre+"/int[3][@name='C'][.='2']"
|
||||
,pre+"/int[4][@name='D'][.='1']"
|
||||
,pre+"/int[5][@name='E'][.='3']"
|
||||
,pre+"/int[6][@name='F'][.='1']"
|
||||
,pre+"/int[7][@name='G'][.='5']"
|
||||
);
|
||||
|
||||
assertQ("check for solrconfig default (false)",
|
||||
req("q", "id:[* TO *]"
|
||||
,"facet", "true"
|
||||
,"facet.field", f
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=7]"
|
||||
|
||||
,pre+"/int[1][@name='A'][.='1']"
|
||||
,pre+"/int[2][@name='B'][.='1']"
|
||||
,pre+"/int[3][@name='C'][.='2']"
|
||||
,pre+"/int[4][@name='D'][.='1']"
|
||||
,pre+"/int[5][@name='E'][.='3']"
|
||||
,pre+"/int[6][@name='F'][.='1']"
|
||||
,pre+"/int[7][@name='G'][.='5']"
|
||||
);
|
||||
}
|
||||
}
|
|
@ -418,7 +418,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","1"
|
||||
,"facet.offset","3"
|
||||
,"facet.limit","2"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=2]"
|
||||
,pre+"/int[1][@name='B'][.='1']"
|
||||
|
@ -433,7 +433,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","1"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","6"
|
||||
,"facet.sort","false"
|
||||
,"facet.sort","lex"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=6]"
|
||||
,pre+"/int[1][@name='A'][.='1']"
|
||||
|
@ -452,7 +452,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","1"
|
||||
,"facet.offset","3"
|
||||
,"facet.limit","2"
|
||||
,"facet.sort","false"
|
||||
,"facet.sort","lex"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=2]"
|
||||
,pre+"/int[1][@name='E'][.='3']"
|
||||
|
@ -467,7 +467,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","2"
|
||||
,"facet.offset","1"
|
||||
,"facet.limit","2"
|
||||
,"facet.sort","false"
|
||||
,"facet.sort","lex"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=1]"
|
||||
,pre+"/int[1][@name='G'][.='5']"
|
||||
|
@ -514,7 +514,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","B"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=3]"
|
||||
|
@ -531,7 +531,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","false"
|
||||
,"facet.sort","lex"
|
||||
,"facet.prefix","B"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=3]"
|
||||
|
@ -549,7 +549,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","false"
|
||||
,"facet.sort","lex"
|
||||
,"facet.prefix","B"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=3]"
|
||||
|
@ -567,7 +567,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","1"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","B"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=2]"
|
||||
|
@ -583,7 +583,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","1"
|
||||
,"facet.limit","1"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","B"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=1]"
|
||||
|
@ -598,7 +598,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","1"
|
||||
,"facet.limit","1"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","B"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=1]"
|
||||
|
@ -613,7 +613,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","C"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=2]"
|
||||
|
@ -629,7 +629,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","CC"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=2]"
|
||||
|
@ -645,7 +645,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","X"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=0]"
|
||||
|
@ -659,7 +659,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","1"
|
||||
,"facet.limit","-1"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","X"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=0]"
|
||||
|
@ -673,7 +673,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","AAA"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=1]"
|
||||
|
@ -687,7 +687,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","AA"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=1]"
|
||||
|
@ -701,7 +701,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","AA"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=1]"
|
||||
|
@ -715,7 +715,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","999"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=0]"
|
||||
|
@ -729,7 +729,7 @@ public class SimpleFacetsTest extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","2"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"facet.prefix","999"
|
||||
)
|
||||
,"*[count(//lst[@name='facet_fields']/lst/int)=0]"
|
||||
|
|
|
@ -149,7 +149,7 @@ public class TestWriterPerf extends AbstractSolrTestCase {
|
|||
,"facet.mincount","0"
|
||||
,"facet.offset","0"
|
||||
,"facet.limit","100"
|
||||
,"facet.sort","true"
|
||||
,"facet.sort","count"
|
||||
,"hl","true"
|
||||
,"hl.fl","t1"
|
||||
);
|
||||
|
|
|
@ -0,0 +1,420 @@
|
|||
<?xml version="1.0" ?>
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!-- $Id$
|
||||
$Source$
|
||||
$Name$
|
||||
-->
|
||||
|
||||
<config>
|
||||
|
||||
<jmx />
|
||||
|
||||
<!-- Used to specify an alternate directory to hold all index data.
|
||||
It defaults to "index" if not present, and should probably
|
||||
not be changed if replication is in use. -->
|
||||
<dataDir>${solr.data.dir:./solr/data}</dataDir>
|
||||
|
||||
<indexDefaults>
|
||||
<!-- Values here affect all index writers and act as a default
|
||||
unless overridden. -->
|
||||
<!-- Values here affect all index writers and act as a default unless overridden. -->
|
||||
<useCompoundFile>false</useCompoundFile>
|
||||
<mergeFactor>10</mergeFactor>
|
||||
<!-- If both ramBufferSizeMB and maxBufferedDocs is set, then Lucene will flush based on whichever limit is hit first.
|
||||
-->
|
||||
<!--<maxBufferedDocs>1000</maxBufferedDocs>-->
|
||||
<!-- Tell Lucene when to flush documents to disk.
|
||||
Giving Lucene more memory for indexing means faster indexing at the cost of more RAM
|
||||
|
||||
If both ramBufferSizeMB and maxBufferedDocs is set, then Lucene will flush based on whichever limit is hit first.
|
||||
|
||||
-->
|
||||
<ramBufferSizeMB>32</ramBufferSizeMB>
|
||||
<maxMergeDocs>2147483647</maxMergeDocs>
|
||||
<maxFieldLength>10000</maxFieldLength>
|
||||
<writeLockTimeout>1000</writeLockTimeout>
|
||||
<commitLockTimeout>10000</commitLockTimeout>
|
||||
|
||||
<!--
|
||||
Expert: Turn on Lucene's auto commit capability.
|
||||
|
||||
NOTE: Despite the name, this value does not have any relation to Solr's autoCommit functionality
|
||||
|
||||
-->
|
||||
<luceneAutoCommit>false</luceneAutoCommit>
|
||||
|
||||
<!--
|
||||
Expert:
|
||||
The Merge Policy in Lucene controls how merging is handled by Lucene. The default in 2.3 is the LogByteSizeMergePolicy, previous
|
||||
versions used LogDocMergePolicy.
|
||||
|
||||
LogByteSizeMergePolicy chooses segments to merge based on their size. The Lucene 2.2 default, LogDocMergePolicy chose when
|
||||
to merge based on number of documents
|
||||
|
||||
Other implementations of MergePolicy must have a no-argument constructor
|
||||
-->
|
||||
<mergePolicy>org.apache.lucene.index.LogByteSizeMergePolicy</mergePolicy>
|
||||
|
||||
<!--
|
||||
Expert:
|
||||
The Merge Scheduler in Lucene controls how merges are performed. The ConcurrentMergeScheduler (Lucene 2.3 default)
|
||||
can perform merges in the background using separate threads. The SerialMergeScheduler (Lucene 2.2 default) does not.
|
||||
-->
|
||||
<mergeScheduler>org.apache.lucene.index.ConcurrentMergeScheduler</mergeScheduler>
|
||||
<!-- these are global... can't currently override per index -->
|
||||
<writeLockTimeout>1000</writeLockTimeout>
|
||||
<commitLockTimeout>10000</commitLockTimeout>
|
||||
|
||||
<lockType>single</lockType>
|
||||
</indexDefaults>
|
||||
|
||||
<mainIndex>
|
||||
<!-- lucene options specific to the main on-disk lucene index -->
|
||||
<useCompoundFile>false</useCompoundFile>
|
||||
<mergeFactor>10</mergeFactor>
|
||||
<ramBufferSizeMB>32</ramBufferSizeMB>
|
||||
<maxMergeDocs>2147483647</maxMergeDocs>
|
||||
<maxFieldLength>10000</maxFieldLength>
|
||||
|
||||
<unlockOnStartup>true</unlockOnStartup>
|
||||
</mainIndex>
|
||||
|
||||
<updateHandler class="solr.DirectUpdateHandler2">
|
||||
|
||||
<!-- autocommit pending docs if certain criteria are met
|
||||
<autoCommit>
|
||||
<maxDocs>10000</maxDocs>
|
||||
<maxTime>3600000</maxTime>
|
||||
</autoCommit>
|
||||
-->
|
||||
<!-- represents a lower bound on the frequency that commits may
|
||||
occur (in seconds). NOTE: not yet implemented
|
||||
|
||||
<commitIntervalLowerBound>0</commitIntervalLowerBound>
|
||||
-->
|
||||
|
||||
<!-- The RunExecutableListener executes an external command.
|
||||
exe - the name of the executable to run
|
||||
dir - dir to use as the current working directory. default="."
|
||||
wait - the calling thread waits until the executable returns. default="true"
|
||||
args - the arguments to pass to the program. default=nothing
|
||||
env - environment variables to set. default=nothing
|
||||
-->
|
||||
<!-- A postCommit event is fired after every commit
|
||||
<listener event="postCommit" class="solr.RunExecutableListener">
|
||||
<str name="exe">/var/opt/resin3/__PORT__/scripts/solr/snapshooter</str>
|
||||
<str name="dir">/var/opt/resin3/__PORT__</str>
|
||||
<bool name="wait">true</bool>
|
||||
<arr name="args"> <str>arg1</str> <str>arg2</str> </arr>
|
||||
<arr name="env"> <str>MYVAR=val1</str> </arr>
|
||||
</listener>
|
||||
-->
|
||||
|
||||
|
||||
</updateHandler>
|
||||
|
||||
|
||||
<query>
|
||||
<!-- Maximum number of clauses in a boolean query... can affect
|
||||
range or wildcard queries that expand to big boolean
|
||||
queries. An exception is thrown if exceeded.
|
||||
-->
|
||||
<maxBooleanClauses>1024</maxBooleanClauses>
|
||||
|
||||
|
||||
<!-- Cache specification for Filters or DocSets - unordered set of *all* documents
|
||||
that match a particular query.
|
||||
-->
|
||||
<filterCache
|
||||
class="solr.search.LRUCache"
|
||||
size="512"
|
||||
initialSize="512"
|
||||
autowarmCount="256"/>
|
||||
|
||||
<queryResultCache
|
||||
class="solr.search.LRUCache"
|
||||
size="512"
|
||||
initialSize="512"
|
||||
autowarmCount="1024"/>
|
||||
|
||||
<documentCache
|
||||
class="solr.search.LRUCache"
|
||||
size="512"
|
||||
initialSize="512"
|
||||
autowarmCount="0"/>
|
||||
|
||||
<!-- If true, stored fields that are not requested will be loaded lazily.
|
||||
-->
|
||||
<enableLazyFieldLoading>true</enableLazyFieldLoading>
|
||||
|
||||
<!--
|
||||
|
||||
<cache name="myUserCache"
|
||||
class="solr.search.LRUCache"
|
||||
size="4096"
|
||||
initialSize="1024"
|
||||
autowarmCount="1024"
|
||||
regenerator="MyRegenerator"
|
||||
/>
|
||||
-->
|
||||
|
||||
|
||||
<useFilterForSortedQuery>true</useFilterForSortedQuery>
|
||||
|
||||
<queryResultWindowSize>10</queryResultWindowSize>
|
||||
|
||||
<!-- set maxSize artificially low to exercise both types of sets -->
|
||||
<HashDocSet maxSize="3" loadFactor="0.75"/>
|
||||
|
||||
|
||||
<!-- boolToFilterOptimizer converts boolean clauses with zero boost
|
||||
into cached filters if the number of docs selected by the clause exceeds
|
||||
the threshold (represented as a fraction of the total index)
|
||||
-->
|
||||
<boolTofilterOptimizer enabled="false" cacheSize="32" threshold=".05"/>
|
||||
|
||||
|
||||
<!-- a newSearcher event is fired whenever a new searcher is being prepared
|
||||
and there is a current searcher handling requests (aka registered). -->
|
||||
<!-- QuerySenderListener takes an array of NamedList and executes a
|
||||
local query request for each NamedList in sequence. -->
|
||||
<!--
|
||||
<listener event="newSearcher" class="solr.QuerySenderListener">
|
||||
<arr name="queries">
|
||||
<lst> <str name="q">solr</str> <str name="start">0</str> <str name="rows">10</str> </lst>
|
||||
<lst> <str name="q">rocks</str> <str name="start">0</str> <str name="rows">10</str> </lst>
|
||||
</arr>
|
||||
</listener>
|
||||
-->
|
||||
|
||||
<!-- a firstSearcher event is fired whenever a new searcher is being
|
||||
prepared but there is no current registered searcher to handle
|
||||
requests or to gain prewarming data from. -->
|
||||
<!--
|
||||
<listener event="firstSearcher" class="solr.QuerySenderListener">
|
||||
<arr name="queries">
|
||||
<lst> <str name="q">fast_warm</str> <str name="start">0</str> <str name="rows">10</str> </lst>
|
||||
</arr>
|
||||
</listener>
|
||||
-->
|
||||
|
||||
|
||||
</query>
|
||||
|
||||
|
||||
<!-- An alternate set representation that uses an integer hash to store filters (sets of docids).
|
||||
If the set cardinality <= maxSize elements, then HashDocSet will be used instead of the bitset
|
||||
based HashBitset. -->
|
||||
|
||||
<!-- requestHandler plugins... incoming queries will be dispatched to the
|
||||
correct handler based on the qt (query type) param matching the
|
||||
name of registered handlers.
|
||||
The "standard" request handler is the default and will be used if qt
|
||||
is not specified in the request.
|
||||
-->
|
||||
<requestHandler name="standard" class="solr.StandardRequestHandler">
|
||||
<bool name="httpCaching">true</bool>
|
||||
<lst name="defaults">
|
||||
<bool name="facet.sort">false</bool>
|
||||
</lst>
|
||||
</requestHandler>
|
||||
<requestHandler name="dismaxOldStyleDefaults"
|
||||
class="solr.DisMaxRequestHandler" >
|
||||
<!-- for historic reasons, DisMaxRequestHandler will use all of
|
||||
it's init params as "defaults" if there is no "defaults" list
|
||||
specified
|
||||
-->
|
||||
<float name="tie">0.01</float>
|
||||
<str name="qf">
|
||||
text^0.5 features_t^1.0 subject^1.4 title_stemmed^2.0
|
||||
</str>
|
||||
<str name="pf">
|
||||
text^0.2 features_t^1.1 subject^1.4 title_stemmed^2.0 title^1.5
|
||||
</str>
|
||||
<str name="bf">
|
||||
ord(weight)^0.5 recip(rord(iind),1,1000,1000)^0.3
|
||||
</str>
|
||||
<str name="mm">
|
||||
3<-1 5<-2 6<90%
|
||||
</str>
|
||||
<int name="ps">100</int>
|
||||
</requestHandler>
|
||||
<requestHandler name="dismax" class="solr.DisMaxRequestHandler" >
|
||||
<lst name="defaults">
|
||||
<str name="q.alt">*:*</str>
|
||||
<float name="tie">0.01</float>
|
||||
<str name="qf">
|
||||
text^0.5 features_t^1.0 subject^1.4 title_stemmed^2.0
|
||||
</str>
|
||||
<str name="pf">
|
||||
text^0.2 features_t^1.1 subject^1.4 title_stemmed^2.0 title^1.5
|
||||
</str>
|
||||
<str name="bf">
|
||||
ord(weight)^0.5 recip(rord(iind),1,1000,1000)^0.3
|
||||
</str>
|
||||
<str name="mm">
|
||||
3<-1 5<-2 6<90%
|
||||
</str>
|
||||
<int name="ps">100</int>
|
||||
</lst>
|
||||
</requestHandler>
|
||||
<requestHandler name="old" class="solr.tst.OldRequestHandler" >
|
||||
<int name="myparam">1000</int>
|
||||
<float name="ratio">1.4142135</float>
|
||||
<arr name="myarr"><int>1</int><int>2</int></arr>
|
||||
<str>foo</str>
|
||||
</requestHandler>
|
||||
<requestHandler name="oldagain" class="solr.tst.OldRequestHandler" >
|
||||
<lst name="lst1"> <str name="op">sqrt</str> <int name="val">2</int> </lst>
|
||||
<lst name="lst2"> <str name="op">log</str> <float name="val">10</float> </lst>
|
||||
</requestHandler>
|
||||
|
||||
<requestHandler name="test" class="solr.tst.TestRequestHandler" />
|
||||
|
||||
<!-- test query parameter defaults -->
|
||||
<requestHandler name="defaults" class="solr.StandardRequestHandler">
|
||||
<lst name="defaults">
|
||||
<int name="rows">4</int>
|
||||
<bool name="hl">true</bool>
|
||||
<str name="hl.fl">text,name,subject,title,whitetok</str>
|
||||
</lst>
|
||||
</requestHandler>
|
||||
|
||||
<!-- test query parameter defaults -->
|
||||
<requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy">
|
||||
<lst name="defaults">
|
||||
<int name="rows">4</int>
|
||||
<bool name="hl">true</bool>
|
||||
<str name="hl.fl">text,name,subject,title,whitetok</str>
|
||||
</lst>
|
||||
</requestHandler>
|
||||
|
||||
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" />
|
||||
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy">
|
||||
<bool name="httpCaching">false</bool>
|
||||
</requestHandler>
|
||||
|
||||
<!-- test elevation -->
|
||||
<searchComponent name="elevate" class="org.apache.solr.handler.component.QueryElevationComponent" >
|
||||
<str name="queryFieldType">string</str>
|
||||
<str name="config-file">elevate.xml</str>
|
||||
</searchComponent>
|
||||
|
||||
<requestHandler name="/elevate" class="org.apache.solr.handler.component.SearchHandler">
|
||||
<lst name="defaults">
|
||||
<str name="echoParams">explicit</str>
|
||||
</lst>
|
||||
<arr name="last-components">
|
||||
<str>elevate</str>
|
||||
</arr>
|
||||
</requestHandler>
|
||||
|
||||
<searchComponent name="spellcheck" class="org.apache.solr.handler.component.SpellCheckComponent">
|
||||
<str name="queryAnalyzerFieldType">lowerfilt</str>
|
||||
|
||||
<lst name="spellchecker">
|
||||
<str name="name">default</str>
|
||||
<str name="field">lowerfilt</str>
|
||||
<str name="spellcheckIndexDir">spellchecker1</str>
|
||||
<str name="buildOnCommit">true</str>
|
||||
</lst>
|
||||
<!-- Example of using different distance measure -->
|
||||
<lst name="spellchecker">
|
||||
<str name="name">jarowinkler</str>
|
||||
<str name="field">lowerfilt</str>
|
||||
<!-- Use a different Distance Measure -->
|
||||
<str name="distanceMeasure">org.apache.lucene.search.spell.JaroWinklerDistance</str>
|
||||
<str name="spellcheckIndexDir">spellchecker2</str>
|
||||
|
||||
</lst>
|
||||
<lst name="spellchecker">
|
||||
<str name="classname">solr.FileBasedSpellChecker</str>
|
||||
<str name="name">external</str>
|
||||
<str name="sourceLocation">spellings.txt</str>
|
||||
<str name="characterEncoding">UTF-8</str>
|
||||
<str name="spellcheckIndexDir">spellchecker3</str>
|
||||
</lst>
|
||||
</searchComponent>
|
||||
<!--
|
||||
The SpellingQueryConverter to convert raw (CommonParams.Q) queries into tokens. Uses a simple regular expression
|
||||
to strip off field markup, boosts, ranges, etc. but it is not guaranteed to match an exact parse from the query parser.
|
||||
-->
|
||||
<queryConverter name="queryConverter" class="org.apache.solr.spelling.SpellingQueryConverter"/>
|
||||
|
||||
<requestHandler name="spellCheckCompRH" class="org.apache.solr.handler.component.SearchHandler">
|
||||
<lst name="defaults">
|
||||
<!-- omp = Only More Popular -->
|
||||
<str name="spellcheck.onlyMorePopular">false</str>
|
||||
<!-- exr = Extended Results -->
|
||||
<str name="spellcheck.extendedResults">false</str>
|
||||
<!-- The number of suggestions to return -->
|
||||
<str name="spellcheck.count">1</str>
|
||||
</lst>
|
||||
<arr name="last-components">
|
||||
<str>spellcheck</str>
|
||||
</arr>
|
||||
</requestHandler>
|
||||
|
||||
<highlighting>
|
||||
<!-- Configure the standard fragmenter -->
|
||||
<fragmenter name="gap" class="org.apache.solr.highlight.GapFragmenter" default="true">
|
||||
<lst name="defaults">
|
||||
<int name="hl.fragsize">100</int>
|
||||
</lst>
|
||||
</fragmenter>
|
||||
|
||||
<fragmenter name="regex" class="org.apache.solr.highlight.RegexFragmenter">
|
||||
<lst name="defaults">
|
||||
<int name="hl.fragsize">70</int>
|
||||
</lst>
|
||||
</fragmenter>
|
||||
|
||||
<!-- Configure the standard formatter -->
|
||||
<formatter name="html" class="org.apache.solr.highlight.HtmlFormatter" default="true">
|
||||
<lst name="defaults">
|
||||
<str name="hl.simple.pre"><![CDATA[<em>]]></str>
|
||||
<str name="hl.simple.post"><![CDATA[</em>]]></str>
|
||||
</lst>
|
||||
</formatter>
|
||||
</highlighting>
|
||||
|
||||
|
||||
<!-- enable streaming for testing... -->
|
||||
<requestDispatcher handleSelect="true" >
|
||||
<requestParsers enableRemoteStreaming="true" multipartUploadLimitInKB="2048" />
|
||||
<httpCaching lastModifiedFrom="openTime" etagSeed="Solr" never304="false">
|
||||
<cacheControl>max-age=30, public</cacheControl>
|
||||
</httpCaching>
|
||||
</requestDispatcher>
|
||||
|
||||
<admin>
|
||||
<defaultQuery>solr</defaultQuery>
|
||||
<gettableFiles>solrconfig.xml scheam.xml admin-extra.html</gettableFiles>
|
||||
</admin>
|
||||
|
||||
<!-- test getting system property -->
|
||||
<propTest attr1="${solr.test.sys.prop1}-$${literal}"
|
||||
attr2="${non.existent.sys.prop:default-from-config}">prefix-${solr.test.sys.prop2}-suffix</propTest>
|
||||
|
||||
<queryParser name="foo" class="FooQParserPlugin"/>
|
||||
|
||||
|
||||
|
||||
</config>
|
Loading…
Reference in New Issue