mirror of https://github.com/apache/lucene.git
Merged /lucene/dev/trunk:r1445907-1446753
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4765@1446801 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
8b4207e278
|
@ -48,7 +48,7 @@
|
|||
<jetty.version>8.1.8.v20121106</jetty.version>
|
||||
<slf4j.version>1.6.4</slf4j.version>
|
||||
<tika.version>1.2</tika.version>
|
||||
<httpcomponents.version>4.1.3</httpcomponents.version>
|
||||
<httpcomponents.version>4.2.3</httpcomponents.version>
|
||||
|
||||
<!-- RandomizedTesting library system properties -->
|
||||
<tests.iters>1</tests.iters>
|
||||
|
|
|
@ -168,6 +168,9 @@ New Features
|
|||
is faster than others, however consumes much more RAM.
|
||||
(Michael McCandless, Shai Erera)
|
||||
|
||||
* LUCENE-4778: Add a getter for the delegate in RateLimitedDirectoryWrapper.
|
||||
(Mark Miller)
|
||||
|
||||
API Changes
|
||||
|
||||
* LUCENE-4709: FacetResultNode no longer has a residue field. (Shai Erera)
|
||||
|
@ -216,6 +219,19 @@ Bug Fixes
|
|||
cases, for example if you had an index with more than 260M documents and a
|
||||
VAR_INT field. (Simon Willnauer, Adrien Grand, Mike McCandless, Robert Muir)
|
||||
|
||||
* LUCENE-4775: Remove SegmentInfo.sizeInBytes() and make
|
||||
MergePolicy.OneMerge.totalBytesSize thread safe (Josh Bronson via
|
||||
Robert Muir, Mike McCandless)
|
||||
|
||||
* LUCENE-4770: If spatial's TermQueryPrefixTreeStrategy was used to search
|
||||
indexed non-point shapes, then there was an edge case where a query should
|
||||
find a shape but it didn't. The fix is the removal of an optimization that
|
||||
simplifies some leaf cells into a parent. The index data for such a field is
|
||||
now ~20% larger. This optimization is still done for the query shape, and for
|
||||
indexed data for RecursivePrefixTreeStrategy. Furthermore, this optimization
|
||||
is enhanced to roll up beyond the bottom cell level. (David Smiley,
|
||||
Florian Schilling)
|
||||
|
||||
Documentation
|
||||
|
||||
* LUCENE-4718: Fixed documentation of oal.queryparser.classic.
|
||||
|
|
|
@ -520,7 +520,7 @@ class DocumentsWriterPerThread {
|
|||
}
|
||||
|
||||
if (infoStream.isEnabled("DWPT")) {
|
||||
final double newSegmentSize = segmentInfo.sizeInBytes()/1024./1024.;
|
||||
final double newSegmentSize = segmentInfoPerCommit.sizeInBytes()/1024./1024.;
|
||||
infoStream.message("DWPT", "flushed: segment=" + segmentInfo.name +
|
||||
" ramUsed=" + nf.format(startMBUsed) + " MB" +
|
||||
" newFlushedSize(includes docstores)=" + nf.format(newSegmentSize) + " MB" +
|
||||
|
@ -557,7 +557,7 @@ class DocumentsWriterPerThread {
|
|||
|
||||
IndexWriter.setDiagnostics(newSegment.info, "flush");
|
||||
|
||||
IOContext context = new IOContext(new FlushInfo(newSegment.info.getDocCount(), newSegment.info.sizeInBytes()));
|
||||
IOContext context = new IOContext(new FlushInfo(newSegment.info.getDocCount(), newSegment.sizeInBytes()));
|
||||
|
||||
boolean success = false;
|
||||
try {
|
||||
|
|
|
@ -2301,7 +2301,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
|||
infoStream.message("IW", "addIndexes: process segment origName=" + info.info.name + " newName=" + newSegName + " info=" + info);
|
||||
}
|
||||
|
||||
IOContext context = new IOContext(new MergeInfo(info.info.getDocCount(), info.info.sizeInBytes(), true, -1));
|
||||
IOContext context = new IOContext(new MergeInfo(info.info.getDocCount(), info.sizeInBytes(), true, -1));
|
||||
|
||||
for(FieldInfo fi : getFieldInfos(info.info)) {
|
||||
globalFieldNumberMap.addOrGet(fi.name, fi.number, fi.getDocValuesType());
|
||||
|
@ -3458,7 +3458,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
|||
final int delCount = numDeletedDocs(info);
|
||||
assert delCount <= info.info.getDocCount();
|
||||
final double delRatio = ((double) delCount)/info.info.getDocCount();
|
||||
merge.estimatedMergeBytes += info.info.sizeInBytes() * (1.0 - delRatio);
|
||||
merge.estimatedMergeBytes += info.sizeInBytes() * (1.0 - delRatio);
|
||||
merge.totalMergeBytes += info.sizeInBytes();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3759,7 +3760,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
|||
// lost...
|
||||
|
||||
if (infoStream.isEnabled("IW")) {
|
||||
infoStream.message("IW", String.format(Locale.ROOT, "merged segment size=%.3f MB vs estimate=%.3f MB", merge.info.info.sizeInBytes()/1024./1024., merge.estimatedMergeBytes/1024/1024.));
|
||||
infoStream.message("IW", String.format(Locale.ROOT, "merged segment size=%.3f MB vs estimate=%.3f MB", merge.info.sizeInBytes()/1024./1024., merge.estimatedMergeBytes/1024/1024.));
|
||||
}
|
||||
|
||||
final IndexReaderWarmer mergedSegmentWarmer = config.getMergedSegmentWarmer();
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -74,7 +73,11 @@ public abstract class MergePolicy implements java.io.Closeable, Cloneable {
|
|||
int maxNumSegments = -1; // used by IndexWriter
|
||||
|
||||
/** Estimated size in bytes of the merged segment. */
|
||||
public long estimatedMergeBytes; // used by IndexWriter
|
||||
public volatile long estimatedMergeBytes; // used by IndexWriter
|
||||
|
||||
// Sum of sizeInBytes of all SegmentInfos; set by IW.mergeInit
|
||||
volatile long totalMergeBytes;
|
||||
|
||||
List<SegmentReader> readers; // used by IndexWriter
|
||||
|
||||
/** Segments to be merged. */
|
||||
|
@ -187,14 +190,12 @@ public abstract class MergePolicy implements java.io.Closeable, Cloneable {
|
|||
|
||||
/**
|
||||
* Returns the total size in bytes of this merge. Note that this does not
|
||||
* indicate the size of the merged segment, but the input total size.
|
||||
* */
|
||||
* indicate the size of the merged segment, but the
|
||||
* input total size. This is only set once the merge is
|
||||
* initialized by IndexWriter.
|
||||
*/
|
||||
public long totalBytesSize() throws IOException {
|
||||
long total = 0;
|
||||
for (SegmentInfoPerCommit info : segments) {
|
||||
total += info.info.sizeInBytes();
|
||||
}
|
||||
return total;
|
||||
return totalMergeBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.index;
|
|||
*/
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -57,8 +56,6 @@ public final class SegmentInfo {
|
|||
|
||||
private boolean isCompoundFile;
|
||||
|
||||
private volatile long sizeInBytes = -1; // total byte size of all files (computed on demand)
|
||||
|
||||
private Codec codec;
|
||||
|
||||
private Map<String,String> diagnostics;
|
||||
|
@ -100,23 +97,6 @@ public final class SegmentInfo {
|
|||
this.attributes = attributes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns total size in bytes of all of files used by
|
||||
* this segment. Note that this will not include any live
|
||||
* docs for the segment; to include that use {@link
|
||||
* SegmentInfoPerCommit#sizeInBytes()} instead.
|
||||
*/
|
||||
public long sizeInBytes() throws IOException {
|
||||
if (sizeInBytes == -1) {
|
||||
long sum = 0;
|
||||
for (final String fileName : files()) {
|
||||
sum += dir.fileLength(fileName);
|
||||
}
|
||||
sizeInBytes = sum;
|
||||
}
|
||||
return sizeInBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark whether this segment is stored as a compound file.
|
||||
*
|
||||
|
@ -254,7 +234,6 @@ public final class SegmentInfo {
|
|||
public void setFiles(Set<String> files) {
|
||||
checkFileNames(files);
|
||||
setFiles = files;
|
||||
sizeInBytes = -1;
|
||||
}
|
||||
|
||||
/** Add these files to the set of files written for this
|
||||
|
@ -262,7 +241,6 @@ public final class SegmentInfo {
|
|||
public void addFiles(Collection<String> files) {
|
||||
checkFileNames(files);
|
||||
setFiles.addAll(files);
|
||||
sizeInBytes = -1;
|
||||
}
|
||||
|
||||
/** Add this file to the set of files written for this
|
||||
|
@ -270,7 +248,6 @@ public final class SegmentInfo {
|
|||
public void addFile(String file) {
|
||||
checkFileNames(Collections.singleton(file));
|
||||
setFiles.add(file);
|
||||
sizeInBytes = -1;
|
||||
}
|
||||
|
||||
private void checkFileNames(Collection<String> files) {
|
||||
|
|
|
@ -374,7 +374,7 @@ public class TieredMergePolicy extends MergePolicy {
|
|||
for(int idx = tooBigCount; idx<infosSorted.size(); idx++) {
|
||||
final SegmentInfoPerCommit info = infosSorted.get(idx);
|
||||
if (merging.contains(info)) {
|
||||
mergingBytes += info.info.sizeInBytes();
|
||||
mergingBytes += info.sizeInBytes();
|
||||
} else if (!toBeMerged.contains(info)) {
|
||||
eligible.add(info);
|
||||
}
|
||||
|
@ -470,7 +470,7 @@ public class TieredMergePolicy extends MergePolicy {
|
|||
final long segBytes = size(info);
|
||||
totAfterMergeBytes += segBytes;
|
||||
totAfterMergeBytesFloored += floorSize(segBytes);
|
||||
totBeforeMergeBytes += info.info.sizeInBytes();
|
||||
totBeforeMergeBytes += info.sizeInBytes();
|
||||
}
|
||||
|
||||
// Measure "skew" of the merge, which can range
|
||||
|
@ -670,7 +670,7 @@ public class TieredMergePolicy extends MergePolicy {
|
|||
|
||||
// Segment size in bytes, pro-rated by % deleted
|
||||
private long size(SegmentInfoPerCommit info) throws IOException {
|
||||
final long byteSize = info.info.sizeInBytes();
|
||||
final long byteSize = info.sizeInBytes();
|
||||
final int delCount = writer.get().numDeletedDocs(info);
|
||||
final double delRatio = (info.info.getDocCount() <= 0 ? 0.0f : ((double)delCount / (double)info.info.getDocCount()));
|
||||
assert delRatio <= 1.0;
|
||||
|
|
|
@ -91,7 +91,7 @@ public interface FieldCache {
|
|||
};
|
||||
}
|
||||
|
||||
/** Field values as 32-bit signed long integers */
|
||||
/** Field values as 64-bit signed long integers */
|
||||
public static abstract class Longs {
|
||||
/** Return an long representation of this field's value. */
|
||||
public abstract long get(int docID);
|
||||
|
|
|
@ -40,6 +40,10 @@ public final class RateLimitedDirectoryWrapper extends Directory {
|
|||
this.delegate = wrapped;
|
||||
}
|
||||
|
||||
public Directory getDelegate() {
|
||||
return delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] listAll() throws IOException {
|
||||
ensureOpen();
|
||||
|
|
|
@ -324,4 +324,40 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
w.close(false);
|
||||
dir.close();
|
||||
}
|
||||
|
||||
|
||||
private static class TrackingCMS extends ConcurrentMergeScheduler {
|
||||
long totMergedBytes;
|
||||
|
||||
public TrackingCMS() {
|
||||
setMaxMergeCount(5);
|
||||
setMaxThreadCount(5);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doMerge(MergePolicy.OneMerge merge) throws IOException {
|
||||
totMergedBytes += merge.totalBytesSize();
|
||||
super.doMerge(merge);
|
||||
}
|
||||
}
|
||||
|
||||
public void testTotalBytesSize() throws Exception {
|
||||
Directory d = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
|
||||
iwc.setMaxBufferedDocs(5);
|
||||
iwc.setMergeScheduler(new TrackingCMS());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), d);
|
||||
for(int i=0;i<100000;i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newStringField("id", ""+i, Field.Store.NO));
|
||||
doc.add(newTextField("field", "here is some text", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
|
||||
if (random().nextBoolean()) {
|
||||
w.deleteDocuments(new Term("id", ""+random().nextInt(i+1)));
|
||||
}
|
||||
}
|
||||
w.close();
|
||||
d.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,11 +20,6 @@ package org.apache.lucene.search;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.BitSet;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -44,7 +39,6 @@ import org.apache.lucene.index.DirectoryReader;
|
|||
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.MultiReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
|
@ -53,13 +47,10 @@ import org.apache.lucene.index.StoredDocument;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.FieldValueHitQueue.Entry;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.DocIdBitSet;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NamedThreadFactory;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
@ -351,11 +342,6 @@ public class TestSort extends LuceneTestCase {
|
|||
return getIndex(false, true);
|
||||
}
|
||||
|
||||
private IndexSearcher getEmptyIndex()
|
||||
throws IOException {
|
||||
return getIndex(false, false);
|
||||
}
|
||||
|
||||
// Set to true if the DV "string" field is indexed as a
|
||||
// sorted source:
|
||||
private boolean dvStringSorted;
|
||||
|
@ -392,64 +378,6 @@ public class TestSort extends LuceneTestCase {
|
|||
super.tearDown();
|
||||
}
|
||||
|
||||
// test the sorts by score and document number
|
||||
public void testBuiltInSorts() throws Exception {
|
||||
sort = new Sort();
|
||||
assertMatches(full, queryX, sort, "ACEGI");
|
||||
assertMatches(full, queryY, sort, "BDFHJ");
|
||||
|
||||
sort.setSort(SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "ACEGI");
|
||||
assertMatches(full, queryY, sort, "BDFHJ");
|
||||
}
|
||||
|
||||
// test sorts where the type of field is specified
|
||||
public void testTypedSort() throws Exception {
|
||||
sort.setSort(new SortField("int", SortField.Type.INT), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "IGAEC");
|
||||
assertMatches(full, queryY, sort, "DHFJB");
|
||||
|
||||
sort.setSort(new SortField("float", SortField.Type.FLOAT), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "GCIEA");
|
||||
assertMatches(full, queryY, sort, "DHJFB");
|
||||
|
||||
sort.setSort(new SortField("long", SortField.Type.LONG), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "EACGI");
|
||||
assertMatches(full, queryY, sort, "FBJHD");
|
||||
|
||||
sort.setSort(new SortField("double", SortField.Type.DOUBLE), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "AGICE");
|
||||
assertMatches(full, queryY, sort, "DJHBF");
|
||||
|
||||
sort.setSort(new SortField("byte", SortField.Type.BYTE), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "CIGAE");
|
||||
assertMatches(full, queryY, sort, "DHFBJ");
|
||||
|
||||
sort.setSort(new SortField("short", SortField.Type.SHORT), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "IAGCE");
|
||||
assertMatches(full, queryY, sort, "DFHBJ");
|
||||
|
||||
sort.setSort(new SortField("string", SortField.Type.STRING), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "AIGEC");
|
||||
assertMatches(full, queryY, sort, "DJHFB");
|
||||
|
||||
sort.setSort(new SortField("int_dv", SortField.Type.INT), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "IGAEC");
|
||||
assertMatches(full, queryY, sort, "DHFJB");
|
||||
|
||||
sort.setSort(new SortField("float_dv", SortField.Type.FLOAT), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "GCIEA");
|
||||
assertMatches(full, queryY, sort, "DHJFB");
|
||||
|
||||
sort.setSort(new SortField("double_dv", SortField.Type.DOUBLE), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "AGICE");
|
||||
assertMatches(full, queryY, sort, "DJHBF");
|
||||
|
||||
sort.setSort(new SortField("string_dv", getDVStringSortType()), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "AIGEC");
|
||||
assertMatches(full, queryY, sort, "DJHFB");
|
||||
}
|
||||
|
||||
private SortField.Type getDVStringSortType() {
|
||||
return getDVStringSortType(true);
|
||||
}
|
||||
|
@ -609,142 +537,6 @@ public class TestSort extends LuceneTestCase {
|
|||
assertFalse("Found sort results out of order", fail);
|
||||
searcher.getIndexReader().close();
|
||||
}
|
||||
|
||||
/**
|
||||
* test sorts where the type of field is specified and a custom field parser
|
||||
* is used, that uses a simple char encoding. The sorted string contains a
|
||||
* character beginning from 'A' that is mapped to a numeric value using some
|
||||
* "funny" algorithm to be different for each data type.
|
||||
*/
|
||||
public void testCustomFieldParserSort() throws Exception {
|
||||
// since tests explicilty uses different parsers on the same fieldname
|
||||
// we explicitly check/purge the FieldCache between each assertMatch
|
||||
FieldCache fc = FieldCache.DEFAULT;
|
||||
|
||||
|
||||
sort.setSort(new SortField("parser", new FieldCache.IntParser(){
|
||||
@Override
|
||||
public final int parseInt(final BytesRef term) {
|
||||
return (term.bytes[term.offset]-'A') * 123456;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermsEnum termsEnum(Terms terms) throws IOException {
|
||||
return terms.iterator(null);
|
||||
}
|
||||
}), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryA, sort, "JIHGFEDCBA");
|
||||
assertSaneFieldCaches(getTestName() + " IntParser");
|
||||
fc.purgeAllCaches();
|
||||
|
||||
sort.setSort(new SortField("parser", new FieldCache.FloatParser(){
|
||||
@Override
|
||||
public final float parseFloat(final BytesRef term) {
|
||||
return (float) Math.sqrt( term.bytes[term.offset]);
|
||||
}
|
||||
@Override
|
||||
public TermsEnum termsEnum(Terms terms) throws IOException {
|
||||
return terms.iterator(null);
|
||||
}
|
||||
}), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryA, sort, "JIHGFEDCBA");
|
||||
assertSaneFieldCaches(getTestName() + " FloatParser");
|
||||
fc.purgeAllCaches();
|
||||
|
||||
sort.setSort(new SortField("parser", new FieldCache.LongParser(){
|
||||
@Override
|
||||
public final long parseLong(final BytesRef term) {
|
||||
return (term.bytes[term.offset]-'A') * 1234567890L;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermsEnum termsEnum(Terms terms) throws IOException {
|
||||
return terms.iterator(null);
|
||||
}
|
||||
}), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryA, sort, "JIHGFEDCBA");
|
||||
assertSaneFieldCaches(getTestName() + " LongParser");
|
||||
fc.purgeAllCaches();
|
||||
|
||||
sort.setSort(new SortField("parser", new FieldCache.DoubleParser(){
|
||||
@Override
|
||||
public final double parseDouble(final BytesRef term) {
|
||||
return Math.pow( term.bytes[term.offset], (term.bytes[term.offset]-'A'));
|
||||
}
|
||||
@Override
|
||||
public TermsEnum termsEnum(Terms terms) throws IOException {
|
||||
return terms.iterator(null);
|
||||
}
|
||||
}), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryA, sort, "JIHGFEDCBA");
|
||||
assertSaneFieldCaches(getTestName() + " DoubleParser");
|
||||
fc.purgeAllCaches();
|
||||
|
||||
sort.setSort(new SortField("parser", new FieldCache.ByteParser(){
|
||||
@Override
|
||||
public final byte parseByte(final BytesRef term) {
|
||||
return (byte) (term.bytes[term.offset]-'A');
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermsEnum termsEnum(Terms terms) throws IOException {
|
||||
return terms.iterator(null);
|
||||
}
|
||||
}), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryA, sort, "JIHGFEDCBA");
|
||||
assertSaneFieldCaches(getTestName() + " ByteParser");
|
||||
fc.purgeAllCaches();
|
||||
|
||||
sort.setSort(new SortField("parser", new FieldCache.ShortParser(){
|
||||
@Override
|
||||
public final short parseShort(final BytesRef term) {
|
||||
return (short) (term.bytes[term.offset]-'A');
|
||||
}
|
||||
@Override
|
||||
public TermsEnum termsEnum(Terms terms) throws IOException {
|
||||
return terms.iterator(null);
|
||||
}
|
||||
}), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryA, sort, "JIHGFEDCBA");
|
||||
assertSaneFieldCaches(getTestName() + " ShortParser");
|
||||
fc.purgeAllCaches();
|
||||
}
|
||||
|
||||
// test sorts when there's nothing in the index
|
||||
public void testEmptyIndex() throws Exception {
|
||||
IndexSearcher empty = getEmptyIndex();
|
||||
|
||||
sort = new Sort();
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
|
||||
sort.setSort(SortField.FIELD_DOC);
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
|
||||
sort.setSort(new SortField("int", SortField.Type.INT), SortField.FIELD_DOC);
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
|
||||
sort.setSort(new SortField("int_dv", SortField.Type.INT), SortField.FIELD_DOC);
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
|
||||
sort.setSort(new SortField("string", SortField.Type.STRING, true), SortField.FIELD_DOC);
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
|
||||
sort.setSort(new SortField("float", SortField.Type.FLOAT), new SortField("string", SortField.Type.STRING));
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
|
||||
sort.setSort(new SortField("float_dv", SortField.Type.FLOAT), new SortField("string", SortField.Type.STRING));
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
|
||||
sort.setSort(new SortField("string_dv", getDVStringSortType(false), true), SortField.FIELD_DOC);
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
|
||||
sort.setSort(new SortField("float_dv", SortField.Type.FLOAT),
|
||||
new SortField("string_dv", getDVStringSortType(false)));
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
|
||||
sort.setSort(new SortField("float_dv", SortField.Type.FLOAT), new SortField("string_dv", getDVStringSortType(false)));
|
||||
assertMatches(empty, queryX, sort, "");
|
||||
}
|
||||
|
||||
static class MyFieldComparator extends FieldComparator<Integer> {
|
||||
FieldCache.Ints docValues;
|
||||
|
@ -822,41 +614,6 @@ public class TestSort extends LuceneTestCase {
|
|||
assertMatches(full, queryA, sort, "JIHGFEDCBA");
|
||||
}
|
||||
|
||||
// test sorts in reverse
|
||||
public void testReverseSort() throws Exception {
|
||||
sort.setSort(new SortField(null, SortField.Type.SCORE, true), SortField.FIELD_DOC);
|
||||
assertMatches(full, queryX, sort, "IEGCA");
|
||||
assertMatches(full, queryY, sort, "JFHDB");
|
||||
|
||||
sort.setSort(new SortField(null, SortField.Type.DOC, true));
|
||||
assertMatches(full, queryX, sort, "IGECA");
|
||||
assertMatches(full, queryY, sort, "JHFDB");
|
||||
|
||||
sort.setSort(new SortField("int", SortField.Type.INT, true));
|
||||
assertMatches(full, queryX, sort, "CAEGI");
|
||||
assertMatches(full, queryY, sort, "BJFHD");
|
||||
|
||||
sort.setSort(new SortField("float", SortField.Type.FLOAT, true));
|
||||
assertMatches(full, queryX, sort, "AECIG");
|
||||
assertMatches(full, queryY, sort, "BFJHD");
|
||||
|
||||
sort.setSort(new SortField("string", SortField.Type.STRING, true));
|
||||
assertMatches(full, queryX, sort, "CEGIA");
|
||||
assertMatches(full, queryY, sort, "BFHJD");
|
||||
|
||||
sort.setSort(new SortField("int_dv", SortField.Type.INT, true));
|
||||
assertMatches(full, queryX, sort, "CAEGI");
|
||||
assertMatches(full, queryY, sort, "BJFHD");
|
||||
|
||||
sort.setSort(new SortField("float_dv", SortField.Type.FLOAT, true));
|
||||
assertMatches(full, queryX, sort, "AECIG");
|
||||
assertMatches(full, queryY, sort, "BFJHD");
|
||||
|
||||
sort.setSort(new SortField("string_dv", getDVStringSortType(), true));
|
||||
assertMatches(full, queryX, sort, "CEGIA");
|
||||
assertMatches(full, queryY, sort, "BFHJD");
|
||||
}
|
||||
|
||||
// test sorting when the sort field is empty (undefined) for some of the documents
|
||||
public void testEmptyFieldSort() throws Exception {
|
||||
|
||||
|
@ -980,219 +737,6 @@ public class TestSort extends LuceneTestCase {
|
|||
assertEquals(docs1.scoreDocs[0].score, docs2.scoreDocs[0].score, 1e-6);
|
||||
}
|
||||
|
||||
public void testSortWithoutFillFields() throws Exception {
|
||||
|
||||
// There was previously a bug in TopFieldCollector when fillFields was set
|
||||
// to false - the same doc and score was set in ScoreDoc[] array. This test
|
||||
// asserts that if fillFields is false, the documents are set properly. It
|
||||
// does not use Searcher's default search methods (with Sort) since all set
|
||||
// fillFields to true.
|
||||
Sort[] sort = new Sort[] { new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, false,
|
||||
false, false, true);
|
||||
|
||||
full.search(q, tdc);
|
||||
|
||||
ScoreDoc[] sd = tdc.topDocs().scoreDocs;
|
||||
for(int j = 1; j < sd.length; j++) {
|
||||
assertTrue(sd[j].doc != sd[j - 1].doc);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void testSortWithoutScoreTracking() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, false,
|
||||
false, true);
|
||||
|
||||
full.search(q, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
for(int j = 0; j < sd.length; j++) {
|
||||
assertTrue(Float.isNaN(sd[j].score));
|
||||
}
|
||||
assertTrue(Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSortWithScoreNoMaxScoreTracking() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true,
|
||||
false, true);
|
||||
|
||||
full.search(q, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
for(int j = 0; j < sd.length; j++) {
|
||||
assertTrue(!Float.isNaN(sd[j].score));
|
||||
}
|
||||
assertTrue(Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
|
||||
// MultiComparatorScoringNoMaxScoreCollector
|
||||
public void testSortWithScoreNoMaxScoreTrackingMulti() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC, SortField.FIELD_SCORE) };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true,
|
||||
false, true);
|
||||
|
||||
full.search(q, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
for(int j = 0; j < sd.length; j++) {
|
||||
assertTrue(!Float.isNaN(sd[j].score));
|
||||
}
|
||||
assertTrue(Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSortWithScoreAndMaxScoreTracking() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true,
|
||||
true, true);
|
||||
|
||||
full.search(q, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
for(int j = 0; j < sd.length; j++) {
|
||||
assertTrue(!Float.isNaN(sd[j].score));
|
||||
}
|
||||
assertTrue(!Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
|
||||
public void testOutOfOrderDocsScoringSort() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
boolean[][] tfcOptions = new boolean[][] {
|
||||
new boolean[] { false, false, false },
|
||||
new boolean[] { false, false, true },
|
||||
new boolean[] { false, true, false },
|
||||
new boolean[] { false, true, true },
|
||||
new boolean[] { true, false, false },
|
||||
new boolean[] { true, false, true },
|
||||
new boolean[] { true, true, false },
|
||||
new boolean[] { true, true, true },
|
||||
};
|
||||
String[] actualTFCClasses = new String[] {
|
||||
"OutOfOrderOneComparatorNonScoringCollector",
|
||||
"OutOfOrderOneComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorScoringNoMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorNonScoringCollector",
|
||||
"OutOfOrderOneComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorScoringNoMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorScoringMaxScoreCollector"
|
||||
};
|
||||
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
// Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
|
||||
// which delegates to BS if there are no mandatory clauses.
|
||||
bq.add(new MatchAllDocsQuery(), Occur.SHOULD);
|
||||
// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
|
||||
// the clause instead of BQ.
|
||||
bq.setMinimumNumberShouldMatch(1);
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
for(int j = 0; j < tfcOptions.length; j++) {
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10,
|
||||
tfcOptions[j][0], tfcOptions[j][1], tfcOptions[j][2], false);
|
||||
|
||||
assertTrue(tdc.getClass().getName().endsWith("$"+actualTFCClasses[j]));
|
||||
|
||||
full.search(bq, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
assertEquals(10, sd.length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// OutOfOrderMulti*Collector
|
||||
public void testOutOfOrderDocsScoringSortMulti() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC, SortField.FIELD_SCORE) };
|
||||
boolean[][] tfcOptions = new boolean[][] {
|
||||
new boolean[] { false, false, false },
|
||||
new boolean[] { false, false, true },
|
||||
new boolean[] { false, true, false },
|
||||
new boolean[] { false, true, true },
|
||||
new boolean[] { true, false, false },
|
||||
new boolean[] { true, false, true },
|
||||
new boolean[] { true, true, false },
|
||||
new boolean[] { true, true, true },
|
||||
};
|
||||
String[] actualTFCClasses = new String[] {
|
||||
"OutOfOrderMultiComparatorNonScoringCollector",
|
||||
"OutOfOrderMultiComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorScoringNoMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorNonScoringCollector",
|
||||
"OutOfOrderMultiComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorScoringNoMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorScoringMaxScoreCollector"
|
||||
};
|
||||
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
// Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
|
||||
// which delegates to BS if there are no mandatory clauses.
|
||||
bq.add(new MatchAllDocsQuery(), Occur.SHOULD);
|
||||
// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
|
||||
// the clause instead of BQ.
|
||||
bq.setMinimumNumberShouldMatch(1);
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
for(int j = 0; j < tfcOptions.length; j++) {
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10,
|
||||
tfcOptions[j][0], tfcOptions[j][1], tfcOptions[j][2], false);
|
||||
|
||||
assertTrue(tdc.getClass().getName().endsWith("$"+actualTFCClasses[j]));
|
||||
|
||||
full.search(bq, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
assertEquals(10, sd.length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testSortWithScoreAndMaxScoreTrackingNoResults() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true, true, true);
|
||||
TopDocs td = tdc.topDocs();
|
||||
assertEquals(0, td.totalHits);
|
||||
assertTrue(Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
|
||||
// runs a variety of sorts useful for multisearchers
|
||||
private void runMultiSorts(IndexSearcher multi, boolean isFull) throws Exception {
|
||||
sort.setSort(SortField.FIELD_DOC);
|
||||
|
@ -1331,255 +875,4 @@ public class TestSort extends LuceneTestCase {
|
|||
}
|
||||
assertEquals(msg, expectedResult, buff.toString());
|
||||
}
|
||||
|
||||
public void testEmptyStringVsNullStringSort() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
Document doc = new Document();
|
||||
doc.add(newStringField("f", "", Field.Store.NO));
|
||||
doc.add(newStringField("t", "1", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
w.commit();
|
||||
doc = new Document();
|
||||
doc.add(newStringField("t", "1", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
|
||||
IndexReader r = DirectoryReader.open(w, true);
|
||||
w.close();
|
||||
IndexSearcher s = newSearcher(r);
|
||||
TopDocs hits = s.search(new TermQuery(new Term("t", "1")), null, 10, new Sort(new SortField("f", SortField.Type.STRING)));
|
||||
assertEquals(2, hits.totalHits);
|
||||
// null sorts first
|
||||
assertEquals(1, hits.scoreDocs[0].doc);
|
||||
assertEquals(0, hits.scoreDocs[1].doc);
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testLUCENE2142() throws IOException {
|
||||
Directory indexStore = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(indexStore, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
for(int i=0; i<5; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new StringField("string", "a"+i, Field.Store.NO));
|
||||
doc.add(new StringField("string", "b"+i, Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
writer.forceMerge(1); // enforce one segment to have a higher unique term count in all cases
|
||||
writer.close();
|
||||
sort.setSort(
|
||||
new SortField("string", SortField.Type.STRING),
|
||||
SortField.FIELD_DOC);
|
||||
// this should not throw AIOOBE or RuntimeEx
|
||||
IndexReader reader = DirectoryReader.open(indexStore);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
searcher.search(new MatchAllDocsQuery(), null, 500, sort);
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
||||
public void testCountingCollector() throws Exception {
|
||||
Directory indexStore = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), indexStore);
|
||||
for(int i=0; i<5; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new StringField("string", "a"+i, Field.Store.NO));
|
||||
doc.add(new StringField("string", "b"+i, Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
IndexReader reader = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
TotalHitCountCollector c = new TotalHitCountCollector();
|
||||
searcher.search(new MatchAllDocsQuery(), null, c);
|
||||
assertEquals(5, c.getTotalHits());
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
||||
private static class RandomFilter extends Filter {
|
||||
private final Random random;
|
||||
private float density;
|
||||
private final List<BytesRef> docValues;
|
||||
public final List<BytesRef> matchValues = Collections.synchronizedList(new ArrayList<BytesRef>());
|
||||
|
||||
// density should be 0.0 ... 1.0
|
||||
public RandomFilter(Random random, float density, List<BytesRef> docValues) {
|
||||
this.random = random;
|
||||
this.density = density;
|
||||
this.docValues = docValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
||||
final int maxDoc = context.reader().maxDoc();
|
||||
final FieldCache.Ints idSource = FieldCache.DEFAULT.getInts(context.reader(), "id", false);
|
||||
assertNotNull(idSource);
|
||||
final FixedBitSet bits = new FixedBitSet(maxDoc);
|
||||
for(int docID=0;docID<maxDoc;docID++) {
|
||||
if (random.nextFloat() <= density && (acceptDocs == null || acceptDocs.get(docID))) {
|
||||
bits.set(docID);
|
||||
//System.out.println(" acc id=" + idSource.getInt(docID) + " docID=" + docID);
|
||||
matchValues.add(docValues.get(idSource.get(docID)));
|
||||
}
|
||||
}
|
||||
|
||||
return bits;
|
||||
}
|
||||
}
|
||||
|
||||
public void testRandomStringSort() throws Exception {
|
||||
Random random = new Random(random().nextLong());
|
||||
|
||||
final int NUM_DOCS = atLeast(100);
|
||||
final Directory dir = newDirectory();
|
||||
final RandomIndexWriter writer = new RandomIndexWriter(random, dir);
|
||||
final boolean allowDups = random.nextBoolean();
|
||||
final Set<String> seen = new HashSet<String>();
|
||||
final int maxLength = _TestUtil.nextInt(random, 5, 100);
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: NUM_DOCS=" + NUM_DOCS + " maxLength=" + maxLength + " allowDups=" + allowDups);
|
||||
}
|
||||
|
||||
int numDocs = 0;
|
||||
final List<BytesRef> docValues = new ArrayList<BytesRef>();
|
||||
// TODO: deletions
|
||||
while (numDocs < NUM_DOCS) {
|
||||
final String s;
|
||||
if (random.nextBoolean()) {
|
||||
s = _TestUtil.randomSimpleString(random, maxLength);
|
||||
} else {
|
||||
s = _TestUtil.randomUnicodeString(random, maxLength);
|
||||
}
|
||||
final BytesRef br = new BytesRef(s);
|
||||
|
||||
if (!allowDups) {
|
||||
if (seen.contains(s)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(s);
|
||||
}
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println(" " + numDocs + ": s=" + s);
|
||||
}
|
||||
|
||||
final Document doc = new Document();
|
||||
doc.add(new SortedDocValuesField("stringdv", br));
|
||||
doc.add(newStringField("string", s, Field.Store.NO));
|
||||
doc.add(new NumericDocValuesField("id", numDocs));
|
||||
docValues.add(br);
|
||||
writer.addDocument(doc);
|
||||
numDocs++;
|
||||
|
||||
if (random.nextInt(40) == 17) {
|
||||
// force flush
|
||||
writer.getReader().close();
|
||||
}
|
||||
}
|
||||
|
||||
final IndexReader r = writer.getReader();
|
||||
writer.close();
|
||||
if (VERBOSE) {
|
||||
System.out.println(" reader=" + r);
|
||||
}
|
||||
|
||||
final IndexSearcher s = newSearcher(r, false);
|
||||
final int ITERS = atLeast(100);
|
||||
for(int iter=0;iter<ITERS;iter++) {
|
||||
final boolean reverse = random.nextBoolean();
|
||||
final TopFieldDocs hits;
|
||||
final SortField sf;
|
||||
if (random.nextBoolean()) {
|
||||
sf = new SortField("stringdv", SortField.Type.STRING, reverse);
|
||||
} else {
|
||||
sf = new SortField("string", SortField.Type.STRING, reverse);
|
||||
}
|
||||
final Sort sort = new Sort(sf);
|
||||
final int hitCount = _TestUtil.nextInt(random, 1, r.maxDoc() + 20);
|
||||
final RandomFilter f = new RandomFilter(random, random.nextFloat(), docValues);
|
||||
if (random.nextBoolean()) {
|
||||
hits = s.search(new ConstantScoreQuery(f),
|
||||
hitCount,
|
||||
sort);
|
||||
} else {
|
||||
hits = s.search(new MatchAllDocsQuery(),
|
||||
f,
|
||||
hitCount,
|
||||
sort);
|
||||
}
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("\nTEST: iter=" + iter + " " + hits.totalHits + " hits; topN=" + hitCount + "; reverse=" + reverse);
|
||||
}
|
||||
|
||||
// Compute expected results:
|
||||
Collections.sort(f.matchValues);
|
||||
if (reverse) {
|
||||
Collections.reverse(f.matchValues);
|
||||
}
|
||||
final List<BytesRef> expected = f.matchValues;
|
||||
if (VERBOSE) {
|
||||
System.out.println(" expected:");
|
||||
for(int idx=0;idx<expected.size();idx++) {
|
||||
System.out.println(" " + idx + ": " + expected.get(idx).utf8ToString());
|
||||
if (idx == hitCount-1) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println(" actual:");
|
||||
for(int hitIDX=0;hitIDX<hits.scoreDocs.length;hitIDX++) {
|
||||
final FieldDoc fd = (FieldDoc) hits.scoreDocs[hitIDX];
|
||||
System.out.println(" " + hitIDX + ": " + ((BytesRef) fd.fields[0]).utf8ToString());
|
||||
}
|
||||
}
|
||||
for(int hitIDX=0;hitIDX<hits.scoreDocs.length;hitIDX++) {
|
||||
final FieldDoc fd = (FieldDoc) hits.scoreDocs[hitIDX];
|
||||
assertEquals(expected.get(hitIDX), (BytesRef) fd.fields[0]);
|
||||
}
|
||||
}
|
||||
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testMaxScore() throws Exception {
|
||||
Directory d = newDirectory();
|
||||
// Not RIW because we need exactly 2 segs:
|
||||
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
int id = 0;
|
||||
for(int seg=0;seg<2;seg++) {
|
||||
for(int docIDX=0;docIDX<10;docIDX++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newStringField("id", ""+docIDX, Field.Store.YES));
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for(int i=0;i<id;i++) {
|
||||
sb.append(' ');
|
||||
sb.append("text");
|
||||
}
|
||||
doc.add(newTextField("body", sb.toString(), Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
id++;
|
||||
}
|
||||
w.commit();
|
||||
}
|
||||
|
||||
IndexReader r = DirectoryReader.open(w, true);
|
||||
w.close();
|
||||
Query q = new TermQuery(new Term("body", "text"));
|
||||
IndexSearcher s = newSearcher(r);
|
||||
float maxScore = s.search(q , 10).getMaxScore();
|
||||
assertEquals(maxScore, s.search(q, null, 3, Sort.INDEXORDER, random().nextBoolean(), true).getMaxScore(), 0.0);
|
||||
assertEquals(maxScore, s.search(q, null, 3, Sort.RELEVANCE, random().nextBoolean(), true).getMaxScore(), 0.0);
|
||||
assertEquals(maxScore, s.search(q, null, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, false)}), random().nextBoolean(), true).getMaxScore(), 0.0);
|
||||
assertEquals(maxScore, s.search(q, null, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, true)}), random().nextBoolean(), true).getMaxScore(), 0.0);
|
||||
r.close();
|
||||
d.close();
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,643 @@
|
|||
package org.apache.lucene.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.document.BinaryDocValuesField;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoubleDocValuesField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FloatDocValuesField;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/** Tests basic sorting on docvalues fields.
|
||||
* These are mostly like TestSort's tests, except each test
|
||||
* indexes the field up-front as docvalues, and checks no fieldcaches were made */
|
||||
public class TestSortDocValues extends LuceneTestCase {
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
// ensure there is nothing in fieldcache before test starts
|
||||
FieldCache.DEFAULT.purgeAllCaches();
|
||||
}
|
||||
|
||||
private void assertNoFieldCaches() {
|
||||
// docvalues sorting should NOT create any fieldcache entries!
|
||||
assertEquals(0, FieldCache.DEFAULT.getCacheEntries().length);
|
||||
}
|
||||
|
||||
/** Tests sorting on type string */
|
||||
public void testString() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new SortedDocValuesField("value", new BytesRef("foo")));
|
||||
doc.add(newStringField("value", "foo", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new SortedDocValuesField("value", new BytesRef("bar")));
|
||||
doc.add(newStringField("value", "bar", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(2, td.totalHits);
|
||||
// 'bar' comes before 'foo'
|
||||
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("foo", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests reverse sorting on type string */
|
||||
public void testStringReverse() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new SortedDocValuesField("value", new BytesRef("bar")));
|
||||
doc.add(newStringField("value", "bar", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new SortedDocValuesField("value", new BytesRef("foo")));
|
||||
doc.add(newStringField("value", "foo", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING, true));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(2, td.totalHits);
|
||||
// 'foo' comes after 'bar' in reverse order
|
||||
assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type string_val */
|
||||
public void testStringVal() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new BinaryDocValuesField("value", new BytesRef("foo")));
|
||||
doc.add(newStringField("value", "foo", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new BinaryDocValuesField("value", new BytesRef("bar")));
|
||||
doc.add(newStringField("value", "bar", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(2, td.totalHits);
|
||||
// 'bar' comes before 'foo'
|
||||
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("foo", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests reverse sorting on type string_val */
|
||||
public void testStringValReverse() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new BinaryDocValuesField("value", new BytesRef("bar")));
|
||||
doc.add(newStringField("value", "bar", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new BinaryDocValuesField("value", new BytesRef("foo")));
|
||||
doc.add(newStringField("value", "foo", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL, true));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(2, td.totalHits);
|
||||
// 'foo' comes after 'bar' in reverse order
|
||||
assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type string_val, but with a SortedDocValuesField */
|
||||
public void testStringValSorted() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new SortedDocValuesField("value", new BytesRef("foo")));
|
||||
doc.add(newStringField("value", "foo", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new SortedDocValuesField("value", new BytesRef("bar")));
|
||||
doc.add(newStringField("value", "bar", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(2, td.totalHits);
|
||||
// 'bar' comes before 'foo'
|
||||
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("foo", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests reverse sorting on type string_val, but with a SortedDocValuesField */
|
||||
public void testStringValReverseSorted() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new SortedDocValuesField("value", new BytesRef("bar")));
|
||||
doc.add(newStringField("value", "bar", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new SortedDocValuesField("value", new BytesRef("foo")));
|
||||
doc.add(newStringField("value", "foo", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL, true));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(2, td.totalHits);
|
||||
// 'foo' comes after 'bar' in reverse order
|
||||
assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type byte */
|
||||
public void testByte() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 23));
|
||||
doc.add(newStringField("value", "23", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", -1));
|
||||
doc.add(newStringField("value", "-1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 4));
|
||||
doc.add(newStringField("value", "4", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.BYTE));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// numeric order
|
||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("23", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type byte in reverse */
|
||||
public void testByteReverse() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 23));
|
||||
doc.add(newStringField("value", "23", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", -1));
|
||||
doc.add(newStringField("value", "-1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 4));
|
||||
doc.add(newStringField("value", "4", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.BYTE, true));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// reverse numeric order
|
||||
assertEquals("23", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type short */
|
||||
public void testShort() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 300));
|
||||
doc.add(newStringField("value", "300", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", -1));
|
||||
doc.add(newStringField("value", "-1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 4));
|
||||
doc.add(newStringField("value", "4", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.SHORT));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// numeric order
|
||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("300", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type short in reverse */
|
||||
public void testShortReverse() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 300));
|
||||
doc.add(newStringField("value", "300", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", -1));
|
||||
doc.add(newStringField("value", "-1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 4));
|
||||
doc.add(newStringField("value", "4", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.SHORT, true));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// reverse numeric order
|
||||
assertEquals("300", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type int */
|
||||
public void testInt() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 300000));
|
||||
doc.add(newStringField("value", "300000", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", -1));
|
||||
doc.add(newStringField("value", "-1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 4));
|
||||
doc.add(newStringField("value", "4", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.INT));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// numeric order
|
||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("300000", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type int in reverse */
|
||||
public void testIntReverse() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 300000));
|
||||
doc.add(newStringField("value", "300000", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", -1));
|
||||
doc.add(newStringField("value", "-1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 4));
|
||||
doc.add(newStringField("value", "4", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.INT, true));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// reverse numeric order
|
||||
assertEquals("300000", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type long */
|
||||
public void testLong() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 3000000000L));
|
||||
doc.add(newStringField("value", "3000000000", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", -1));
|
||||
doc.add(newStringField("value", "-1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 4));
|
||||
doc.add(newStringField("value", "4", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// numeric order
|
||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("3000000000", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type long in reverse */
|
||||
public void testLongReverse() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 3000000000L));
|
||||
doc.add(newStringField("value", "3000000000", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", -1));
|
||||
doc.add(newStringField("value", "-1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new NumericDocValuesField("value", 4));
|
||||
doc.add(newStringField("value", "4", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.LONG, true));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// reverse numeric order
|
||||
assertEquals("3000000000", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type float */
|
||||
public void testFloat() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new FloatDocValuesField("value", 30.1F));
|
||||
doc.add(newStringField("value", "30.1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new FloatDocValuesField("value", -1.3F));
|
||||
doc.add(newStringField("value", "-1.3", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new FloatDocValuesField("value", 4.2F));
|
||||
doc.add(newStringField("value", "4.2", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// numeric order
|
||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("30.1", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type float in reverse */
|
||||
public void testFloatReverse() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new FloatDocValuesField("value", 30.1F));
|
||||
doc.add(newStringField("value", "30.1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new FloatDocValuesField("value", -1.3F));
|
||||
doc.add(newStringField("value", "-1.3", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new FloatDocValuesField("value", 4.2F));
|
||||
doc.add(newStringField("value", "4.2", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT, true));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(3, td.totalHits);
|
||||
// reverse numeric order
|
||||
assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type double */
|
||||
public void testDouble() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new DoubleDocValuesField("value", 30.1));
|
||||
doc.add(newStringField("value", "30.1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new DoubleDocValuesField("value", -1.3));
|
||||
doc.add(newStringField("value", "-1.3", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new DoubleDocValuesField("value", 4.2333333333333));
|
||||
doc.add(newStringField("value", "4.2333333333333", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new DoubleDocValuesField("value", 4.2333333333332));
|
||||
doc.add(newStringField("value", "4.2333333333332", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(4, td.totalHits);
|
||||
// numeric order
|
||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertEquals("30.1", searcher.doc(td.scoreDocs[3].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** Tests sorting on type double in reverse */
|
||||
public void testDoubleReverse() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
Document doc = new Document();
|
||||
doc.add(new DoubleDocValuesField("value", 30.1));
|
||||
doc.add(newStringField("value", "30.1", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new DoubleDocValuesField("value", -1.3));
|
||||
doc.add(newStringField("value", "-1.3", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new DoubleDocValuesField("value", 4.2333333333333));
|
||||
doc.add(newStringField("value", "4.2333333333333", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new DoubleDocValuesField("value", 4.2333333333332));
|
||||
doc.add(newStringField("value", "4.2333333333332", Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true));
|
||||
|
||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||
assertEquals(4, td.totalHits);
|
||||
// numeric order
|
||||
assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||
assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||
assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value"));
|
||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[3].doc).get("value"));
|
||||
assertNoFieldCaches();
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,207 @@
|
|||
package org.apache.lucene.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
/** random sorting tests */
|
||||
public class TestSortRandom extends LuceneTestCase {
|
||||
|
||||
public void testRandomStringSort() throws Exception {
|
||||
Random random = new Random(random().nextLong());
|
||||
|
||||
final int NUM_DOCS = atLeast(100);
|
||||
final Directory dir = newDirectory();
|
||||
final RandomIndexWriter writer = new RandomIndexWriter(random, dir);
|
||||
final boolean allowDups = random.nextBoolean();
|
||||
final Set<String> seen = new HashSet<String>();
|
||||
final int maxLength = _TestUtil.nextInt(random, 5, 100);
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: NUM_DOCS=" + NUM_DOCS + " maxLength=" + maxLength + " allowDups=" + allowDups);
|
||||
}
|
||||
|
||||
int numDocs = 0;
|
||||
final List<BytesRef> docValues = new ArrayList<BytesRef>();
|
||||
// TODO: deletions
|
||||
while (numDocs < NUM_DOCS) {
|
||||
final String s;
|
||||
if (random.nextBoolean()) {
|
||||
s = _TestUtil.randomSimpleString(random, maxLength);
|
||||
} else {
|
||||
s = _TestUtil.randomUnicodeString(random, maxLength);
|
||||
}
|
||||
final BytesRef br = new BytesRef(s);
|
||||
|
||||
if (!allowDups) {
|
||||
if (seen.contains(s)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(s);
|
||||
}
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println(" " + numDocs + ": s=" + s);
|
||||
}
|
||||
|
||||
final Document doc = new Document();
|
||||
doc.add(new SortedDocValuesField("stringdv", br));
|
||||
doc.add(newStringField("string", s, Field.Store.NO));
|
||||
doc.add(new NumericDocValuesField("id", numDocs));
|
||||
docValues.add(br);
|
||||
writer.addDocument(doc);
|
||||
numDocs++;
|
||||
|
||||
if (random.nextInt(40) == 17) {
|
||||
// force flush
|
||||
writer.getReader().close();
|
||||
}
|
||||
}
|
||||
|
||||
final IndexReader r = writer.getReader();
|
||||
writer.close();
|
||||
if (VERBOSE) {
|
||||
System.out.println(" reader=" + r);
|
||||
}
|
||||
|
||||
final IndexSearcher s = newSearcher(r, false);
|
||||
final int ITERS = atLeast(100);
|
||||
for(int iter=0;iter<ITERS;iter++) {
|
||||
final boolean reverse = random.nextBoolean();
|
||||
final TopFieldDocs hits;
|
||||
final SortField sf;
|
||||
if (random.nextBoolean()) {
|
||||
sf = new SortField("stringdv", SortField.Type.STRING, reverse);
|
||||
} else {
|
||||
sf = new SortField("string", SortField.Type.STRING, reverse);
|
||||
}
|
||||
final Sort sort;
|
||||
if (random.nextBoolean()) {
|
||||
sort = new Sort(sf);
|
||||
} else {
|
||||
sort = new Sort(sf, SortField.FIELD_DOC);
|
||||
}
|
||||
final int hitCount = _TestUtil.nextInt(random, 1, r.maxDoc() + 20);
|
||||
final RandomFilter f = new RandomFilter(random, random.nextFloat(), docValues);
|
||||
int queryType = random.nextInt(3);
|
||||
if (queryType == 0) {
|
||||
// force out of order
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
// Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
|
||||
// which delegates to BS if there are no mandatory clauses.
|
||||
bq.add(new MatchAllDocsQuery(), Occur.SHOULD);
|
||||
// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
|
||||
// the clause instead of BQ.
|
||||
bq.setMinimumNumberShouldMatch(1);
|
||||
hits = s.search(bq, f, hitCount, sort, random.nextBoolean(), random.nextBoolean());
|
||||
} else if (queryType == 1) {
|
||||
hits = s.search(new ConstantScoreQuery(f),
|
||||
null, hitCount, sort, random.nextBoolean(), random.nextBoolean());
|
||||
} else {
|
||||
hits = s.search(new MatchAllDocsQuery(),
|
||||
f, hitCount, sort, random.nextBoolean(), random.nextBoolean());
|
||||
}
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("\nTEST: iter=" + iter + " " + hits.totalHits + " hits; topN=" + hitCount + "; reverse=" + reverse);
|
||||
}
|
||||
|
||||
// Compute expected results:
|
||||
Collections.sort(f.matchValues);
|
||||
if (reverse) {
|
||||
Collections.reverse(f.matchValues);
|
||||
}
|
||||
final List<BytesRef> expected = f.matchValues;
|
||||
if (VERBOSE) {
|
||||
System.out.println(" expected:");
|
||||
for(int idx=0;idx<expected.size();idx++) {
|
||||
System.out.println(" " + idx + ": " + expected.get(idx).utf8ToString());
|
||||
if (idx == hitCount-1) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println(" actual:");
|
||||
for(int hitIDX=0;hitIDX<hits.scoreDocs.length;hitIDX++) {
|
||||
final FieldDoc fd = (FieldDoc) hits.scoreDocs[hitIDX];
|
||||
System.out.println(" " + hitIDX + ": " + ((BytesRef) fd.fields[0]).utf8ToString());
|
||||
}
|
||||
}
|
||||
for(int hitIDX=0;hitIDX<hits.scoreDocs.length;hitIDX++) {
|
||||
final FieldDoc fd = (FieldDoc) hits.scoreDocs[hitIDX];
|
||||
assertEquals(expected.get(hitIDX), (BytesRef) fd.fields[0]);
|
||||
}
|
||||
}
|
||||
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
private static class RandomFilter extends Filter {
|
||||
private final Random random;
|
||||
private float density;
|
||||
private final List<BytesRef> docValues;
|
||||
public final List<BytesRef> matchValues = Collections.synchronizedList(new ArrayList<BytesRef>());
|
||||
|
||||
// density should be 0.0 ... 1.0
|
||||
public RandomFilter(Random random, float density, List<BytesRef> docValues) {
|
||||
this.random = random;
|
||||
this.density = density;
|
||||
this.docValues = docValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
||||
final int maxDoc = context.reader().maxDoc();
|
||||
final FieldCache.Ints idSource = FieldCache.DEFAULT.getInts(context.reader(), "id", false);
|
||||
assertNotNull(idSource);
|
||||
final FixedBitSet bits = new FixedBitSet(maxDoc);
|
||||
for(int docID=0;docID<maxDoc;docID++) {
|
||||
if (random.nextFloat() <= density && (acceptDocs == null || acceptDocs.get(docID))) {
|
||||
bits.set(docID);
|
||||
//System.out.println(" acc id=" + idSource.getInt(docID) + " docID=" + docID);
|
||||
matchValues.add(docValues.get(idSource.get(docID)));
|
||||
}
|
||||
}
|
||||
|
||||
return bits;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,267 @@
|
|||
package org.apache.lucene.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.FieldValueHitQueue.Entry;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
public class TestTopFieldCollector extends LuceneTestCase {
|
||||
private IndexSearcher is;
|
||||
private IndexReader ir;
|
||||
private Directory dir;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
dir = newDirectory();
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(100);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
Document doc = new Document();
|
||||
iw.addDocument(doc);
|
||||
}
|
||||
ir = iw.getReader();
|
||||
iw.close();
|
||||
is = newSearcher(ir);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
ir.close();
|
||||
dir.close();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
public void testSortWithoutFillFields() throws Exception {
|
||||
|
||||
// There was previously a bug in TopFieldCollector when fillFields was set
|
||||
// to false - the same doc and score was set in ScoreDoc[] array. This test
|
||||
// asserts that if fillFields is false, the documents are set properly. It
|
||||
// does not use Searcher's default search methods (with Sort) since all set
|
||||
// fillFields to true.
|
||||
Sort[] sort = new Sort[] { new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, false,
|
||||
false, false, true);
|
||||
|
||||
is.search(q, tdc);
|
||||
|
||||
ScoreDoc[] sd = tdc.topDocs().scoreDocs;
|
||||
for(int j = 1; j < sd.length; j++) {
|
||||
assertTrue(sd[j].doc != sd[j - 1].doc);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void testSortWithoutScoreTracking() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, false,
|
||||
false, true);
|
||||
|
||||
is.search(q, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
for(int j = 0; j < sd.length; j++) {
|
||||
assertTrue(Float.isNaN(sd[j].score));
|
||||
}
|
||||
assertTrue(Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSortWithScoreNoMaxScoreTracking() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true,
|
||||
false, true);
|
||||
|
||||
is.search(q, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
for(int j = 0; j < sd.length; j++) {
|
||||
assertTrue(!Float.isNaN(sd[j].score));
|
||||
}
|
||||
assertTrue(Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
|
||||
// MultiComparatorScoringNoMaxScoreCollector
|
||||
public void testSortWithScoreNoMaxScoreTrackingMulti() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC, SortField.FIELD_SCORE) };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true,
|
||||
false, true);
|
||||
|
||||
is.search(q, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
for(int j = 0; j < sd.length; j++) {
|
||||
assertTrue(!Float.isNaN(sd[j].score));
|
||||
}
|
||||
assertTrue(Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSortWithScoreAndMaxScoreTracking() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
Query q = new MatchAllDocsQuery();
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true,
|
||||
true, true);
|
||||
|
||||
is.search(q, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
for(int j = 0; j < sd.length; j++) {
|
||||
assertTrue(!Float.isNaN(sd[j].score));
|
||||
}
|
||||
assertTrue(!Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
|
||||
public void testOutOfOrderDocsScoringSort() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
boolean[][] tfcOptions = new boolean[][] {
|
||||
new boolean[] { false, false, false },
|
||||
new boolean[] { false, false, true },
|
||||
new boolean[] { false, true, false },
|
||||
new boolean[] { false, true, true },
|
||||
new boolean[] { true, false, false },
|
||||
new boolean[] { true, false, true },
|
||||
new boolean[] { true, true, false },
|
||||
new boolean[] { true, true, true },
|
||||
};
|
||||
String[] actualTFCClasses = new String[] {
|
||||
"OutOfOrderOneComparatorNonScoringCollector",
|
||||
"OutOfOrderOneComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorScoringNoMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorNonScoringCollector",
|
||||
"OutOfOrderOneComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorScoringNoMaxScoreCollector",
|
||||
"OutOfOrderOneComparatorScoringMaxScoreCollector"
|
||||
};
|
||||
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
// Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
|
||||
// which delegates to BS if there are no mandatory clauses.
|
||||
bq.add(new MatchAllDocsQuery(), Occur.SHOULD);
|
||||
// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
|
||||
// the clause instead of BQ.
|
||||
bq.setMinimumNumberShouldMatch(1);
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
for(int j = 0; j < tfcOptions.length; j++) {
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10,
|
||||
tfcOptions[j][0], tfcOptions[j][1], tfcOptions[j][2], false);
|
||||
|
||||
assertTrue(tdc.getClass().getName().endsWith("$"+actualTFCClasses[j]));
|
||||
|
||||
is.search(bq, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
assertEquals(10, sd.length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// OutOfOrderMulti*Collector
|
||||
public void testOutOfOrderDocsScoringSortMulti() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC, SortField.FIELD_SCORE) };
|
||||
boolean[][] tfcOptions = new boolean[][] {
|
||||
new boolean[] { false, false, false },
|
||||
new boolean[] { false, false, true },
|
||||
new boolean[] { false, true, false },
|
||||
new boolean[] { false, true, true },
|
||||
new boolean[] { true, false, false },
|
||||
new boolean[] { true, false, true },
|
||||
new boolean[] { true, true, false },
|
||||
new boolean[] { true, true, true },
|
||||
};
|
||||
String[] actualTFCClasses = new String[] {
|
||||
"OutOfOrderMultiComparatorNonScoringCollector",
|
||||
"OutOfOrderMultiComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorScoringNoMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorNonScoringCollector",
|
||||
"OutOfOrderMultiComparatorScoringMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorScoringNoMaxScoreCollector",
|
||||
"OutOfOrderMultiComparatorScoringMaxScoreCollector"
|
||||
};
|
||||
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
// Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
|
||||
// which delegates to BS if there are no mandatory clauses.
|
||||
bq.add(new MatchAllDocsQuery(), Occur.SHOULD);
|
||||
// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
|
||||
// the clause instead of BQ.
|
||||
bq.setMinimumNumberShouldMatch(1);
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
for(int j = 0; j < tfcOptions.length; j++) {
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10,
|
||||
tfcOptions[j][0], tfcOptions[j][1], tfcOptions[j][2], false);
|
||||
|
||||
assertTrue(tdc.getClass().getName().endsWith("$"+actualTFCClasses[j]));
|
||||
|
||||
is.search(bq, tdc);
|
||||
|
||||
TopDocs td = tdc.topDocs();
|
||||
ScoreDoc[] sd = td.scoreDocs;
|
||||
assertEquals(10, sd.length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testSortWithScoreAndMaxScoreTrackingNoResults() throws Exception {
|
||||
|
||||
// Two Sort criteria to instantiate the multi/single comparators.
|
||||
Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
|
||||
for(int i = 0; i < sort.length; i++) {
|
||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true, true, true);
|
||||
TopDocs td = tdc.topDocs();
|
||||
assertEquals(0, td.totalHits);
|
||||
assertTrue(Float.isNaN(td.getMaxScore()));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
package org.apache.lucene.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
public class TestTotalHitCountCollector extends LuceneTestCase {
|
||||
|
||||
public void testBasics() throws Exception {
|
||||
Directory indexStore = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), indexStore);
|
||||
for(int i=0; i<5; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new StringField("string", "a"+i, Field.Store.NO));
|
||||
doc.add(new StringField("string", "b"+i, Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
IndexReader reader = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
TotalHitCountCollector c = new TotalHitCountCollector();
|
||||
searcher.search(new MatchAllDocsQuery(), null, c);
|
||||
assertEquals(5, c.getTotalHits());
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
}
|
|
@ -145,7 +145,9 @@ public class TestDocValuesFieldSources extends LuceneTestCase {
|
|||
|
||||
public void test() throws IOException {
|
||||
for (DocValuesType type : DocValuesType.values()) {
|
||||
test(type);
|
||||
if (type != DocValuesType.SORTED_SET) {
|
||||
test(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -78,12 +78,14 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||
public abstract class PrefixTreeStrategy extends SpatialStrategy {
|
||||
protected final SpatialPrefixTree grid;
|
||||
private final Map<String, PointPrefixTreeFieldCacheProvider> provider = new ConcurrentHashMap<String, PointPrefixTreeFieldCacheProvider>();
|
||||
protected final boolean simplifyIndexedCells;
|
||||
protected int defaultFieldValuesArrayLen = 2;
|
||||
protected double distErrPct = SpatialArgs.DEFAULT_DISTERRPCT;// [ 0 TO 0.5 ]
|
||||
|
||||
public PrefixTreeStrategy(SpatialPrefixTree grid, String fieldName) {
|
||||
public PrefixTreeStrategy(SpatialPrefixTree grid, String fieldName, boolean simplifyIndexedCells) {
|
||||
super(grid.getSpatialContext(), fieldName);
|
||||
this.grid = grid;
|
||||
this.simplifyIndexedCells = simplifyIndexedCells;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -123,7 +125,7 @@ public abstract class PrefixTreeStrategy extends SpatialStrategy {
|
|||
|
||||
public Field[] createIndexableFields(Shape shape, double distErr) {
|
||||
int detailLevel = grid.getLevelForDistance(distErr);
|
||||
List<Node> cells = grid.getNodes(shape, detailLevel, true);//true=intermediates cells
|
||||
List<Node> cells = grid.getNodes(shape, detailLevel, true, simplifyIndexedCells);//intermediates cells
|
||||
|
||||
//TODO is CellTokenStream supposed to be re-used somehow? see Uwe's comments:
|
||||
// http://code.google.com/p/lucene-spatial-playground/issues/detail?id=4
|
||||
|
|
|
@ -38,7 +38,8 @@ public class RecursivePrefixTreeStrategy extends PrefixTreeStrategy {
|
|||
private int prefixGridScanLevel;
|
||||
|
||||
public RecursivePrefixTreeStrategy(SpatialPrefixTree grid, String fieldName) {
|
||||
super(grid, fieldName);
|
||||
super(grid, fieldName,
|
||||
true);//simplify indexed cells
|
||||
prefixGridScanLevel = grid.getMaxLevels() - 4;//TODO this default constant is dependent on the prefix grid size
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.List;
|
|||
/**
|
||||
* A basic implementation of {@link PrefixTreeStrategy} using a large {@link
|
||||
* TermsFilter} of all the nodes from {@link SpatialPrefixTree#getNodes(com.spatial4j.core.shape.Shape,
|
||||
* int, boolean)}. It only supports the search of indexed Point shapes.
|
||||
* int, boolean, boolean)}. It only supports the search of indexed Point shapes.
|
||||
* <p/>
|
||||
* The precision of query shapes (distErrPct) is an important factor in using
|
||||
* this Strategy. If the precision is too precise then it will result in many
|
||||
|
@ -43,7 +43,8 @@ import java.util.List;
|
|||
public class TermQueryPrefixTreeStrategy extends PrefixTreeStrategy {
|
||||
|
||||
public TermQueryPrefixTreeStrategy(SpatialPrefixTree grid, String fieldName) {
|
||||
super(grid, fieldName);
|
||||
super(grid, fieldName,
|
||||
false);//do not simplify indexed cells
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -54,7 +55,9 @@ public class TermQueryPrefixTreeStrategy extends PrefixTreeStrategy {
|
|||
|
||||
Shape shape = args.getShape();
|
||||
int detailLevel = grid.getLevelForDistance(args.resolveDistErr(ctx, distErrPct));
|
||||
List<Node> cells = grid.getNodes(shape, detailLevel, false);
|
||||
List<Node> cells = grid.getNodes(shape, detailLevel,
|
||||
false,//no parents
|
||||
true);//simplify
|
||||
BytesRef[] terms = new BytesRef[cells.size()];
|
||||
int i = 0;
|
||||
for (Node cell : cells) {
|
||||
|
|
|
@ -93,12 +93,6 @@ public class GeohashPrefixTree extends SpatialPrefixTree {
|
|||
return new GhCell(bytes, offset, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Node> getNodes(Shape shape, int detailLevel, boolean inclParents) {
|
||||
return shape instanceof Point ? super.getNodesAltPoint((Point) shape, detailLevel, inclParents) :
|
||||
super.getNodes(shape, detailLevel, inclParents);
|
||||
}
|
||||
|
||||
class GhCell extends Node {
|
||||
GhCell(String token) {
|
||||
super(token);
|
||||
|
|
|
@ -156,14 +156,6 @@ public class QuadPrefixTree extends SpatialPrefixTree {
|
|||
return new QuadCell(bytes, offset, len);
|
||||
}
|
||||
|
||||
@Override //for performance
|
||||
public List<Node> getNodes(Shape shape, int detailLevel, boolean inclParents) {
|
||||
if (shape instanceof Point)
|
||||
return super.getNodesAltPoint((Point) shape, detailLevel, inclParents);
|
||||
else
|
||||
return super.getNodes(shape, detailLevel, inclParents);
|
||||
}
|
||||
|
||||
private void build(
|
||||
double x,
|
||||
double y,
|
||||
|
|
|
@ -116,88 +116,86 @@ public abstract class SpatialPrefixTree {
|
|||
}
|
||||
|
||||
/**
|
||||
* Gets the intersecting & including cells for the specified shape, without exceeding detail level.
|
||||
* The result is a set of cells (no dups), sorted. Unmodifiable.
|
||||
* Gets the intersecting cells for the specified shape, without exceeding
|
||||
* detail level. If a cell is within the query shape then it's marked as a
|
||||
* leaf and none of its children are added.
|
||||
* <p/>
|
||||
* This implementation checks if shape is a Point and if so uses an implementation that
|
||||
* recursively calls {@link Node#getSubCell(com.spatial4j.core.shape.Point)}. Cell subclasses
|
||||
* ideally implement that method with a quick implementation, otherwise, subclasses should
|
||||
* override this method to invoke {@link #getNodesAltPoint(com.spatial4j.core.shape.Point, int, boolean)}.
|
||||
* TODO consider another approach returning an iterator -- won't build up all cells in memory.
|
||||
* This implementation checks if shape is a Point and if so returns {@link
|
||||
* #getNodes(com.spatial4j.core.shape.Point, int, boolean)}.
|
||||
*
|
||||
* @param shape the shape; non-null
|
||||
* @param detailLevel the maximum detail level to get cells for
|
||||
* @param inclParents if true then all parent cells of leaves are returned
|
||||
* too. The top world cell is never returned.
|
||||
* @param simplify for non-point shapes, this will simply/aggregate sets of
|
||||
* complete leaves in a cell to its parent, resulting in
|
||||
* ~20-25% fewer cells.
|
||||
* @return a set of cells (no dups), sorted, immutable, non-null
|
||||
*/
|
||||
public List<Node> getNodes(Shape shape, int detailLevel, boolean inclParents) {
|
||||
public List<Node> getNodes(Shape shape, int detailLevel, boolean inclParents,
|
||||
boolean simplify) {
|
||||
//TODO consider an on-demand iterator -- it won't build up all cells in memory.
|
||||
if (detailLevel > maxLevels) {
|
||||
throw new IllegalArgumentException("detailLevel > maxLevels");
|
||||
}
|
||||
|
||||
List<Node> cells;
|
||||
if (shape instanceof Point) {
|
||||
//optimized point algorithm
|
||||
final int initialCapacity = inclParents ? 1 + detailLevel : 1;
|
||||
cells = new ArrayList<Node>(initialCapacity);
|
||||
recursiveGetNodes(getWorldNode(), (Point) shape, detailLevel, true, cells);
|
||||
assert cells.size() == initialCapacity;
|
||||
} else {
|
||||
cells = new ArrayList<Node>(inclParents ? 1024 : 512);
|
||||
recursiveGetNodes(getWorldNode(), shape, detailLevel, inclParents, cells);
|
||||
}
|
||||
if (inclParents) {
|
||||
Node c = cells.remove(0);//remove getWorldNode()
|
||||
assert c.getLevel() == 0;
|
||||
return getNodes((Point) shape, detailLevel, inclParents);
|
||||
}
|
||||
List<Node> cells = new ArrayList<Node>(inclParents ? 4096 : 2048);
|
||||
recursiveGetNodes(getWorldNode(), shape, detailLevel, inclParents, simplify, cells);
|
||||
return cells;
|
||||
}
|
||||
|
||||
private void recursiveGetNodes(Node node, Shape shape, int detailLevel, boolean inclParents,
|
||||
Collection<Node> result) {
|
||||
if (node.isLeaf()) {//cell is within shape
|
||||
/**
|
||||
* Returns true if node was added as a leaf. If it wasn't it recursively
|
||||
* descends.
|
||||
*/
|
||||
private boolean recursiveGetNodes(Node node, Shape shape, int detailLevel,
|
||||
boolean inclParents, boolean simplify,
|
||||
List<Node> result) {
|
||||
if (node.getLevel() == detailLevel) {
|
||||
node.setLeaf();//FYI might already be a leaf
|
||||
}
|
||||
if (node.isLeaf()) {
|
||||
result.add(node);
|
||||
return;
|
||||
return true;
|
||||
}
|
||||
final Collection<Node> subCells = node.getSubCells(shape);
|
||||
if (node.getLevel() == detailLevel - 1) {
|
||||
if (subCells.size() < node.getSubCellsSize() || node.getLevel() == 0) {
|
||||
if (inclParents)
|
||||
result.add(node);
|
||||
for (Node subCell : subCells) {
|
||||
subCell.setLeaf();
|
||||
}
|
||||
result.addAll(subCells);
|
||||
} else {//a bottom level (i.e. detail level) optimization where all boxes intersect, so use parent cell.
|
||||
node.setLeaf();//the cell may not be strictly within but its close
|
||||
result.add(node);
|
||||
}
|
||||
} else {
|
||||
if (inclParents) {
|
||||
result.add(node);
|
||||
}
|
||||
for (Node subCell : subCells) {
|
||||
recursiveGetNodes(subCell, shape, detailLevel, inclParents, result);//tail call
|
||||
}
|
||||
}
|
||||
}
|
||||
if (inclParents && node.getLevel() != 0)
|
||||
result.add(node);
|
||||
|
||||
private void recursiveGetNodes(Node node, Point point, int detailLevel, boolean inclParents,
|
||||
Collection<Node> result) {
|
||||
if (inclParents) {
|
||||
result.add(node);
|
||||
Collection<Node> subCells = node.getSubCells(shape);
|
||||
int leaves = 0;
|
||||
for (Node subCell : subCells) {
|
||||
if (recursiveGetNodes(subCell, shape, detailLevel, inclParents, simplify, result))
|
||||
leaves++;
|
||||
}
|
||||
final Node pCell = node.getSubCell(point);
|
||||
if (node.getLevel() == detailLevel - 1) {
|
||||
pCell.setLeaf();
|
||||
result.add(pCell);
|
||||
} else {
|
||||
recursiveGetNodes(pCell, point, detailLevel, inclParents, result);//tail call
|
||||
//can we simplify?
|
||||
if (simplify && leaves == node.getSubCellsSize() && node.getLevel() != 0) {
|
||||
//Optimization: substitute the parent as a leaf instead of adding all
|
||||
// children as leaves
|
||||
|
||||
//remove the leaves
|
||||
do {
|
||||
result.remove(result.size() - 1);//remove last
|
||||
} while (--leaves > 0);
|
||||
//add node as the leaf
|
||||
node.setLeaf();
|
||||
if (!inclParents) // otherwise it was already added up above
|
||||
result.add(node);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclasses might override {@link #getNodes(com.spatial4j.core.shape.Shape, int, boolean)}
|
||||
* and check if the argument is a shape and if so, delegate
|
||||
* to this implementation, which calls {@link #getNode(com.spatial4j.core.shape.Point, int)} and
|
||||
* then calls {@link #getNode(String)} repeatedly if inclParents is true.
|
||||
* A Point-optimized implementation of
|
||||
* {@link #getNodes(com.spatial4j.core.shape.Shape, int, boolean, boolean)}. That
|
||||
* method in facts calls this for points.
|
||||
* <p/>
|
||||
* This implementation depends on {@link #getNode(String)} being fast, as its
|
||||
* called repeatedly when incPlarents is true.
|
||||
*/
|
||||
protected final List<Node> getNodesAltPoint(Point p, int detailLevel, boolean inclParents) {
|
||||
public List<Node> getNodes(Point p, int detailLevel, boolean inclParents) {
|
||||
Node cell = getNode(p, detailLevel);
|
||||
if (!inclParents) {
|
||||
return Collections.singletonList(cell);
|
||||
|
|
|
@ -18,9 +18,19 @@ package org.apache.lucene.spatial.prefix;
|
|||
*/
|
||||
|
||||
import com.spatial4j.core.context.SpatialContextFactory;
|
||||
import com.spatial4j.core.shape.Point;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.spatial.StrategyTestCase;
|
||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
|
||||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.junit.Test;
|
||||
|
@ -70,4 +80,39 @@ public class JtsPolygonTest extends StrategyTestCase {
|
|||
return args;
|
||||
}
|
||||
|
||||
/**
|
||||
* A PrefixTree pruning optimization gone bad.
|
||||
* See <a href="https://issues.apache.org/jira/browse/LUCENE-4770>LUCENE-4770</a>.
|
||||
*/
|
||||
@Test
|
||||
public void testBadPrefixTreePrune() throws Exception {
|
||||
|
||||
Shape area = ctx.readShape("POLYGON((-122.83 48.57, -122.77 48.56, -122.79 48.53, -122.83 48.57))");
|
||||
|
||||
SpatialPrefixTree trie = new QuadPrefixTree(ctx, 12);
|
||||
TermQueryPrefixTreeStrategy strategy = new TermQueryPrefixTreeStrategy(trie, "geo");
|
||||
Document doc = new Document();
|
||||
doc.add(new TextField("id", "1", Store.YES));
|
||||
|
||||
Field[] fields = strategy.createIndexableFields(area, 0.025);
|
||||
for (Field field : fields) {
|
||||
doc.add(field);
|
||||
}
|
||||
addDocument(doc);
|
||||
|
||||
Point upperleft = ctx.makePoint(-122.88, 48.54);
|
||||
Point lowerright = ctx.makePoint(-122.82, 48.62);
|
||||
|
||||
Query query = strategy.makeQuery(new SpatialArgs(SpatialOperation.Intersects, ctx.makeRectangle(upperleft, lowerright)));
|
||||
commit();
|
||||
|
||||
TopDocs search = indexSearcher.search(query, 10);
|
||||
ScoreDoc[] scoreDocs = search.scoreDocs;
|
||||
for (ScoreDoc scoreDoc : scoreDocs) {
|
||||
System.out.println(indexSearcher.doc(scoreDoc.doc));
|
||||
}
|
||||
|
||||
assertEquals(1, search.totalHits);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -113,7 +113,7 @@ public class SpatialOpRecursivePrefixTreeTest extends StrategyTestCase {
|
|||
double distErrPct = ((PrefixTreeStrategy) strategy).getDistErrPct();
|
||||
double distErr = SpatialArgs.calcDistanceFromErrPct(snapMe, distErrPct, ctx);
|
||||
int detailLevel = grid.getLevelForDistance(distErr);
|
||||
List<Node> cells = grid.getNodes(snapMe, detailLevel, false);
|
||||
List<Node> cells = grid.getNodes(snapMe, detailLevel, false, true);
|
||||
|
||||
//calc bounding box of cells.
|
||||
double minX = Double.POSITIVE_INFINITY, maxX = Double.NEGATIVE_INFINITY;
|
||||
|
|
|
@ -18,14 +18,24 @@
|
|||
package org.apache.lucene.spatial.prefix.tree;
|
||||
|
||||
import com.spatial4j.core.context.SpatialContext;
|
||||
import com.spatial4j.core.shape.Point;
|
||||
import com.spatial4j.core.shape.Rectangle;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.spatial.SpatialTestCase;
|
||||
import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy;
|
||||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class SpatialPrefixTreeTest extends LuceneTestCase {
|
||||
public class SpatialPrefixTreeTest extends SpatialTestCase {
|
||||
|
||||
//TODO plug in others and test them
|
||||
private SpatialContext ctx;
|
||||
|
@ -36,11 +46,12 @@ public class SpatialPrefixTreeTest extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
ctx = SpatialContext.GEO;
|
||||
trie = new GeohashPrefixTree(ctx,4);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNodeTraverse() {
|
||||
trie = new GeohashPrefixTree(ctx,4);
|
||||
|
||||
Node prevN = null;
|
||||
Node n = trie.getWorldNode();
|
||||
assertEquals(0,n.getLevel());
|
||||
|
@ -57,4 +68,40 @@ public class SpatialPrefixTreeTest extends LuceneTestCase {
|
|||
assertTrue(prevNShape.getHeight() > sbox.getHeight());
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* A PrefixTree pruning optimization gone bad.
|
||||
* See <a href="https://issues.apache.org/jira/browse/LUCENE-4770>LUCENE-4770</a>.
|
||||
*/
|
||||
@Test
|
||||
public void testBadPrefixTreePrune() throws Exception {
|
||||
|
||||
trie = new QuadPrefixTree(ctx, 12);
|
||||
TermQueryPrefixTreeStrategy strategy = new TermQueryPrefixTreeStrategy(trie, "geo");
|
||||
Document doc = new Document();
|
||||
doc.add(new TextField("id", "1", Store.YES));
|
||||
|
||||
Shape area = ctx.makeRectangle(-122.82, -122.78, 48.54, 48.56);
|
||||
|
||||
Field[] fields = strategy.createIndexableFields(area, 0.025);
|
||||
for (Field field : fields) {
|
||||
doc.add(field);
|
||||
}
|
||||
addDocument(doc);
|
||||
|
||||
Point upperleft = ctx.makePoint(-122.88, 48.54);
|
||||
Point lowerright = ctx.makePoint(-122.82, 48.62);
|
||||
|
||||
Query query = strategy.makeQuery(new SpatialArgs(SpatialOperation.Intersects, ctx.makeRectangle(upperleft, lowerright)));
|
||||
|
||||
commit();
|
||||
|
||||
TopDocs search = indexSearcher.search(query, 10);
|
||||
ScoreDoc[] scoreDocs = search.scoreDocs;
|
||||
for (ScoreDoc scoreDoc : scoreDocs) {
|
||||
System.out.println(indexSearcher.doc(scoreDoc.doc));
|
||||
}
|
||||
|
||||
assertEquals(1, search.totalHits);
|
||||
}
|
||||
|
||||
}
|
|
@ -80,7 +80,7 @@ public class AssertingTermVectorsFormat extends TermVectorsFormat {
|
|||
static class AssertingTermVectorsWriter extends TermVectorsWriter {
|
||||
private final TermVectorsWriter in;
|
||||
private Status docStatus, fieldStatus, termStatus;
|
||||
private int fieldCount, termCount, positionCount;
|
||||
private int docCount, fieldCount, termCount, positionCount;
|
||||
boolean hasPositions;
|
||||
|
||||
AssertingTermVectorsWriter(TermVectorsWriter in) {
|
||||
|
@ -98,6 +98,7 @@ public class AssertingTermVectorsFormat extends TermVectorsFormat {
|
|||
in.startDocument(numVectorFields);
|
||||
docStatus = Status.STARTED;
|
||||
fieldCount = numVectorFields;
|
||||
docCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -167,6 +168,7 @@ public class AssertingTermVectorsFormat extends TermVectorsFormat {
|
|||
|
||||
@Override
|
||||
public void finish(FieldInfos fis, int numDocs) throws IOException {
|
||||
assert docCount == numDocs;
|
||||
assert docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED);
|
||||
assert fieldStatus != Status.STARTED;
|
||||
assert termStatus != Status.STARTED;
|
||||
|
@ -181,9 +183,6 @@ public class AssertingTermVectorsFormat extends TermVectorsFormat {
|
|||
@Override
|
||||
public void close() throws IOException {
|
||||
in.close();
|
||||
assert docStatus != Status.STARTED;
|
||||
assert fieldStatus != Status.STARTED;
|
||||
assert termStatus != Status.STARTED;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -67,4 +67,7 @@ grant {
|
|||
permission java.security.SecurityPermission "getProperty.networkaddress.cache.ttl";
|
||||
permission java.security.SecurityPermission "getProperty.networkaddress.cache.negative.ttl";
|
||||
|
||||
// SSL related properties for Solr tests
|
||||
permission java.security.SecurityPermission "getProperty.ssl.*";
|
||||
|
||||
};
|
||||
|
|
|
@ -41,6 +41,8 @@ Detailed Change List
|
|||
Other Changes
|
||||
----------------------
|
||||
|
||||
* SOLR-4394: Tests and example configs demonstrating SSL with both server
|
||||
and client certs (hossman)
|
||||
|
||||
================== 4.2.0 ==================
|
||||
|
||||
|
@ -71,6 +73,10 @@ New Features
|
|||
* SOLR-4370: Allow configuring commitWithin to do hard commits.
|
||||
(Mark Miller, Senthuran Sivananthan)
|
||||
|
||||
* SOLR-4451: SolrJ, and SolrCloud internals, now use SystemDefaultHttpClient
|
||||
under the covers -- allowing many HTTP connection related properties to be
|
||||
controlled via 'standard' java system properties. (hossman)
|
||||
|
||||
Bug Fixes
|
||||
----------------------
|
||||
|
||||
|
@ -117,6 +123,8 @@ Bug Fixes
|
|||
* SOLR-4426: NRTCachingDirectoryFactory does not initialize maxCachedMB and maxMergeSizeMB
|
||||
if <directoryFactory> is not present in solrconfig.xml (Jack Krupansky via shalin)
|
||||
|
||||
* SOLR-4463: Fix SolrCoreState reference counting. (Mark Miller)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
||||
|
|
|
@ -23,6 +23,9 @@ import java.util.LinkedList;
|
|||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import java.net.URL;
|
||||
import java.net.MalformedURLException;
|
||||
|
||||
import javax.servlet.DispatcherType;
|
||||
import javax.servlet.Filter;
|
||||
import javax.servlet.FilterChain;
|
||||
|
@ -38,8 +41,11 @@ import org.apache.solr.servlet.SolrDispatchFilter;
|
|||
import org.eclipse.jetty.server.Connector;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.server.bio.SocketConnector;
|
||||
import org.eclipse.jetty.server.handler.GzipHandler;
|
||||
import org.eclipse.jetty.server.nio.SelectChannelConnector;
|
||||
import org.eclipse.jetty.server.ssl.SslConnector;
|
||||
import org.eclipse.jetty.server.ssl.SslSocketConnector;
|
||||
import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
|
||||
import org.eclipse.jetty.server.handler.GzipHandler;
|
||||
import org.eclipse.jetty.server.session.HashSessionIdManager;
|
||||
import org.eclipse.jetty.servlet.FilterHolder;
|
||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||
|
@ -47,6 +53,7 @@ import org.eclipse.jetty.util.component.LifeCycle;
|
|||
import org.eclipse.jetty.util.log.Logger;
|
||||
import org.eclipse.jetty.util.thread.QueuedThreadPool;
|
||||
import org.eclipse.jetty.util.thread.ThreadPool;
|
||||
import org.eclipse.jetty.util.ssl.SslContextFactory;
|
||||
|
||||
/**
|
||||
* Run solr using jetty
|
||||
|
@ -155,22 +162,59 @@ public class JettySolrRunner {
|
|||
System.setProperty("solr.solr.home", solrHome);
|
||||
if (System.getProperty("jetty.testMode") != null) {
|
||||
final String connectorName = System.getProperty("tests.jettyConnector", "SelectChannel");
|
||||
|
||||
// if this property is true, then jetty will be configured to use SSL
|
||||
// leveraging the same system properties as java to specify
|
||||
// the keystore/truststore if they are set
|
||||
//
|
||||
// This means we will use the same truststore, keystore (and keys) for
|
||||
// the server as well as any client actions taken by this JVM in
|
||||
// talking to that server, but for the purposes of testing that should
|
||||
// be good enough
|
||||
final boolean useSsl = Boolean.getBoolean("tests.jettySsl");
|
||||
final SslContextFactory sslcontext = new SslContextFactory(false);
|
||||
|
||||
if (useSsl) {
|
||||
if (null != System.getProperty("javax.net.ssl.keyStore")) {
|
||||
sslcontext.setKeyStorePath
|
||||
(System.getProperty("javax.net.ssl.keyStore"));
|
||||
}
|
||||
if (null != System.getProperty("javax.net.ssl.keyStorePassword")) {
|
||||
sslcontext.setKeyStorePassword
|
||||
(System.getProperty("javax.net.ssl.keyStorePassword"));
|
||||
}
|
||||
if (null != System.getProperty("javax.net.ssl.trustStore")) {
|
||||
sslcontext.setTrustStore
|
||||
(System.getProperty("javax.net.ssl.trustStore"));
|
||||
}
|
||||
if (null != System.getProperty("javax.net.ssl.trustStorePassword")) {
|
||||
sslcontext.setTrustStorePassword
|
||||
(System.getProperty("javax.net.ssl.trustStorePassword"));
|
||||
}
|
||||
sslcontext.setNeedClientAuth(Boolean.getBoolean("tests.jettySsl.clientAuth"));
|
||||
}
|
||||
|
||||
final Connector connector;
|
||||
final QueuedThreadPool threadPool;
|
||||
if ("SelectChannel".equals(connectorName)) {
|
||||
final SelectChannelConnector c = new SelectChannelConnector();
|
||||
final SelectChannelConnector c = useSsl
|
||||
? new SslSelectChannelConnector(sslcontext)
|
||||
: new SelectChannelConnector();
|
||||
c.setReuseAddress(true);
|
||||
c.setLowResourcesMaxIdleTime(1500);
|
||||
connector = c;
|
||||
threadPool = (QueuedThreadPool) c.getThreadPool();
|
||||
} else if ("Socket".equals(connectorName)) {
|
||||
final SocketConnector c = new SocketConnector();
|
||||
final SocketConnector c = useSsl
|
||||
? new SslSocketConnector(sslcontext)
|
||||
: new SocketConnector();
|
||||
c.setReuseAddress(true);
|
||||
connector = c;
|
||||
threadPool = (QueuedThreadPool) c.getThreadPool();
|
||||
} else {
|
||||
throw new IllegalArgumentException("Illegal value for system property 'tests.jettyConnector': " + connectorName);
|
||||
}
|
||||
|
||||
connector.setPort(port);
|
||||
connector.setHost("127.0.0.1");
|
||||
if (threadPool != null) {
|
||||
|
@ -346,6 +390,31 @@ public class JettySolrRunner {
|
|||
return lastPort;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a base URL consisting of the protocal, host, and port for a
|
||||
* Connector in use by the Jetty Server contained in this runner.
|
||||
*/
|
||||
public URL getBaseUrl() {
|
||||
String protocol = null;
|
||||
try {
|
||||
Connector[] conns = server.getConnectors();
|
||||
if (0 == conns.length) {
|
||||
throw new IllegalStateException("Jetty Server has no Connectors");
|
||||
}
|
||||
Connector c = conns[0];
|
||||
if (c.getLocalPort() < 0) {
|
||||
throw new IllegalStateException("Jetty Connector is not open: " +
|
||||
c.getLocalPort());
|
||||
}
|
||||
protocol = (c instanceof SslConnector) ? "https" : "http";
|
||||
return new URL(protocol, c.getHost(), c.getLocalPort(), context);
|
||||
|
||||
} catch (MalformedURLException e) {
|
||||
throw new IllegalStateException
|
||||
("Java could not make sense of protocol: " + protocol, e);
|
||||
}
|
||||
}
|
||||
|
||||
public DebugFilter getDebugFilter() {
|
||||
return (DebugFilter)debugFilter.getFilter();
|
||||
}
|
||||
|
|
|
@ -185,7 +185,7 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
|
|||
} catch (SolrException ex) {
|
||||
SolrException.log(log, "Collection " + operation + " of " + operation
|
||||
+ " failed");
|
||||
results.add("Operation " + operation + " cause exception:", ex);
|
||||
results.add("Operation " + operation + " caused exception:", ex);
|
||||
} finally {
|
||||
return new OverseerSolrResponse(results);
|
||||
}
|
||||
|
|
|
@ -44,7 +44,6 @@ import java.util.concurrent.CountDownLatch;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
@ -150,7 +149,6 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
private final String dataDir;
|
||||
private final UpdateHandler updateHandler;
|
||||
private final SolrCoreState solrCoreState;
|
||||
private int solrCoreStateRefCnt = 1;
|
||||
|
||||
private final long startTime;
|
||||
private final RequestHandlers reqHandlers;
|
||||
|
@ -396,7 +394,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
IndexSchema schema = new IndexSchema(config,
|
||||
getSchema().getResourceName(), null);
|
||||
|
||||
increfSolrCoreState();
|
||||
solrCoreState.increfSolrCoreState();
|
||||
|
||||
SolrCore core = new SolrCore(getName(), getDataDir(), config,
|
||||
schema, coreDescriptor, updateHandler, prev);
|
||||
|
@ -867,32 +865,6 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
public SolrCoreState getSolrCoreState() {
|
||||
return solrCoreState;
|
||||
}
|
||||
|
||||
private void increfSolrCoreState() {
|
||||
synchronized (solrCoreState) {
|
||||
if (solrCoreStateRefCnt == 0) {
|
||||
throw new IllegalStateException("IndexWriter has been closed");
|
||||
}
|
||||
solrCoreStateRefCnt++;
|
||||
}
|
||||
}
|
||||
|
||||
private void decrefSolrCoreState(IndexWriterCloser closer) {
|
||||
synchronized (solrCoreState) {
|
||||
|
||||
solrCoreStateRefCnt--;
|
||||
if (solrCoreStateRefCnt == 0) {
|
||||
|
||||
try {
|
||||
log.info("Closing SolrCoreState");
|
||||
solrCoreState.close(closer);
|
||||
} catch (Throwable t) {
|
||||
log.error("Error closing SolrCoreState", t);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return an update processor registered to the given name. Throw an exception if this chain is undefined
|
||||
|
@ -976,10 +948,12 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
}
|
||||
|
||||
try {
|
||||
if (updateHandler instanceof IndexWriterCloser) {
|
||||
decrefSolrCoreState((IndexWriterCloser) updateHandler);
|
||||
} else {
|
||||
decrefSolrCoreState(null);
|
||||
if (solrCoreState != null) {
|
||||
if (updateHandler instanceof IndexWriterCloser) {
|
||||
solrCoreState.decrefSolrCoreState((IndexWriterCloser) updateHandler);
|
||||
} else {
|
||||
solrCoreState.decrefSolrCoreState(null);
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
SolrException.log(log, e);
|
||||
|
@ -1005,15 +979,14 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
}
|
||||
|
||||
if (solrCoreState != null) { // bad startup case
|
||||
synchronized (solrCoreState) {
|
||||
if (solrCoreStateRefCnt == 0) {
|
||||
try {
|
||||
directoryFactory.close();
|
||||
} catch (Throwable t) {
|
||||
SolrException.log(log, t);
|
||||
}
|
||||
if (solrCoreState.getSolrCoreStateRefCnt() == 0) {
|
||||
try {
|
||||
directoryFactory.close();
|
||||
} catch (Throwable t) {
|
||||
SolrException.log(log, t);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -25,6 +25,8 @@ import org.apache.solr.core.CoreContainer;
|
|||
import org.apache.solr.core.DirectoryFactory;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The state in this class can be easily shared between SolrCores across
|
||||
|
@ -32,12 +34,46 @@ import org.apache.solr.util.RefCounted;
|
|||
*
|
||||
*/
|
||||
public abstract class SolrCoreState {
|
||||
public static Logger log = LoggerFactory.getLogger(SolrCoreState.class);
|
||||
|
||||
private final Object deleteLock = new Object();
|
||||
|
||||
public Object getUpdateLock() {
|
||||
return deleteLock;
|
||||
}
|
||||
|
||||
private int solrCoreStateRefCnt = 1;
|
||||
|
||||
public synchronized int getSolrCoreStateRefCnt() {
|
||||
return solrCoreStateRefCnt;
|
||||
}
|
||||
|
||||
public void increfSolrCoreState() {
|
||||
synchronized (this) {
|
||||
if (solrCoreStateRefCnt == 0) {
|
||||
throw new IllegalStateException("IndexWriter has been closed");
|
||||
}
|
||||
solrCoreStateRefCnt++;
|
||||
}
|
||||
}
|
||||
|
||||
public void decrefSolrCoreState(IndexWriterCloser closer) {
|
||||
synchronized (this) {
|
||||
|
||||
solrCoreStateRefCnt--;
|
||||
if (solrCoreStateRefCnt == 0) {
|
||||
|
||||
try {
|
||||
log.info("Closing SolrCoreState");
|
||||
close(closer);
|
||||
} catch (Throwable t) {
|
||||
log.error("Error closing SolrCoreState", t);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public abstract Lock getCommitLock();
|
||||
|
||||
/**
|
||||
|
|
|
@ -22,6 +22,8 @@ import java.io.File;
|
|||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
|
@ -77,6 +79,33 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
|
|||
System.clearProperty("shareSchema");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReload() throws Exception {
|
||||
final CoreContainer cc = h.getCoreContainer();
|
||||
|
||||
class TestThread extends Thread {
|
||||
@Override
|
||||
public void run() {
|
||||
cc.reload("collection1");
|
||||
}
|
||||
}
|
||||
|
||||
List<Thread> threads = new ArrayList<Thread>();
|
||||
int numThreads = 4;
|
||||
for (int i = 0; i < numThreads; i++) {
|
||||
threads.add(new TestThread());
|
||||
}
|
||||
|
||||
for (Thread thread : threads) {
|
||||
thread.start();
|
||||
}
|
||||
|
||||
for (Thread thread : threads) {
|
||||
thread.join();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPersist() throws Exception {
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
#!/bin/bash -ex
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
############
|
||||
|
||||
# This script shows how the solrtest.keystore file used for solr tests
|
||||
# and these example configs was generated.
|
||||
#
|
||||
# Running this script should only be neccessary if the keystore file
|
||||
# needs to be replaced, which shouldn't be required until sometime arround
|
||||
# the year 4751.
|
||||
#
|
||||
# NOTE: the "-ext" option used in the "keytool" command requires that you have
|
||||
# the java7 version of keytool, but the generated key will work with any
|
||||
# version of java
|
||||
|
||||
echo "### remove old keystore"
|
||||
rm -f solrtest.keystore
|
||||
|
||||
echo "### create keystore and keys"
|
||||
keytool -keystore solrtest.keystore -storepass "secret" -alias solrtest -keypass "secret" -genkey -keyalg RSA -dname "cn=localhost, ou=SolrTest, o=lucene.apache.org, c=US" -ext "san=ip:127.0.0.1" -validity 999999
|
||||
|
||||
|
|
@ -60,6 +60,43 @@
|
|||
</Arg>
|
||||
</Call>
|
||||
|
||||
<!-- if the connector below is uncommented, then jetty will also accept SSL
|
||||
connections on port 8984, using a self signed certificate and can
|
||||
optionally require the client to authenticate with a certificate.
|
||||
(which can be the same as the server certificate_
|
||||
|
||||
# Run solr example with SSL on port 8984
|
||||
java -jar start.jar
|
||||
#
|
||||
# Run post.jar so that it trusts the server cert...
|
||||
java -Djavax.net.ssl.trustStore=../etc/solrtest.keystore -Durl=https://localhost:8984/solr/update -jar post.jar *.xml
|
||||
|
||||
# Run solr example with SSL requiring client certs on port 8984
|
||||
java -Djetty.ssl.clientAuth=true -jar start.jar
|
||||
#
|
||||
# Run post.jar so that it trusts the server cert,
|
||||
# and authenticates with a client cert
|
||||
java -Djavax.net.ssl.keyStorePassword=secret -Djavax.net.ssl.keyStore=../etc/solrtest.keystore -Djavax.net.ssl.trustStore=../etc/solrtest.keystore -Durl=https://localhost:8984/solr/update -jar post.jar *.xml
|
||||
|
||||
-->
|
||||
<!--
|
||||
<Call name="addConnector">
|
||||
<Arg>
|
||||
<New class="org.eclipse.jetty.server.ssl.SslSelectChannelConnector">
|
||||
<Arg>
|
||||
<New class="org.eclipse.jetty.http.ssl.SslContextFactory">
|
||||
<Set name="keyStore"><SystemProperty name="jetty.home" default="."/>/etc/solrtest.keystore</Set>
|
||||
<Set name="keyStorePassword">secret</Set>
|
||||
<Set name="needClientAuth"><SystemProperty name="jetty.ssl.clientAuth" default="false"/></Set>
|
||||
</New>
|
||||
</Arg>
|
||||
<Set name="port"><SystemProperty name="jetty.ssl.port" default="8984"/></Set>
|
||||
<Set name="maxIdleTime">30000</Set>
|
||||
</New>
|
||||
</Arg>
|
||||
</Call>
|
||||
-->
|
||||
|
||||
<!-- =========================================================== -->
|
||||
<!-- Set handler Collection Structure -->
|
||||
<!-- =========================================================== -->
|
||||
|
|
Binary file not shown.
|
@ -1 +0,0 @@
|
|||
16cf5a6b78951f50713d29bfae3230a611dc01f0
|
|
@ -0,0 +1 @@
|
|||
37ced84d839a02fb856255eca85f0a4be95aa634
|
|
@ -1 +0,0 @@
|
|||
31cc0a151d458c4b99476805ede9c8accafb734c
|
|
@ -0,0 +1 @@
|
|||
b76bee23cd3f3ee9b98bc7c2c14670e821ddbbfd
|
|
@ -1 +0,0 @@
|
|||
d97e400d31bbeb36c1c60d2c3a9bbf2cdccf85a8
|
|
@ -0,0 +1 @@
|
|||
118ae1bc7f3aeeddfe564f0edfd79c11d09d17d1
|
|
@ -21,9 +21,24 @@
|
|||
|
||||
<dependencies>
|
||||
<dependency org="org.apache.zookeeper" name="zookeeper" rev="3.4.5" transitive="false"/>
|
||||
<dependency org="org.apache.httpcomponents" name="httpcore" rev="4.1.4" transitive="false"/>
|
||||
<dependency org="org.apache.httpcomponents" name="httpclient" rev="4.1.3" transitive="false"/>
|
||||
<dependency org="org.apache.httpcomponents" name="httpmime" rev="4.1.3" transitive="false"/>
|
||||
<dependency org="org.apache.httpcomponents" name="httpclient" rev="4.2.3" transitive="false"/>
|
||||
<!--
|
||||
The httpmime rev should be kept in sync with the httpclient rev, since both the
|
||||
httpclient and the httpmime modules are sub-modules of the httpcomponents-client
|
||||
module, and both sub-modules draw their version from the httpcomponents-client
|
||||
parent POM.
|
||||
-->
|
||||
<dependency org="org.apache.httpcomponents" name="httpmime" rev="4.2.3" transitive="false"/>
|
||||
<!--
|
||||
The httpcore rev should be drawn from the httpcomponents-client POM (the parent POM
|
||||
of the httpclient POM - see the comment above) - the httpclient version and the httpcore
|
||||
version are NOT always in sync with each other.
|
||||
|
||||
E.g. httpclient v4.2.3 depends on httpcore v4.2.2 - see the httpcomponents-client POM
|
||||
- look for <httpcore.version> under <properties>:
|
||||
https://svn.apache.org/repos/asf/httpcomponents/httpclient/tags/4.2.3/pom.xml
|
||||
-->
|
||||
<dependency org="org.apache.httpcomponents" name="httpcore" rev="4.2.2" transitive="false"/>
|
||||
<dependency org="commons-io" name="commons-io" rev="${commons-io.version}" transitive="false"/>
|
||||
<dependency org="org.slf4j" name="jcl-over-slf4j" rev="1.6.4" transitive="false"/>
|
||||
<dependency org="org.codehaus.woodstox" name="wstx-asl" rev="3.2.7" transitive="false"/>
|
||||
|
|
|
@ -79,8 +79,7 @@ public class ConcurrentUpdateSolrServer extends SolrServer {
|
|||
final int threadCount;
|
||||
|
||||
/**
|
||||
* Uses an internal ThreadSafeClientConnManager to manage http
|
||||
* connections.
|
||||
* Uses an internaly managed HttpClient instance.
|
||||
*
|
||||
* @param solrServerUrl
|
||||
* The Solr server URL
|
||||
|
@ -95,9 +94,7 @@ public class ConcurrentUpdateSolrServer extends SolrServer {
|
|||
}
|
||||
|
||||
/**
|
||||
* Uses the supplied HttpClient to send documents to the Solr server, the
|
||||
* HttpClient should be instantiated using a
|
||||
* ThreadSafeClientConnManager.
|
||||
* Uses the supplied HttpClient to send documents to the Solr server.
|
||||
*/
|
||||
public ConcurrentUpdateSolrServer(String solrServerUrl,
|
||||
HttpClient client, int queueSize, int threadCount) {
|
||||
|
|
|
@ -35,8 +35,10 @@ import org.apache.http.client.HttpClient;
|
|||
import org.apache.http.client.params.ClientParamBean;
|
||||
import org.apache.http.entity.HttpEntityWrapper;
|
||||
import org.apache.http.impl.client.DefaultHttpClient;
|
||||
import org.apache.http.impl.client.SystemDefaultHttpClient;
|
||||
import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
|
||||
import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager;
|
||||
import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; // jdoc
|
||||
import org.apache.http.impl.conn.PoolingClientConnectionManager;
|
||||
import org.apache.http.params.HttpConnectionParams;
|
||||
import org.apache.http.protocol.HttpContext;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
|
@ -94,14 +96,12 @@ public class HttpClientUtil {
|
|||
*
|
||||
* @param params
|
||||
* http client configuration, if null a client with default
|
||||
* configuration (no additional configuration) is created that uses
|
||||
* ThreadSafeClientConnManager.
|
||||
* configuration (no additional configuration) is created.
|
||||
*/
|
||||
public static HttpClient createClient(final SolrParams params) {
|
||||
final ModifiableSolrParams config = new ModifiableSolrParams(params);
|
||||
logger.info("Creating new http client, config:" + config);
|
||||
final ThreadSafeClientConnManager mgr = new ThreadSafeClientConnManager();
|
||||
final DefaultHttpClient httpClient = new DefaultHttpClient(mgr);
|
||||
final DefaultHttpClient httpClient = new SystemDefaultHttpClient();
|
||||
configureClient(httpClient, config);
|
||||
return httpClient;
|
||||
}
|
||||
|
@ -153,25 +153,35 @@ public class HttpClientUtil {
|
|||
|
||||
/**
|
||||
* Set max connections allowed per host. This call will only work when
|
||||
* {@link ThreadSafeClientConnManager} is used.
|
||||
* {@link ThreadSafeClientConnManager} or
|
||||
* {@link PoolingClientConnectionManager} is used.
|
||||
*/
|
||||
public static void setMaxConnectionsPerHost(HttpClient httpClient,
|
||||
int max) {
|
||||
if(httpClient.getConnectionManager() instanceof ThreadSafeClientConnManager) {
|
||||
// would have been nice if there was a common interface
|
||||
if (httpClient.getConnectionManager() instanceof ThreadSafeClientConnManager) {
|
||||
ThreadSafeClientConnManager mgr = (ThreadSafeClientConnManager)httpClient.getConnectionManager();
|
||||
mgr.setDefaultMaxPerRoute(max);
|
||||
} else if (httpClient.getConnectionManager() instanceof PoolingClientConnectionManager) {
|
||||
PoolingClientConnectionManager mgr = (PoolingClientConnectionManager)httpClient.getConnectionManager();
|
||||
mgr.setDefaultMaxPerRoute(max);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set max total connections allowed. This call will only work when
|
||||
* {@link ThreadSafeClientConnManager} is used.
|
||||
* {@link ThreadSafeClientConnManager} or
|
||||
* {@link PoolingClientConnectionManager} is used.
|
||||
*/
|
||||
public static void setMaxConnections(final HttpClient httpClient,
|
||||
int max) {
|
||||
if(httpClient.getConnectionManager() instanceof ThreadSafeClientConnManager) {
|
||||
// would have been nice if there was a common interface
|
||||
if (httpClient.getConnectionManager() instanceof ThreadSafeClientConnManager) {
|
||||
ThreadSafeClientConnManager mgr = (ThreadSafeClientConnManager)httpClient.getConnectionManager();
|
||||
mgr.setMaxTotal(max);
|
||||
} else if (httpClient.getConnectionManager() instanceof PoolingClientConnectionManager) {
|
||||
PoolingClientConnectionManager mgr = (PoolingClientConnectionManager)httpClient.getConnectionManager();
|
||||
mgr.setMaxTotal(max);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,10 +20,19 @@ package org.apache.solr.client.solrj.embedded;
|
|||
import org.apache.solr.client.solrj.SolrExampleTests;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.util.ExternalPaths;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* TODO? perhaps use:
|
||||
* http://docs.codehaus.org/display/JETTY/ServletTester
|
||||
|
@ -32,11 +41,73 @@ import org.junit.Test;
|
|||
*/
|
||||
public class SolrExampleJettyTest extends SolrExampleTests {
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(SolrExampleJettyTest.class);
|
||||
|
||||
private static boolean manageSslProps = true;
|
||||
|
||||
private static final File TEST_KEYSTORE = new File(ExternalPaths.SOURCE_HOME,
|
||||
"example/etc/solrtest.keystore");
|
||||
|
||||
private static final Map<String,String> SSL_PROPS = new HashMap<String,String>();
|
||||
static {
|
||||
SSL_PROPS.put("tests.jettySsl","false");
|
||||
SSL_PROPS.put("tests.jettySsl.clientAuth","false");
|
||||
SSL_PROPS.put("javax.net.ssl.keyStore", TEST_KEYSTORE.getAbsolutePath());
|
||||
SSL_PROPS.put("javax.net.ssl.keyStorePassword","secret");
|
||||
SSL_PROPS.put("javax.net.ssl.trustStore", TEST_KEYSTORE.getAbsolutePath());
|
||||
SSL_PROPS.put("javax.net.ssl.trustStorePassword","secret");
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() throws Exception {
|
||||
|
||||
// // //
|
||||
|
||||
// :TODO: SOLR-4394 promote SSL up to SolrJettyTestBase?
|
||||
|
||||
// consume the same amount of random no matter what
|
||||
final boolean trySsl = random().nextBoolean();
|
||||
final boolean trySslClientAuth = random().nextBoolean();
|
||||
|
||||
// only randomize SSL if none of the SSL_PROPS are already set
|
||||
final Map<Object,Object> sysprops = System.getProperties();
|
||||
for (String prop : SSL_PROPS.keySet()) {
|
||||
if (sysprops.containsKey(prop)) {
|
||||
log.info("System property explicitly set, so skipping randomized ssl properties: " + prop);
|
||||
manageSslProps = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue("test keystore does not exist, can't be used for randomized " +
|
||||
"ssl testing: " + TEST_KEYSTORE.getAbsolutePath(),
|
||||
TEST_KEYSTORE.exists() );
|
||||
|
||||
if (manageSslProps) {
|
||||
log.info("Randomized ssl ({}) and clientAuth ({})", trySsl, trySslClientAuth);
|
||||
for (String prop : SSL_PROPS.keySet()) {
|
||||
System.setProperty(prop, SSL_PROPS.get(prop));
|
||||
}
|
||||
// now explicitly re-set the two random values
|
||||
System.setProperty("tests.jettySsl", String.valueOf(trySsl));
|
||||
System.setProperty("tests.jettySsl.clientAuth", String.valueOf(trySslClientAuth));
|
||||
}
|
||||
// // //
|
||||
|
||||
|
||||
createJetty(ExternalPaths.EXAMPLE_HOME, null, null);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterTest() throws Exception {
|
||||
if (manageSslProps) {
|
||||
for (String prop : SSL_PROPS.keySet()) {
|
||||
System.clearProperty(prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testBadSetup()
|
||||
{
|
||||
|
|
|
@ -23,8 +23,8 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.params.ClientPNames;
|
||||
import org.apache.http.impl.conn.PoolingClientConnectionManager;
|
||||
import org.apache.http.impl.client.DefaultHttpClient;
|
||||
import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager;
|
||||
import org.apache.http.params.HttpConnectionParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
|
@ -52,9 +52,9 @@ public class HttpClientUtilTest {
|
|||
params.set(HttpClientUtil.PROP_USE_RETRY, false);
|
||||
DefaultHttpClient client = (DefaultHttpClient) HttpClientUtil.createClient(params);
|
||||
assertEquals(12345, HttpConnectionParams.getConnectionTimeout(client.getParams()));
|
||||
assertEquals(ThreadSafeClientConnManager.class, client.getConnectionManager().getClass());
|
||||
assertEquals(22345, ((ThreadSafeClientConnManager)client.getConnectionManager()).getMaxTotal());
|
||||
assertEquals(32345, ((ThreadSafeClientConnManager)client.getConnectionManager()).getDefaultMaxPerRoute());
|
||||
assertEquals(PoolingClientConnectionManager.class, client.getConnectionManager().getClass());
|
||||
assertEquals(22345, ((PoolingClientConnectionManager)client.getConnectionManager()).getMaxTotal());
|
||||
assertEquals(32345, ((PoolingClientConnectionManager)client.getConnectionManager()).getDefaultMaxPerRoute());
|
||||
assertEquals(42345, HttpConnectionParams.getSoTimeout(client.getParams()));
|
||||
assertEquals(HttpClientUtil.NO_RETRY, client.getHttpRequestRetryHandler());
|
||||
assertEquals("pass", client.getCredentialsProvider().getCredentials(new AuthScope("127.0.0.1", 1234)).getPassword());
|
||||
|
|
|
@ -7,7 +7,6 @@ import static org.junit.Assert.*;
|
|||
|
||||
import java.net.MalformedURLException;
|
||||
|
||||
import org.apache.http.impl.client.DefaultHttpClient;
|
||||
import org.apache.solr.client.solrj.ResponseParser;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -28,6 +27,8 @@ import org.junit.Test;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
|
||||
/**
|
||||
* Test the LBHttpSolrServer.
|
||||
*/
|
||||
|
@ -42,14 +43,14 @@ public class LBHttpSolrServerTest {
|
|||
*/
|
||||
@Test
|
||||
public void testLBHttpSolrServerHttpClientResponseParserStringArray() throws MalformedURLException {
|
||||
LBHttpSolrServer testServer = new LBHttpSolrServer(new DefaultHttpClient(), (ResponseParser) null);
|
||||
LBHttpSolrServer testServer = new LBHttpSolrServer(HttpClientUtil.createClient(new ModifiableSolrParams()), (ResponseParser) null);
|
||||
HttpSolrServer httpServer = testServer.makeServer("http://127.0.0.1:8080");
|
||||
assertNull("Generated server should have null parser.", httpServer.getParser());
|
||||
|
||||
ResponseParser parser = new BinaryResponseParser();
|
||||
testServer = new LBHttpSolrServer(new DefaultHttpClient(), parser);
|
||||
testServer = new LBHttpSolrServer(HttpClientUtil.createClient(new ModifiableSolrParams()), parser);
|
||||
httpServer = testServer.makeServer("http://127.0.0.1:8080");
|
||||
assertEquals("Invalid parser passed to generated server.", parser, httpServer.getParser());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,7 +86,7 @@ abstract public class SolrJettyTestBase extends SolrTestCaseJ4
|
|||
if (jetty != null) {
|
||||
try {
|
||||
// setup the server...
|
||||
String url = "http://127.0.0.1:"+port+context;
|
||||
String url = jetty.getBaseUrl().toString();
|
||||
HttpSolrServer s = new HttpSolrServer( url );
|
||||
s.setConnectionTimeout(DEFAULT_CONNECTION_TIMEOUT);
|
||||
s.setDefaultMaxConnectionsPerHost(100);
|
||||
|
|
Loading…
Reference in New Issue