diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 34d8bd85975..993d8e6452e 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -58,6 +58,9 @@ Optimizations
* LUCENE-3298: FST can now be larger than 2.1 GB / 2.1 B nodes.
(James Dyer, Mike McCandless)
+* LUCENE-4690: Performance improvements and non-hashing versions
+ of NumericUtils.*ToPrefixCoded() (yonik)
+
New Features
* LUCENE-4686: New specialized DGapVInt8IntEncoder for facets (now the
@@ -70,9 +73,23 @@ New Features
compresses term vectors into chunks of documents similarly to
CompressingStoredFieldsFormat. (Adrien Grand)
+* LUCENE-4695: Added LiveFieldValues utility class, for getting the
+ current (live, real-time) value for any indexed doc/field. The
+ class buffers recently indexed doc/field values until a new
+ near-real-time reader is opened that contains those changes.
+ (Robert Muir, Mike McCandless)
+
API Changes
* LUCENE-4709: FacetResultNode no longer has a residue field. (Shai Erera)
+
+* LUCENE-4716: DrillDown.query now takes Occur, allowing to specify if
+ categories should be OR'ed or AND'ed. (Shai Erera)
+
+* LUCENE-4695: ReferenceManager.RefreshListener.afterRefresh now takes
+ a boolean indicating whether a new reference was in fact opened, and
+ a new beforeRefresh method notifies you when a refresh attempt is
+ starting. (Robert Muir, Mike McCandless)
Bug Fixes
@@ -414,6 +431,13 @@ Changes in Runtime Behavior
This only affects requests with depth>1. If you execute such requests and
rely on the facet results being returned flat (i.e. no hierarchy), you should
set the ResultMode to GLOBAL_FLAT. (Shai Erera, Gilad Barkai)
+
+* LUCENE-1822: Improves the text window selection by recalculating the starting margin
+ once all phrases in the fragment have been identified in FastVectorHighlighter. This
+ way if a single word is matched in a fragment, it will appear in the middle of the highlight,
+ instead of 6 characters from the beginning. This way one can also guarantee that
+ the entirety of short texts are represented in a fragment by specifying a large
+ enough fragCharSize.
Optimizations
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressionMode.java b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressionMode.java
index 700258fdd28..c1c4648e0e2 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressionMode.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressionMode.java
@@ -45,7 +45,7 @@ public abstract class CompressionMode {
@Override
public Compressor newCompressor() {
- return LZ4_FAST_COMPRESSOR;
+ return new LZ4FastCompressor();
}
@Override
@@ -95,7 +95,7 @@ public abstract class CompressionMode {
@Override
public Compressor newCompressor() {
- return LZ4_HIGH_COMPRESSOR;
+ return new LZ4HighCompressor();
}
@Override
@@ -147,25 +147,37 @@ public abstract class CompressionMode {
};
- private static final Compressor LZ4_FAST_COMPRESSOR = new Compressor() {
+ private static final class LZ4FastCompressor extends Compressor {
+
+ private final LZ4.HashTable ht;
+
+ LZ4FastCompressor() {
+ ht = new LZ4.HashTable();
+ }
@Override
public void compress(byte[] bytes, int off, int len, DataOutput out)
throws IOException {
- LZ4.compress(bytes, off, len, out);
+ LZ4.compress(bytes, off, len, out, ht);
}
- };
+ }
- private static final Compressor LZ4_HIGH_COMPRESSOR = new Compressor() {
+ private static final class LZ4HighCompressor extends Compressor {
+
+ private final LZ4.HCHashTable ht;
+
+ LZ4HighCompressor() {
+ ht = new LZ4.HCHashTable();
+ }
@Override
public void compress(byte[] bytes, int off, int len, DataOutput out)
throws IOException {
- LZ4.compressHC(bytes, off, len, out);
+ LZ4.compressHC(bytes, off, len, out, ht);
}
- };
+ }
private static final class DeflateDecompressor extends Decompressor {
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/compressing/LZ4.java b/lucene/core/src/java/org/apache/lucene/codecs/compressing/LZ4.java
index 7e52339657d..022b58036ad 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/compressing/LZ4.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/compressing/LZ4.java
@@ -30,7 +30,7 @@ import org.apache.lucene.util.packed.PackedInts;
* http://code.google.com/p/lz4/
* http://fastcompression.blogspot.fr/p/lz4.html
*/
-class LZ4 {
+final class LZ4 {
private LZ4() {}
@@ -181,11 +181,29 @@ class LZ4 {
}
}
+ static final class HashTable {
+ private int hashLog;
+ private PackedInts.Mutable hashTable;
+
+ void reset(int len) {
+ final int bitsPerOffset = PackedInts.bitsRequired(len - LAST_LITERALS);
+ final int bitsPerOffsetLog = 32 - Integer.numberOfLeadingZeros(bitsPerOffset - 1);
+ hashLog = MEMORY_USAGE + 3 - bitsPerOffsetLog;
+ if (hashTable == null || hashTable.size() < 1 << hashLog || hashTable.getBitsPerValue() < bitsPerOffset) {
+ hashTable = PackedInts.getMutable(1 << hashLog, bitsPerOffset, PackedInts.DEFAULT);
+ } else {
+ hashTable.clear();
+ }
+ }
+
+ }
+
/**
* Compress bytes[off:off+len] into out using
- * at most 16KB of memory.
+ * at most 16KB of memory. ht shouldn't be shared across threads
+ * but can safely be reused.
*/
- public static void compress(byte[] bytes, int off, int len, DataOutput out) throws IOException {
+ public static void compress(byte[] bytes, int off, int len, DataOutput out, HashTable ht) throws IOException {
final int base = off;
final int end = off + len;
@@ -196,11 +214,9 @@ class LZ4 {
final int limit = end - LAST_LITERALS;
final int matchLimit = limit - MIN_MATCH;
-
- final int bitsPerOffset = PackedInts.bitsRequired(len - LAST_LITERALS);
- final int bitsPerOffsetLog = 32 - Integer.numberOfLeadingZeros(bitsPerOffset - 1);
- final int hashLog = MEMORY_USAGE + 3 - bitsPerOffsetLog;
- final PackedInts.Mutable hashTable = PackedInts.getMutable(1 << hashLog, bitsPerOffset, PackedInts.DEFAULT);
+ ht.reset(len);
+ final int hashLog = ht.hashLog;
+ final PackedInts.Mutable hashTable = ht.hashTable;
main:
while (off < limit) {
@@ -256,20 +272,24 @@ class LZ4 {
m2.ref = m1.ref;
}
- private static class HashTable {
+ static final class HCHashTable {
static final int MAX_ATTEMPTS = 256;
static final int MASK = MAX_DISTANCE - 1;
int nextToUpdate;
- private final int base;
+ private int base;
private final int[] hashTable;
private final short[] chainTable;
- HashTable(int base) {
+ HCHashTable() {
+ hashTable = new int[HASH_TABLE_SIZE_HC];
+ chainTable = new short[MAX_DISTANCE];
+ }
+
+ private void reset(int base) {
this.base = base;
nextToUpdate = base;
- hashTable = new int[HASH_TABLE_SIZE_HC];
Arrays.fill(hashTable, -1);
- chainTable = new short[MAX_DISTANCE];
+ Arrays.fill(chainTable, (short) 0);
}
private int hashPointer(byte[] bytes, int off) {
@@ -355,12 +375,14 @@ class LZ4 {
/**
* Compress bytes[off:off+len] into out. Compared to
- * {@link LZ4#compress(byte[], int, int, DataOutput)}, this method is slower,
- * uses more memory (~ 256KB), but should provide better compression ratios
- * (especially on large inputs) because it chooses the best match among up to
- * 256 candidates and then performs trade-offs to fix overlapping matches.
+ * {@link LZ4#compress(byte[], int, int, DataOutput, HashTable)}, this method
+ * is slower and uses more memory (~ 256KB per thread) but should provide
+ * better compression ratios (especially on large inputs) because it chooses
+ * the best match among up to 256 candidates and then performs trade-offs to
+ * fix overlapping matches. ht shouldn't be shared across threads
+ * but can safely be reused.
*/
- public static void compressHC(byte[] src, int srcOff, int srcLen, DataOutput out) throws IOException {
+ public static void compressHC(byte[] src, int srcOff, int srcLen, DataOutput out, HCHashTable ht) throws IOException {
final int srcEnd = srcOff + srcLen;
final int matchLimit = srcEnd - LAST_LITERALS;
@@ -368,7 +390,7 @@ class LZ4 {
int sOff = srcOff;
int anchor = sOff++;
- final HashTable ht = new HashTable(srcOff);
+ ht.reset(srcOff);
final Match match0 = new Match();
final Match match1 = new Match();
final Match match2 = new Match();
diff --git a/lucene/core/src/java/org/apache/lucene/search/LiveFieldValues.java b/lucene/core/src/java/org/apache/lucene/search/LiveFieldValues.java
new file mode 100644
index 00000000000..c0a28edf169
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/search/LiveFieldValues.java
@@ -0,0 +1,133 @@
+package org.apache.lucene.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/** Tracks live field values across NRT reader reopens.
+ * This holds a map for all updated ids since
+ * the last reader reopen. Once the NRT reader is reopened,
+ * it prunes the map. This means you must reopen your NRT
+ * reader periodically otherwise the RAM consumption of
+ * this class will grow unbounded!
+ *
+ *
NOTE: you must ensure the same id is never updated at
+ * the same time by two threads, because in this case you
+ * cannot in general know which thread "won". */
+
+public abstract class LiveFieldValues implements ReferenceManager.RefreshListener, Closeable {
+
+ private volatile Map current = new ConcurrentHashMap();
+ private volatile Map old = new ConcurrentHashMap();
+ private final ReferenceManager mgr;
+ private final T missingValue;
+
+ public LiveFieldValues(ReferenceManager mgr, T missingValue) {
+ this.missingValue = missingValue;
+ this.mgr = mgr;
+ mgr.addListener(this);
+ }
+
+ @Override
+ public void close() {
+ mgr.removeListener(this);
+ }
+
+ @Override
+ public void beforeRefresh() throws IOException {
+ old = current;
+ // Start sending all updates after this point to the new
+ // map. While reopen is running, any lookup will first
+ // try this new map, then fallback to old, then to the
+ // current searcher:
+ current = new ConcurrentHashMap();
+ }
+
+ @Override
+ public void afterRefresh(boolean didRefresh) throws IOException {
+ // Now drop all the old values because they are now
+ // visible via the searcher that was just opened; if
+ // didRefresh is false, it's possible old has some
+ // entries in it, which is fine: it means they were
+ // actually already included in the previously opened
+ // reader. So we can safely clear old here:
+ old = new ConcurrentHashMap();
+ }
+
+ /** Call this after you've successfully added a document
+ * to the index, to record what value you just set the
+ * field to. */
+ public void add(String id, T value) {
+ current.put(id, value);
+ }
+
+ /** Call this after you've successfully deleted a document
+ * from the index. */
+ public void delete(String id) {
+ current.put(id, missingValue);
+ }
+
+ /** Returns the [approximate] number of id/value pairs
+ * buffered in RAM. */
+ public int size() {
+ return current.size() + old.size();
+ }
+
+ /** Returns the current value for this id, or null if the
+ * id isn't in the index or was deleted. */
+ public T get(String id) throws IOException {
+ // First try to get the "live" value:
+ T value = current.get(id);
+ if (value == missingValue) {
+ // Deleted but the deletion is not yet reflected in
+ // the reader:
+ return null;
+ } else if (value != null) {
+ return value;
+ } else {
+ value = old.get(id);
+ if (value == missingValue) {
+ // Deleted but the deletion is not yet reflected in
+ // the reader:
+ return null;
+ } else if (value != null) {
+ return value;
+ } else {
+ // It either does not exist in the index, or, it was
+ // already flushed & NRT reader was opened on the
+ // segment, so fallback to current searcher:
+ IndexSearcher s = mgr.acquire();
+ try {
+ return lookupFromSearcher(s, id);
+ } finally {
+ mgr.release(s);
+ }
+ }
+ }
+ }
+
+ /** This is called when the id/value was already flushed & opened
+ * in an NRT IndexSearcher. You must implement this to
+ * go look up the value (eg, via doc values, field cache,
+ * stored fields, etc.). */
+ protected abstract T lookupFromSearcher(IndexSearcher s, String id) throws IOException;
+}
+
diff --git a/lucene/core/src/java/org/apache/lucene/search/ReferenceManager.java b/lucene/core/src/java/org/apache/lucene/search/ReferenceManager.java
index 1973f0583cd..62a7fc3d6e0 100755
--- a/lucene/core/src/java/org/apache/lucene/search/ReferenceManager.java
+++ b/lucene/core/src/java/org/apache/lucene/search/ReferenceManager.java
@@ -151,6 +151,7 @@ public abstract class ReferenceManager implements Closeable {
try {
final G reference = acquire();
try {
+ notifyRefreshListenersBefore();
G newReference = refreshIfNeeded(reference);
if (newReference != null) {
assert newReference != reference : "refreshIfNeeded should return null if refresh wasn't needed";
@@ -165,11 +166,9 @@ public abstract class ReferenceManager implements Closeable {
}
} finally {
release(reference);
+ notifyRefreshListenersRefreshed(refreshed);
}
afterMaybeRefresh();
- if (refreshed) {
- notifyRefreshListeners();
- }
} finally {
refreshLock.unlock();
}
@@ -254,9 +253,15 @@ public abstract class ReferenceManager implements Closeable {
decRef(reference);
}
- private void notifyRefreshListeners() {
+ private void notifyRefreshListenersBefore() throws IOException {
for (RefreshListener refreshListener : refreshListeners) {
- refreshListener.afterRefresh();
+ refreshListener.beforeRefresh();
+ }
+ }
+
+ private void notifyRefreshListenersRefreshed(boolean didRefresh) throws IOException {
+ for (RefreshListener refreshListener : refreshListeners) {
+ refreshListener.afterRefresh(didRefresh);
}
}
@@ -284,9 +289,13 @@ public abstract class ReferenceManager implements Closeable {
* finished. See {@link #addListener}. */
public interface RefreshListener {
- /**
- * Called after a successful refresh and a new reference has been installed. When this is called {@link #acquire()} is guaranteed to return a new instance.
- */
- void afterRefresh();
+ /** Called right before a refresh attempt starts. */
+ void beforeRefresh() throws IOException;
+
+ /** Called after the attempted refresh; if the refresh
+ * did open a new reference then didRefresh will be true
+ * and {@link #acquire()} is guaranteed to return the new
+ * reference. */
+ void afterRefresh(boolean didRefresh) throws IOException;
}
}
diff --git a/lucene/core/src/java/org/apache/lucene/util/NumericUtils.java b/lucene/core/src/java/org/apache/lucene/util/NumericUtils.java
index 0815ecb73de..f4fcc632339 100644
--- a/lucene/core/src/java/org/apache/lucene/util/NumericUtils.java
+++ b/lucene/core/src/java/org/apache/lucene/util/NumericUtils.java
@@ -82,7 +82,7 @@ public final class NumericUtils {
/**
* The maximum term length (used for byte[] buffer size)
* for encoding long values.
- * @see #longToPrefixCoded(long,int,BytesRef)
+ * @see #longToPrefixCodedBytes
*/
public static final int BUF_SIZE_LONG = 63/7 + 2;
@@ -95,7 +95,7 @@ public final class NumericUtils {
/**
* The maximum term length (used for byte[] buffer size)
* for encoding int values.
- * @see #intToPrefixCoded(int,int,BytesRef)
+ * @see #intToPrefixCodedBytes
*/
public static final int BUF_SIZE_INT = 31/7 + 2;
@@ -109,15 +109,42 @@ public final class NumericUtils {
* @return the hash code for indexing (TermsHash)
*/
public static int longToPrefixCoded(final long val, final int shift, final BytesRef bytes) {
- if (shift>63 || shift<0)
+ longToPrefixCodedBytes(val, shift, bytes);
+ return bytes.hashCode();
+ }
+
+ /**
+ * Returns prefix coded bits after reducing the precision by shift bits.
+ * This is method is used by {@link NumericTokenStream}.
+ * After encoding, {@code bytes.offset} will always be 0.
+ * @param val the numeric value
+ * @param shift how many bits to strip from the right
+ * @param bytes will contain the encoded value
+ * @return the hash code for indexing (TermsHash)
+ */
+ public static int intToPrefixCoded(final int val, final int shift, final BytesRef bytes) {
+ intToPrefixCodedBytes(val, shift, bytes);
+ return bytes.hashCode();
+ }
+
+ /**
+ * Returns prefix coded bits after reducing the precision by shift bits.
+ * This is method is used by {@link NumericTokenStream}.
+ * After encoding, {@code bytes.offset} will always be 0.
+ * @param val the numeric value
+ * @param shift how many bits to strip from the right
+ * @param bytes will contain the encoded value
+ */
+ public static void longToPrefixCodedBytes(final long val, final int shift, final BytesRef bytes) {
+ if ((shift & ~0x3f) != 0) // ensure shift is 0..63
throw new IllegalArgumentException("Illegal shift value, must be 0..63");
- int hash, nChars = (63-shift)/7 + 1;
+ int nChars = (((63-shift)*37)>>8) + 1; // i/7 is the same as (i*37)>>8 for i in 0..63
bytes.offset = 0;
- bytes.length = nChars+1;
+ bytes.length = nChars+1; // one extra for the byte that contains the shift info
if (bytes.bytes.length < bytes.length) {
- bytes.grow(NumericUtils.BUF_SIZE_LONG);
+ bytes.bytes = new byte[NumericUtils.BUF_SIZE_LONG]; // use the max
}
- bytes.bytes[0] = (byte) (hash = (SHIFT_START_LONG + shift));
+ bytes.bytes[0] = (byte)(SHIFT_START_LONG + shift);
long sortableBits = val ^ 0x8000000000000000L;
sortableBits >>>= shift;
while (nChars > 0) {
@@ -126,13 +153,9 @@ public final class NumericUtils {
bytes.bytes[nChars--] = (byte)(sortableBits & 0x7f);
sortableBits >>>= 7;
}
- // calculate hash
- for (int i = 1; i < bytes.length; i++) {
- hash = 31*hash + bytes.bytes[i];
- }
- return hash;
}
+
/**
* Returns prefix coded bits after reducing the precision by shift bits.
* This is method is used by {@link NumericTokenStream}.
@@ -140,18 +163,17 @@ public final class NumericUtils {
* @param val the numeric value
* @param shift how many bits to strip from the right
* @param bytes will contain the encoded value
- * @return the hash code for indexing (TermsHash)
*/
- public static int intToPrefixCoded(final int val, final int shift, final BytesRef bytes) {
- if (shift>31 || shift<0)
+ public static void intToPrefixCodedBytes(final int val, final int shift, final BytesRef bytes) {
+ if ((shift & ~0x1f) != 0) // ensure shift is 0..31
throw new IllegalArgumentException("Illegal shift value, must be 0..31");
- int hash, nChars = (31-shift)/7 + 1;
+ int nChars = (((31-shift)*37)>>8) + 1; // i/7 is the same as (i*37)>>8 for i in 0..63
bytes.offset = 0;
- bytes.length = nChars+1;
+ bytes.length = nChars+1; // one extra for the byte that contains the shift info
if (bytes.bytes.length < bytes.length) {
- bytes.grow(NumericUtils.BUF_SIZE_INT);
+ bytes.bytes = new byte[NumericUtils.BUF_SIZE_LONG]; // use the max
}
- bytes.bytes[0] = (byte) (hash = (SHIFT_START_INT + shift));
+ bytes.bytes[0] = (byte)(SHIFT_START_INT + shift);
int sortableBits = val ^ 0x80000000;
sortableBits >>>= shift;
while (nChars > 0) {
@@ -160,13 +182,9 @@ public final class NumericUtils {
bytes.bytes[nChars--] = (byte)(sortableBits & 0x7f);
sortableBits >>>= 7;
}
- // calculate hash
- for (int i = 1; i < bytes.length; i++) {
- hash = 31*hash + bytes.bytes[i];
- }
- return hash;
}
+
/**
* Returns the shift value from a prefix encoded {@code long}.
* @throws NumberFormatException if the supplied {@link BytesRef} is
@@ -197,7 +215,7 @@ public final class NumericUtils {
* This method can be used to decode a term's value.
* @throws NumberFormatException if the supplied {@link BytesRef} is
* not correctly prefix encoded.
- * @see #longToPrefixCoded(long,int,BytesRef)
+ * @see #longToPrefixCodedBytes
*/
public static long prefixCodedToLong(final BytesRef val) {
long sortableBits = 0L;
@@ -221,7 +239,7 @@ public final class NumericUtils {
* This method can be used to decode a term's value.
* @throws NumberFormatException if the supplied {@link BytesRef} is
* not correctly prefix encoded.
- * @see #intToPrefixCoded(int,int,BytesRef)
+ * @see #intToPrefixCodedBytes
*/
public static int prefixCodedToInt(final BytesRef val) {
int sortableBits = 0;
@@ -402,8 +420,8 @@ public final class NumericUtils {
*/
public void addRange(final long min, final long max, final int shift) {
final BytesRef minBytes = new BytesRef(BUF_SIZE_LONG), maxBytes = new BytesRef(BUF_SIZE_LONG);
- longToPrefixCoded(min, shift, minBytes);
- longToPrefixCoded(max, shift, maxBytes);
+ longToPrefixCodedBytes(min, shift, minBytes);
+ longToPrefixCodedBytes(max, shift, maxBytes);
addRange(minBytes, maxBytes);
}
@@ -431,8 +449,8 @@ public final class NumericUtils {
*/
public void addRange(final int min, final int max, final int shift) {
final BytesRef minBytes = new BytesRef(BUF_SIZE_INT), maxBytes = new BytesRef(BUF_SIZE_INT);
- intToPrefixCoded(min, shift, minBytes);
- intToPrefixCoded(max, shift, maxBytes);
+ intToPrefixCodedBytes(min, shift, minBytes);
+ intToPrefixCodedBytes(max, shift, maxBytes);
addRange(minBytes, maxBytes);
}
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLiveFieldValues.java b/lucene/core/src/test/org/apache/lucene/search/TestLiveFieldValues.java
new file mode 100644
index 00000000000..d13f148ceac
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/search/TestLiveFieldValues.java
@@ -0,0 +1,180 @@
+package org.apache.lucene.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Random;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CountDownLatch;
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.IntField;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.StoredDocument;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.NRTManager.TrackingIndexWriter;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util._TestUtil;
+
+public class TestLiveFieldValues extends LuceneTestCase {
+ public void test() throws Exception {
+
+ Directory dir = newFSDirectory(_TestUtil.getTempDir("livefieldupdates"));
+ IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
+
+ final IndexWriter _w = new IndexWriter(dir, iwc);
+ final TrackingIndexWriter w = new TrackingIndexWriter(_w);
+
+ final NRTManager mgr = new NRTManager(w, new SearcherFactory() {
+ @Override
+ public IndexSearcher newSearcher(IndexReader r) {
+ return new IndexSearcher(r);
+ }
+ });
+
+ final Integer missing = -1;
+
+ final LiveFieldValues rt = new LiveFieldValues(mgr, missing) {
+ @Override
+ protected Integer lookupFromSearcher(IndexSearcher s, String id) throws IOException {
+ TermQuery tq = new TermQuery(new Term("id", id));
+ TopDocs hits = s.search(tq, 1);
+ assertTrue(hits.totalHits <= 1);
+ if (hits.totalHits == 0) {
+ return null;
+ } else {
+ StoredDocument doc = s.doc(hits.scoreDocs[0].doc);
+ return (Integer) doc.getField("field").numericValue();
+ }
+ }
+ };
+
+ int numThreads = _TestUtil.nextInt(random(), 2, 5);
+ if (VERBOSE) {
+ System.out.println(numThreads + " threads");
+ }
+
+ final CountDownLatch startingGun = new CountDownLatch(1);
+ List threads = new ArrayList();
+
+ final int iters = atLeast(1000);
+ final int idCount = _TestUtil.nextInt(random(), 100, 10000);
+
+ final double reopenChance = random().nextDouble()*0.01;
+ final double deleteChance = random().nextDouble()*0.25;
+ final double addChance = random().nextDouble()*0.5;
+
+ for(int t=0;t values = new HashMap();
+ List allIDs = Collections.synchronizedList(new ArrayList());
+
+ startingGun.await();
+ for(int iter=0; iter 0 && threadRandom.nextDouble() <= deleteChance) {
+ String randomID = allIDs.get(threadRandom.nextInt(allIDs.size()));
+ w.deleteDocuments(new Term("id", randomID));
+ rt.delete(randomID);
+ values.put(randomID, missing);
+ }
+
+ if (threadRandom.nextDouble() <= reopenChance || rt.size() > 10000) {
+ //System.out.println("refresh @ " + rt.size());
+ mgr.maybeRefresh();
+ if (VERBOSE) {
+ IndexSearcher s = mgr.acquire();
+ try {
+ System.out.println("TEST: reopen " + s);
+ } finally {
+ mgr.release(s);
+ }
+ System.out.println("TEST: " + values.size() + " values");
+ }
+ }
+
+ if (threadRandom.nextInt(10) == 7) {
+ assertEquals(null, rt.get("foo"));
+ }
+
+ if (allIDs.size() > 0) {
+ String randomID = allIDs.get(threadRandom.nextInt(allIDs.size()));
+ Integer expected = values.get(randomID);
+ if (expected == missing) {
+ expected = null;
+ }
+ assertEquals("id=" + randomID, expected, rt.get(randomID));
+ }
+ }
+ } catch (Throwable t) {
+ throw new RuntimeException(t);
+ }
+ }
+ };
+ threads.add(thread);
+ thread.start();
+ }
+
+ startingGun.countDown();
+
+ for(Thread thread : threads) {
+ thread.join();
+ }
+ mgr.maybeRefresh();
+ assertEquals(0, rt.size());
+
+ rt.close();
+ mgr.close();
+ _w.close();
+ dir.close();
+ }
+}
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNRTManager.java b/lucene/core/src/test/org/apache/lucene/search/TestNRTManager.java
index 38cc749051c..ded7c9d9159 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestNRTManager.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestNRTManager.java
@@ -423,8 +423,13 @@ public class TestNRTManager extends ThreadedIndexingAndSearchingTestCase {
NRTManager sm = new NRTManager(new NRTManager.TrackingIndexWriter(iw),new SearcherFactory());
sm.addListener(new ReferenceManager.RefreshListener() {
@Override
- public void afterRefresh() {
- afterRefreshCalled.set(true);
+ public void beforeRefresh() {
+ }
+ @Override
+ public void afterRefresh(boolean didRefresh) {
+ if (didRefresh) {
+ afterRefreshCalled.set(true);
+ }
}
});
iw.addDocument(new Document());
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java
index 6711f7add70..4ce557524f3 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java
@@ -380,8 +380,8 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
int a=lower; lower=upper; upper=a;
}
final BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_INT), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_INT);
- NumericUtils.intToPrefixCoded(lower, 0, lowerBytes);
- NumericUtils.intToPrefixCoded(upper, 0, upperBytes);
+ NumericUtils.intToPrefixCodedBytes(lower, 0, lowerBytes);
+ NumericUtils.intToPrefixCodedBytes(upper, 0, upperBytes);
// test inclusive range
NumericRangeQuery tq=NumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java
index ede30d9ed44..648c7c75516 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java
@@ -405,8 +405,8 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
long a=lower; lower=upper; upper=a;
}
final BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_LONG), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_LONG);
- NumericUtils.longToPrefixCoded(lower, 0, lowerBytes);
- NumericUtils.longToPrefixCoded(upper, 0, upperBytes);
+ NumericUtils.longToPrefixCodedBytes(lower, 0, lowerBytes);
+ NumericUtils.longToPrefixCodedBytes(upper, 0, upperBytes);
// test inclusive range
NumericRangeQuery tq=NumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java b/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java
index 5a0cd46e0cc..9306f0a9d9b 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java
@@ -331,8 +331,13 @@ public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
SearcherManager sm = new SearcherManager(iw, false, new SearcherFactory());
sm.addListener(new ReferenceManager.RefreshListener() {
@Override
- public void afterRefresh() {
- afterRefreshCalled.set(true);
+ public void beforeRefresh() {
+ }
+ @Override
+ public void afterRefresh(boolean didRefresh) {
+ if (didRefresh) {
+ afterRefreshCalled.set(true);
+ }
}
});
iw.addDocument(new Document());
diff --git a/lucene/core/src/test/org/apache/lucene/util/TestNumericUtils.java b/lucene/core/src/test/org/apache/lucene/util/TestNumericUtils.java
index 9153a1b0db9..125fd6d27a0 100644
--- a/lucene/core/src/test/org/apache/lucene/util/TestNumericUtils.java
+++ b/lucene/core/src/test/org/apache/lucene/util/TestNumericUtils.java
@@ -28,7 +28,7 @@ public class TestNumericUtils extends LuceneTestCase {
// generate a series of encoded longs, each numerical one bigger than the one before
BytesRef last=null, act=new BytesRef(NumericUtils.BUF_SIZE_LONG);
for (long l=-100000L; l<100000L; l++) {
- NumericUtils.longToPrefixCoded(l, 0, act);
+ NumericUtils.longToPrefixCodedBytes(l, 0, act);
if (last!=null) {
// test if smaller
assertTrue("actual bigger than last (BytesRef)", last.compareTo(act) < 0 );
@@ -46,7 +46,7 @@ public class TestNumericUtils extends LuceneTestCase {
// generate a series of encoded ints, each numerical one bigger than the one before
BytesRef last=null, act=new BytesRef(NumericUtils.BUF_SIZE_INT);
for (int i=-100000; i<100000; i++) {
- NumericUtils.intToPrefixCoded(i, 0, act);
+ NumericUtils.intToPrefixCodedBytes(i, 0, act);
if (last!=null) {
// test if smaller
assertTrue("actual bigger than last (BytesRef)", last.compareTo(act) < 0 );
@@ -69,7 +69,7 @@ public class TestNumericUtils extends LuceneTestCase {
for (int i=0; i();
+ dirsPerPartitionSize = new HashMap();
}
@AfterClass
@@ -181,8 +180,10 @@ public abstract class FacetTestBase extends LuceneTestCase {
return newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
}
- /** Returns a default facet indexing params */
+ /** Returns a {@link FacetIndexingParams} per the given partition size. */
protected FacetIndexingParams getFacetIndexingParams(final int partSize) {
+ // several of our encoders don't support the value 0,
+ // which is one of the values encoded when dealing w/ partitions.
return new FacetIndexingParams() {
@Override
public int getPartitionSize() {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java b/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java
new file mode 100644
index 00000000000..a6cf3b8ec3b
--- /dev/null
+++ b/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java
@@ -0,0 +1,64 @@
+package org.apache.lucene.facet;
+
+import java.util.Random;
+
+import org.apache.lucene.facet.index.params.CategoryListParams;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.encoding.DGapIntEncoder;
+import org.apache.lucene.util.encoding.DGapVInt8IntEncoder;
+import org.apache.lucene.util.encoding.EightFlagsIntEncoder;
+import org.apache.lucene.util.encoding.FourFlagsIntEncoder;
+import org.apache.lucene.util.encoding.IntEncoder;
+import org.apache.lucene.util.encoding.NOnesIntEncoder;
+import org.apache.lucene.util.encoding.SortingIntEncoder;
+import org.apache.lucene.util.encoding.UniqueValuesIntEncoder;
+import org.apache.lucene.util.encoding.VInt8IntEncoder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class FacetTestCase extends LuceneTestCase {
+
+ private static final IntEncoder[] ENCODERS = new IntEncoder[] {
+ new SortingIntEncoder(new UniqueValuesIntEncoder(new VInt8IntEncoder())),
+ new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))),
+ new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapVInt8IntEncoder())),
+ new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new EightFlagsIntEncoder()))),
+ new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new FourFlagsIntEncoder()))),
+ new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new NOnesIntEncoder(3)))),
+ new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new NOnesIntEncoder(4)))),
+ };
+
+ /** Returns a {@link CategoryListParams} with random {@link IntEncoder} and field. */
+ public static CategoryListParams randomCategoryListParams() {
+ final String field = CategoryListParams.DEFAULT_FIELD + "$" + random().nextInt();
+ return randomCategoryListParams(field);
+ }
+
+ /** Returns a {@link CategoryListParams} with random {@link IntEncoder}. */
+ public static CategoryListParams randomCategoryListParams(String field) {
+ Random random = random();
+ final IntEncoder encoder = ENCODERS[random.nextInt(ENCODERS.length)];
+ return new CategoryListParams(field) {
+ @Override
+ public IntEncoder createEncoder() {
+ return encoder;
+ }
+ };
+ }
+
+}
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/FacetTestUtils.java b/lucene/facet/src/test/org/apache/lucene/facet/FacetTestUtils.java
index 2c8f0f323e4..7846f154e25 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/FacetTestUtils.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/FacetTestUtils.java
@@ -1,30 +1,17 @@
package org.apache.lucene.facet;
import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.facet.index.params.FacetIndexingParams;
-import org.apache.lucene.facet.search.FacetsCollector;
-import org.apache.lucene.facet.search.params.CountFacetRequest;
-import org.apache.lucene.facet.search.params.FacetRequest;
-import org.apache.lucene.facet.search.params.FacetSearchParams;
import org.apache.lucene.facet.search.results.FacetResult;
import org.apache.lucene.facet.search.results.FacetResultNode;
-import org.apache.lucene.facet.taxonomy.CategoryPath;
-import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.search.MultiCollector;
-import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
@@ -109,30 +96,6 @@ public class FacetTestUtils {
return pairs;
}
- public static Collector[] search(IndexSearcher searcher, TaxonomyReader taxonomyReader, FacetIndexingParams iParams,
- int k, String... facetNames) throws IOException {
-
- Collector[] collectors = new Collector[2];
-
- List fRequests = new ArrayList();
- for (String facetName : facetNames) {
- CategoryPath cp = new CategoryPath(facetName);
- FacetRequest fq = new CountFacetRequest(cp, k);
- fRequests.add(fq);
- }
- FacetSearchParams facetSearchParams = new FacetSearchParams(fRequests, iParams);
-
- TopScoreDocCollector topDocsCollector = TopScoreDocCollector.create(searcher.getIndexReader().maxDoc(), true);
- FacetsCollector facetsCollector = FacetsCollector.create(facetSearchParams, searcher.getIndexReader(), taxonomyReader);
- Collector mColl = MultiCollector.wrap(topDocsCollector, facetsCollector);
-
- collectors[0] = topDocsCollector;
- collectors[1] = facetsCollector;
-
- searcher.search(new MatchAllDocsQuery(), mColl);
- return collectors;
- }
-
public static String toSimpleString(FacetResult fr) {
StringBuilder sb = new StringBuilder();
toSimpleString(0, sb, fr.getFacetResultNode(), "");
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/index/OrdinalMappingReaderTest.java b/lucene/facet/src/test/org/apache/lucene/facet/index/OrdinalMappingReaderTest.java
index 9ec237f424a..8f311f0991f 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/index/OrdinalMappingReaderTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/index/OrdinalMappingReaderTest.java
@@ -7,16 +7,9 @@ import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.store.Directory;
-import org.junit.Test;
-
-import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.example.merge.TaxonomyMergeUtils;
+import org.apache.lucene.facet.index.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.search.params.CountFacetRequest;
import org.apache.lucene.facet.search.params.FacetSearchParams;
@@ -25,6 +18,13 @@ import org.apache.lucene.facet.search.results.FacetResultNode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.store.Directory;
+import org.junit.Test;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@@ -43,34 +43,35 @@ import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
* limitations under the License.
*/
-public class OrdinalMappingReaderTest extends LuceneTestCase {
+public class OrdinalMappingReaderTest extends FacetTestCase {
private static final int NUM_DOCS = 100;
@Test
public void testTaxonomyMergeUtils() throws Exception {
Directory dir = newDirectory();
- Directory taxDir = newDirectory();
- buildIndexWithFacets(dir, taxDir, true);
+ Directory taxDir = newDirectory();
+ FacetIndexingParams fip = new FacetIndexingParams(randomCategoryListParams());
+ buildIndexWithFacets(dir, taxDir, true, fip);
Directory dir1 = newDirectory();
Directory taxDir1 = newDirectory();
- buildIndexWithFacets(dir1, taxDir1, false);
+ buildIndexWithFacets(dir1, taxDir1, false, fip);
- TaxonomyMergeUtils.merge(dir, taxDir, dir1, taxDir1);
+ TaxonomyMergeUtils.merge(dir, taxDir, dir1, taxDir1, fip);
- verifyResults(dir1, taxDir1);
+ verifyResults(dir1, taxDir1, fip);
dir1.close();
taxDir1.close();
dir.close();
taxDir.close();
}
- private void verifyResults(Directory dir, Directory taxDir) throws IOException {
+ private void verifyResults(Directory dir, Directory taxDir, FacetIndexingParams fip) throws IOException {
DirectoryReader reader1 = DirectoryReader.open(dir);
DirectoryTaxonomyReader taxReader = new DirectoryTaxonomyReader(taxDir);
IndexSearcher searcher = newSearcher(reader1);
- FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("tag"), NUM_DOCS));
+ FacetSearchParams fsp = new FacetSearchParams(fip, new CountFacetRequest(new CategoryPath("tag"), NUM_DOCS));
FacetsCollector collector = FacetsCollector.create(fsp, reader1, taxReader);
searcher.search(new MatchAllDocsQuery(), collector);
FacetResult result = collector.getFacetResults().get(0);
@@ -88,7 +89,7 @@ public class OrdinalMappingReaderTest extends LuceneTestCase {
taxReader.close();
}
- private void buildIndexWithFacets(Directory dir, Directory taxDir, boolean asc) throws IOException {
+ private void buildIndexWithFacets(Directory dir, Directory taxDir, boolean asc, FacetIndexingParams fip) throws IOException {
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
@@ -101,7 +102,7 @@ public class OrdinalMappingReaderTest extends LuceneTestCase {
int facetValue = asc? j: NUM_DOCS - j;
categoryPaths.add(new CategoryPath("tag", Integer.toString(facetValue)));
}
- FacetFields facetFields = new FacetFields(taxonomyWriter);
+ FacetFields facetFields = new FacetFields(taxonomyWriter, fip);
facetFields.addFields(doc, categoryPaths);
writer.addDocument(doc);
}
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/index/TestFacetsPayloadMigrationReader.java b/lucene/facet/src/test/org/apache/lucene/facet/index/TestFacetsPayloadMigrationReader.java
index c9b14593d2e..f45aba5c4f3 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/index/TestFacetsPayloadMigrationReader.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/index/TestFacetsPayloadMigrationReader.java
@@ -21,6 +21,7 @@ import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.params.CategoryListParams;
import org.apache.lucene.facet.index.params.FacetIndexingParams;
import org.apache.lucene.facet.index.params.PerDimensionIndexingParams;
@@ -57,11 +58,11 @@ import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TotalHitCountCollector;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.IntsRef;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -82,7 +83,7 @@ import org.junit.Test;
*/
/** Tests facets index migration from payload to DocValues.*/
-public class TestFacetsPayloadMigrationReader extends LuceneTestCase {
+public class TestFacetsPayloadMigrationReader extends FacetTestCase {
private static class PayloadFacetFields extends FacetFields {
@@ -284,7 +285,7 @@ public class TestFacetsPayloadMigrationReader extends LuceneTestCase {
for (String dim : expectedCounts.keySet()) {
CategoryPath drillDownCP = new CategoryPath(dim);
FacetSearchParams fsp = new FacetSearchParams(fip, new CountFacetRequest(drillDownCP, 10));
- Query drillDown = DrillDown.query(fsp, new MatchAllDocsQuery(), drillDownCP);
+ Query drillDown = DrillDown.query(fsp, new MatchAllDocsQuery(), Occur.MUST, drillDownCP);
TotalHitCountCollector total = new TotalHitCountCollector();
FacetsCollector fc = FacetsCollector.create(fsp, indexReader, taxoReader);
searcher.search(drillDown, MultiCollector.wrap(fc, total));
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/index/params/CategoryListParamsTest.java b/lucene/facet/src/test/org/apache/lucene/facet/index/params/CategoryListParamsTest.java
index 4534b89ecba..49378e40672 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/index/params/CategoryListParamsTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/index/params/CategoryListParamsTest.java
@@ -1,6 +1,6 @@
package org.apache.lucene.facet.index.params;
-import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.util.encoding.DGapVInt8IntEncoder;
import org.apache.lucene.util.encoding.IntDecoder;
import org.apache.lucene.util.encoding.IntEncoder;
@@ -25,7 +25,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class CategoryListParamsTest extends LuceneTestCase {
+public class CategoryListParamsTest extends FacetTestCase {
@Test
public void testDefaultSettings() {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/index/params/FacetIndexingParamsTest.java b/lucene/facet/src/test/org/apache/lucene/facet/index/params/FacetIndexingParamsTest.java
index 5ab8d2f032e..f24fec84283 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/index/params/FacetIndexingParamsTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/index/params/FacetIndexingParamsTest.java
@@ -1,10 +1,10 @@
package org.apache.lucene.facet.index.params;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.search.DrillDown;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.util.PartitionsUtils;
import org.apache.lucene.index.Term;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -24,7 +24,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class FacetIndexingParamsTest extends LuceneTestCase {
+public class FacetIndexingParamsTest extends FacetTestCase {
@Test
public void testDefaultSettings() {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/index/params/PerDimensionIndexingParamsTest.java b/lucene/facet/src/test/org/apache/lucene/facet/index/params/PerDimensionIndexingParamsTest.java
index 6db5e22b262..ef8ec97b336 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/index/params/PerDimensionIndexingParamsTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/index/params/PerDimensionIndexingParamsTest.java
@@ -2,11 +2,11 @@ package org.apache.lucene.facet.index.params;
import java.util.Collections;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.search.DrillDown;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.util.PartitionsUtils;
import org.apache.lucene.index.Term;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -26,7 +26,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class PerDimensionIndexingParamsTest extends LuceneTestCase {
+public class PerDimensionIndexingParamsTest extends FacetTestCase {
@Test
public void testTopLevelSettings() {
@@ -41,7 +41,6 @@ public class PerDimensionIndexingParamsTest extends LuceneTestCase {
assertEquals("3 characters should be written", 3, numchars);
assertEquals("wrong drill-down term text", expectedDDText, new String(buf, 0, numchars));
- CategoryListParams clParams = ifip.getCategoryListParams(null);
assertEquals("partition for all ordinals is the first", "", PartitionsUtils.partitionNameByOrdinal(ifip, 250));
assertEquals("for partition 0, the same name should be returned", "", PartitionsUtils.partitionName(0));
assertEquals("for any other, it's the concatenation of name + partition", PartitionsUtils.PART_NAME_PREFIX + "1", PartitionsUtils.partitionName(1));
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/CategoryListIteratorTest.java b/lucene/facet/src/test/org/apache/lucene/facet/search/CategoryListIteratorTest.java
index 26f1cd50507..a8d9d8bceef 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/CategoryListIteratorTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/CategoryListIteratorTest.java
@@ -7,13 +7,13 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
-import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.encoding.DGapIntEncoder;
import org.apache.lucene.util.encoding.IntEncoder;
import org.apache.lucene.util.encoding.SortingIntEncoder;
@@ -38,7 +38,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class CategoryListIteratorTest extends LuceneTestCase {
+public class CategoryListIteratorTest extends FacetTestCase {
static final IntsRef[] data = new IntsRef[] {
new IntsRef(new int[] { 1, 2 }, 0, 2),
@@ -48,9 +48,9 @@ public class CategoryListIteratorTest extends LuceneTestCase {
};
@Test
- public void testPayloadCategoryListIteraor() throws Exception {
+ public void test() throws Exception {
Directory dir = newDirectory();
- final IntEncoder encoder = new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder())));
+ final IntEncoder encoder = randomCategoryListParams().createEncoder();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy()));
BytesRef buf = new BytesRef();
@@ -89,7 +89,7 @@ public class CategoryListIteratorTest extends LuceneTestCase {
}
@Test
- public void testPayloadIteratorWithInvalidDoc() throws Exception {
+ public void testEmptyDocuments() throws Exception {
Directory dir = newDirectory();
final IntEncoder encoder = new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder())));
// NOTE: test is wired to LogMP... because test relies on certain docids having payloads
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/CountingFacetsCollectorTest.java b/lucene/facet/src/test/org/apache/lucene/facet/search/CountingFacetsCollectorTest.java
index 8ec20d38a29..3904483751e 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/CountingFacetsCollectorTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/CountingFacetsCollectorTest.java
@@ -13,6 +13,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.index.params.CategoryListParams;
import org.apache.lucene.facet.index.params.FacetIndexingParams;
@@ -40,7 +41,6 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.collections.ObjectToIntMap;
import org.apache.lucene.util.encoding.IntEncoder;
import org.apache.lucene.util.encoding.VInt8IntEncoder;
@@ -65,7 +65,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class CountingFacetsCollectorTest extends LuceneTestCase {
+public class CountingFacetsCollectorTest extends FacetTestCase {
private static final Term A = new Term("f", "a");
private static final CategoryPath CP_A = new CategoryPath("A"), CP_B = new CategoryPath("B");
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/DrillDownTest.java b/lucene/facet/src/test/org/apache/lucene/facet/search/DrillDownTest.java
index 9a881cbad20..16846d64842 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/DrillDownTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/DrillDownTest.java
@@ -10,6 +10,7 @@ import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.index.params.CategoryListParams;
import org.apache.lucene.facet.index.params.FacetIndexingParams;
@@ -26,8 +27,8 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -49,9 +50,9 @@ import org.junit.Test;
* limitations under the License.
*/
-public class DrillDownTest extends LuceneTestCase {
+public class DrillDownTest extends FacetTestCase {
- private FacetIndexingParams defaultParams = FacetIndexingParams.ALL_PARENTS;
+ private FacetIndexingParams defaultParams;
private PerDimensionIndexingParams nonDefaultParams;
private static IndexReader reader;
private static DirectoryTaxonomyReader taxo;
@@ -60,9 +61,10 @@ public class DrillDownTest extends LuceneTestCase {
public DrillDownTest() {
Map paramsMap = new HashMap();
- paramsMap.put(new CategoryPath("a"), new CategoryListParams("testing_facets_a"));
- paramsMap.put(new CategoryPath("b"), new CategoryListParams("testing_facets_b"));
+ paramsMap.put(new CategoryPath("a"), randomCategoryListParams("testing_facets_a"));
+ paramsMap.put(new CategoryPath("b"), randomCategoryListParams("testing_facets_b"));
nonDefaultParams = new PerDimensionIndexingParams(paramsMap);
+ defaultParams = new FacetIndexingParams(randomCategoryListParams(CategoryListParams.DEFAULT_FIELD));
}
@BeforeClass
@@ -128,25 +130,25 @@ public class DrillDownTest extends LuceneTestCase {
IndexSearcher searcher = newSearcher(reader);
// Making sure the query yields 25 documents with the facet "a"
- Query q = DrillDown.query(defaultParams, null, new CategoryPath("a"));
+ Query q = DrillDown.query(defaultParams, null, Occur.MUST, new CategoryPath("a"));
TopDocs docs = searcher.search(q, 100);
assertEquals(25, docs.totalHits);
// Making sure the query yields 5 documents with the facet "b" and the
// previous (facet "a") query as a base query
- Query q2 = DrillDown.query(defaultParams, q, new CategoryPath("b"));
+ Query q2 = DrillDown.query(defaultParams, q, Occur.MUST, new CategoryPath("b"));
docs = searcher.search(q2, 100);
assertEquals(5, docs.totalHits);
// Making sure that a query of both facet "a" and facet "b" yields 5 results
- Query q3 = DrillDown.query(defaultParams, null, new CategoryPath("a"), new CategoryPath("b"));
+ Query q3 = DrillDown.query(defaultParams, null, Occur.MUST, new CategoryPath("a"), new CategoryPath("b"));
docs = searcher.search(q3, 100);
assertEquals(5, docs.totalHits);
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
// would gather together 10 results (10%..)
Query fooQuery = new TermQuery(new Term("content", "foo"));
- Query q4 = DrillDown.query(defaultParams, fooQuery, new CategoryPath("b"));
+ Query q4 = DrillDown.query(defaultParams, fooQuery, Occur.MUST, new CategoryPath("b"));
docs = searcher.search(q4, 100);
assertEquals(10, docs.totalHits);
}
@@ -156,18 +158,18 @@ public class DrillDownTest extends LuceneTestCase {
IndexSearcher searcher = newSearcher(reader);
// Create the base query to start with
- Query q = DrillDown.query(defaultParams, null, new CategoryPath("a"));
+ Query q = DrillDown.query(defaultParams, null, Occur.MUST, new CategoryPath("a"));
// Making sure the query yields 5 documents with the facet "b" and the
// previous (facet "a") query as a base query
- Query q2 = DrillDown.query(defaultParams, q, new CategoryPath("b"));
+ Query q2 = DrillDown.query(defaultParams, q, Occur.MUST, new CategoryPath("b"));
TopDocs docs = searcher.search(q2, 100);
assertEquals(5, docs.totalHits);
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
// would gather together 10 results (10%..)
Query fooQuery = new TermQuery(new Term("content", "foo"));
- Query q4 = DrillDown.query(defaultParams, fooQuery, new CategoryPath("b"));
+ Query q4 = DrillDown.query(defaultParams, fooQuery, Occur.MUST, new CategoryPath("b"));
docs = searcher.search(q4, 100);
assertEquals(10, docs.totalHits);
}
@@ -202,7 +204,7 @@ public class DrillDownTest extends LuceneTestCase {
}
// create a drill-down query with category "a", scores should not change
- q = DrillDown.query(defaultParams, q, new CategoryPath("a"));
+ q = DrillDown.query(defaultParams, q, Occur.MUST, new CategoryPath("a"));
docs = searcher.search(q, reader.maxDoc()); // fetch all available docs to this query
for (ScoreDoc sd : docs.scoreDocs) {
assertEquals("score of doc=" + sd.doc + " modified", scores[sd.doc], sd.score, 0f);
@@ -214,11 +216,21 @@ public class DrillDownTest extends LuceneTestCase {
// verify that drill-down queries (with no base query) returns 0.0 score
IndexSearcher searcher = newSearcher(reader);
- Query q = DrillDown.query(defaultParams, null, new CategoryPath("a"));
+ Query q = DrillDown.query(defaultParams, null, Occur.MUST, new CategoryPath("a"));
TopDocs docs = searcher.search(q, reader.maxDoc()); // fetch all available docs to this query
for (ScoreDoc sd : docs.scoreDocs) {
assertEquals(0f, sd.score, 0f);
}
}
+
+ @Test
+ public void testOrQuery() throws Exception {
+ IndexSearcher searcher = newSearcher(reader);
+
+ // Making sure that a query of facet "a" or facet "b" yields 0 results
+ Query q = DrillDown.query(defaultParams, null, Occur.SHOULD, new CategoryPath("a"), new CategoryPath("b"));
+ TopDocs docs = searcher.search(q, 100);
+ assertEquals(40, docs.totalHits);
+ }
}
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java
index 0331c1607ee..a4f53e7da0a 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.document.Document;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.FacetTestUtils;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.search.params.CountFacetRequest;
@@ -39,14 +40,14 @@ import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
-public class TestDemoFacets extends LuceneTestCase {
+public class TestDemoFacets extends FacetTestCase {
private DirectoryTaxonomyWriter taxoWriter;
private RandomIndexWriter writer;
- private FacetFields docBuilder;
+ private FacetFields facetFields;
private void add(String ... categoryPaths) throws IOException {
Document doc = new Document();
@@ -55,7 +56,7 @@ public class TestDemoFacets extends LuceneTestCase {
for(String categoryPath : categoryPaths) {
paths.add(new CategoryPath(categoryPath, '/'));
}
- docBuilder.addFields(doc, paths);
+ facetFields.addFields(doc, paths);
writer.addDocument(doc);
}
@@ -70,7 +71,7 @@ public class TestDemoFacets extends LuceneTestCase {
// Reused across documents, to add the necessary facet
// fields:
- docBuilder = new FacetFields(taxoWriter);
+ facetFields = new FacetFields(taxoWriter);
add("Author/Bob", "Publish Date/2010/10/15");
add("Author/Lisa", "Publish Date/2010/10/20");
@@ -111,7 +112,7 @@ public class TestDemoFacets extends LuceneTestCase {
// Now user drills down on Publish Date/2010:
fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("Author"), 10));
- Query q2 = DrillDown.query(fsp, new MatchAllDocsQuery(), new CategoryPath("Publish Date/2010", '/'));
+ Query q2 = DrillDown.query(fsp, new MatchAllDocsQuery(), Occur.MUST, new CategoryPath("Publish Date/2010", '/'));
c = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
searcher.search(q2, c);
results = c.getFacetResults();
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetArrays.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetArrays.java
index 05c19a807b8..8aab807a6d2 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetArrays.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetArrays.java
@@ -1,6 +1,6 @@
package org.apache.lucene.facet.search;
-import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
/*
@@ -20,7 +20,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestFacetArrays extends LuceneTestCase {
+public class TestFacetArrays extends FacetTestCase {
@Test
public void testFacetArrays() {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsAccumulatorWithComplement.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsAccumulatorWithComplement.java
index ea7eec066e9..13fc3d18dcf 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsAccumulatorWithComplement.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsAccumulatorWithComplement.java
@@ -123,15 +123,10 @@ public class TestFacetsAccumulatorWithComplement extends FacetTestBase {
}
- private FacetSearchParams getFacetSearchParams() {
- return new FacetSearchParams(new CountFacetRequest(new CategoryPath("root","a"), 10));
- }
-
/** compute facets with certain facet requests and docs */
private List findFacets(ScoredDocIDs sDocids, boolean withComplement) throws IOException {
-
- FacetsAccumulator fAccumulator =
- new StandardFacetsAccumulator(getFacetSearchParams(), indexReader, taxoReader);
+ FacetSearchParams fsp = new FacetSearchParams(getFacetIndexingParams(Integer.MAX_VALUE), new CountFacetRequest(new CategoryPath("root","a"), 10));
+ FacetsAccumulator fAccumulator = new StandardFacetsAccumulator(fsp, indexReader, taxoReader);
fAccumulator.setComplementThreshold(
withComplement ?
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsCollector.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsCollector.java
index 4a1e83d0db0..5c48be09b85 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsCollector.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsCollector.java
@@ -7,6 +7,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.search.params.FacetSearchParams;
import org.apache.lucene.facet.search.params.ScoreFacetRequest;
@@ -24,7 +25,6 @@ import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -44,7 +44,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestFacetsCollector extends LuceneTestCase {
+public class TestFacetsCollector extends FacetTestCase {
@Test
public void testFacetsWithDocScore() throws Exception {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestMultipleCategoryLists.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestMultipleCategoryLists.java
index a9e5dc84435..a0da35d6616 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestMultipleCategoryLists.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestMultipleCategoryLists.java
@@ -13,6 +13,7 @@ import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.FacetTestUtils;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.index.params.CategoryListParams;
@@ -41,7 +42,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -61,7 +61,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestMultipleCategoryLists extends LuceneTestCase {
+public class TestMultipleCategoryLists extends FacetTestCase {
private static final CategoryPath[] CATEGORIES = new CategoryPath[] {
new CategoryPath("Author", "Mark Twain"),
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestScoredDocIdCollector.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestScoredDocIdCollector.java
index 0fc8b5522e7..bac10b19659 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestScoredDocIdCollector.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestScoredDocIdCollector.java
@@ -4,24 +4,18 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.List;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.TermQuery;
-import org.junit.Before;
-import org.junit.Test;
-
import org.apache.lucene.facet.FacetTestBase;
-import org.apache.lucene.facet.search.FacetsAccumulator;
-import org.apache.lucene.facet.search.ScoredDocIDs;
-import org.apache.lucene.facet.search.ScoredDocIDsIterator;
-import org.apache.lucene.facet.search.ScoredDocIdCollector;
-import org.apache.lucene.facet.search.StandardFacetsAccumulator;
import org.apache.lucene.facet.search.params.CountFacetRequest;
import org.apache.lucene.facet.search.params.FacetSearchParams;
import org.apache.lucene.facet.search.params.ScoreFacetRequest;
import org.apache.lucene.facet.search.results.FacetResult;
import org.apache.lucene.facet.search.results.FacetResultNode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.junit.Before;
+import org.junit.Test;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@@ -65,8 +59,7 @@ public class TestScoredDocIdCollector extends FacetTestBase {
System.out.println("Query: " + q);
}
float constScore = 17.0f;
- ScoredDocIdCollector dCollector = ScoredDocIdCollector.create(indexReader
- .maxDoc(), false); // scoring is disabled
+ ScoredDocIdCollector dCollector = ScoredDocIdCollector.create(indexReader.maxDoc(), false); // scoring is disabled
dCollector.setDefaultScore(constScore);
searcher.search(q, dCollector);
@@ -75,13 +68,16 @@ public class TestScoredDocIdCollector extends FacetTestBase {
assertEquals("Wrong number of matching documents!", 2, scoredDocIDs.size());
ScoredDocIDsIterator docItr = scoredDocIDs.iterator();
while (docItr.next()) {
- assertEquals("Wrong score for doc " + docItr.getDocID(), constScore,
- docItr.getScore(), Double.MIN_VALUE);
+ assertEquals("Wrong score for doc " + docItr.getDocID(), constScore, docItr.getScore(), Double.MIN_VALUE);
}
// verify by facet values
- List countRes = findFacets(scoredDocIDs, getFacetSearchParams());
- List scoreRes = findFacets(scoredDocIDs, sumScoreSearchParams());
+ CategoryPath cp = new CategoryPath("root","a");
+ FacetSearchParams countFSP = new FacetSearchParams(getFacetIndexingParams(Integer.MAX_VALUE), new CountFacetRequest(cp, 10));
+ FacetSearchParams scoreFSP = new FacetSearchParams(getFacetIndexingParams(Integer.MAX_VALUE), new ScoreFacetRequest(cp, 10));
+
+ List countRes = findFacets(scoredDocIDs, countFSP);
+ List scoreRes = findFacets(scoredDocIDs, scoreFSP);
assertEquals("Wrong number of facet count results!", 1, countRes.size());
assertEquals("Wrong number of facet score results!", 1, scoreRes.size());
@@ -151,14 +147,4 @@ public class TestScoredDocIdCollector extends FacetTestBase {
}
}
- /* use a scoring aggregator */
- private FacetSearchParams sumScoreSearchParams() {
- // this will use default faceted indexing params, not altering anything about indexing
- return new FacetSearchParams(new ScoreFacetRequest(new CategoryPath("root", "a"), 10));
- }
-
- private FacetSearchParams getFacetSearchParams() {
- return new FacetSearchParams(new CountFacetRequest(new CategoryPath("root","a"), 10));
- }
-
-}
\ No newline at end of file
+}
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestStandardFacetsAccumulator.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestStandardFacetsAccumulator.java
index 50f1d171610..06e09480458 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestStandardFacetsAccumulator.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestStandardFacetsAccumulator.java
@@ -8,6 +8,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.index.params.CategoryListParams;
import org.apache.lucene.facet.index.params.FacetIndexingParams;
@@ -32,7 +33,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -52,7 +52,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestStandardFacetsAccumulator extends LuceneTestCase {
+public class TestStandardFacetsAccumulator extends FacetTestCase {
private void indexTwoDocs(IndexWriter indexWriter, FacetFields facetFields, boolean withContent) throws Exception {
for (int i = 0; i < 2; i++) {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestTopKInEachNodeResultHandler.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestTopKInEachNodeResultHandler.java
index 7a3f3af2897..856e36f94a4 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestTopKInEachNodeResultHandler.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestTopKInEachNodeResultHandler.java
@@ -9,6 +9,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.index.params.FacetIndexingParams;
import org.apache.lucene.facet.search.params.CountFacetRequest;
@@ -30,7 +31,6 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -50,7 +50,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
+public class TestTopKInEachNodeResultHandler extends FacetTestCase {
//TODO (Facet): Move to extend BaseTestTopK and separate to several smaller test cases (methods) - see TestTopKResultsHandler
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestTotalFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestTotalFacetCounts.java
index 7dfd8c6c0de..82388b7fabb 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestTotalFacetCounts.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestTotalFacetCounts.java
@@ -4,13 +4,13 @@ import java.io.File;
import java.io.IOException;
import java.util.Arrays;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.FacetTestUtils;
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyReaderPair;
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyWriterPair;
import org.apache.lucene.facet.index.params.FacetIndexingParams;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.junit.Test;
@@ -31,7 +31,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestTotalFacetCounts extends LuceneTestCase {
+public class TestTotalFacetCounts extends FacetTestCase {
private static void initCache(int numEntries) {
TotalFacetCountsCache.getSingleton().clear();
@@ -53,8 +53,7 @@ public class TestTotalFacetCounts extends LuceneTestCase {
// Create temporary RAMDirectories
Directory[][] dirs = FacetTestUtils.createIndexTaxonomyDirs(1);
// Create our index/taxonomy writers
- IndexTaxonomyWriterPair[] writers = FacetTestUtils
- .createIndexTaxonomyWriterPair(dirs);
+ IndexTaxonomyWriterPair[] writers = FacetTestUtils.createIndexTaxonomyWriterPair(dirs);
FacetIndexingParams iParams = new FacetIndexingParams() {
@Override
public int getPartitionSize() {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/TestTotalFacetCountsCache.java b/lucene/facet/src/test/org/apache/lucene/facet/search/TestTotalFacetCountsCache.java
index 5ed5a1612f1..a767a8d06dd 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/TestTotalFacetCountsCache.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/TestTotalFacetCountsCache.java
@@ -8,6 +8,7 @@ import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.FacetTestUtils;
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyReaderPair;
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyWriterPair;
@@ -32,7 +33,6 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.SlowRAMDirectory;
import org.apache.lucene.util._TestUtil;
import org.junit.Before;
@@ -55,7 +55,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestTotalFacetCountsCache extends LuceneTestCase {
+public class TestTotalFacetCountsCache extends FacetTestCase {
static final TotalFacetCountsCache TFC = TotalFacetCountsCache.getSingleton();
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/associations/AssociationsFacetRequestTest.java b/lucene/facet/src/test/org/apache/lucene/facet/search/associations/AssociationsFacetRequestTest.java
index c8eeed3b829..d35d31d00c2 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/associations/AssociationsFacetRequestTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/associations/AssociationsFacetRequestTest.java
@@ -5,6 +5,7 @@ import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.associations.AssociationsFacetFields;
import org.apache.lucene.facet.associations.CategoryAssociationsContainer;
import org.apache.lucene.facet.associations.CategoryFloatAssociation;
@@ -47,7 +48,7 @@ import org.junit.Test;
*/
/** Test for associations */
-public class AssociationsFacetRequestTest extends LuceneTestCase {
+public class AssociationsFacetRequestTest extends FacetTestCase {
private static Directory dir;
private static IndexReader reader;
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/params/FacetRequestTest.java b/lucene/facet/src/test/org/apache/lucene/facet/search/params/FacetRequestTest.java
index e9db167f06e..87ade2c1798 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/params/FacetRequestTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/params/FacetRequestTest.java
@@ -1,15 +1,14 @@
package org.apache.lucene.facet.search.params;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.store.Directory;
-import org.junit.Test;
-
-import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.search.FacetResultsHandler;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.store.Directory;
+import org.junit.Test;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@@ -28,7 +27,7 @@ import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
* limitations under the License.
*/
-public class FacetRequestTest extends LuceneTestCase {
+public class FacetRequestTest extends FacetTestCase {
@Test(expected=IllegalArgumentException.class)
public void testIllegalNumResults() throws Exception {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/params/FacetSearchParamsTest.java b/lucene/facet/src/test/org/apache/lucene/facet/search/params/FacetSearchParamsTest.java
index 7d6253f3814..e75d1766ba6 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/params/FacetSearchParamsTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/params/FacetSearchParamsTest.java
@@ -1,6 +1,6 @@
package org.apache.lucene.facet.search.params;
-import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
/*
@@ -20,7 +20,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class FacetSearchParamsTest extends LuceneTestCase {
+public class FacetSearchParamsTest extends FacetTestCase {
@Test
public void testSearchParamsWithNullRequest() throws Exception {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/params/MultiCategoryListIteratorTest.java b/lucene/facet/src/test/org/apache/lucene/facet/search/params/MultiCategoryListIteratorTest.java
index 88a1d90e897..861e607a9df 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/params/MultiCategoryListIteratorTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/params/MultiCategoryListIteratorTest.java
@@ -5,6 +5,7 @@ import java.util.HashMap;
import java.util.Random;
import org.apache.lucene.document.Document;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.index.params.CategoryListParams;
import org.apache.lucene.facet.index.params.PerDimensionIndexingParams;
@@ -22,7 +23,6 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.IntsRef;
-import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.encoding.IntDecoder;
import org.junit.Test;
@@ -43,7 +43,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class MultiCategoryListIteratorTest extends LuceneTestCase {
+public class MultiCategoryListIteratorTest extends FacetTestCase {
@Test
public void testMultipleCategoryLists() throws Exception {
@@ -58,7 +58,7 @@ public class MultiCategoryListIteratorTest extends LuceneTestCase {
HashMap clps = new HashMap();
for (String dim : dimensions) {
CategoryPath cp = new CategoryPath(dim);
- CategoryListParams clp = new CategoryListParams("$" + dim);
+ CategoryListParams clp = randomCategoryListParams("$" + dim);
clps.put(cp, clp);
}
PerDimensionIndexingParams indexingParams = new PerDimensionIndexingParams(clps);
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/search/sampling/OversampleWithDepthTest.java b/lucene/facet/src/test/org/apache/lucene/facet/search/sampling/OversampleWithDepthTest.java
index ffec1971c96..1117fcf1d73 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/search/sampling/OversampleWithDepthTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/search/sampling/OversampleWithDepthTest.java
@@ -5,7 +5,9 @@ import java.util.Collections;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.document.Document;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.FacetFields;
+import org.apache.lucene.facet.index.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetsAccumulator;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.search.StandardFacetsCollector;
@@ -20,7 +22,6 @@ import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
-import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
@@ -28,9 +29,7 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -50,16 +49,18 @@ import org.junit.Test;
* limitations under the License.
*/
-public class OversampleWithDepthTest extends LuceneTestCase {
+public class OversampleWithDepthTest extends FacetTestCase {
@Test
public void testCountWithdepthUsingSampling() throws Exception, IOException {
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
+ FacetIndexingParams fip = new FacetIndexingParams(randomCategoryListParams());
+
// index 100 docs, each with one category: ["root", docnum/10, docnum]
// e.g. root/8/87
- index100Docs(indexDir, taxoDir);
+ index100Docs(indexDir, taxoDir, fip);
DirectoryReader r = DirectoryReader.open(indexDir);
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
@@ -69,7 +70,7 @@ public class OversampleWithDepthTest extends LuceneTestCase {
facetRequest.setDepth(2);
facetRequest.setResultMode(ResultMode.PER_NODE_IN_TREE);
- FacetSearchParams fsp = new FacetSearchParams(facetRequest);
+ FacetSearchParams fsp = new FacetSearchParams(fip, facetRequest);
// Craft sampling params to enforce sampling
final SamplingParams params = new SamplingParams();
@@ -93,13 +94,12 @@ public class OversampleWithDepthTest extends LuceneTestCase {
IOUtils.close(r, tr, indexDir, taxoDir);
}
- private void index100Docs(Directory indexDir, Directory taxoDir)
- throws CorruptIndexException, LockObtainFailedException, IOException {
+ private void index100Docs(Directory indexDir, Directory taxoDir, FacetIndexingParams fip) throws IOException {
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer());
IndexWriter w = new IndexWriter(indexDir, iwc);
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
- FacetFields facetFields = new FacetFields(tw);
+ FacetFields facetFields = new FacetFields(tw, fip);
for (int i = 0; i < 100; i++) {
Document doc = new Document();
CategoryPath cp = new CategoryPath("root",Integer.toString(i / 10), Integer.toString(i));
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestCategoryPath.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestCategoryPath.java
index ce3f29f7634..b690a643489 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestCategoryPath.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestCategoryPath.java
@@ -1,6 +1,6 @@
package org.apache.lucene.facet.taxonomy;
-import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
/*
@@ -20,7 +20,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestCategoryPath extends LuceneTestCase {
+public class TestCategoryPath extends FacetTestCase {
@Test
public void testBasic() {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java
index 2225f96ade3..cddee220b51 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java
@@ -7,13 +7,13 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.RAMDirectory;
-import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.SlowRAMDirectory;
import org.junit.Test;
@@ -37,7 +37,7 @@ import org.junit.Test;
// TODO: remove this suppress if we fix the TaxoWriter Codec to a non-default (see todo in DirTW)
@SuppressCodecs("SimpleText")
-public class TestTaxonomyCombined extends LuceneTestCase {
+public class TestTaxonomyCombined extends FacetTestCase {
/** The following categories will be added to the taxonomy by
fillTaxonomy(), and tested by all tests below:
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestAddTaxonomy.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestAddTaxonomy.java
index 3086d1aece6..925e857ec79 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestAddTaxonomy.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestAddTaxonomy.java
@@ -5,13 +5,13 @@ import java.util.HashSet;
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.DiskOrdinalMap;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.OrdinalMap;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
/*
@@ -31,7 +31,7 @@ import org.apache.lucene.util._TestUtil;
* limitations under the License.
*/
-public class TestAddTaxonomy extends LuceneTestCase {
+public class TestAddTaxonomy extends FacetTestCase {
private void dotest(int ncats, final int range) throws Exception {
final AtomicInteger numCats = new AtomicInteger(ncats);
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java
index e120c18a435..a5b76581de3 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java
@@ -8,6 +8,7 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.document.Document;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache;
@@ -17,7 +18,6 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@@ -37,7 +37,7 @@ import org.apache.lucene.util.LuceneTestCase;
*/
/** Tests concurrent indexing with facets. */
-public class TestConcurrentFacetedIndexing extends LuceneTestCase {
+public class TestConcurrentFacetedIndexing extends FacetTestCase {
// A No-Op TaxonomyWriterCache which always discards all given categories, and
// always returns true in put(), to indicate some cache entries were cleared.
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java
index 301ef294310..2ae22a13c1c 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java
@@ -4,19 +4,19 @@ import java.io.IOException;
import java.util.Random;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -36,7 +36,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestDirectoryTaxonomyReader extends LuceneTestCase {
+public class TestDirectoryTaxonomyReader extends FacetTestCase {
@Test
public void testCloseAfterIncRef() throws Exception {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java
index bed2e60f948..8c10ffba7c0 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java
@@ -7,6 +7,7 @@ import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
@@ -21,7 +22,6 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -41,7 +41,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestDirectoryTaxonomyWriter extends LuceneTestCase {
+public class TestDirectoryTaxonomyWriter extends FacetTestCase {
// A No-Op TaxonomyWriterCache which always discards all given categories, and
// always returns true in put(), to indicate some cache entries were cleared.
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/writercache/cl2o/TestCharBlockArray.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/writercache/cl2o/TestCharBlockArray.java
index b6ff0219b82..747acfe1df3 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/writercache/cl2o/TestCharBlockArray.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/writercache/cl2o/TestCharBlockArray.java
@@ -9,11 +9,9 @@ import java.nio.ByteBuffer;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CodingErrorAction;
-import org.junit.Test;
-
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.facet.taxonomy.writercache.cl2o.CharBlockArray;
+import org.junit.Test;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@@ -32,7 +30,7 @@ import org.apache.lucene.facet.taxonomy.writercache.cl2o.CharBlockArray;
* limitations under the License.
*/
-public class TestCharBlockArray extends LuceneTestCase {
+public class TestCharBlockArray extends FacetTestCase {
@Test public void testArray() throws Exception {
CharBlockArray array = new CharBlockArray();
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/writercache/cl2o/TestCompactLabelToOrdinal.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/writercache/cl2o/TestCompactLabelToOrdinal.java
index 74e1743a1d1..f9fab9708f4 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/writercache/cl2o/TestCompactLabelToOrdinal.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/writercache/cl2o/TestCompactLabelToOrdinal.java
@@ -8,14 +8,11 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Random;
-import org.junit.Test;
-
-import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util._TestUtil;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.CategoryPath;
-import org.apache.lucene.facet.taxonomy.writercache.cl2o.CompactLabelToOrdinal;
-import org.apache.lucene.facet.taxonomy.writercache.cl2o.LabelToOrdinal;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util._TestUtil;
+import org.junit.Test;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@@ -34,7 +31,7 @@ import org.apache.lucene.facet.taxonomy.writercache.cl2o.LabelToOrdinal;
* limitations under the License.
*/
-public class TestCompactLabelToOrdinal extends LuceneTestCase {
+public class TestCompactLabelToOrdinal extends FacetTestCase {
@Test
public void testL2O() throws Exception {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/util/TestScoredDocIDsUtils.java b/lucene/facet/src/test/org/apache/lucene/facet/util/TestScoredDocIDsUtils.java
index 3ae661521d9..cb1d7eac9fc 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/util/TestScoredDocIDsUtils.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/util/TestScoredDocIDsUtils.java
@@ -9,6 +9,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.search.ScoredDocIDs;
import org.apache.lucene.facet.search.ScoredDocIDsIterator;
import org.apache.lucene.facet.search.ScoredDocIdCollector;
@@ -25,7 +26,6 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
@@ -45,7 +45,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class TestScoredDocIDsUtils extends LuceneTestCase {
+public class TestScoredDocIDsUtils extends FacetTestCase {
@Test
public void testComplementIterator() throws Exception {
diff --git a/lucene/facet/src/test/org/apache/lucene/util/UnsafeByteArrayInputStreamTest.java b/lucene/facet/src/test/org/apache/lucene/util/UnsafeByteArrayInputStreamTest.java
index 2c5dc76490b..e88bd22895f 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/UnsafeByteArrayInputStreamTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/UnsafeByteArrayInputStreamTest.java
@@ -3,11 +3,9 @@ package org.apache.lucene.util;
import java.io.IOException;
import java.util.Arrays;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.UnsafeByteArrayInputStream;
-
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -25,7 +23,7 @@ import org.apache.lucene.util.UnsafeByteArrayInputStream;
* limitations under the License.
*/
-public class UnsafeByteArrayInputStreamTest extends LuceneTestCase {
+public class UnsafeByteArrayInputStreamTest extends FacetTestCase {
@Test
public void testSimple() throws IOException {
diff --git a/lucene/facet/src/test/org/apache/lucene/util/UnsafeByteArrayOutputStreamTest.java b/lucene/facet/src/test/org/apache/lucene/util/UnsafeByteArrayOutputStreamTest.java
index b3bad7987b5..b7af76aa2f6 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/UnsafeByteArrayOutputStreamTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/UnsafeByteArrayOutputStreamTest.java
@@ -2,11 +2,9 @@ package org.apache.lucene.util;
import java.io.IOException;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.UnsafeByteArrayOutputStream;
-
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -24,7 +22,7 @@ import org.apache.lucene.util.UnsafeByteArrayOutputStream;
* limitations under the License.
*/
-public class UnsafeByteArrayOutputStreamTest extends LuceneTestCase {
+public class UnsafeByteArrayOutputStreamTest extends FacetTestCase {
@Test
public void testSimpleWrite() throws IOException {
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/ArrayHashMapTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/ArrayHashMapTest.java
index 2a79064fc09..4b0c3ba90fe 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/ArrayHashMapTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/ArrayHashMapTest.java
@@ -4,11 +4,9 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.ArrayHashMap;
-
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -26,7 +24,7 @@ import org.apache.lucene.util.collections.ArrayHashMap;
* limitations under the License.
*/
-public class ArrayHashMapTest extends LuceneTestCase {
+public class ArrayHashMapTest extends FacetTestCase {
public static final int RANDOM_TEST_NUM_ITERATIONS = 100; // set to 100,000 for deeper test
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/FloatToObjectMapTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/FloatToObjectMapTest.java
index 5b4d1a3443a..a627ea1e423 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/FloatToObjectMapTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/FloatToObjectMapTest.java
@@ -1,13 +1,11 @@
package org.apache.lucene.util.collections;
-import org.junit.Test;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.FloatIterator;
-import org.apache.lucene.util.collections.FloatToObjectMap;
+import org.apache.lucene.facet.FacetTestCase;
+import org.junit.Test;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@@ -26,7 +24,7 @@ import org.apache.lucene.util.collections.FloatToObjectMap;
* limitations under the License.
*/
-public class FloatToObjectMapTest extends LuceneTestCase {
+public class FloatToObjectMapTest extends FacetTestCase {
@Test
public void test0() {
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/IntArrayTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/IntArrayTest.java
index 87843c180ef..73b33465622 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/IntArrayTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/IntArrayTest.java
@@ -1,10 +1,8 @@
package org.apache.lucene.util.collections;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.IntArray;
-
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -22,7 +20,7 @@ import org.apache.lucene.util.collections.IntArray;
* limitations under the License.
*/
-public class IntArrayTest extends LuceneTestCase {
+public class IntArrayTest extends FacetTestCase {
@Test
public void test0() {
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/IntHashSetTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/IntHashSetTest.java
index 94692f293f2..1440925f23a 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/IntHashSetTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/IntHashSetTest.java
@@ -2,11 +2,9 @@ package org.apache.lucene.util.collections;
import java.util.HashSet;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.IntHashSet;
-
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -24,7 +22,7 @@ import org.apache.lucene.util.collections.IntHashSet;
* limitations under the License.
*/
-public class IntHashSetTest extends LuceneTestCase {
+public class IntHashSetTest extends FacetTestCase {
@Test
public void test0() {
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/IntToDoubleMapTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/IntToDoubleMapTest.java
index 46d9d1645e2..9380cc1e444 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/IntToDoubleMapTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/IntToDoubleMapTest.java
@@ -1,15 +1,11 @@
package org.apache.lucene.util.collections;
-import org.junit.Test;
-
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.DoubleIterator;
-import org.apache.lucene.util.collections.IntIterator;
-import org.apache.lucene.util.collections.IntToDoubleMap;
-
import java.util.HashSet;
import java.util.Random;
+import org.apache.lucene.facet.FacetTestCase;
+import org.junit.Test;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -27,7 +23,8 @@ import java.util.Random;
* limitations under the License.
*/
-public class IntToDoubleMapTest extends LuceneTestCase {
+public class IntToDoubleMapTest extends FacetTestCase {
+
private static void assertGround(double value) {
assertEquals(IntToDoubleMap.GROUND, value, Double.MAX_VALUE);
}
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/IntToFloatMapTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/IntToFloatMapTest.java
index 034e31d17d6..4c9fc88b635 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/IntToFloatMapTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/IntToFloatMapTest.java
@@ -1,15 +1,11 @@
package org.apache.lucene.util.collections;
-import org.junit.Test;
-
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.FloatIterator;
-import org.apache.lucene.util.collections.IntIterator;
-import org.apache.lucene.util.collections.IntToFloatMap;
-
import java.util.HashSet;
import java.util.Random;
+import org.apache.lucene.facet.FacetTestCase;
+import org.junit.Test;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -27,7 +23,8 @@ import java.util.Random;
* limitations under the License.
*/
-public class IntToFloatMapTest extends LuceneTestCase {
+public class IntToFloatMapTest extends FacetTestCase {
+
private static void assertGround(float value) {
assertEquals(IntToFloatMap.GROUND, value, Float.MAX_VALUE);
}
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/IntToIntMapTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/IntToIntMapTest.java
index dbd40d21e70..6877a89997b 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/IntToIntMapTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/IntToIntMapTest.java
@@ -1,12 +1,10 @@
package org.apache.lucene.util.collections;
-import org.junit.Test;
import java.util.HashSet;
import java.util.Random;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.IntIterator;
-import org.apache.lucene.util.collections.IntToIntMap;
+import org.apache.lucene.facet.FacetTestCase;
+import org.junit.Test;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@@ -25,7 +23,7 @@ import org.apache.lucene.util.collections.IntToIntMap;
* limitations under the License.
*/
-public class IntToIntMapTest extends LuceneTestCase {
+public class IntToIntMapTest extends FacetTestCase {
private static void assertGround(int value) {
assertEquals(IntToIntMap.GROUD, value);
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/IntToObjectMapTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/IntToObjectMapTest.java
index 7cb8b8b02fa..91337451b68 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/IntToObjectMapTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/IntToObjectMapTest.java
@@ -4,12 +4,9 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.IntIterator;
-import org.apache.lucene.util.collections.IntToObjectMap;
-
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -27,7 +24,7 @@ import org.apache.lucene.util.collections.IntToObjectMap;
* limitations under the License.
*/
-public class IntToObjectMapTest extends LuceneTestCase {
+public class IntToObjectMapTest extends FacetTestCase {
@Test
public void test0() {
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/ObjectToFloatMapTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/ObjectToFloatMapTest.java
index d1c4f27b970..7d00a16b39a 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/ObjectToFloatMapTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/ObjectToFloatMapTest.java
@@ -1,15 +1,12 @@
package org.apache.lucene.util.collections;
-import org.junit.Test;
-
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.FloatIterator;
-import org.apache.lucene.util.collections.ObjectToFloatMap;
-
import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;
+import org.apache.lucene.facet.FacetTestCase;
+import org.junit.Test;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -27,7 +24,7 @@ import java.util.Random;
* limitations under the License.
*/
-public class ObjectToFloatMapTest extends LuceneTestCase {
+public class ObjectToFloatMapTest extends FacetTestCase {
@Test
public void test0() {
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/ObjectToIntMapTest.java b/lucene/facet/src/test/org/apache/lucene/util/collections/ObjectToIntMapTest.java
index a5629a3dfe3..94d74ffd7a0 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/ObjectToIntMapTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/ObjectToIntMapTest.java
@@ -6,6 +6,7 @@ import java.util.Random;
import org.junit.Test;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.collections.IntIterator;
import org.apache.lucene.util.collections.ObjectToIntMap;
@@ -27,7 +28,7 @@ import org.apache.lucene.util.collections.ObjectToIntMap;
* limitations under the License.
*/
-public class ObjectToIntMapTest extends LuceneTestCase {
+public class ObjectToIntMapTest extends FacetTestCase {
@Test
public void test0() {
diff --git a/lucene/facet/src/test/org/apache/lucene/util/collections/TestLRUHashMap.java b/lucene/facet/src/test/org/apache/lucene/util/collections/TestLRUHashMap.java
index 01f028b8876..ddf3301eeda 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/collections/TestLRUHashMap.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/collections/TestLRUHashMap.java
@@ -1,10 +1,8 @@
package org.apache.lucene.util.collections;
+import org.apache.lucene.facet.FacetTestCase;
import org.junit.Test;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.collections.LRUHashMap;
-
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -22,7 +20,7 @@ import org.apache.lucene.util.collections.LRUHashMap;
* limitations under the License.
*/
-public class TestLRUHashMap extends LuceneTestCase {
+public class TestLRUHashMap extends FacetTestCase {
// testLRU() tests that the specified size limit is indeed honored, and
// the remaining objects in the map are indeed those that have been most
// recently used
diff --git a/lucene/facet/src/test/org/apache/lucene/util/encoding/EncodingTest.java b/lucene/facet/src/test/org/apache/lucene/util/encoding/EncodingTest.java
index 411b9aec79e..6383beaec0b 100644
--- a/lucene/facet/src/test/org/apache/lucene/util/encoding/EncodingTest.java
+++ b/lucene/facet/src/test/org/apache/lucene/util/encoding/EncodingTest.java
@@ -3,9 +3,9 @@ package org.apache.lucene.util.encoding;
import java.io.IOException;
import java.util.Arrays;
+import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
-import org.apache.lucene.util.LuceneTestCase;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -26,7 +26,7 @@ import org.junit.Test;
* limitations under the License.
*/
-public class EncodingTest extends LuceneTestCase {
+public class EncodingTest extends FacetTestCase {
private static IntsRef uniqueSortedData, data;
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 03f39709944..9ea9b2fd4da 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -63,6 +63,9 @@ Detailed Change List
New Features
----------------------
+* SOLR-4043: Add ability to get success/failure responses from Collections API.
+ (Raintung Li, Mark Miller)
+
Bug Fixes
----------------------
@@ -80,6 +83,16 @@ Bug Fixes
* SOLR-4349 : Admin UI - Query Interface does not work in IE
(steffkes)
+* SOLR-4359: The RecentUpdates#update method should treat a problem reading the
+ next record the same as a problem parsing the record - log the exception and
+ break. (Mark Miller)
+
+* SOLR-4225: Term info page under schema browser shows incorrect count of terms
+ (steffkes)
+
+* SOLR-3926: Solr should support better way of finding active sorts (Eirik Lygre via
+ Erick Erickson)
+
Optimizations
----------------------
@@ -100,6 +113,8 @@ Other Changes
* SOLR-4348: Make the lock type configurable by system property by default.
(Mark Miller)
+* SOLR-4353: Renamed example jetty context file to reduce confusion (hossman)
+
================== 4.1.0 ==================
Versions of Major Components
@@ -607,6 +622,9 @@ Bug Fixes
* SOLR-4266: HttpSolrServer does not release connection properly on exception
when no response parser is used. (Steve Molloy via Mark Miller)
+* SOLR-2298: Updated JavaDoc for SolrDocument.addField and SolrInputDocument.addField
+ to have more information on name and value parameters. (Siva Natarajan)
+
Other Changes
----------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestBuiltInEvaluators.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestBuiltInEvaluators.java
index 3c669d90e8e..f07d4abacab 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestBuiltInEvaluators.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestBuiltInEvaluators.java
@@ -17,7 +17,6 @@
package org.apache.solr.handler.dataimport;
import org.junit.Before;
-import org.junit.Ignore;
import org.junit.Test;
import java.net.URLEncoder;
@@ -109,58 +108,63 @@ public class TestBuiltInEvaluators extends AbstractDataImportHandlerTestCase {
}
- private Date getNow() {
- Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("GMT"),
- Locale.ROOT);
+ private Date twoDaysAgo(Locale l, TimeZone tz) {
+ Calendar calendar = Calendar.getInstance(tz, l);
calendar.add(Calendar.DAY_OF_YEAR, -2);
return calendar.getTime();
}
@Test
- @Ignore("fails if somewhere on earth is a DST change")
public void testDateFormatEvaluator() {
Evaluator dateFormatEval = new DateFormatEvaluator();
ContextImpl context = new ContextImpl(null, resolver, null,
Context.FULL_DUMP, Collections. emptyMap(), null, null);
- String currentLocale = Locale.getDefault().toString();
+
+ Locale rootLocale = Locale.ROOT;
+ Locale defaultLocale = Locale.getDefault();
+ TimeZone defaultTz = TimeZone.getDefault();
+
{
- {
- SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH", Locale.ROOT);
- String sdf = sdfDate.format(getNow());
- String dfe = dateFormatEval.evaluate("'NOW-2DAYS','yyyy-MM-dd HH'", context);
- assertEquals(sdf,dfe);
- }
- {
- SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH", Locale.getDefault());
- String sdf = sdfDate.format(getNow());
- String dfe = dateFormatEval.evaluate("'NOW-2DAYS','yyyy-MM-dd HH','"+ currentLocale + "'", context);
- assertEquals(sdf,dfe);
- for(String tz : TimeZone.getAvailableIDs()) {
- sdfDate.setTimeZone(TimeZone.getTimeZone(tz));
- sdf = sdfDate.format(getNow());
- dfe = dateFormatEval.evaluate("'NOW-2DAYS','yyyy-MM-dd HH','" + currentLocale + "','" + tz + "'", context);
- assertEquals(sdf,dfe);
- }
+ SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH", rootLocale);
+ String sdf = sdfDate.format(twoDaysAgo(rootLocale, defaultTz));
+ String dfe = dateFormatEval.evaluate("'NOW-2DAYS','yyyy-MM-dd HH'", context);
+ assertEquals(sdf,dfe);
+ }
+ {
+ SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH", defaultLocale);
+ String sdf = sdfDate.format(twoDaysAgo(defaultLocale, TimeZone.getDefault()));
+ String dfe = dateFormatEval.evaluate(
+ "'NOW-2DAYS','yyyy-MM-dd HH','" + defaultLocale + "'", context);
+ assertEquals(sdf,dfe);
+ for(String tzStr : TimeZone.getAvailableIDs()) {
+ TimeZone tz = TimeZone.getTimeZone(tzStr);
+ sdfDate.setTimeZone(tz);
+ sdf = sdfDate.format(twoDaysAgo(defaultLocale, tz));
+ dfe = dateFormatEval.evaluate(
+ "'NOW-2DAYS','yyyy-MM-dd HH','" + defaultLocale + "','" + tzStr + "'", context);
+ assertEquals(sdf,dfe);
}
}
+
Date d = new Date();
Map map = new HashMap();
map.put("key", d);
resolver.addNamespace("A", map);
-
+
assertEquals(
- new SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.ROOT).format(d),
+ new SimpleDateFormat("yyyy-MM-dd HH:mm", rootLocale).format(d),
dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm'", context));
assertEquals(
- new SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.getDefault()).format(d),
- dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm','" + currentLocale
- + "'", context));
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.getDefault());
- for(String tz : TimeZone.getAvailableIDs()) {
- sdf.setTimeZone(TimeZone.getTimeZone(tz));
+ new SimpleDateFormat("yyyy-MM-dd HH:mm", defaultLocale).format(d),
+ dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm','" + defaultLocale + "'", context));
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm", defaultLocale);
+ for(String tzStr : TimeZone.getAvailableIDs()) {
+ TimeZone tz = TimeZone.getTimeZone(tzStr);
+ sdf.setTimeZone(tz);
assertEquals(
sdf.format(d),
- dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm','" + currentLocale + "', '" + tz + "'", context));
+ dateFormatEval.evaluate(
+ "A.key, 'yyyy-MM-dd HH:mm','" + defaultLocale + "', '" + tzStr + "'", context));
}
diff --git a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
index 61fd31ddb6f..30f8008afad 100644
--- a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
@@ -48,6 +48,8 @@ public class DistributedQueue {
private final String prefix = "qn-";
+ private final String response_prefix = "qnr-" ;
+
public DistributedQueue(SolrZkClient zookeeper, String dir, List acl) {
this.dir = dir;
@@ -100,7 +102,7 @@ public class DistributedQueue {
*
* @return the data at the head of the queue.
*/
- public byte[] element() throws NoSuchElementException, KeeperException,
+ private QueueEvent element() throws NoSuchElementException, KeeperException,
InterruptedException {
TreeMap orderedChildren;
@@ -122,7 +124,7 @@ public class DistributedQueue {
for (String headNode : orderedChildren.values()) {
if (headNode != null) {
try {
- return zookeeper.getData(dir + "/" + headNode, null, null, true);
+ return new QueueEvent(dir + "/" + headNode, zookeeper.getData(dir + "/" + headNode, null, null, true), null);
} catch (KeeperException.NoNodeException e) {
// Another client removed the node first, try next
}
@@ -162,17 +164,41 @@ public class DistributedQueue {
}
}
+ /**
+ * Remove the event and save the response into the other path.
+ *
+ */
+ public byte[] remove(QueueEvent event) throws KeeperException,
+ InterruptedException {
+ String path = event.getId();
+ String responsePath = dir + "/" + response_prefix
+ + path.substring(path.lastIndexOf("-") + 1);
+ if (zookeeper.exists(responsePath, true)) {
+ zookeeper.setData(responsePath, event.getBytes(), true);
+ }
+ byte[] data = zookeeper.getData(path, null, null, true);
+ zookeeper.delete(path, -1, true);
+ return data;
+ }
+
+
private class LatchChildWatcher implements Watcher {
Object lock = new Object();
+ private WatchedEvent event = null;
public LatchChildWatcher() {}
+ public LatchChildWatcher(Object lock) {
+ this.lock = lock;
+ }
+
@Override
public void process(WatchedEvent event) {
LOG.info("Watcher fired on path: " + event.getPath() + " state: "
+ event.getState() + " type " + event.getType());
synchronized (lock) {
+ this.event = event;
lock.notifyAll();
}
}
@@ -182,6 +208,10 @@ public class DistributedQueue {
lock.wait(timeout);
}
}
+
+ public WatchedEvent getWatchedEvent() {
+ return event;
+ }
}
/**
@@ -225,22 +255,51 @@ public class DistributedQueue {
*/
public boolean offer(byte[] data) throws KeeperException,
InterruptedException {
+ return createData(dir + "/" + prefix, data,
+ CreateMode.PERSISTENT_SEQUENTIAL) != null;
+ }
+
+ /**
+ * Inserts data into zookeeper.
+ *
+ * @return true if data was successfully added
+ */
+ private String createData(String path, byte[] data, CreateMode mode)
+ throws KeeperException, InterruptedException {
for (;;) {
try {
- zookeeper.create(dir + "/" + prefix, data, acl,
- CreateMode.PERSISTENT_SEQUENTIAL, true);
- return true;
+ return zookeeper.create(path, data, acl, mode, true);
} catch (KeeperException.NoNodeException e) {
try {
zookeeper.create(dir, new byte[0], acl, CreateMode.PERSISTENT, true);
} catch (KeeperException.NodeExistsException ne) {
- //someone created it
+ // someone created it
}
}
}
-
-
-
+ }
+
+ /**
+ * Offer the data and wait for the response
+ *
+ */
+ public QueueEvent offer(byte[] data, long timeout) throws KeeperException,
+ InterruptedException {
+ String path = createData(dir + "/" + prefix, data,
+ CreateMode.PERSISTENT_SEQUENTIAL);
+ String watchID = createData(
+ dir + "/" + response_prefix + path.substring(path.lastIndexOf("-") + 1),
+ null, CreateMode.EPHEMERAL);
+ Object lock = new Object();
+ LatchChildWatcher watcher = new LatchChildWatcher(lock);
+ synchronized (lock) {
+ if (zookeeper.exists(watchID, watcher, true) != null) {
+ watcher.await(timeout);
+ }
+ }
+ byte[] bytes = zookeeper.getData(watchID, null, null, true);
+ zookeeper.delete(watchID, -1, true);
+ return new QueueEvent(watchID, bytes, watcher.getWatchedEvent());
}
/**
@@ -251,21 +310,74 @@ public class DistributedQueue {
*/
public byte[] peek() throws KeeperException, InterruptedException {
try {
- return element();
+ return element().getBytes();
} catch (NoSuchElementException e) {
return null;
}
}
+ public static class QueueEvent {
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((id == null) ? 0 : id.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) return true;
+ if (obj == null) return false;
+ if (getClass() != obj.getClass()) return false;
+ QueueEvent other = (QueueEvent) obj;
+ if (id == null) {
+ if (other.id != null) return false;
+ } else if (!id.equals(other.id)) return false;
+ return true;
+ }
+
+ private WatchedEvent event = null;
+ private String id;
+ private byte[] bytes;
+
+ QueueEvent(String id, byte[] bytes, WatchedEvent event) {
+ this.id = id;
+ this.bytes = bytes;
+ this.event = event;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setBytes(byte[] bytes) {
+ this.bytes = bytes;
+ }
+
+ public byte[] getBytes() {
+ return bytes;
+ }
+
+ public WatchedEvent getWatchedEvent() {
+ return event;
+ }
+
+ }
+
/**
* Returns the data at the first element of the queue, or null if the queue is
* empty.
*
* @return data at the first element of the queue, or null.
*/
- public byte[] peek(boolean block) throws KeeperException, InterruptedException {
+ public QueueEvent peek(boolean block) throws KeeperException, InterruptedException {
if (!block) {
- return peek();
+ return element();
}
TreeMap orderedChildren;
@@ -286,7 +398,7 @@ public class DistributedQueue {
String path = dir + "/" + headNode;
try {
byte[] data = zookeeper.getData(path, null, null, true);
- return data;
+ return new QueueEvent(path, data, childWatcher.getWatchedEvent());
} catch (KeeperException.NoNodeException e) {
// Another client deleted the node first.
}
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
index cbde272695f..ae09ff48157 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
@@ -23,6 +23,8 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
+import org.apache.solr.client.solrj.SolrResponse;
+import org.apache.solr.cloud.DistributedQueue.QueueEvent;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ClosableThread;
@@ -36,6 +38,7 @@ import org.apache.solr.common.cloud.ZooKeeperException;
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.handler.component.ShardHandler;
import org.apache.solr.handler.component.ShardRequest;
@@ -94,47 +97,33 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
@Override
public void run() {
- log.info("Process current queue of collection messages");
- while (amILeader() && !isClosed) {
- try {
- byte[] head = workQueue.peek(true);
-
- //if (head != null) { // should not happen since we block above
- final ZkNodeProps message = ZkNodeProps.load(head);
- final String operation = message.getStr(QUEUE_OPERATION);
- try {
- boolean success = processMessage(message, operation);
- if (!success) {
- // TODO: what to do on failure / partial failure
- // if we fail, do we clean up then ?
- SolrException.log(log,
- "Collection " + operation + " of " + message.getStr("name")
- + " failed");
- }
- } catch(Throwable t) {
- SolrException.log(log,
- "Collection " + operation + " of " + message.getStr("name")
- + " failed", t);
- }
- //}
-
-
- workQueue.poll();
-
- } catch (KeeperException e) {
- if (e.code() == KeeperException.Code.SESSIONEXPIRED
- || e.code() == KeeperException.Code.CONNECTIONLOSS) {
- log.warn("Overseer cannot talk to ZK");
- return;
- }
- SolrException.log(log, "", e);
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "",
- e);
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- return;
- }
- }
+ log.info("Process current queue of collection creations");
+ while (amILeader() && !isClosed) {
+ try {
+ QueueEvent head = workQueue.peek(true);
+ final ZkNodeProps message = ZkNodeProps.load(head.getBytes());
+ log.info("Overseer Collection Processor: Get the message id:" + head.getId() + " message:" + message.toString());
+ final String operation = message.getStr(QUEUE_OPERATION);
+ SolrResponse response = processMessage(message, operation);
+ head.setBytes(SolrResponse.serializable(response));
+ workQueue.remove(head);
+ log.info("Overseer Collection Processor: Message id:" + head.getId() + " complete, response:"+ response.getResponse().toString());
+ } catch (KeeperException e) {
+ if (e.code() == KeeperException.Code.SESSIONEXPIRED
+ || e.code() == KeeperException.Code.CONNECTIONLOSS) {
+ log.warn("Overseer cannot talk to ZK");
+ return;
+ }
+ SolrException.log(log, "", e);
+ throw new ZooKeeperException(
+ SolrException.ErrorCode.SERVER_ERROR, "", e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ return;
+ } catch (Throwable e) {
+ SolrException.log(log, "", e);
+ }
+ }
}
public void close() {
@@ -157,21 +146,49 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
return false;
}
- protected boolean processMessage(ZkNodeProps message, String operation) {
- if (CREATECOLLECTION.equals(operation)) {
- return createCollection(zkStateReader.getClusterState(), message);
- } else if (DELETECOLLECTION.equals(operation)) {
- ModifiableSolrParams params = new ModifiableSolrParams();
- params.set(CoreAdminParams.ACTION, CoreAdminAction.UNLOAD.toString());
- params.set(CoreAdminParams.DELETE_INSTANCE_DIR, true);
- return collectionCmd(zkStateReader.getClusterState(), message, params);
- } else if (RELOADCOLLECTION.equals(operation)) {
- ModifiableSolrParams params = new ModifiableSolrParams();
- params.set(CoreAdminParams.ACTION, CoreAdminAction.RELOAD.toString());
- return collectionCmd(zkStateReader.getClusterState(), message, params);
+
+ protected SolrResponse processMessage(ZkNodeProps message, String operation) {
+
+ NamedList results = new NamedList();
+ try {
+ if (CREATECOLLECTION.equals(operation)) {
+ createCollection(zkStateReader.getClusterState(), message);
+ } else if (DELETECOLLECTION.equals(operation)) {
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.set(CoreAdminParams.ACTION, CoreAdminAction.UNLOAD.toString());
+ params.set(CoreAdminParams.DELETE_INSTANCE_DIR, true);
+ collectionCmd(zkStateReader.getClusterState(), message, params);
+ } else if (RELOADCOLLECTION.equals(operation)) {
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.set(CoreAdminParams.ACTION, CoreAdminAction.RELOAD.toString());
+ collectionCmd(zkStateReader.getClusterState(), message, params);
+ } else {
+ throw new SolrException(ErrorCode.BAD_REQUEST, "Unknow the operation:"
+ + operation);
+ }
+ int failed = 0;
+ ShardResponse srsp;
+
+ do {
+ srsp = shardHandler.takeCompletedIncludingErrors();
+ if (srsp != null) {
+ Throwable e = srsp.getException();
+ if (e != null) {
+ failed++;
+ log.error("Error talking to shard: " + srsp.getShard(), e);
+ results.add(srsp.getShard(), e);
+ } else {
+ results.add(srsp.getShard(), srsp.getSolrResponse().getResponse());
+ }
+ }
+ } while (srsp != null);
+ } catch (SolrException ex) {
+ SolrException.log(log, "Collection " + operation + " of " + operation
+ + " failed");
+ results.add("Operation " + operation + " cause exception:", ex);
+ } finally {
+ return new OverseerSolrResponse(results);
}
- // unknown command, toss it from our queue
- return true;
}
private boolean createCollection(ClusterState clusterState, ZkNodeProps message) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerSolrResponse.java b/solr/core/src/java/org/apache/solr/cloud/OverseerSolrResponse.java
new file mode 100644
index 00000000000..9dfc0773c02
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerSolrResponse.java
@@ -0,0 +1,47 @@
+package org.apache.solr.cloud;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.solr.client.solrj.SolrResponse;
+import org.apache.solr.common.util.NamedList;
+
+public class OverseerSolrResponse extends SolrResponse {
+
+ NamedList responseList = null;
+
+ public OverseerSolrResponse(NamedList list) {
+ responseList = list;
+ }
+
+ @Override
+ public long getElapsedTime() {
+ // TODO Auto-generated method stub
+ return 0;
+ }
+
+ @Override
+ public void setResponse(NamedList