mirror of
https://github.com/apache/lucene.git
synced 2025-02-17 15:35:20 +00:00
Merged /lucene/dev/trunk:r1438171-1439161
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4547@1439164 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
a02bffb253
@ -58,6 +58,9 @@ Optimizations
|
|||||||
* LUCENE-3298: FST can now be larger than 2.1 GB / 2.1 B nodes.
|
* LUCENE-3298: FST can now be larger than 2.1 GB / 2.1 B nodes.
|
||||||
(James Dyer, Mike McCandless)
|
(James Dyer, Mike McCandless)
|
||||||
|
|
||||||
|
* LUCENE-4690: Performance improvements and non-hashing versions
|
||||||
|
of NumericUtils.*ToPrefixCoded() (yonik)
|
||||||
|
|
||||||
New Features
|
New Features
|
||||||
|
|
||||||
* LUCENE-4686: New specialized DGapVInt8IntEncoder for facets (now the
|
* LUCENE-4686: New specialized DGapVInt8IntEncoder for facets (now the
|
||||||
@ -70,9 +73,23 @@ New Features
|
|||||||
compresses term vectors into chunks of documents similarly to
|
compresses term vectors into chunks of documents similarly to
|
||||||
CompressingStoredFieldsFormat. (Adrien Grand)
|
CompressingStoredFieldsFormat. (Adrien Grand)
|
||||||
|
|
||||||
|
* LUCENE-4695: Added LiveFieldValues utility class, for getting the
|
||||||
|
current (live, real-time) value for any indexed doc/field. The
|
||||||
|
class buffers recently indexed doc/field values until a new
|
||||||
|
near-real-time reader is opened that contains those changes.
|
||||||
|
(Robert Muir, Mike McCandless)
|
||||||
|
|
||||||
API Changes
|
API Changes
|
||||||
|
|
||||||
* LUCENE-4709: FacetResultNode no longer has a residue field. (Shai Erera)
|
* LUCENE-4709: FacetResultNode no longer has a residue field. (Shai Erera)
|
||||||
|
|
||||||
|
* LUCENE-4716: DrillDown.query now takes Occur, allowing to specify if
|
||||||
|
categories should be OR'ed or AND'ed. (Shai Erera)
|
||||||
|
|
||||||
|
* LUCENE-4695: ReferenceManager.RefreshListener.afterRefresh now takes
|
||||||
|
a boolean indicating whether a new reference was in fact opened, and
|
||||||
|
a new beforeRefresh method notifies you when a refresh attempt is
|
||||||
|
starting. (Robert Muir, Mike McCandless)
|
||||||
|
|
||||||
Bug Fixes
|
Bug Fixes
|
||||||
|
|
||||||
@ -414,6 +431,13 @@ Changes in Runtime Behavior
|
|||||||
This only affects requests with depth>1. If you execute such requests and
|
This only affects requests with depth>1. If you execute such requests and
|
||||||
rely on the facet results being returned flat (i.e. no hierarchy), you should
|
rely on the facet results being returned flat (i.e. no hierarchy), you should
|
||||||
set the ResultMode to GLOBAL_FLAT. (Shai Erera, Gilad Barkai)
|
set the ResultMode to GLOBAL_FLAT. (Shai Erera, Gilad Barkai)
|
||||||
|
|
||||||
|
* LUCENE-1822: Improves the text window selection by recalculating the starting margin
|
||||||
|
once all phrases in the fragment have been identified in FastVectorHighlighter. This
|
||||||
|
way if a single word is matched in a fragment, it will appear in the middle of the highlight,
|
||||||
|
instead of 6 characters from the beginning. This way one can also guarantee that
|
||||||
|
the entirety of short texts are represented in a fragment by specifying a large
|
||||||
|
enough fragCharSize.
|
||||||
|
|
||||||
Optimizations
|
Optimizations
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ public abstract class CompressionMode {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Compressor newCompressor() {
|
public Compressor newCompressor() {
|
||||||
return LZ4_FAST_COMPRESSOR;
|
return new LZ4FastCompressor();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -95,7 +95,7 @@ public abstract class CompressionMode {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Compressor newCompressor() {
|
public Compressor newCompressor() {
|
||||||
return LZ4_HIGH_COMPRESSOR;
|
return new LZ4HighCompressor();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -147,25 +147,37 @@ public abstract class CompressionMode {
|
|||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
private static final Compressor LZ4_FAST_COMPRESSOR = new Compressor() {
|
private static final class LZ4FastCompressor extends Compressor {
|
||||||
|
|
||||||
|
private final LZ4.HashTable ht;
|
||||||
|
|
||||||
|
LZ4FastCompressor() {
|
||||||
|
ht = new LZ4.HashTable();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void compress(byte[] bytes, int off, int len, DataOutput out)
|
public void compress(byte[] bytes, int off, int len, DataOutput out)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
LZ4.compress(bytes, off, len, out);
|
LZ4.compress(bytes, off, len, out, ht);
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
}
|
||||||
|
|
||||||
private static final Compressor LZ4_HIGH_COMPRESSOR = new Compressor() {
|
private static final class LZ4HighCompressor extends Compressor {
|
||||||
|
|
||||||
|
private final LZ4.HCHashTable ht;
|
||||||
|
|
||||||
|
LZ4HighCompressor() {
|
||||||
|
ht = new LZ4.HCHashTable();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void compress(byte[] bytes, int off, int len, DataOutput out)
|
public void compress(byte[] bytes, int off, int len, DataOutput out)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
LZ4.compressHC(bytes, off, len, out);
|
LZ4.compressHC(bytes, off, len, out, ht);
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
}
|
||||||
|
|
||||||
private static final class DeflateDecompressor extends Decompressor {
|
private static final class DeflateDecompressor extends Decompressor {
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
|||||||
* http://code.google.com/p/lz4/
|
* http://code.google.com/p/lz4/
|
||||||
* http://fastcompression.blogspot.fr/p/lz4.html
|
* http://fastcompression.blogspot.fr/p/lz4.html
|
||||||
*/
|
*/
|
||||||
class LZ4 {
|
final class LZ4 {
|
||||||
|
|
||||||
private LZ4() {}
|
private LZ4() {}
|
||||||
|
|
||||||
@ -181,11 +181,29 @@ class LZ4 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static final class HashTable {
|
||||||
|
private int hashLog;
|
||||||
|
private PackedInts.Mutable hashTable;
|
||||||
|
|
||||||
|
void reset(int len) {
|
||||||
|
final int bitsPerOffset = PackedInts.bitsRequired(len - LAST_LITERALS);
|
||||||
|
final int bitsPerOffsetLog = 32 - Integer.numberOfLeadingZeros(bitsPerOffset - 1);
|
||||||
|
hashLog = MEMORY_USAGE + 3 - bitsPerOffsetLog;
|
||||||
|
if (hashTable == null || hashTable.size() < 1 << hashLog || hashTable.getBitsPerValue() < bitsPerOffset) {
|
||||||
|
hashTable = PackedInts.getMutable(1 << hashLog, bitsPerOffset, PackedInts.DEFAULT);
|
||||||
|
} else {
|
||||||
|
hashTable.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compress <code>bytes[off:off+len]</code> into <code>out</code> using
|
* Compress <code>bytes[off:off+len]</code> into <code>out</code> using
|
||||||
* at most 16KB of memory.
|
* at most 16KB of memory. <code>ht</code> shouldn't be shared across threads
|
||||||
|
* but can safely be reused.
|
||||||
*/
|
*/
|
||||||
public static void compress(byte[] bytes, int off, int len, DataOutput out) throws IOException {
|
public static void compress(byte[] bytes, int off, int len, DataOutput out, HashTable ht) throws IOException {
|
||||||
|
|
||||||
final int base = off;
|
final int base = off;
|
||||||
final int end = off + len;
|
final int end = off + len;
|
||||||
@ -196,11 +214,9 @@ class LZ4 {
|
|||||||
|
|
||||||
final int limit = end - LAST_LITERALS;
|
final int limit = end - LAST_LITERALS;
|
||||||
final int matchLimit = limit - MIN_MATCH;
|
final int matchLimit = limit - MIN_MATCH;
|
||||||
|
ht.reset(len);
|
||||||
final int bitsPerOffset = PackedInts.bitsRequired(len - LAST_LITERALS);
|
final int hashLog = ht.hashLog;
|
||||||
final int bitsPerOffsetLog = 32 - Integer.numberOfLeadingZeros(bitsPerOffset - 1);
|
final PackedInts.Mutable hashTable = ht.hashTable;
|
||||||
final int hashLog = MEMORY_USAGE + 3 - bitsPerOffsetLog;
|
|
||||||
final PackedInts.Mutable hashTable = PackedInts.getMutable(1 << hashLog, bitsPerOffset, PackedInts.DEFAULT);
|
|
||||||
|
|
||||||
main:
|
main:
|
||||||
while (off < limit) {
|
while (off < limit) {
|
||||||
@ -256,20 +272,24 @@ class LZ4 {
|
|||||||
m2.ref = m1.ref;
|
m2.ref = m1.ref;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class HashTable {
|
static final class HCHashTable {
|
||||||
static final int MAX_ATTEMPTS = 256;
|
static final int MAX_ATTEMPTS = 256;
|
||||||
static final int MASK = MAX_DISTANCE - 1;
|
static final int MASK = MAX_DISTANCE - 1;
|
||||||
int nextToUpdate;
|
int nextToUpdate;
|
||||||
private final int base;
|
private int base;
|
||||||
private final int[] hashTable;
|
private final int[] hashTable;
|
||||||
private final short[] chainTable;
|
private final short[] chainTable;
|
||||||
|
|
||||||
HashTable(int base) {
|
HCHashTable() {
|
||||||
|
hashTable = new int[HASH_TABLE_SIZE_HC];
|
||||||
|
chainTable = new short[MAX_DISTANCE];
|
||||||
|
}
|
||||||
|
|
||||||
|
private void reset(int base) {
|
||||||
this.base = base;
|
this.base = base;
|
||||||
nextToUpdate = base;
|
nextToUpdate = base;
|
||||||
hashTable = new int[HASH_TABLE_SIZE_HC];
|
|
||||||
Arrays.fill(hashTable, -1);
|
Arrays.fill(hashTable, -1);
|
||||||
chainTable = new short[MAX_DISTANCE];
|
Arrays.fill(chainTable, (short) 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
private int hashPointer(byte[] bytes, int off) {
|
private int hashPointer(byte[] bytes, int off) {
|
||||||
@ -355,12 +375,14 @@ class LZ4 {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Compress <code>bytes[off:off+len]</code> into <code>out</code>. Compared to
|
* Compress <code>bytes[off:off+len]</code> into <code>out</code>. Compared to
|
||||||
* {@link LZ4#compress(byte[], int, int, DataOutput)}, this method is slower,
|
* {@link LZ4#compress(byte[], int, int, DataOutput, HashTable)}, this method
|
||||||
* uses more memory (~ 256KB), but should provide better compression ratios
|
* is slower and uses more memory (~ 256KB per thread) but should provide
|
||||||
* (especially on large inputs) because it chooses the best match among up to
|
* better compression ratios (especially on large inputs) because it chooses
|
||||||
* 256 candidates and then performs trade-offs to fix overlapping matches.
|
* the best match among up to 256 candidates and then performs trade-offs to
|
||||||
|
* fix overlapping matches. <code>ht</code> shouldn't be shared across threads
|
||||||
|
* but can safely be reused.
|
||||||
*/
|
*/
|
||||||
public static void compressHC(byte[] src, int srcOff, int srcLen, DataOutput out) throws IOException {
|
public static void compressHC(byte[] src, int srcOff, int srcLen, DataOutput out, HCHashTable ht) throws IOException {
|
||||||
|
|
||||||
final int srcEnd = srcOff + srcLen;
|
final int srcEnd = srcOff + srcLen;
|
||||||
final int matchLimit = srcEnd - LAST_LITERALS;
|
final int matchLimit = srcEnd - LAST_LITERALS;
|
||||||
@ -368,7 +390,7 @@ class LZ4 {
|
|||||||
int sOff = srcOff;
|
int sOff = srcOff;
|
||||||
int anchor = sOff++;
|
int anchor = sOff++;
|
||||||
|
|
||||||
final HashTable ht = new HashTable(srcOff);
|
ht.reset(srcOff);
|
||||||
final Match match0 = new Match();
|
final Match match0 = new Match();
|
||||||
final Match match1 = new Match();
|
final Match match1 = new Match();
|
||||||
final Match match2 = new Match();
|
final Match match2 = new Match();
|
||||||
|
@ -0,0 +1,133 @@
|
|||||||
|
package org.apache.lucene.search;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
/** Tracks live field values across NRT reader reopens.
|
||||||
|
* This holds a map for all updated ids since
|
||||||
|
* the last reader reopen. Once the NRT reader is reopened,
|
||||||
|
* it prunes the map. This means you must reopen your NRT
|
||||||
|
* reader periodically otherwise the RAM consumption of
|
||||||
|
* this class will grow unbounded!
|
||||||
|
*
|
||||||
|
* <p>NOTE: you must ensure the same id is never updated at
|
||||||
|
* the same time by two threads, because in this case you
|
||||||
|
* cannot in general know which thread "won". */
|
||||||
|
|
||||||
|
public abstract class LiveFieldValues<T> implements ReferenceManager.RefreshListener, Closeable {
|
||||||
|
|
||||||
|
private volatile Map<String,T> current = new ConcurrentHashMap<String,T>();
|
||||||
|
private volatile Map<String,T> old = new ConcurrentHashMap<String,T>();
|
||||||
|
private final ReferenceManager<IndexSearcher> mgr;
|
||||||
|
private final T missingValue;
|
||||||
|
|
||||||
|
public LiveFieldValues(ReferenceManager<IndexSearcher> mgr, T missingValue) {
|
||||||
|
this.missingValue = missingValue;
|
||||||
|
this.mgr = mgr;
|
||||||
|
mgr.addListener(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
mgr.removeListener(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void beforeRefresh() throws IOException {
|
||||||
|
old = current;
|
||||||
|
// Start sending all updates after this point to the new
|
||||||
|
// map. While reopen is running, any lookup will first
|
||||||
|
// try this new map, then fallback to old, then to the
|
||||||
|
// current searcher:
|
||||||
|
current = new ConcurrentHashMap<String,T>();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void afterRefresh(boolean didRefresh) throws IOException {
|
||||||
|
// Now drop all the old values because they are now
|
||||||
|
// visible via the searcher that was just opened; if
|
||||||
|
// didRefresh is false, it's possible old has some
|
||||||
|
// entries in it, which is fine: it means they were
|
||||||
|
// actually already included in the previously opened
|
||||||
|
// reader. So we can safely clear old here:
|
||||||
|
old = new ConcurrentHashMap<String,T>();
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Call this after you've successfully added a document
|
||||||
|
* to the index, to record what value you just set the
|
||||||
|
* field to. */
|
||||||
|
public void add(String id, T value) {
|
||||||
|
current.put(id, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Call this after you've successfully deleted a document
|
||||||
|
* from the index. */
|
||||||
|
public void delete(String id) {
|
||||||
|
current.put(id, missingValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns the [approximate] number of id/value pairs
|
||||||
|
* buffered in RAM. */
|
||||||
|
public int size() {
|
||||||
|
return current.size() + old.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns the current value for this id, or null if the
|
||||||
|
* id isn't in the index or was deleted. */
|
||||||
|
public T get(String id) throws IOException {
|
||||||
|
// First try to get the "live" value:
|
||||||
|
T value = current.get(id);
|
||||||
|
if (value == missingValue) {
|
||||||
|
// Deleted but the deletion is not yet reflected in
|
||||||
|
// the reader:
|
||||||
|
return null;
|
||||||
|
} else if (value != null) {
|
||||||
|
return value;
|
||||||
|
} else {
|
||||||
|
value = old.get(id);
|
||||||
|
if (value == missingValue) {
|
||||||
|
// Deleted but the deletion is not yet reflected in
|
||||||
|
// the reader:
|
||||||
|
return null;
|
||||||
|
} else if (value != null) {
|
||||||
|
return value;
|
||||||
|
} else {
|
||||||
|
// It either does not exist in the index, or, it was
|
||||||
|
// already flushed & NRT reader was opened on the
|
||||||
|
// segment, so fallback to current searcher:
|
||||||
|
IndexSearcher s = mgr.acquire();
|
||||||
|
try {
|
||||||
|
return lookupFromSearcher(s, id);
|
||||||
|
} finally {
|
||||||
|
mgr.release(s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This is called when the id/value was already flushed & opened
|
||||||
|
* in an NRT IndexSearcher. You must implement this to
|
||||||
|
* go look up the value (eg, via doc values, field cache,
|
||||||
|
* stored fields, etc.). */
|
||||||
|
protected abstract T lookupFromSearcher(IndexSearcher s, String id) throws IOException;
|
||||||
|
}
|
||||||
|
|
@ -151,6 +151,7 @@ public abstract class ReferenceManager<G> implements Closeable {
|
|||||||
try {
|
try {
|
||||||
final G reference = acquire();
|
final G reference = acquire();
|
||||||
try {
|
try {
|
||||||
|
notifyRefreshListenersBefore();
|
||||||
G newReference = refreshIfNeeded(reference);
|
G newReference = refreshIfNeeded(reference);
|
||||||
if (newReference != null) {
|
if (newReference != null) {
|
||||||
assert newReference != reference : "refreshIfNeeded should return null if refresh wasn't needed";
|
assert newReference != reference : "refreshIfNeeded should return null if refresh wasn't needed";
|
||||||
@ -165,11 +166,9 @@ public abstract class ReferenceManager<G> implements Closeable {
|
|||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
release(reference);
|
release(reference);
|
||||||
|
notifyRefreshListenersRefreshed(refreshed);
|
||||||
}
|
}
|
||||||
afterMaybeRefresh();
|
afterMaybeRefresh();
|
||||||
if (refreshed) {
|
|
||||||
notifyRefreshListeners();
|
|
||||||
}
|
|
||||||
} finally {
|
} finally {
|
||||||
refreshLock.unlock();
|
refreshLock.unlock();
|
||||||
}
|
}
|
||||||
@ -254,9 +253,15 @@ public abstract class ReferenceManager<G> implements Closeable {
|
|||||||
decRef(reference);
|
decRef(reference);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void notifyRefreshListeners() {
|
private void notifyRefreshListenersBefore() throws IOException {
|
||||||
for (RefreshListener refreshListener : refreshListeners) {
|
for (RefreshListener refreshListener : refreshListeners) {
|
||||||
refreshListener.afterRefresh();
|
refreshListener.beforeRefresh();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void notifyRefreshListenersRefreshed(boolean didRefresh) throws IOException {
|
||||||
|
for (RefreshListener refreshListener : refreshListeners) {
|
||||||
|
refreshListener.afterRefresh(didRefresh);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -284,9 +289,13 @@ public abstract class ReferenceManager<G> implements Closeable {
|
|||||||
* finished. See {@link #addListener}. */
|
* finished. See {@link #addListener}. */
|
||||||
public interface RefreshListener {
|
public interface RefreshListener {
|
||||||
|
|
||||||
/**
|
/** Called right before a refresh attempt starts. */
|
||||||
* Called after a successful refresh and a new reference has been installed. When this is called {@link #acquire()} is guaranteed to return a new instance.
|
void beforeRefresh() throws IOException;
|
||||||
*/
|
|
||||||
void afterRefresh();
|
/** Called after the attempted refresh; if the refresh
|
||||||
|
* did open a new reference then didRefresh will be true
|
||||||
|
* and {@link #acquire()} is guaranteed to return the new
|
||||||
|
* reference. */
|
||||||
|
void afterRefresh(boolean didRefresh) throws IOException;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -82,7 +82,7 @@ public final class NumericUtils {
|
|||||||
/**
|
/**
|
||||||
* The maximum term length (used for <code>byte[]</code> buffer size)
|
* The maximum term length (used for <code>byte[]</code> buffer size)
|
||||||
* for encoding <code>long</code> values.
|
* for encoding <code>long</code> values.
|
||||||
* @see #longToPrefixCoded(long,int,BytesRef)
|
* @see #longToPrefixCodedBytes
|
||||||
*/
|
*/
|
||||||
public static final int BUF_SIZE_LONG = 63/7 + 2;
|
public static final int BUF_SIZE_LONG = 63/7 + 2;
|
||||||
|
|
||||||
@ -95,7 +95,7 @@ public final class NumericUtils {
|
|||||||
/**
|
/**
|
||||||
* The maximum term length (used for <code>byte[]</code> buffer size)
|
* The maximum term length (used for <code>byte[]</code> buffer size)
|
||||||
* for encoding <code>int</code> values.
|
* for encoding <code>int</code> values.
|
||||||
* @see #intToPrefixCoded(int,int,BytesRef)
|
* @see #intToPrefixCodedBytes
|
||||||
*/
|
*/
|
||||||
public static final int BUF_SIZE_INT = 31/7 + 2;
|
public static final int BUF_SIZE_INT = 31/7 + 2;
|
||||||
|
|
||||||
@ -109,15 +109,42 @@ public final class NumericUtils {
|
|||||||
* @return the hash code for indexing (TermsHash)
|
* @return the hash code for indexing (TermsHash)
|
||||||
*/
|
*/
|
||||||
public static int longToPrefixCoded(final long val, final int shift, final BytesRef bytes) {
|
public static int longToPrefixCoded(final long val, final int shift, final BytesRef bytes) {
|
||||||
if (shift>63 || shift<0)
|
longToPrefixCodedBytes(val, shift, bytes);
|
||||||
|
return bytes.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns prefix coded bits after reducing the precision by <code>shift</code> bits.
|
||||||
|
* This is method is used by {@link NumericTokenStream}.
|
||||||
|
* After encoding, {@code bytes.offset} will always be 0.
|
||||||
|
* @param val the numeric value
|
||||||
|
* @param shift how many bits to strip from the right
|
||||||
|
* @param bytes will contain the encoded value
|
||||||
|
* @return the hash code for indexing (TermsHash)
|
||||||
|
*/
|
||||||
|
public static int intToPrefixCoded(final int val, final int shift, final BytesRef bytes) {
|
||||||
|
intToPrefixCodedBytes(val, shift, bytes);
|
||||||
|
return bytes.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns prefix coded bits after reducing the precision by <code>shift</code> bits.
|
||||||
|
* This is method is used by {@link NumericTokenStream}.
|
||||||
|
* After encoding, {@code bytes.offset} will always be 0.
|
||||||
|
* @param val the numeric value
|
||||||
|
* @param shift how many bits to strip from the right
|
||||||
|
* @param bytes will contain the encoded value
|
||||||
|
*/
|
||||||
|
public static void longToPrefixCodedBytes(final long val, final int shift, final BytesRef bytes) {
|
||||||
|
if ((shift & ~0x3f) != 0) // ensure shift is 0..63
|
||||||
throw new IllegalArgumentException("Illegal shift value, must be 0..63");
|
throw new IllegalArgumentException("Illegal shift value, must be 0..63");
|
||||||
int hash, nChars = (63-shift)/7 + 1;
|
int nChars = (((63-shift)*37)>>8) + 1; // i/7 is the same as (i*37)>>8 for i in 0..63
|
||||||
bytes.offset = 0;
|
bytes.offset = 0;
|
||||||
bytes.length = nChars+1;
|
bytes.length = nChars+1; // one extra for the byte that contains the shift info
|
||||||
if (bytes.bytes.length < bytes.length) {
|
if (bytes.bytes.length < bytes.length) {
|
||||||
bytes.grow(NumericUtils.BUF_SIZE_LONG);
|
bytes.bytes = new byte[NumericUtils.BUF_SIZE_LONG]; // use the max
|
||||||
}
|
}
|
||||||
bytes.bytes[0] = (byte) (hash = (SHIFT_START_LONG + shift));
|
bytes.bytes[0] = (byte)(SHIFT_START_LONG + shift);
|
||||||
long sortableBits = val ^ 0x8000000000000000L;
|
long sortableBits = val ^ 0x8000000000000000L;
|
||||||
sortableBits >>>= shift;
|
sortableBits >>>= shift;
|
||||||
while (nChars > 0) {
|
while (nChars > 0) {
|
||||||
@ -126,13 +153,9 @@ public final class NumericUtils {
|
|||||||
bytes.bytes[nChars--] = (byte)(sortableBits & 0x7f);
|
bytes.bytes[nChars--] = (byte)(sortableBits & 0x7f);
|
||||||
sortableBits >>>= 7;
|
sortableBits >>>= 7;
|
||||||
}
|
}
|
||||||
// calculate hash
|
|
||||||
for (int i = 1; i < bytes.length; i++) {
|
|
||||||
hash = 31*hash + bytes.bytes[i];
|
|
||||||
}
|
|
||||||
return hash;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns prefix coded bits after reducing the precision by <code>shift</code> bits.
|
* Returns prefix coded bits after reducing the precision by <code>shift</code> bits.
|
||||||
* This is method is used by {@link NumericTokenStream}.
|
* This is method is used by {@link NumericTokenStream}.
|
||||||
@ -140,18 +163,17 @@ public final class NumericUtils {
|
|||||||
* @param val the numeric value
|
* @param val the numeric value
|
||||||
* @param shift how many bits to strip from the right
|
* @param shift how many bits to strip from the right
|
||||||
* @param bytes will contain the encoded value
|
* @param bytes will contain the encoded value
|
||||||
* @return the hash code for indexing (TermsHash)
|
|
||||||
*/
|
*/
|
||||||
public static int intToPrefixCoded(final int val, final int shift, final BytesRef bytes) {
|
public static void intToPrefixCodedBytes(final int val, final int shift, final BytesRef bytes) {
|
||||||
if (shift>31 || shift<0)
|
if ((shift & ~0x1f) != 0) // ensure shift is 0..31
|
||||||
throw new IllegalArgumentException("Illegal shift value, must be 0..31");
|
throw new IllegalArgumentException("Illegal shift value, must be 0..31");
|
||||||
int hash, nChars = (31-shift)/7 + 1;
|
int nChars = (((31-shift)*37)>>8) + 1; // i/7 is the same as (i*37)>>8 for i in 0..63
|
||||||
bytes.offset = 0;
|
bytes.offset = 0;
|
||||||
bytes.length = nChars+1;
|
bytes.length = nChars+1; // one extra for the byte that contains the shift info
|
||||||
if (bytes.bytes.length < bytes.length) {
|
if (bytes.bytes.length < bytes.length) {
|
||||||
bytes.grow(NumericUtils.BUF_SIZE_INT);
|
bytes.bytes = new byte[NumericUtils.BUF_SIZE_LONG]; // use the max
|
||||||
}
|
}
|
||||||
bytes.bytes[0] = (byte) (hash = (SHIFT_START_INT + shift));
|
bytes.bytes[0] = (byte)(SHIFT_START_INT + shift);
|
||||||
int sortableBits = val ^ 0x80000000;
|
int sortableBits = val ^ 0x80000000;
|
||||||
sortableBits >>>= shift;
|
sortableBits >>>= shift;
|
||||||
while (nChars > 0) {
|
while (nChars > 0) {
|
||||||
@ -160,13 +182,9 @@ public final class NumericUtils {
|
|||||||
bytes.bytes[nChars--] = (byte)(sortableBits & 0x7f);
|
bytes.bytes[nChars--] = (byte)(sortableBits & 0x7f);
|
||||||
sortableBits >>>= 7;
|
sortableBits >>>= 7;
|
||||||
}
|
}
|
||||||
// calculate hash
|
|
||||||
for (int i = 1; i < bytes.length; i++) {
|
|
||||||
hash = 31*hash + bytes.bytes[i];
|
|
||||||
}
|
|
||||||
return hash;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the shift value from a prefix encoded {@code long}.
|
* Returns the shift value from a prefix encoded {@code long}.
|
||||||
* @throws NumberFormatException if the supplied {@link BytesRef} is
|
* @throws NumberFormatException if the supplied {@link BytesRef} is
|
||||||
@ -197,7 +215,7 @@ public final class NumericUtils {
|
|||||||
* This method can be used to decode a term's value.
|
* This method can be used to decode a term's value.
|
||||||
* @throws NumberFormatException if the supplied {@link BytesRef} is
|
* @throws NumberFormatException if the supplied {@link BytesRef} is
|
||||||
* not correctly prefix encoded.
|
* not correctly prefix encoded.
|
||||||
* @see #longToPrefixCoded(long,int,BytesRef)
|
* @see #longToPrefixCodedBytes
|
||||||
*/
|
*/
|
||||||
public static long prefixCodedToLong(final BytesRef val) {
|
public static long prefixCodedToLong(final BytesRef val) {
|
||||||
long sortableBits = 0L;
|
long sortableBits = 0L;
|
||||||
@ -221,7 +239,7 @@ public final class NumericUtils {
|
|||||||
* This method can be used to decode a term's value.
|
* This method can be used to decode a term's value.
|
||||||
* @throws NumberFormatException if the supplied {@link BytesRef} is
|
* @throws NumberFormatException if the supplied {@link BytesRef} is
|
||||||
* not correctly prefix encoded.
|
* not correctly prefix encoded.
|
||||||
* @see #intToPrefixCoded(int,int,BytesRef)
|
* @see #intToPrefixCodedBytes
|
||||||
*/
|
*/
|
||||||
public static int prefixCodedToInt(final BytesRef val) {
|
public static int prefixCodedToInt(final BytesRef val) {
|
||||||
int sortableBits = 0;
|
int sortableBits = 0;
|
||||||
@ -402,8 +420,8 @@ public final class NumericUtils {
|
|||||||
*/
|
*/
|
||||||
public void addRange(final long min, final long max, final int shift) {
|
public void addRange(final long min, final long max, final int shift) {
|
||||||
final BytesRef minBytes = new BytesRef(BUF_SIZE_LONG), maxBytes = new BytesRef(BUF_SIZE_LONG);
|
final BytesRef minBytes = new BytesRef(BUF_SIZE_LONG), maxBytes = new BytesRef(BUF_SIZE_LONG);
|
||||||
longToPrefixCoded(min, shift, minBytes);
|
longToPrefixCodedBytes(min, shift, minBytes);
|
||||||
longToPrefixCoded(max, shift, maxBytes);
|
longToPrefixCodedBytes(max, shift, maxBytes);
|
||||||
addRange(minBytes, maxBytes);
|
addRange(minBytes, maxBytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -431,8 +449,8 @@ public final class NumericUtils {
|
|||||||
*/
|
*/
|
||||||
public void addRange(final int min, final int max, final int shift) {
|
public void addRange(final int min, final int max, final int shift) {
|
||||||
final BytesRef minBytes = new BytesRef(BUF_SIZE_INT), maxBytes = new BytesRef(BUF_SIZE_INT);
|
final BytesRef minBytes = new BytesRef(BUF_SIZE_INT), maxBytes = new BytesRef(BUF_SIZE_INT);
|
||||||
intToPrefixCoded(min, shift, minBytes);
|
intToPrefixCodedBytes(min, shift, minBytes);
|
||||||
intToPrefixCoded(max, shift, maxBytes);
|
intToPrefixCodedBytes(max, shift, maxBytes);
|
||||||
addRange(minBytes, maxBytes);
|
addRange(minBytes, maxBytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,180 @@
|
|||||||
|
package org.apache.lucene.search;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Random;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.IntField;
|
||||||
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
|
import org.apache.lucene.index.StoredDocument;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.NRTManager.TrackingIndexWriter;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
|
public class TestLiveFieldValues extends LuceneTestCase {
|
||||||
|
public void test() throws Exception {
|
||||||
|
|
||||||
|
Directory dir = newFSDirectory(_TestUtil.getTempDir("livefieldupdates"));
|
||||||
|
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
|
||||||
|
|
||||||
|
final IndexWriter _w = new IndexWriter(dir, iwc);
|
||||||
|
final TrackingIndexWriter w = new TrackingIndexWriter(_w);
|
||||||
|
|
||||||
|
final NRTManager mgr = new NRTManager(w, new SearcherFactory() {
|
||||||
|
@Override
|
||||||
|
public IndexSearcher newSearcher(IndexReader r) {
|
||||||
|
return new IndexSearcher(r);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
final Integer missing = -1;
|
||||||
|
|
||||||
|
final LiveFieldValues<Integer> rt = new LiveFieldValues<Integer>(mgr, missing) {
|
||||||
|
@Override
|
||||||
|
protected Integer lookupFromSearcher(IndexSearcher s, String id) throws IOException {
|
||||||
|
TermQuery tq = new TermQuery(new Term("id", id));
|
||||||
|
TopDocs hits = s.search(tq, 1);
|
||||||
|
assertTrue(hits.totalHits <= 1);
|
||||||
|
if (hits.totalHits == 0) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
StoredDocument doc = s.doc(hits.scoreDocs[0].doc);
|
||||||
|
return (Integer) doc.getField("field").numericValue();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
int numThreads = _TestUtil.nextInt(random(), 2, 5);
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println(numThreads + " threads");
|
||||||
|
}
|
||||||
|
|
||||||
|
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||||
|
List<Thread> threads = new ArrayList<Thread>();
|
||||||
|
|
||||||
|
final int iters = atLeast(1000);
|
||||||
|
final int idCount = _TestUtil.nextInt(random(), 100, 10000);
|
||||||
|
|
||||||
|
final double reopenChance = random().nextDouble()*0.01;
|
||||||
|
final double deleteChance = random().nextDouble()*0.25;
|
||||||
|
final double addChance = random().nextDouble()*0.5;
|
||||||
|
|
||||||
|
for(int t=0;t<numThreads;t++) {
|
||||||
|
final int threadID = t;
|
||||||
|
final Random threadRandom = new Random(random().nextLong());
|
||||||
|
Thread thread = new Thread() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
Map<String,Integer> values = new HashMap<String,Integer>();
|
||||||
|
List<String> allIDs = Collections.synchronizedList(new ArrayList<String>());
|
||||||
|
|
||||||
|
startingGun.await();
|
||||||
|
for(int iter=0; iter<iters;iter++) {
|
||||||
|
// Add/update a document
|
||||||
|
Document doc = new Document();
|
||||||
|
// Threads must not update the same id at the
|
||||||
|
// same time:
|
||||||
|
if (threadRandom.nextDouble() <= addChance) {
|
||||||
|
String id = String.format(Locale.ROOT, "%d_%04x", threadID, threadRandom.nextInt(idCount));
|
||||||
|
Integer field = threadRandom.nextInt(Integer.MAX_VALUE);
|
||||||
|
doc.add(new StringField("id", id, Field.Store.YES));
|
||||||
|
doc.add(new IntField("field", field.intValue(), Field.Store.YES));
|
||||||
|
w.updateDocument(new Term("id", id), doc);
|
||||||
|
rt.add(id, field);
|
||||||
|
if (values.put(id, field) == null) {
|
||||||
|
allIDs.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allIDs.size() > 0 && threadRandom.nextDouble() <= deleteChance) {
|
||||||
|
String randomID = allIDs.get(threadRandom.nextInt(allIDs.size()));
|
||||||
|
w.deleteDocuments(new Term("id", randomID));
|
||||||
|
rt.delete(randomID);
|
||||||
|
values.put(randomID, missing);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (threadRandom.nextDouble() <= reopenChance || rt.size() > 10000) {
|
||||||
|
//System.out.println("refresh @ " + rt.size());
|
||||||
|
mgr.maybeRefresh();
|
||||||
|
if (VERBOSE) {
|
||||||
|
IndexSearcher s = mgr.acquire();
|
||||||
|
try {
|
||||||
|
System.out.println("TEST: reopen " + s);
|
||||||
|
} finally {
|
||||||
|
mgr.release(s);
|
||||||
|
}
|
||||||
|
System.out.println("TEST: " + values.size() + " values");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (threadRandom.nextInt(10) == 7) {
|
||||||
|
assertEquals(null, rt.get("foo"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allIDs.size() > 0) {
|
||||||
|
String randomID = allIDs.get(threadRandom.nextInt(allIDs.size()));
|
||||||
|
Integer expected = values.get(randomID);
|
||||||
|
if (expected == missing) {
|
||||||
|
expected = null;
|
||||||
|
}
|
||||||
|
assertEquals("id=" + randomID, expected, rt.get(randomID));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Throwable t) {
|
||||||
|
throw new RuntimeException(t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
threads.add(thread);
|
||||||
|
thread.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
startingGun.countDown();
|
||||||
|
|
||||||
|
for(Thread thread : threads) {
|
||||||
|
thread.join();
|
||||||
|
}
|
||||||
|
mgr.maybeRefresh();
|
||||||
|
assertEquals(0, rt.size());
|
||||||
|
|
||||||
|
rt.close();
|
||||||
|
mgr.close();
|
||||||
|
_w.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
}
|
@ -423,8 +423,13 @@ public class TestNRTManager extends ThreadedIndexingAndSearchingTestCase {
|
|||||||
NRTManager sm = new NRTManager(new NRTManager.TrackingIndexWriter(iw),new SearcherFactory());
|
NRTManager sm = new NRTManager(new NRTManager.TrackingIndexWriter(iw),new SearcherFactory());
|
||||||
sm.addListener(new ReferenceManager.RefreshListener() {
|
sm.addListener(new ReferenceManager.RefreshListener() {
|
||||||
@Override
|
@Override
|
||||||
public void afterRefresh() {
|
public void beforeRefresh() {
|
||||||
afterRefreshCalled.set(true);
|
}
|
||||||
|
@Override
|
||||||
|
public void afterRefresh(boolean didRefresh) {
|
||||||
|
if (didRefresh) {
|
||||||
|
afterRefreshCalled.set(true);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
iw.addDocument(new Document());
|
iw.addDocument(new Document());
|
||||||
|
@ -380,8 +380,8 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
|
|||||||
int a=lower; lower=upper; upper=a;
|
int a=lower; lower=upper; upper=a;
|
||||||
}
|
}
|
||||||
final BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_INT), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_INT);
|
final BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_INT), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_INT);
|
||||||
NumericUtils.intToPrefixCoded(lower, 0, lowerBytes);
|
NumericUtils.intToPrefixCodedBytes(lower, 0, lowerBytes);
|
||||||
NumericUtils.intToPrefixCoded(upper, 0, upperBytes);
|
NumericUtils.intToPrefixCodedBytes(upper, 0, upperBytes);
|
||||||
|
|
||||||
// test inclusive range
|
// test inclusive range
|
||||||
NumericRangeQuery<Integer> tq=NumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true);
|
NumericRangeQuery<Integer> tq=NumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true);
|
||||||
|
@ -405,8 +405,8 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
|
|||||||
long a=lower; lower=upper; upper=a;
|
long a=lower; lower=upper; upper=a;
|
||||||
}
|
}
|
||||||
final BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_LONG), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
final BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_LONG), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
||||||
NumericUtils.longToPrefixCoded(lower, 0, lowerBytes);
|
NumericUtils.longToPrefixCodedBytes(lower, 0, lowerBytes);
|
||||||
NumericUtils.longToPrefixCoded(upper, 0, upperBytes);
|
NumericUtils.longToPrefixCodedBytes(upper, 0, upperBytes);
|
||||||
|
|
||||||
// test inclusive range
|
// test inclusive range
|
||||||
NumericRangeQuery<Long> tq=NumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true);
|
NumericRangeQuery<Long> tq=NumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true);
|
||||||
|
@ -331,8 +331,13 @@ public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
|
|||||||
SearcherManager sm = new SearcherManager(iw, false, new SearcherFactory());
|
SearcherManager sm = new SearcherManager(iw, false, new SearcherFactory());
|
||||||
sm.addListener(new ReferenceManager.RefreshListener() {
|
sm.addListener(new ReferenceManager.RefreshListener() {
|
||||||
@Override
|
@Override
|
||||||
public void afterRefresh() {
|
public void beforeRefresh() {
|
||||||
afterRefreshCalled.set(true);
|
}
|
||||||
|
@Override
|
||||||
|
public void afterRefresh(boolean didRefresh) {
|
||||||
|
if (didRefresh) {
|
||||||
|
afterRefreshCalled.set(true);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
iw.addDocument(new Document());
|
iw.addDocument(new Document());
|
||||||
|
@ -28,7 +28,7 @@ public class TestNumericUtils extends LuceneTestCase {
|
|||||||
// generate a series of encoded longs, each numerical one bigger than the one before
|
// generate a series of encoded longs, each numerical one bigger than the one before
|
||||||
BytesRef last=null, act=new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
BytesRef last=null, act=new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
||||||
for (long l=-100000L; l<100000L; l++) {
|
for (long l=-100000L; l<100000L; l++) {
|
||||||
NumericUtils.longToPrefixCoded(l, 0, act);
|
NumericUtils.longToPrefixCodedBytes(l, 0, act);
|
||||||
if (last!=null) {
|
if (last!=null) {
|
||||||
// test if smaller
|
// test if smaller
|
||||||
assertTrue("actual bigger than last (BytesRef)", last.compareTo(act) < 0 );
|
assertTrue("actual bigger than last (BytesRef)", last.compareTo(act) < 0 );
|
||||||
@ -46,7 +46,7 @@ public class TestNumericUtils extends LuceneTestCase {
|
|||||||
// generate a series of encoded ints, each numerical one bigger than the one before
|
// generate a series of encoded ints, each numerical one bigger than the one before
|
||||||
BytesRef last=null, act=new BytesRef(NumericUtils.BUF_SIZE_INT);
|
BytesRef last=null, act=new BytesRef(NumericUtils.BUF_SIZE_INT);
|
||||||
for (int i=-100000; i<100000; i++) {
|
for (int i=-100000; i<100000; i++) {
|
||||||
NumericUtils.intToPrefixCoded(i, 0, act);
|
NumericUtils.intToPrefixCodedBytes(i, 0, act);
|
||||||
if (last!=null) {
|
if (last!=null) {
|
||||||
// test if smaller
|
// test if smaller
|
||||||
assertTrue("actual bigger than last (BytesRef)", last.compareTo(act) < 0 );
|
assertTrue("actual bigger than last (BytesRef)", last.compareTo(act) < 0 );
|
||||||
@ -69,7 +69,7 @@ public class TestNumericUtils extends LuceneTestCase {
|
|||||||
|
|
||||||
for (int i=0; i<vals.length; i++) {
|
for (int i=0; i<vals.length; i++) {
|
||||||
prefixVals[i] = new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
prefixVals[i] = new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
||||||
NumericUtils.longToPrefixCoded(vals[i], 0, prefixVals[i]);
|
NumericUtils.longToPrefixCodedBytes(vals[i], 0, prefixVals[i]);
|
||||||
|
|
||||||
// check forward and back conversion
|
// check forward and back conversion
|
||||||
assertEquals( "forward and back conversion should generate same long", vals[i], NumericUtils.prefixCodedToLong(prefixVals[i]) );
|
assertEquals( "forward and back conversion should generate same long", vals[i], NumericUtils.prefixCodedToLong(prefixVals[i]) );
|
||||||
@ -92,7 +92,7 @@ public class TestNumericUtils extends LuceneTestCase {
|
|||||||
final BytesRef ref = new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
final BytesRef ref = new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
||||||
for (int i=0; i<vals.length; i++) {
|
for (int i=0; i<vals.length; i++) {
|
||||||
for (int j=0; j<64; j++) {
|
for (int j=0; j<64; j++) {
|
||||||
NumericUtils.longToPrefixCoded(vals[i], j, ref);
|
NumericUtils.longToPrefixCodedBytes(vals[i], j, ref);
|
||||||
long prefixVal=NumericUtils.prefixCodedToLong(ref);
|
long prefixVal=NumericUtils.prefixCodedToLong(ref);
|
||||||
long mask=(1L << j) - 1L;
|
long mask=(1L << j) - 1L;
|
||||||
assertEquals( "difference between prefix val and original value for "+vals[i]+" with shift="+j, vals[i] & mask, vals[i]-prefixVal );
|
assertEquals( "difference between prefix val and original value for "+vals[i]+" with shift="+j, vals[i] & mask, vals[i]-prefixVal );
|
||||||
@ -109,7 +109,7 @@ public class TestNumericUtils extends LuceneTestCase {
|
|||||||
|
|
||||||
for (int i=0; i<vals.length; i++) {
|
for (int i=0; i<vals.length; i++) {
|
||||||
prefixVals[i] = new BytesRef(NumericUtils.BUF_SIZE_INT);
|
prefixVals[i] = new BytesRef(NumericUtils.BUF_SIZE_INT);
|
||||||
NumericUtils.intToPrefixCoded(vals[i], 0, prefixVals[i]);
|
NumericUtils.intToPrefixCodedBytes(vals[i], 0, prefixVals[i]);
|
||||||
|
|
||||||
// check forward and back conversion
|
// check forward and back conversion
|
||||||
assertEquals( "forward and back conversion should generate same int", vals[i], NumericUtils.prefixCodedToInt(prefixVals[i]) );
|
assertEquals( "forward and back conversion should generate same int", vals[i], NumericUtils.prefixCodedToInt(prefixVals[i]) );
|
||||||
@ -132,7 +132,7 @@ public class TestNumericUtils extends LuceneTestCase {
|
|||||||
final BytesRef ref = new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
final BytesRef ref = new BytesRef(NumericUtils.BUF_SIZE_LONG);
|
||||||
for (int i=0; i<vals.length; i++) {
|
for (int i=0; i<vals.length; i++) {
|
||||||
for (int j=0; j<32; j++) {
|
for (int j=0; j<32; j++) {
|
||||||
NumericUtils.intToPrefixCoded(vals[i], j, ref);
|
NumericUtils.intToPrefixCodedBytes(vals[i], j, ref);
|
||||||
int prefixVal=NumericUtils.prefixCodedToInt(ref);
|
int prefixVal=NumericUtils.prefixCodedToInt(ref);
|
||||||
int mask=(1 << j) - 1;
|
int mask=(1 << j) - 1;
|
||||||
assertEquals( "difference between prefix val and original value for "+vals[i]+" with shift="+j, vals[i] & mask, vals[i]-prefixVal );
|
assertEquals( "difference between prefix val and original value for "+vals[i]+" with shift="+j, vals[i] & mask, vals[i]-prefixVal );
|
||||||
|
@ -45,17 +45,17 @@ public class TaxonomyMergeUtils {
|
|||||||
* opens {@link DirectoryTaxonomyWriter} and {@link IndexWriter} on the
|
* opens {@link DirectoryTaxonomyWriter} and {@link IndexWriter} on the
|
||||||
* respective destination indexes. Therefore if you have a writer open on any
|
* respective destination indexes. Therefore if you have a writer open on any
|
||||||
* of them, it should be closed, or you should use
|
* of them, it should be closed, or you should use
|
||||||
* {@link #merge(Directory, Directory, IndexWriter, DirectoryTaxonomyWriter)}
|
* {@link #merge(Directory, Directory, IndexWriter, DirectoryTaxonomyWriter, FacetIndexingParams)}
|
||||||
* instead.
|
* instead.
|
||||||
*
|
*
|
||||||
* @see #merge(Directory, Directory, IndexWriter, DirectoryTaxonomyWriter)
|
* @see #merge(Directory, Directory, IndexWriter, DirectoryTaxonomyWriter, FacetIndexingParams)
|
||||||
*/
|
*/
|
||||||
public static void merge(Directory srcIndexDir, Directory srcTaxDir,
|
public static void merge(Directory srcIndexDir, Directory srcTaxDir, Directory destIndexDir, Directory destTaxDir,
|
||||||
Directory destIndexDir, Directory destTaxDir) throws IOException {
|
FacetIndexingParams params) throws IOException {
|
||||||
IndexWriter destIndexWriter = new IndexWriter(destIndexDir,
|
IndexWriter destIndexWriter = new IndexWriter(destIndexDir,
|
||||||
new IndexWriterConfig(ExampleUtils.EXAMPLE_VER, null));
|
new IndexWriterConfig(ExampleUtils.EXAMPLE_VER, null));
|
||||||
DirectoryTaxonomyWriter destTaxWriter = new DirectoryTaxonomyWriter(destTaxDir);
|
DirectoryTaxonomyWriter destTaxWriter = new DirectoryTaxonomyWriter(destTaxDir);
|
||||||
merge(srcIndexDir, srcTaxDir, new MemoryOrdinalMap(), destIndexWriter, destTaxWriter);
|
merge(srcIndexDir, srcTaxDir, new MemoryOrdinalMap(), destIndexWriter, destTaxWriter, params);
|
||||||
destTaxWriter.close();
|
destTaxWriter.close();
|
||||||
destIndexWriter.close();
|
destIndexWriter.close();
|
||||||
}
|
}
|
||||||
@ -64,30 +64,26 @@ public class TaxonomyMergeUtils {
|
|||||||
* Merges the given taxonomy and index directories and commits the changes to
|
* Merges the given taxonomy and index directories and commits the changes to
|
||||||
* the given writers. This method uses {@link MemoryOrdinalMap} to store the
|
* the given writers. This method uses {@link MemoryOrdinalMap} to store the
|
||||||
* mapped ordinals. If you cannot afford the memory, you can use
|
* mapped ordinals. If you cannot afford the memory, you can use
|
||||||
* {@link #merge(Directory, Directory, DirectoryTaxonomyWriter.OrdinalMap, IndexWriter, DirectoryTaxonomyWriter)}
|
* {@link #merge(Directory, Directory, DirectoryTaxonomyWriter.OrdinalMap, IndexWriter, DirectoryTaxonomyWriter, FacetIndexingParams)}
|
||||||
* by passing {@link DiskOrdinalMap}.
|
* by passing {@link DiskOrdinalMap}.
|
||||||
*
|
*
|
||||||
* @see #merge(Directory, Directory, DirectoryTaxonomyWriter.OrdinalMap, IndexWriter, DirectoryTaxonomyWriter)
|
* @see #merge(Directory, Directory, DirectoryTaxonomyWriter.OrdinalMap,
|
||||||
|
* IndexWriter, DirectoryTaxonomyWriter, FacetIndexingParams)
|
||||||
*/
|
*/
|
||||||
public static void merge(Directory srcIndexDir, Directory srcTaxDir,
|
public static void merge(Directory srcIndexDir, Directory srcTaxDir, IndexWriter destIndexWriter,
|
||||||
IndexWriter destIndexWriter,
|
DirectoryTaxonomyWriter destTaxWriter, FacetIndexingParams params) throws IOException {
|
||||||
DirectoryTaxonomyWriter destTaxWriter) throws IOException {
|
merge(srcIndexDir, srcTaxDir, new MemoryOrdinalMap(), destIndexWriter, destTaxWriter, params);
|
||||||
merge(srcIndexDir, srcTaxDir, new MemoryOrdinalMap(), destIndexWriter, destTaxWriter);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Merges the given taxonomy and index directories and commits the changes to
|
* Merges the given taxonomy and index directories and commits the changes to
|
||||||
* the given writers.
|
* the given writers.
|
||||||
*/
|
*/
|
||||||
public static void merge(Directory srcIndexDir, Directory srcTaxDir,
|
public static void merge(Directory srcIndexDir, Directory srcTaxDir, OrdinalMap map, IndexWriter destIndexWriter,
|
||||||
OrdinalMap map, IndexWriter destIndexWriter,
|
DirectoryTaxonomyWriter destTaxWriter, FacetIndexingParams params) throws IOException {
|
||||||
DirectoryTaxonomyWriter destTaxWriter) throws IOException {
|
|
||||||
// merge the taxonomies
|
// merge the taxonomies
|
||||||
destTaxWriter.addTaxonomy(srcTaxDir, map);
|
destTaxWriter.addTaxonomy(srcTaxDir, map);
|
||||||
|
|
||||||
int ordinalMap[] = map.getMap();
|
int ordinalMap[] = map.getMap();
|
||||||
FacetIndexingParams params = FacetIndexingParams.ALL_PARENTS;
|
|
||||||
|
|
||||||
DirectoryReader reader = DirectoryReader.open(srcIndexDir, -1);
|
DirectoryReader reader = DirectoryReader.open(srcIndexDir, -1);
|
||||||
List<AtomicReaderContext> leaves = reader.leaves();
|
List<AtomicReaderContext> leaves = reader.leaves();
|
||||||
int numReaders = leaves.size();
|
int numReaders = leaves.size();
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
package org.apache.lucene.facet.example.simple;
|
package org.apache.lucene.facet.example.simple;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
@ -17,6 +16,7 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
|
|||||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.MultiCollector;
|
import org.apache.lucene.search.MultiCollector;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
@ -153,7 +153,7 @@ public class SimpleSearcher {
|
|||||||
CategoryPath categoryOfInterest = resIterator.next().label;
|
CategoryPath categoryOfInterest = resIterator.next().label;
|
||||||
|
|
||||||
// drill-down preparation: turn the base query into a drill-down query for the category of interest
|
// drill-down preparation: turn the base query into a drill-down query for the category of interest
|
||||||
Query q2 = DrillDown.query(indexingParams, baseQuery, categoryOfInterest);
|
Query q2 = DrillDown.query(indexingParams, baseQuery, Occur.MUST, categoryOfInterest);
|
||||||
|
|
||||||
// that's it - search with the new query and we're done!
|
// that's it - search with the new query and we're done!
|
||||||
// only documents both matching the base query AND containing the
|
// only documents both matching the base query AND containing the
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
package org.apache.lucene.facet.index.params;
|
package org.apache.lucene.facet.index.params;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
import org.apache.lucene.facet.search.CategoryListIterator;
|
import org.apache.lucene.facet.search.CategoryListIterator;
|
||||||
import org.apache.lucene.facet.search.DocValuesCategoryListIterator;
|
import org.apache.lucene.facet.search.DocValuesCategoryListIterator;
|
||||||
@ -34,7 +33,7 @@ import org.apache.lucene.util.encoding.UniqueValuesIntEncoder;
|
|||||||
*
|
*
|
||||||
* @lucene.experimental
|
* @lucene.experimental
|
||||||
*/
|
*/
|
||||||
public class CategoryListParams implements Serializable {
|
public class CategoryListParams {
|
||||||
|
|
||||||
/** OrdinalPolicy defines which ordinals are encoded for every document. */
|
/** OrdinalPolicy defines which ordinals are encoded for every document. */
|
||||||
public static enum OrdinalPolicy {
|
public static enum OrdinalPolicy {
|
||||||
@ -143,4 +142,9 @@ public class CategoryListParams implements Serializable {
|
|||||||
return DEFAULT_ORDINAL_POLICY;
|
return DEFAULT_ORDINAL_POLICY;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "field=" + field + " encoder=" + createEncoder() + " ordinalPolicy=" + getOrdinalPolicy();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -59,18 +59,23 @@ public final class DrillDown {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wraps a given {@link Query} as a drill-down query over the given
|
* Wraps a given {@link Query} by a drill-down query over the given
|
||||||
* categories, assuming all are required (e.g. {@code AND}). You can construct
|
* categories. {@link Occur} defines the relationship between the cateories
|
||||||
* a query with different modes (such as {@code OR} or {@code AND} of
|
* (e.g. {@code OR} or {@code AND}. If you need to construct a more
|
||||||
* {@code ORs}) by creating a {@link BooleanQuery} and call this method
|
* complicated relationship, e.g. {@code AND} of {@code ORs}), call this
|
||||||
* several times. Make sure to wrap the query in that case by
|
* method with every group of categories with the same relationship and then
|
||||||
* {@link ConstantScoreQuery} and set the boost to 0.0f, so that it doesn't
|
* construct a {@link BooleanQuery} which will wrap all returned queries. It
|
||||||
* affect scoring.
|
* is advised to construct that boolean query with coord disabled, and also
|
||||||
|
* wrap the final query with {@link ConstantScoreQuery} and set its boost to
|
||||||
|
* {@code 0.0f}.
|
||||||
|
* <p>
|
||||||
|
* <b>NOTE:</b> {@link Occur} only makes sense when there is more than one
|
||||||
|
* {@link CategoryPath} given.
|
||||||
* <p>
|
* <p>
|
||||||
* <b>NOTE:</b> {@code baseQuery} can be {@code null}, in which case only the
|
* <b>NOTE:</b> {@code baseQuery} can be {@code null}, in which case only the
|
||||||
* {@link Query} over the categories will is returned.
|
* {@link Query} over the categories will is returned.
|
||||||
*/
|
*/
|
||||||
public static final Query query(FacetIndexingParams iParams, Query baseQuery, CategoryPath... paths) {
|
public static final Query query(FacetIndexingParams iParams, Query baseQuery, Occur occur, CategoryPath... paths) {
|
||||||
if (paths == null || paths.length == 0) {
|
if (paths == null || paths.length == 0) {
|
||||||
throw new IllegalArgumentException("Empty category path not allowed for drill down query!");
|
throw new IllegalArgumentException("Empty category path not allowed for drill down query!");
|
||||||
}
|
}
|
||||||
@ -81,7 +86,7 @@ public final class DrillDown {
|
|||||||
} else {
|
} else {
|
||||||
BooleanQuery bq = new BooleanQuery(true); // disable coord
|
BooleanQuery bq = new BooleanQuery(true); // disable coord
|
||||||
for (CategoryPath cp : paths) {
|
for (CategoryPath cp : paths) {
|
||||||
bq.add(new TermQuery(term(iParams, cp)), Occur.MUST);
|
bq.add(new TermQuery(term(iParams, cp)), occur);
|
||||||
}
|
}
|
||||||
q = bq;
|
q = bq;
|
||||||
}
|
}
|
||||||
@ -100,10 +105,10 @@ public final class DrillDown {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @see #query(FacetIndexingParams, Query, CategoryPath...)
|
* @see #query
|
||||||
*/
|
*/
|
||||||
public static final Query query(FacetSearchParams sParams, Query baseQuery, CategoryPath... paths) {
|
public static final Query query(FacetSearchParams sParams, Query baseQuery, Occur occur, CategoryPath... paths) {
|
||||||
return query(sParams.indexingParams, baseQuery, paths);
|
return query(sParams.indexingParams, baseQuery, occur, paths);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,7 @@ import org.apache.lucene.store.FSDirectory;
|
|||||||
|
|
||||||
/** Prints how many ords are under each dimension. */
|
/** Prints how many ords are under each dimension. */
|
||||||
|
|
||||||
|
// java -cp ../build/core/classes/java:../build/facet/classes/java org.apache.lucene.facet.util.PrintTaxonomyStats -printTree /s2/scratch/indices/wikibig.trunk.noparents.facets.Lucene41.nd1M/facets
|
||||||
public class PrintTaxonomyStats {
|
public class PrintTaxonomyStats {
|
||||||
|
|
||||||
public static void main(String[] args) throws IOException {
|
public static void main(String[] args) throws IOException {
|
||||||
|
@ -86,7 +86,7 @@ public class FourFlagsIntDecoder extends IntDecoder {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "FourFlags(VInt8)";
|
return "FourFlags(VInt)";
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -42,7 +42,6 @@ import org.apache.lucene.search.IndexSearcher;
|
|||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
@ -66,7 +65,7 @@ import org.junit.BeforeClass;
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
@SuppressCodecs({"SimpleText"})
|
@SuppressCodecs({"SimpleText"})
|
||||||
public abstract class FacetTestBase extends LuceneTestCase {
|
public abstract class FacetTestBase extends FacetTestCase {
|
||||||
|
|
||||||
/** Holds a search and taxonomy Directories pair. */
|
/** Holds a search and taxonomy Directories pair. */
|
||||||
private static final class SearchTaxoDirPair {
|
private static final class SearchTaxoDirPair {
|
||||||
@ -92,7 +91,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
|||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void beforeClassFacetTestBase() {
|
public static void beforeClassFacetTestBase() {
|
||||||
TEST_DIR = _TestUtil.getTempDir("facets");
|
TEST_DIR = _TestUtil.getTempDir("facets");
|
||||||
dirsPerPartitionSize = new HashMap<Integer, FacetTestBase.SearchTaxoDirPair>();
|
dirsPerPartitionSize = new HashMap<Integer, FacetTestBase.SearchTaxoDirPair>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
@ -181,8 +180,10 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
|||||||
return newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
|
return newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns a default facet indexing params */
|
/** Returns a {@link FacetIndexingParams} per the given partition size. */
|
||||||
protected FacetIndexingParams getFacetIndexingParams(final int partSize) {
|
protected FacetIndexingParams getFacetIndexingParams(final int partSize) {
|
||||||
|
// several of our encoders don't support the value 0,
|
||||||
|
// which is one of the values encoded when dealing w/ partitions.
|
||||||
return new FacetIndexingParams() {
|
return new FacetIndexingParams() {
|
||||||
@Override
|
@Override
|
||||||
public int getPartitionSize() {
|
public int getPartitionSize() {
|
||||||
|
@ -0,0 +1,64 @@
|
|||||||
|
package org.apache.lucene.facet;
|
||||||
|
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util.encoding.DGapIntEncoder;
|
||||||
|
import org.apache.lucene.util.encoding.DGapVInt8IntEncoder;
|
||||||
|
import org.apache.lucene.util.encoding.EightFlagsIntEncoder;
|
||||||
|
import org.apache.lucene.util.encoding.FourFlagsIntEncoder;
|
||||||
|
import org.apache.lucene.util.encoding.IntEncoder;
|
||||||
|
import org.apache.lucene.util.encoding.NOnesIntEncoder;
|
||||||
|
import org.apache.lucene.util.encoding.SortingIntEncoder;
|
||||||
|
import org.apache.lucene.util.encoding.UniqueValuesIntEncoder;
|
||||||
|
import org.apache.lucene.util.encoding.VInt8IntEncoder;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class FacetTestCase extends LuceneTestCase {
|
||||||
|
|
||||||
|
private static final IntEncoder[] ENCODERS = new IntEncoder[] {
|
||||||
|
new SortingIntEncoder(new UniqueValuesIntEncoder(new VInt8IntEncoder())),
|
||||||
|
new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))),
|
||||||
|
new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapVInt8IntEncoder())),
|
||||||
|
new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new EightFlagsIntEncoder()))),
|
||||||
|
new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new FourFlagsIntEncoder()))),
|
||||||
|
new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new NOnesIntEncoder(3)))),
|
||||||
|
new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new NOnesIntEncoder(4)))),
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Returns a {@link CategoryListParams} with random {@link IntEncoder} and field. */
|
||||||
|
public static CategoryListParams randomCategoryListParams() {
|
||||||
|
final String field = CategoryListParams.DEFAULT_FIELD + "$" + random().nextInt();
|
||||||
|
return randomCategoryListParams(field);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns a {@link CategoryListParams} with random {@link IntEncoder}. */
|
||||||
|
public static CategoryListParams randomCategoryListParams(String field) {
|
||||||
|
Random random = random();
|
||||||
|
final IntEncoder encoder = ENCODERS[random.nextInt(ENCODERS.length)];
|
||||||
|
return new CategoryListParams(field) {
|
||||||
|
@Override
|
||||||
|
public IntEncoder createEncoder() {
|
||||||
|
return encoder;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,30 +1,17 @@
|
|||||||
package org.apache.lucene.facet;
|
package org.apache.lucene.facet;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
|
||||||
import org.apache.lucene.facet.search.FacetsCollector;
|
|
||||||
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
|
||||||
import org.apache.lucene.facet.search.params.FacetRequest;
|
|
||||||
import org.apache.lucene.facet.search.params.FacetSearchParams;
|
|
||||||
import org.apache.lucene.facet.search.results.FacetResult;
|
import org.apache.lucene.facet.search.results.FacetResult;
|
||||||
import org.apache.lucene.facet.search.results.FacetResultNode;
|
import org.apache.lucene.facet.search.results.FacetResultNode;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
|
||||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
|
||||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.search.Collector;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
|
||||||
import org.apache.lucene.search.MultiCollector;
|
|
||||||
import org.apache.lucene.search.TopScoreDocCollector;
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
|
||||||
@ -109,30 +96,6 @@ public class FacetTestUtils {
|
|||||||
return pairs;
|
return pairs;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Collector[] search(IndexSearcher searcher, TaxonomyReader taxonomyReader, FacetIndexingParams iParams,
|
|
||||||
int k, String... facetNames) throws IOException {
|
|
||||||
|
|
||||||
Collector[] collectors = new Collector[2];
|
|
||||||
|
|
||||||
List<FacetRequest> fRequests = new ArrayList<FacetRequest>();
|
|
||||||
for (String facetName : facetNames) {
|
|
||||||
CategoryPath cp = new CategoryPath(facetName);
|
|
||||||
FacetRequest fq = new CountFacetRequest(cp, k);
|
|
||||||
fRequests.add(fq);
|
|
||||||
}
|
|
||||||
FacetSearchParams facetSearchParams = new FacetSearchParams(fRequests, iParams);
|
|
||||||
|
|
||||||
TopScoreDocCollector topDocsCollector = TopScoreDocCollector.create(searcher.getIndexReader().maxDoc(), true);
|
|
||||||
FacetsCollector facetsCollector = FacetsCollector.create(facetSearchParams, searcher.getIndexReader(), taxonomyReader);
|
|
||||||
Collector mColl = MultiCollector.wrap(topDocsCollector, facetsCollector);
|
|
||||||
|
|
||||||
collectors[0] = topDocsCollector;
|
|
||||||
collectors[1] = facetsCollector;
|
|
||||||
|
|
||||||
searcher.search(new MatchAllDocsQuery(), mColl);
|
|
||||||
return collectors;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String toSimpleString(FacetResult fr) {
|
public static String toSimpleString(FacetResult fr) {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
toSimpleString(0, sb, fr.getFacetResultNode(), "");
|
toSimpleString(0, sb, fr.getFacetResultNode(), "");
|
||||||
|
@ -7,16 +7,9 @@ import java.util.List;
|
|||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.analysis.MockTokenizer;
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.index.RandomIndexWriter;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
|
||||||
import org.apache.lucene.store.Directory;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.facet.example.merge.TaxonomyMergeUtils;
|
import org.apache.lucene.facet.example.merge.TaxonomyMergeUtils;
|
||||||
|
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||||
import org.apache.lucene.facet.search.FacetsCollector;
|
import org.apache.lucene.facet.search.FacetsCollector;
|
||||||
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
||||||
import org.apache.lucene.facet.search.params.FacetSearchParams;
|
import org.apache.lucene.facet.search.params.FacetSearchParams;
|
||||||
@ -25,6 +18,13 @@ import org.apache.lucene.facet.search.results.FacetResultNode;
|
|||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
@ -43,34 +43,35 @@ import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class OrdinalMappingReaderTest extends LuceneTestCase {
|
public class OrdinalMappingReaderTest extends FacetTestCase {
|
||||||
|
|
||||||
private static final int NUM_DOCS = 100;
|
private static final int NUM_DOCS = 100;
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTaxonomyMergeUtils() throws Exception {
|
public void testTaxonomyMergeUtils() throws Exception {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
Directory taxDir = newDirectory();
|
Directory taxDir = newDirectory();
|
||||||
buildIndexWithFacets(dir, taxDir, true);
|
FacetIndexingParams fip = new FacetIndexingParams(randomCategoryListParams());
|
||||||
|
buildIndexWithFacets(dir, taxDir, true, fip);
|
||||||
|
|
||||||
Directory dir1 = newDirectory();
|
Directory dir1 = newDirectory();
|
||||||
Directory taxDir1 = newDirectory();
|
Directory taxDir1 = newDirectory();
|
||||||
buildIndexWithFacets(dir1, taxDir1, false);
|
buildIndexWithFacets(dir1, taxDir1, false, fip);
|
||||||
|
|
||||||
TaxonomyMergeUtils.merge(dir, taxDir, dir1, taxDir1);
|
TaxonomyMergeUtils.merge(dir, taxDir, dir1, taxDir1, fip);
|
||||||
|
|
||||||
verifyResults(dir1, taxDir1);
|
verifyResults(dir1, taxDir1, fip);
|
||||||
dir1.close();
|
dir1.close();
|
||||||
taxDir1.close();
|
taxDir1.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
taxDir.close();
|
taxDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void verifyResults(Directory dir, Directory taxDir) throws IOException {
|
private void verifyResults(Directory dir, Directory taxDir, FacetIndexingParams fip) throws IOException {
|
||||||
DirectoryReader reader1 = DirectoryReader.open(dir);
|
DirectoryReader reader1 = DirectoryReader.open(dir);
|
||||||
DirectoryTaxonomyReader taxReader = new DirectoryTaxonomyReader(taxDir);
|
DirectoryTaxonomyReader taxReader = new DirectoryTaxonomyReader(taxDir);
|
||||||
IndexSearcher searcher = newSearcher(reader1);
|
IndexSearcher searcher = newSearcher(reader1);
|
||||||
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("tag"), NUM_DOCS));
|
FacetSearchParams fsp = new FacetSearchParams(fip, new CountFacetRequest(new CategoryPath("tag"), NUM_DOCS));
|
||||||
FacetsCollector collector = FacetsCollector.create(fsp, reader1, taxReader);
|
FacetsCollector collector = FacetsCollector.create(fsp, reader1, taxReader);
|
||||||
searcher.search(new MatchAllDocsQuery(), collector);
|
searcher.search(new MatchAllDocsQuery(), collector);
|
||||||
FacetResult result = collector.getFacetResults().get(0);
|
FacetResult result = collector.getFacetResults().get(0);
|
||||||
@ -88,7 +89,7 @@ public class OrdinalMappingReaderTest extends LuceneTestCase {
|
|||||||
taxReader.close();
|
taxReader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void buildIndexWithFacets(Directory dir, Directory taxDir, boolean asc) throws IOException {
|
private void buildIndexWithFacets(Directory dir, Directory taxDir, boolean asc, FacetIndexingParams fip) throws IOException {
|
||||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false));
|
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false));
|
||||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
|
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
|
||||||
@ -101,7 +102,7 @@ public class OrdinalMappingReaderTest extends LuceneTestCase {
|
|||||||
int facetValue = asc? j: NUM_DOCS - j;
|
int facetValue = asc? j: NUM_DOCS - j;
|
||||||
categoryPaths.add(new CategoryPath("tag", Integer.toString(facetValue)));
|
categoryPaths.add(new CategoryPath("tag", Integer.toString(facetValue)));
|
||||||
}
|
}
|
||||||
FacetFields facetFields = new FacetFields(taxonomyWriter);
|
FacetFields facetFields = new FacetFields(taxonomyWriter, fip);
|
||||||
facetFields.addFields(doc, categoryPaths);
|
facetFields.addFields(doc, categoryPaths);
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ import org.apache.lucene.document.Field.Store;
|
|||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.document.StringField;
|
import org.apache.lucene.document.StringField;
|
||||||
import org.apache.lucene.document.TextField;
|
import org.apache.lucene.document.TextField;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||||
import org.apache.lucene.facet.index.params.PerDimensionIndexingParams;
|
import org.apache.lucene.facet.index.params.PerDimensionIndexingParams;
|
||||||
@ -57,11 +58,11 @@ import org.apache.lucene.search.MultiCollector;
|
|||||||
import org.apache.lucene.search.PrefixQuery;
|
import org.apache.lucene.search.PrefixQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TotalHitCountCollector;
|
import org.apache.lucene.search.TotalHitCountCollector;
|
||||||
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.IntsRef;
|
import org.apache.lucene.util.IntsRef;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -82,7 +83,7 @@ import org.junit.Test;
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
/** Tests facets index migration from payload to DocValues.*/
|
/** Tests facets index migration from payload to DocValues.*/
|
||||||
public class TestFacetsPayloadMigrationReader extends LuceneTestCase {
|
public class TestFacetsPayloadMigrationReader extends FacetTestCase {
|
||||||
|
|
||||||
private static class PayloadFacetFields extends FacetFields {
|
private static class PayloadFacetFields extends FacetFields {
|
||||||
|
|
||||||
@ -284,7 +285,7 @@ public class TestFacetsPayloadMigrationReader extends LuceneTestCase {
|
|||||||
for (String dim : expectedCounts.keySet()) {
|
for (String dim : expectedCounts.keySet()) {
|
||||||
CategoryPath drillDownCP = new CategoryPath(dim);
|
CategoryPath drillDownCP = new CategoryPath(dim);
|
||||||
FacetSearchParams fsp = new FacetSearchParams(fip, new CountFacetRequest(drillDownCP, 10));
|
FacetSearchParams fsp = new FacetSearchParams(fip, new CountFacetRequest(drillDownCP, 10));
|
||||||
Query drillDown = DrillDown.query(fsp, new MatchAllDocsQuery(), drillDownCP);
|
Query drillDown = DrillDown.query(fsp, new MatchAllDocsQuery(), Occur.MUST, drillDownCP);
|
||||||
TotalHitCountCollector total = new TotalHitCountCollector();
|
TotalHitCountCollector total = new TotalHitCountCollector();
|
||||||
FacetsCollector fc = FacetsCollector.create(fsp, indexReader, taxoReader);
|
FacetsCollector fc = FacetsCollector.create(fsp, indexReader, taxoReader);
|
||||||
searcher.search(drillDown, MultiCollector.wrap(fc, total));
|
searcher.search(drillDown, MultiCollector.wrap(fc, total));
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package org.apache.lucene.facet.index.params;
|
package org.apache.lucene.facet.index.params;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.util.encoding.DGapVInt8IntEncoder;
|
import org.apache.lucene.util.encoding.DGapVInt8IntEncoder;
|
||||||
import org.apache.lucene.util.encoding.IntDecoder;
|
import org.apache.lucene.util.encoding.IntDecoder;
|
||||||
import org.apache.lucene.util.encoding.IntEncoder;
|
import org.apache.lucene.util.encoding.IntEncoder;
|
||||||
@ -25,7 +25,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class CategoryListParamsTest extends LuceneTestCase {
|
public class CategoryListParamsTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDefaultSettings() {
|
public void testDefaultSettings() {
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
package org.apache.lucene.facet.index.params;
|
package org.apache.lucene.facet.index.params;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.search.DrillDown;
|
import org.apache.lucene.facet.search.DrillDown;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.util.PartitionsUtils;
|
import org.apache.lucene.facet.util.PartitionsUtils;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -24,7 +24,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class FacetIndexingParamsTest extends LuceneTestCase {
|
public class FacetIndexingParamsTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDefaultSettings() {
|
public void testDefaultSettings() {
|
||||||
|
@ -2,11 +2,11 @@ package org.apache.lucene.facet.index.params;
|
|||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.search.DrillDown;
|
import org.apache.lucene.facet.search.DrillDown;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.util.PartitionsUtils;
|
import org.apache.lucene.facet.util.PartitionsUtils;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -26,7 +26,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class PerDimensionIndexingParamsTest extends LuceneTestCase {
|
public class PerDimensionIndexingParamsTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTopLevelSettings() {
|
public void testTopLevelSettings() {
|
||||||
@ -41,7 +41,6 @@ public class PerDimensionIndexingParamsTest extends LuceneTestCase {
|
|||||||
assertEquals("3 characters should be written", 3, numchars);
|
assertEquals("3 characters should be written", 3, numchars);
|
||||||
assertEquals("wrong drill-down term text", expectedDDText, new String(buf, 0, numchars));
|
assertEquals("wrong drill-down term text", expectedDDText, new String(buf, 0, numchars));
|
||||||
|
|
||||||
CategoryListParams clParams = ifip.getCategoryListParams(null);
|
|
||||||
assertEquals("partition for all ordinals is the first", "", PartitionsUtils.partitionNameByOrdinal(ifip, 250));
|
assertEquals("partition for all ordinals is the first", "", PartitionsUtils.partitionNameByOrdinal(ifip, 250));
|
||||||
assertEquals("for partition 0, the same name should be returned", "", PartitionsUtils.partitionName(0));
|
assertEquals("for partition 0, the same name should be returned", "", PartitionsUtils.partitionName(0));
|
||||||
assertEquals("for any other, it's the concatenation of name + partition", PartitionsUtils.PART_NAME_PREFIX + "1", PartitionsUtils.partitionName(1));
|
assertEquals("for any other, it's the concatenation of name + partition", PartitionsUtils.PART_NAME_PREFIX + "1", PartitionsUtils.partitionName(1));
|
||||||
|
@ -7,13 +7,13 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
|||||||
import org.apache.lucene.analysis.MockTokenizer;
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.BinaryDocValuesField;
|
import org.apache.lucene.document.BinaryDocValuesField;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.RandomIndexWriter;
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.IntsRef;
|
import org.apache.lucene.util.IntsRef;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.encoding.DGapIntEncoder;
|
import org.apache.lucene.util.encoding.DGapIntEncoder;
|
||||||
import org.apache.lucene.util.encoding.IntEncoder;
|
import org.apache.lucene.util.encoding.IntEncoder;
|
||||||
import org.apache.lucene.util.encoding.SortingIntEncoder;
|
import org.apache.lucene.util.encoding.SortingIntEncoder;
|
||||||
@ -38,7 +38,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class CategoryListIteratorTest extends LuceneTestCase {
|
public class CategoryListIteratorTest extends FacetTestCase {
|
||||||
|
|
||||||
static final IntsRef[] data = new IntsRef[] {
|
static final IntsRef[] data = new IntsRef[] {
|
||||||
new IntsRef(new int[] { 1, 2 }, 0, 2),
|
new IntsRef(new int[] { 1, 2 }, 0, 2),
|
||||||
@ -48,9 +48,9 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
|||||||
};
|
};
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPayloadCategoryListIteraor() throws Exception {
|
public void test() throws Exception {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
final IntEncoder encoder = new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder())));
|
final IntEncoder encoder = randomCategoryListParams().createEncoder();
|
||||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy()));
|
new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy()));
|
||||||
BytesRef buf = new BytesRef();
|
BytesRef buf = new BytesRef();
|
||||||
@ -89,7 +89,7 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPayloadIteratorWithInvalidDoc() throws Exception {
|
public void testEmptyDocuments() throws Exception {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
final IntEncoder encoder = new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder())));
|
final IntEncoder encoder = new SortingIntEncoder(new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder())));
|
||||||
// NOTE: test is wired to LogMP... because test relies on certain docids having payloads
|
// NOTE: test is wired to LogMP... because test relies on certain docids having payloads
|
||||||
|
@ -13,6 +13,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
|||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field.Store;
|
import org.apache.lucene.document.Field.Store;
|
||||||
import org.apache.lucene.document.StringField;
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||||
@ -40,7 +41,6 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
|||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.collections.ObjectToIntMap;
|
import org.apache.lucene.util.collections.ObjectToIntMap;
|
||||||
import org.apache.lucene.util.encoding.IntEncoder;
|
import org.apache.lucene.util.encoding.IntEncoder;
|
||||||
import org.apache.lucene.util.encoding.VInt8IntEncoder;
|
import org.apache.lucene.util.encoding.VInt8IntEncoder;
|
||||||
@ -65,7 +65,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class CountingFacetsCollectorTest extends LuceneTestCase {
|
public class CountingFacetsCollectorTest extends FacetTestCase {
|
||||||
|
|
||||||
private static final Term A = new Term("f", "a");
|
private static final Term A = new Term("f", "a");
|
||||||
private static final CategoryPath CP_A = new CategoryPath("A"), CP_B = new CategoryPath("B");
|
private static final CategoryPath CP_A = new CategoryPath("A"), CP_B = new CategoryPath("B");
|
||||||
|
@ -10,6 +10,7 @@ import org.apache.lucene.analysis.MockTokenizer;
|
|||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.TextField;
|
import org.apache.lucene.document.TextField;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||||
@ -26,8 +27,8 @@ import org.apache.lucene.search.Query;
|
|||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@ -49,9 +50,9 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class DrillDownTest extends LuceneTestCase {
|
public class DrillDownTest extends FacetTestCase {
|
||||||
|
|
||||||
private FacetIndexingParams defaultParams = FacetIndexingParams.ALL_PARENTS;
|
private FacetIndexingParams defaultParams;
|
||||||
private PerDimensionIndexingParams nonDefaultParams;
|
private PerDimensionIndexingParams nonDefaultParams;
|
||||||
private static IndexReader reader;
|
private static IndexReader reader;
|
||||||
private static DirectoryTaxonomyReader taxo;
|
private static DirectoryTaxonomyReader taxo;
|
||||||
@ -60,9 +61,10 @@ public class DrillDownTest extends LuceneTestCase {
|
|||||||
|
|
||||||
public DrillDownTest() {
|
public DrillDownTest() {
|
||||||
Map<CategoryPath,CategoryListParams> paramsMap = new HashMap<CategoryPath,CategoryListParams>();
|
Map<CategoryPath,CategoryListParams> paramsMap = new HashMap<CategoryPath,CategoryListParams>();
|
||||||
paramsMap.put(new CategoryPath("a"), new CategoryListParams("testing_facets_a"));
|
paramsMap.put(new CategoryPath("a"), randomCategoryListParams("testing_facets_a"));
|
||||||
paramsMap.put(new CategoryPath("b"), new CategoryListParams("testing_facets_b"));
|
paramsMap.put(new CategoryPath("b"), randomCategoryListParams("testing_facets_b"));
|
||||||
nonDefaultParams = new PerDimensionIndexingParams(paramsMap);
|
nonDefaultParams = new PerDimensionIndexingParams(paramsMap);
|
||||||
|
defaultParams = new FacetIndexingParams(randomCategoryListParams(CategoryListParams.DEFAULT_FIELD));
|
||||||
}
|
}
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
@ -128,25 +130,25 @@ public class DrillDownTest extends LuceneTestCase {
|
|||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
|
||||||
// Making sure the query yields 25 documents with the facet "a"
|
// Making sure the query yields 25 documents with the facet "a"
|
||||||
Query q = DrillDown.query(defaultParams, null, new CategoryPath("a"));
|
Query q = DrillDown.query(defaultParams, null, Occur.MUST, new CategoryPath("a"));
|
||||||
TopDocs docs = searcher.search(q, 100);
|
TopDocs docs = searcher.search(q, 100);
|
||||||
assertEquals(25, docs.totalHits);
|
assertEquals(25, docs.totalHits);
|
||||||
|
|
||||||
// Making sure the query yields 5 documents with the facet "b" and the
|
// Making sure the query yields 5 documents with the facet "b" and the
|
||||||
// previous (facet "a") query as a base query
|
// previous (facet "a") query as a base query
|
||||||
Query q2 = DrillDown.query(defaultParams, q, new CategoryPath("b"));
|
Query q2 = DrillDown.query(defaultParams, q, Occur.MUST, new CategoryPath("b"));
|
||||||
docs = searcher.search(q2, 100);
|
docs = searcher.search(q2, 100);
|
||||||
assertEquals(5, docs.totalHits);
|
assertEquals(5, docs.totalHits);
|
||||||
|
|
||||||
// Making sure that a query of both facet "a" and facet "b" yields 5 results
|
// Making sure that a query of both facet "a" and facet "b" yields 5 results
|
||||||
Query q3 = DrillDown.query(defaultParams, null, new CategoryPath("a"), new CategoryPath("b"));
|
Query q3 = DrillDown.query(defaultParams, null, Occur.MUST, new CategoryPath("a"), new CategoryPath("b"));
|
||||||
docs = searcher.search(q3, 100);
|
docs = searcher.search(q3, 100);
|
||||||
assertEquals(5, docs.totalHits);
|
assertEquals(5, docs.totalHits);
|
||||||
|
|
||||||
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
|
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
|
||||||
// would gather together 10 results (10%..)
|
// would gather together 10 results (10%..)
|
||||||
Query fooQuery = new TermQuery(new Term("content", "foo"));
|
Query fooQuery = new TermQuery(new Term("content", "foo"));
|
||||||
Query q4 = DrillDown.query(defaultParams, fooQuery, new CategoryPath("b"));
|
Query q4 = DrillDown.query(defaultParams, fooQuery, Occur.MUST, new CategoryPath("b"));
|
||||||
docs = searcher.search(q4, 100);
|
docs = searcher.search(q4, 100);
|
||||||
assertEquals(10, docs.totalHits);
|
assertEquals(10, docs.totalHits);
|
||||||
}
|
}
|
||||||
@ -156,18 +158,18 @@ public class DrillDownTest extends LuceneTestCase {
|
|||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
|
||||||
// Create the base query to start with
|
// Create the base query to start with
|
||||||
Query q = DrillDown.query(defaultParams, null, new CategoryPath("a"));
|
Query q = DrillDown.query(defaultParams, null, Occur.MUST, new CategoryPath("a"));
|
||||||
|
|
||||||
// Making sure the query yields 5 documents with the facet "b" and the
|
// Making sure the query yields 5 documents with the facet "b" and the
|
||||||
// previous (facet "a") query as a base query
|
// previous (facet "a") query as a base query
|
||||||
Query q2 = DrillDown.query(defaultParams, q, new CategoryPath("b"));
|
Query q2 = DrillDown.query(defaultParams, q, Occur.MUST, new CategoryPath("b"));
|
||||||
TopDocs docs = searcher.search(q2, 100);
|
TopDocs docs = searcher.search(q2, 100);
|
||||||
assertEquals(5, docs.totalHits);
|
assertEquals(5, docs.totalHits);
|
||||||
|
|
||||||
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
|
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
|
||||||
// would gather together 10 results (10%..)
|
// would gather together 10 results (10%..)
|
||||||
Query fooQuery = new TermQuery(new Term("content", "foo"));
|
Query fooQuery = new TermQuery(new Term("content", "foo"));
|
||||||
Query q4 = DrillDown.query(defaultParams, fooQuery, new CategoryPath("b"));
|
Query q4 = DrillDown.query(defaultParams, fooQuery, Occur.MUST, new CategoryPath("b"));
|
||||||
docs = searcher.search(q4, 100);
|
docs = searcher.search(q4, 100);
|
||||||
assertEquals(10, docs.totalHits);
|
assertEquals(10, docs.totalHits);
|
||||||
}
|
}
|
||||||
@ -202,7 +204,7 @@ public class DrillDownTest extends LuceneTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// create a drill-down query with category "a", scores should not change
|
// create a drill-down query with category "a", scores should not change
|
||||||
q = DrillDown.query(defaultParams, q, new CategoryPath("a"));
|
q = DrillDown.query(defaultParams, q, Occur.MUST, new CategoryPath("a"));
|
||||||
docs = searcher.search(q, reader.maxDoc()); // fetch all available docs to this query
|
docs = searcher.search(q, reader.maxDoc()); // fetch all available docs to this query
|
||||||
for (ScoreDoc sd : docs.scoreDocs) {
|
for (ScoreDoc sd : docs.scoreDocs) {
|
||||||
assertEquals("score of doc=" + sd.doc + " modified", scores[sd.doc], sd.score, 0f);
|
assertEquals("score of doc=" + sd.doc + " modified", scores[sd.doc], sd.score, 0f);
|
||||||
@ -214,11 +216,21 @@ public class DrillDownTest extends LuceneTestCase {
|
|||||||
// verify that drill-down queries (with no base query) returns 0.0 score
|
// verify that drill-down queries (with no base query) returns 0.0 score
|
||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
|
||||||
Query q = DrillDown.query(defaultParams, null, new CategoryPath("a"));
|
Query q = DrillDown.query(defaultParams, null, Occur.MUST, new CategoryPath("a"));
|
||||||
TopDocs docs = searcher.search(q, reader.maxDoc()); // fetch all available docs to this query
|
TopDocs docs = searcher.search(q, reader.maxDoc()); // fetch all available docs to this query
|
||||||
for (ScoreDoc sd : docs.scoreDocs) {
|
for (ScoreDoc sd : docs.scoreDocs) {
|
||||||
assertEquals(0f, sd.score, 0f);
|
assertEquals(0f, sd.score, 0f);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testOrQuery() throws Exception {
|
||||||
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
|
||||||
|
// Making sure that a query of facet "a" or facet "b" yields 0 results
|
||||||
|
Query q = DrillDown.query(defaultParams, null, Occur.SHOULD, new CategoryPath("a"), new CategoryPath("b"));
|
||||||
|
TopDocs docs = searcher.search(q, 100);
|
||||||
|
assertEquals(40, docs.totalHits);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -24,6 +24,7 @@ import java.util.ArrayList;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.FacetTestUtils;
|
import org.apache.lucene.facet.FacetTestUtils;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
||||||
@ -39,14 +40,14 @@ import org.apache.lucene.index.RandomIndexWriter;
|
|||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
|
|
||||||
public class TestDemoFacets extends LuceneTestCase {
|
public class TestDemoFacets extends FacetTestCase {
|
||||||
|
|
||||||
private DirectoryTaxonomyWriter taxoWriter;
|
private DirectoryTaxonomyWriter taxoWriter;
|
||||||
private RandomIndexWriter writer;
|
private RandomIndexWriter writer;
|
||||||
private FacetFields docBuilder;
|
private FacetFields facetFields;
|
||||||
|
|
||||||
private void add(String ... categoryPaths) throws IOException {
|
private void add(String ... categoryPaths) throws IOException {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
@ -55,7 +56,7 @@ public class TestDemoFacets extends LuceneTestCase {
|
|||||||
for(String categoryPath : categoryPaths) {
|
for(String categoryPath : categoryPaths) {
|
||||||
paths.add(new CategoryPath(categoryPath, '/'));
|
paths.add(new CategoryPath(categoryPath, '/'));
|
||||||
}
|
}
|
||||||
docBuilder.addFields(doc, paths);
|
facetFields.addFields(doc, paths);
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,7 +71,7 @@ public class TestDemoFacets extends LuceneTestCase {
|
|||||||
|
|
||||||
// Reused across documents, to add the necessary facet
|
// Reused across documents, to add the necessary facet
|
||||||
// fields:
|
// fields:
|
||||||
docBuilder = new FacetFields(taxoWriter);
|
facetFields = new FacetFields(taxoWriter);
|
||||||
|
|
||||||
add("Author/Bob", "Publish Date/2010/10/15");
|
add("Author/Bob", "Publish Date/2010/10/15");
|
||||||
add("Author/Lisa", "Publish Date/2010/10/20");
|
add("Author/Lisa", "Publish Date/2010/10/20");
|
||||||
@ -111,7 +112,7 @@ public class TestDemoFacets extends LuceneTestCase {
|
|||||||
|
|
||||||
// Now user drills down on Publish Date/2010:
|
// Now user drills down on Publish Date/2010:
|
||||||
fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("Author"), 10));
|
fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("Author"), 10));
|
||||||
Query q2 = DrillDown.query(fsp, new MatchAllDocsQuery(), new CategoryPath("Publish Date/2010", '/'));
|
Query q2 = DrillDown.query(fsp, new MatchAllDocsQuery(), Occur.MUST, new CategoryPath("Publish Date/2010", '/'));
|
||||||
c = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
|
c = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
|
||||||
searcher.search(q2, c);
|
searcher.search(q2, c);
|
||||||
results = c.getFacetResults();
|
results = c.getFacetResults();
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package org.apache.lucene.facet.search;
|
package org.apache.lucene.facet.search;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -20,7 +20,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestFacetArrays extends LuceneTestCase {
|
public class TestFacetArrays extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFacetArrays() {
|
public void testFacetArrays() {
|
||||||
|
@ -123,15 +123,10 @@ public class TestFacetsAccumulatorWithComplement extends FacetTestBase {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private FacetSearchParams getFacetSearchParams() {
|
|
||||||
return new FacetSearchParams(new CountFacetRequest(new CategoryPath("root","a"), 10));
|
|
||||||
}
|
|
||||||
|
|
||||||
/** compute facets with certain facet requests and docs */
|
/** compute facets with certain facet requests and docs */
|
||||||
private List<FacetResult> findFacets(ScoredDocIDs sDocids, boolean withComplement) throws IOException {
|
private List<FacetResult> findFacets(ScoredDocIDs sDocids, boolean withComplement) throws IOException {
|
||||||
|
FacetSearchParams fsp = new FacetSearchParams(getFacetIndexingParams(Integer.MAX_VALUE), new CountFacetRequest(new CategoryPath("root","a"), 10));
|
||||||
FacetsAccumulator fAccumulator =
|
FacetsAccumulator fAccumulator = new StandardFacetsAccumulator(fsp, indexReader, taxoReader);
|
||||||
new StandardFacetsAccumulator(getFacetSearchParams(), indexReader, taxoReader);
|
|
||||||
|
|
||||||
fAccumulator.setComplementThreshold(
|
fAccumulator.setComplementThreshold(
|
||||||
withComplement ?
|
withComplement ?
|
||||||
|
@ -7,6 +7,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
|||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field.Store;
|
import org.apache.lucene.document.Field.Store;
|
||||||
import org.apache.lucene.document.StringField;
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
import org.apache.lucene.facet.search.params.FacetSearchParams;
|
import org.apache.lucene.facet.search.params.FacetSearchParams;
|
||||||
import org.apache.lucene.facet.search.params.ScoreFacetRequest;
|
import org.apache.lucene.facet.search.params.ScoreFacetRequest;
|
||||||
@ -24,7 +25,6 @@ import org.apache.lucene.search.MultiCollector;
|
|||||||
import org.apache.lucene.search.TopScoreDocCollector;
|
import org.apache.lucene.search.TopScoreDocCollector;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -44,7 +44,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestFacetsCollector extends LuceneTestCase {
|
public class TestFacetsCollector extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFacetsWithDocScore() throws Exception {
|
public void testFacetsWithDocScore() throws Exception {
|
||||||
|
@ -13,6 +13,7 @@ import org.apache.lucene.analysis.MockTokenizer;
|
|||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.TextField;
|
import org.apache.lucene.document.TextField;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.FacetTestUtils;
|
import org.apache.lucene.facet.FacetTestUtils;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||||
@ -41,7 +42,6 @@ import org.apache.lucene.search.Query;
|
|||||||
import org.apache.lucene.search.TopScoreDocCollector;
|
import org.apache.lucene.search.TopScoreDocCollector;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -61,7 +61,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestMultipleCategoryLists extends LuceneTestCase {
|
public class TestMultipleCategoryLists extends FacetTestCase {
|
||||||
|
|
||||||
private static final CategoryPath[] CATEGORIES = new CategoryPath[] {
|
private static final CategoryPath[] CATEGORIES = new CategoryPath[] {
|
||||||
new CategoryPath("Author", "Mark Twain"),
|
new CategoryPath("Author", "Mark Twain"),
|
||||||
|
@ -4,24 +4,18 @@ import java.io.IOException;
|
|||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.index.Term;
|
|
||||||
import org.apache.lucene.search.Query;
|
|
||||||
import org.apache.lucene.search.TermQuery;
|
|
||||||
import org.junit.Before;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import org.apache.lucene.facet.FacetTestBase;
|
import org.apache.lucene.facet.FacetTestBase;
|
||||||
import org.apache.lucene.facet.search.FacetsAccumulator;
|
|
||||||
import org.apache.lucene.facet.search.ScoredDocIDs;
|
|
||||||
import org.apache.lucene.facet.search.ScoredDocIDsIterator;
|
|
||||||
import org.apache.lucene.facet.search.ScoredDocIdCollector;
|
|
||||||
import org.apache.lucene.facet.search.StandardFacetsAccumulator;
|
|
||||||
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
||||||
import org.apache.lucene.facet.search.params.FacetSearchParams;
|
import org.apache.lucene.facet.search.params.FacetSearchParams;
|
||||||
import org.apache.lucene.facet.search.params.ScoreFacetRequest;
|
import org.apache.lucene.facet.search.params.ScoreFacetRequest;
|
||||||
import org.apache.lucene.facet.search.results.FacetResult;
|
import org.apache.lucene.facet.search.results.FacetResult;
|
||||||
import org.apache.lucene.facet.search.results.FacetResultNode;
|
import org.apache.lucene.facet.search.results.FacetResultNode;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
@ -65,8 +59,7 @@ public class TestScoredDocIdCollector extends FacetTestBase {
|
|||||||
System.out.println("Query: " + q);
|
System.out.println("Query: " + q);
|
||||||
}
|
}
|
||||||
float constScore = 17.0f;
|
float constScore = 17.0f;
|
||||||
ScoredDocIdCollector dCollector = ScoredDocIdCollector.create(indexReader
|
ScoredDocIdCollector dCollector = ScoredDocIdCollector.create(indexReader.maxDoc(), false); // scoring is disabled
|
||||||
.maxDoc(), false); // scoring is disabled
|
|
||||||
dCollector.setDefaultScore(constScore);
|
dCollector.setDefaultScore(constScore);
|
||||||
searcher.search(q, dCollector);
|
searcher.search(q, dCollector);
|
||||||
|
|
||||||
@ -75,13 +68,16 @@ public class TestScoredDocIdCollector extends FacetTestBase {
|
|||||||
assertEquals("Wrong number of matching documents!", 2, scoredDocIDs.size());
|
assertEquals("Wrong number of matching documents!", 2, scoredDocIDs.size());
|
||||||
ScoredDocIDsIterator docItr = scoredDocIDs.iterator();
|
ScoredDocIDsIterator docItr = scoredDocIDs.iterator();
|
||||||
while (docItr.next()) {
|
while (docItr.next()) {
|
||||||
assertEquals("Wrong score for doc " + docItr.getDocID(), constScore,
|
assertEquals("Wrong score for doc " + docItr.getDocID(), constScore, docItr.getScore(), Double.MIN_VALUE);
|
||||||
docItr.getScore(), Double.MIN_VALUE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// verify by facet values
|
// verify by facet values
|
||||||
List<FacetResult> countRes = findFacets(scoredDocIDs, getFacetSearchParams());
|
CategoryPath cp = new CategoryPath("root","a");
|
||||||
List<FacetResult> scoreRes = findFacets(scoredDocIDs, sumScoreSearchParams());
|
FacetSearchParams countFSP = new FacetSearchParams(getFacetIndexingParams(Integer.MAX_VALUE), new CountFacetRequest(cp, 10));
|
||||||
|
FacetSearchParams scoreFSP = new FacetSearchParams(getFacetIndexingParams(Integer.MAX_VALUE), new ScoreFacetRequest(cp, 10));
|
||||||
|
|
||||||
|
List<FacetResult> countRes = findFacets(scoredDocIDs, countFSP);
|
||||||
|
List<FacetResult> scoreRes = findFacets(scoredDocIDs, scoreFSP);
|
||||||
|
|
||||||
assertEquals("Wrong number of facet count results!", 1, countRes.size());
|
assertEquals("Wrong number of facet count results!", 1, countRes.size());
|
||||||
assertEquals("Wrong number of facet score results!", 1, scoreRes.size());
|
assertEquals("Wrong number of facet score results!", 1, scoreRes.size());
|
||||||
@ -151,14 +147,4 @@ public class TestScoredDocIdCollector extends FacetTestBase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* use a scoring aggregator */
|
}
|
||||||
private FacetSearchParams sumScoreSearchParams() {
|
|
||||||
// this will use default faceted indexing params, not altering anything about indexing
|
|
||||||
return new FacetSearchParams(new ScoreFacetRequest(new CategoryPath("root", "a"), 10));
|
|
||||||
}
|
|
||||||
|
|
||||||
private FacetSearchParams getFacetSearchParams() {
|
|
||||||
return new FacetSearchParams(new CountFacetRequest(new CategoryPath("root","a"), 10));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
@ -8,6 +8,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
|||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field.Store;
|
import org.apache.lucene.document.Field.Store;
|
||||||
import org.apache.lucene.document.StringField;
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||||
@ -32,7 +33,6 @@ import org.apache.lucene.search.Query;
|
|||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -52,7 +52,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestStandardFacetsAccumulator extends LuceneTestCase {
|
public class TestStandardFacetsAccumulator extends FacetTestCase {
|
||||||
|
|
||||||
private void indexTwoDocs(IndexWriter indexWriter, FacetFields facetFields, boolean withContent) throws Exception {
|
private void indexTwoDocs(IndexWriter indexWriter, FacetFields facetFields, boolean withContent) throws Exception {
|
||||||
for (int i = 0; i < 2; i++) {
|
for (int i = 0; i < 2; i++) {
|
||||||
|
@ -9,6 +9,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
|||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.TextField;
|
import org.apache.lucene.document.TextField;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||||
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
import org.apache.lucene.facet.search.params.CountFacetRequest;
|
||||||
@ -30,7 +31,6 @@ import org.apache.lucene.search.IndexSearcher;
|
|||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -50,7 +50,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
|
public class TestTopKInEachNodeResultHandler extends FacetTestCase {
|
||||||
|
|
||||||
//TODO (Facet): Move to extend BaseTestTopK and separate to several smaller test cases (methods) - see TestTopKResultsHandler
|
//TODO (Facet): Move to extend BaseTestTopK and separate to several smaller test cases (methods) - see TestTopKResultsHandler
|
||||||
|
|
||||||
|
@ -4,13 +4,13 @@ import java.io.File;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.FacetTestUtils;
|
import org.apache.lucene.facet.FacetTestUtils;
|
||||||
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyReaderPair;
|
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyReaderPair;
|
||||||
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyWriterPair;
|
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyWriterPair;
|
||||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
@ -31,7 +31,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestTotalFacetCounts extends LuceneTestCase {
|
public class TestTotalFacetCounts extends FacetTestCase {
|
||||||
|
|
||||||
private static void initCache(int numEntries) {
|
private static void initCache(int numEntries) {
|
||||||
TotalFacetCountsCache.getSingleton().clear();
|
TotalFacetCountsCache.getSingleton().clear();
|
||||||
@ -53,8 +53,7 @@ public class TestTotalFacetCounts extends LuceneTestCase {
|
|||||||
// Create temporary RAMDirectories
|
// Create temporary RAMDirectories
|
||||||
Directory[][] dirs = FacetTestUtils.createIndexTaxonomyDirs(1);
|
Directory[][] dirs = FacetTestUtils.createIndexTaxonomyDirs(1);
|
||||||
// Create our index/taxonomy writers
|
// Create our index/taxonomy writers
|
||||||
IndexTaxonomyWriterPair[] writers = FacetTestUtils
|
IndexTaxonomyWriterPair[] writers = FacetTestUtils.createIndexTaxonomyWriterPair(dirs);
|
||||||
.createIndexTaxonomyWriterPair(dirs);
|
|
||||||
FacetIndexingParams iParams = new FacetIndexingParams() {
|
FacetIndexingParams iParams = new FacetIndexingParams() {
|
||||||
@Override
|
@Override
|
||||||
public int getPartitionSize() {
|
public int getPartitionSize() {
|
||||||
|
@ -8,6 +8,7 @@ import java.util.List;
|
|||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.analysis.MockTokenizer;
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.FacetTestUtils;
|
import org.apache.lucene.facet.FacetTestUtils;
|
||||||
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyReaderPair;
|
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyReaderPair;
|
||||||
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyWriterPair;
|
import org.apache.lucene.facet.FacetTestUtils.IndexTaxonomyWriterPair;
|
||||||
@ -32,7 +33,6 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.SlowRAMDirectory;
|
import org.apache.lucene.util.SlowRAMDirectory;
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@ -55,7 +55,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestTotalFacetCountsCache extends LuceneTestCase {
|
public class TestTotalFacetCountsCache extends FacetTestCase {
|
||||||
|
|
||||||
static final TotalFacetCountsCache TFC = TotalFacetCountsCache.getSingleton();
|
static final TotalFacetCountsCache TFC = TotalFacetCountsCache.getSingleton();
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ import java.util.List;
|
|||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.analysis.MockTokenizer;
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.associations.AssociationsFacetFields;
|
import org.apache.lucene.facet.associations.AssociationsFacetFields;
|
||||||
import org.apache.lucene.facet.associations.CategoryAssociationsContainer;
|
import org.apache.lucene.facet.associations.CategoryAssociationsContainer;
|
||||||
import org.apache.lucene.facet.associations.CategoryFloatAssociation;
|
import org.apache.lucene.facet.associations.CategoryFloatAssociation;
|
||||||
@ -47,7 +48,7 @@ import org.junit.Test;
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
/** Test for associations */
|
/** Test for associations */
|
||||||
public class AssociationsFacetRequestTest extends LuceneTestCase {
|
public class AssociationsFacetRequestTest extends FacetTestCase {
|
||||||
|
|
||||||
private static Directory dir;
|
private static Directory dir;
|
||||||
private static IndexReader reader;
|
private static IndexReader reader;
|
||||||
|
@ -1,15 +1,14 @@
|
|||||||
package org.apache.lucene.facet.search.params;
|
package org.apache.lucene.facet.search.params;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.store.Directory;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.facet.search.FacetResultsHandler;
|
import org.apache.lucene.facet.search.FacetResultsHandler;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||||
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
@ -28,7 +27,7 @@ import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class FacetRequestTest extends LuceneTestCase {
|
public class FacetRequestTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test(expected=IllegalArgumentException.class)
|
@Test(expected=IllegalArgumentException.class)
|
||||||
public void testIllegalNumResults() throws Exception {
|
public void testIllegalNumResults() throws Exception {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package org.apache.lucene.facet.search.params;
|
package org.apache.lucene.facet.search.params;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -20,7 +20,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class FacetSearchParamsTest extends LuceneTestCase {
|
public class FacetSearchParamsTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchParamsWithNullRequest() throws Exception {
|
public void testSearchParamsWithNullRequest() throws Exception {
|
||||||
|
@ -5,6 +5,7 @@ import java.util.HashMap;
|
|||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||||
import org.apache.lucene.facet.index.params.PerDimensionIndexingParams;
|
import org.apache.lucene.facet.index.params.PerDimensionIndexingParams;
|
||||||
@ -22,7 +23,6 @@ import org.apache.lucene.index.IndexWriter;
|
|||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.IntsRef;
|
import org.apache.lucene.util.IntsRef;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.encoding.IntDecoder;
|
import org.apache.lucene.util.encoding.IntDecoder;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
@ -43,7 +43,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class MultiCategoryListIteratorTest extends LuceneTestCase {
|
public class MultiCategoryListIteratorTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMultipleCategoryLists() throws Exception {
|
public void testMultipleCategoryLists() throws Exception {
|
||||||
@ -58,7 +58,7 @@ public class MultiCategoryListIteratorTest extends LuceneTestCase {
|
|||||||
HashMap<CategoryPath,CategoryListParams> clps = new HashMap<CategoryPath,CategoryListParams>();
|
HashMap<CategoryPath,CategoryListParams> clps = new HashMap<CategoryPath,CategoryListParams>();
|
||||||
for (String dim : dimensions) {
|
for (String dim : dimensions) {
|
||||||
CategoryPath cp = new CategoryPath(dim);
|
CategoryPath cp = new CategoryPath(dim);
|
||||||
CategoryListParams clp = new CategoryListParams("$" + dim);
|
CategoryListParams clp = randomCategoryListParams("$" + dim);
|
||||||
clps.put(cp, clp);
|
clps.put(cp, clp);
|
||||||
}
|
}
|
||||||
PerDimensionIndexingParams indexingParams = new PerDimensionIndexingParams(clps);
|
PerDimensionIndexingParams indexingParams = new PerDimensionIndexingParams(clps);
|
||||||
|
@ -5,7 +5,9 @@ import java.util.Collections;
|
|||||||
|
|
||||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
|
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||||
import org.apache.lucene.facet.search.FacetsAccumulator;
|
import org.apache.lucene.facet.search.FacetsAccumulator;
|
||||||
import org.apache.lucene.facet.search.FacetsCollector;
|
import org.apache.lucene.facet.search.FacetsCollector;
|
||||||
import org.apache.lucene.facet.search.StandardFacetsCollector;
|
import org.apache.lucene.facet.search.StandardFacetsCollector;
|
||||||
@ -20,7 +22,6 @@ import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
|||||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
@ -28,9 +29,7 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -50,16 +49,18 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class OversampleWithDepthTest extends LuceneTestCase {
|
public class OversampleWithDepthTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCountWithdepthUsingSampling() throws Exception, IOException {
|
public void testCountWithdepthUsingSampling() throws Exception, IOException {
|
||||||
Directory indexDir = newDirectory();
|
Directory indexDir = newDirectory();
|
||||||
Directory taxoDir = newDirectory();
|
Directory taxoDir = newDirectory();
|
||||||
|
|
||||||
|
FacetIndexingParams fip = new FacetIndexingParams(randomCategoryListParams());
|
||||||
|
|
||||||
// index 100 docs, each with one category: ["root", docnum/10, docnum]
|
// index 100 docs, each with one category: ["root", docnum/10, docnum]
|
||||||
// e.g. root/8/87
|
// e.g. root/8/87
|
||||||
index100Docs(indexDir, taxoDir);
|
index100Docs(indexDir, taxoDir, fip);
|
||||||
|
|
||||||
DirectoryReader r = DirectoryReader.open(indexDir);
|
DirectoryReader r = DirectoryReader.open(indexDir);
|
||||||
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
|
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
|
||||||
@ -69,7 +70,7 @@ public class OversampleWithDepthTest extends LuceneTestCase {
|
|||||||
facetRequest.setDepth(2);
|
facetRequest.setDepth(2);
|
||||||
facetRequest.setResultMode(ResultMode.PER_NODE_IN_TREE);
|
facetRequest.setResultMode(ResultMode.PER_NODE_IN_TREE);
|
||||||
|
|
||||||
FacetSearchParams fsp = new FacetSearchParams(facetRequest);
|
FacetSearchParams fsp = new FacetSearchParams(fip, facetRequest);
|
||||||
|
|
||||||
// Craft sampling params to enforce sampling
|
// Craft sampling params to enforce sampling
|
||||||
final SamplingParams params = new SamplingParams();
|
final SamplingParams params = new SamplingParams();
|
||||||
@ -93,13 +94,12 @@ public class OversampleWithDepthTest extends LuceneTestCase {
|
|||||||
IOUtils.close(r, tr, indexDir, taxoDir);
|
IOUtils.close(r, tr, indexDir, taxoDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void index100Docs(Directory indexDir, Directory taxoDir)
|
private void index100Docs(Directory indexDir, Directory taxoDir, FacetIndexingParams fip) throws IOException {
|
||||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
|
||||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer());
|
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer());
|
||||||
IndexWriter w = new IndexWriter(indexDir, iwc);
|
IndexWriter w = new IndexWriter(indexDir, iwc);
|
||||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
|
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
|
||||||
|
|
||||||
FacetFields facetFields = new FacetFields(tw);
|
FacetFields facetFields = new FacetFields(tw, fip);
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
CategoryPath cp = new CategoryPath("root",Integer.toString(i / 10), Integer.toString(i));
|
CategoryPath cp = new CategoryPath("root",Integer.toString(i / 10), Integer.toString(i));
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package org.apache.lucene.facet.taxonomy;
|
package org.apache.lucene.facet.taxonomy;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -20,7 +20,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestCategoryPath extends LuceneTestCase {
|
public class TestCategoryPath extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBasic() {
|
public void testBasic() {
|
||||||
|
@ -7,13 +7,13 @@ import java.util.ArrayList;
|
|||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
|
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
import org.apache.lucene.store.LockObtainFailedException;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||||
import org.apache.lucene.util.SlowRAMDirectory;
|
import org.apache.lucene.util.SlowRAMDirectory;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@ -37,7 +37,7 @@ import org.junit.Test;
|
|||||||
|
|
||||||
// TODO: remove this suppress if we fix the TaxoWriter Codec to a non-default (see todo in DirTW)
|
// TODO: remove this suppress if we fix the TaxoWriter Codec to a non-default (see todo in DirTW)
|
||||||
@SuppressCodecs("SimpleText")
|
@SuppressCodecs("SimpleText")
|
||||||
public class TestTaxonomyCombined extends LuceneTestCase {
|
public class TestTaxonomyCombined extends FacetTestCase {
|
||||||
|
|
||||||
/** The following categories will be added to the taxonomy by
|
/** The following categories will be added to the taxonomy by
|
||||||
fillTaxonomy(), and tested by all tests below:
|
fillTaxonomy(), and tested by all tests below:
|
||||||
|
@ -5,13 +5,13 @@ import java.util.HashSet;
|
|||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.DiskOrdinalMap;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.DiskOrdinalMap;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.OrdinalMap;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.OrdinalMap;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -31,7 +31,7 @@ import org.apache.lucene.util._TestUtil;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestAddTaxonomy extends LuceneTestCase {
|
public class TestAddTaxonomy extends FacetTestCase {
|
||||||
|
|
||||||
private void dotest(int ncats, final int range) throws Exception {
|
private void dotest(int ncats, final int range) throws Exception {
|
||||||
final AtomicInteger numCats = new AtomicInteger(ncats);
|
final AtomicInteger numCats = new AtomicInteger(ncats);
|
||||||
|
@ -8,6 +8,7 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.index.FacetFields;
|
import org.apache.lucene.facet.index.FacetFields;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache;
|
import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache;
|
||||||
@ -17,7 +18,6 @@ import org.apache.lucene.index.IndexWriter;
|
|||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
@ -37,7 +37,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
/** Tests concurrent indexing with facets. */
|
/** Tests concurrent indexing with facets. */
|
||||||
public class TestConcurrentFacetedIndexing extends LuceneTestCase {
|
public class TestConcurrentFacetedIndexing extends FacetTestCase {
|
||||||
|
|
||||||
// A No-Op TaxonomyWriterCache which always discards all given categories, and
|
// A No-Op TaxonomyWriterCache which always discards all given categories, and
|
||||||
// always returns true in put(), to indicate some cache entries were cleared.
|
// always returns true in put(), to indicate some cache entries were cleared.
|
||||||
|
@ -4,19 +4,19 @@ import java.io.IOException;
|
|||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.LogByteSizeMergePolicy;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
|
import org.apache.lucene.index.LogByteSizeMergePolicy;
|
||||||
import org.apache.lucene.index.LogMergePolicy;
|
import org.apache.lucene.index.LogMergePolicy;
|
||||||
import org.apache.lucene.store.AlreadyClosedException;
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -36,7 +36,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestDirectoryTaxonomyReader extends LuceneTestCase {
|
public class TestDirectoryTaxonomyReader extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCloseAfterIncRef() throws Exception {
|
public void testCloseAfterIncRef() throws Exception {
|
||||||
|
@ -7,6 +7,7 @@ import java.util.Random;
|
|||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
|
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
|
||||||
@ -21,7 +22,6 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
|||||||
import org.apache.lucene.index.SegmentInfos;
|
import org.apache.lucene.index.SegmentInfos;
|
||||||
import org.apache.lucene.store.AlreadyClosedException;
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -41,7 +41,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestDirectoryTaxonomyWriter extends LuceneTestCase {
|
public class TestDirectoryTaxonomyWriter extends FacetTestCase {
|
||||||
|
|
||||||
// A No-Op TaxonomyWriterCache which always discards all given categories, and
|
// A No-Op TaxonomyWriterCache which always discards all given categories, and
|
||||||
// always returns true in put(), to indicate some cache entries were cleared.
|
// always returns true in put(), to indicate some cache entries were cleared.
|
||||||
|
@ -9,11 +9,9 @@ import java.nio.ByteBuffer;
|
|||||||
import java.nio.charset.CharsetDecoder;
|
import java.nio.charset.CharsetDecoder;
|
||||||
import java.nio.charset.CodingErrorAction;
|
import java.nio.charset.CodingErrorAction;
|
||||||
|
|
||||||
import org.junit.Test;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
|
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.junit.Test;
|
||||||
import org.apache.lucene.facet.taxonomy.writercache.cl2o.CharBlockArray;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
@ -32,7 +30,7 @@ import org.apache.lucene.facet.taxonomy.writercache.cl2o.CharBlockArray;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestCharBlockArray extends LuceneTestCase {
|
public class TestCharBlockArray extends FacetTestCase {
|
||||||
|
|
||||||
@Test public void testArray() throws Exception {
|
@Test public void testArray() throws Exception {
|
||||||
CharBlockArray array = new CharBlockArray();
|
CharBlockArray array = new CharBlockArray();
|
||||||
|
@ -8,14 +8,11 @@ import java.util.HashMap;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.junit.Test;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
|
|
||||||
import org.apache.lucene.util.IOUtils;
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.writercache.cl2o.CompactLabelToOrdinal;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.facet.taxonomy.writercache.cl2o.LabelToOrdinal;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
@ -34,7 +31,7 @@ import org.apache.lucene.facet.taxonomy.writercache.cl2o.LabelToOrdinal;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestCompactLabelToOrdinal extends LuceneTestCase {
|
public class TestCompactLabelToOrdinal extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testL2O() throws Exception {
|
public void testL2O() throws Exception {
|
||||||
|
@ -9,6 +9,7 @@ import org.apache.lucene.document.Document;
|
|||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.document.StringField;
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.facet.search.ScoredDocIDs;
|
import org.apache.lucene.facet.search.ScoredDocIDs;
|
||||||
import org.apache.lucene.facet.search.ScoredDocIDsIterator;
|
import org.apache.lucene.facet.search.ScoredDocIDsIterator;
|
||||||
import org.apache.lucene.facet.search.ScoredDocIdCollector;
|
import org.apache.lucene.facet.search.ScoredDocIdCollector;
|
||||||
@ -25,7 +26,6 @@ import org.apache.lucene.search.TermQuery;
|
|||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -45,7 +45,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestScoredDocIDsUtils extends LuceneTestCase {
|
public class TestScoredDocIDsUtils extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testComplementIterator() throws Exception {
|
public void testComplementIterator() throws Exception {
|
||||||
|
@ -3,11 +3,9 @@ package org.apache.lucene.util;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.UnsafeByteArrayInputStream;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -25,7 +23,7 @@ import org.apache.lucene.util.UnsafeByteArrayInputStream;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class UnsafeByteArrayInputStreamTest extends LuceneTestCase {
|
public class UnsafeByteArrayInputStreamTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSimple() throws IOException {
|
public void testSimple() throws IOException {
|
||||||
|
@ -2,11 +2,9 @@ package org.apache.lucene.util;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.UnsafeByteArrayOutputStream;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -24,7 +22,7 @@ import org.apache.lucene.util.UnsafeByteArrayOutputStream;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class UnsafeByteArrayOutputStreamTest extends LuceneTestCase {
|
public class UnsafeByteArrayOutputStreamTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSimpleWrite() throws IOException {
|
public void testSimpleWrite() throws IOException {
|
||||||
|
@ -4,11 +4,9 @@ import java.util.HashSet;
|
|||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.collections.ArrayHashMap;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -26,7 +24,7 @@ import org.apache.lucene.util.collections.ArrayHashMap;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class ArrayHashMapTest extends LuceneTestCase {
|
public class ArrayHashMapTest extends FacetTestCase {
|
||||||
|
|
||||||
public static final int RANDOM_TEST_NUM_ITERATIONS = 100; // set to 100,000 for deeper test
|
public static final int RANDOM_TEST_NUM_ITERATIONS = 100; // set to 100,000 for deeper test
|
||||||
|
|
||||||
|
@ -1,13 +1,11 @@
|
|||||||
package org.apache.lucene.util.collections;
|
package org.apache.lucene.util.collections;
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.util.collections.FloatIterator;
|
import org.junit.Test;
|
||||||
import org.apache.lucene.util.collections.FloatToObjectMap;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
@ -26,7 +24,7 @@ import org.apache.lucene.util.collections.FloatToObjectMap;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class FloatToObjectMapTest extends LuceneTestCase {
|
public class FloatToObjectMapTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void test0() {
|
public void test0() {
|
||||||
|
@ -1,10 +1,8 @@
|
|||||||
package org.apache.lucene.util.collections;
|
package org.apache.lucene.util.collections;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.collections.IntArray;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -22,7 +20,7 @@ import org.apache.lucene.util.collections.IntArray;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class IntArrayTest extends LuceneTestCase {
|
public class IntArrayTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void test0() {
|
public void test0() {
|
||||||
|
@ -2,11 +2,9 @@ package org.apache.lucene.util.collections;
|
|||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.collections.IntHashSet;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -24,7 +22,7 @@ import org.apache.lucene.util.collections.IntHashSet;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class IntHashSetTest extends LuceneTestCase {
|
public class IntHashSetTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void test0() {
|
public void test0() {
|
||||||
|
@ -1,15 +1,11 @@
|
|||||||
package org.apache.lucene.util.collections;
|
package org.apache.lucene.util.collections;
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.collections.DoubleIterator;
|
|
||||||
import org.apache.lucene.util.collections.IntIterator;
|
|
||||||
import org.apache.lucene.util.collections.IntToDoubleMap;
|
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -27,7 +23,8 @@ import java.util.Random;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class IntToDoubleMapTest extends LuceneTestCase {
|
public class IntToDoubleMapTest extends FacetTestCase {
|
||||||
|
|
||||||
private static void assertGround(double value) {
|
private static void assertGround(double value) {
|
||||||
assertEquals(IntToDoubleMap.GROUND, value, Double.MAX_VALUE);
|
assertEquals(IntToDoubleMap.GROUND, value, Double.MAX_VALUE);
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,11 @@
|
|||||||
package org.apache.lucene.util.collections;
|
package org.apache.lucene.util.collections;
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.collections.FloatIterator;
|
|
||||||
import org.apache.lucene.util.collections.IntIterator;
|
|
||||||
import org.apache.lucene.util.collections.IntToFloatMap;
|
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -27,7 +23,8 @@ import java.util.Random;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class IntToFloatMapTest extends LuceneTestCase {
|
public class IntToFloatMapTest extends FacetTestCase {
|
||||||
|
|
||||||
private static void assertGround(float value) {
|
private static void assertGround(float value) {
|
||||||
assertEquals(IntToFloatMap.GROUND, value, Float.MAX_VALUE);
|
assertEquals(IntToFloatMap.GROUND, value, Float.MAX_VALUE);
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
package org.apache.lucene.util.collections;
|
package org.apache.lucene.util.collections;
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.util.collections.IntIterator;
|
import org.junit.Test;
|
||||||
import org.apache.lucene.util.collections.IntToIntMap;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
@ -25,7 +23,7 @@ import org.apache.lucene.util.collections.IntToIntMap;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class IntToIntMapTest extends LuceneTestCase {
|
public class IntToIntMapTest extends FacetTestCase {
|
||||||
|
|
||||||
private static void assertGround(int value) {
|
private static void assertGround(int value) {
|
||||||
assertEquals(IntToIntMap.GROUD, value);
|
assertEquals(IntToIntMap.GROUD, value);
|
||||||
|
@ -4,12 +4,9 @@ import java.util.HashSet;
|
|||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.collections.IntIterator;
|
|
||||||
import org.apache.lucene.util.collections.IntToObjectMap;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -27,7 +24,7 @@ import org.apache.lucene.util.collections.IntToObjectMap;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class IntToObjectMapTest extends LuceneTestCase {
|
public class IntToObjectMapTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void test0() {
|
public void test0() {
|
||||||
|
@ -1,15 +1,12 @@
|
|||||||
package org.apache.lucene.util.collections;
|
package org.apache.lucene.util.collections;
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.collections.FloatIterator;
|
|
||||||
import org.apache.lucene.util.collections.ObjectToFloatMap;
|
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -27,7 +24,7 @@ import java.util.Random;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class ObjectToFloatMapTest extends LuceneTestCase {
|
public class ObjectToFloatMapTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void test0() {
|
public void test0() {
|
||||||
|
@ -6,6 +6,7 @@ import java.util.Random;
|
|||||||
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.collections.IntIterator;
|
import org.apache.lucene.util.collections.IntIterator;
|
||||||
import org.apache.lucene.util.collections.ObjectToIntMap;
|
import org.apache.lucene.util.collections.ObjectToIntMap;
|
||||||
@ -27,7 +28,7 @@ import org.apache.lucene.util.collections.ObjectToIntMap;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class ObjectToIntMapTest extends LuceneTestCase {
|
public class ObjectToIntMapTest extends FacetTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void test0() {
|
public void test0() {
|
||||||
|
@ -1,10 +1,8 @@
|
|||||||
package org.apache.lucene.util.collections;
|
package org.apache.lucene.util.collections;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.apache.lucene.util.collections.LRUHashMap;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
@ -22,7 +20,7 @@ import org.apache.lucene.util.collections.LRUHashMap;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class TestLRUHashMap extends LuceneTestCase {
|
public class TestLRUHashMap extends FacetTestCase {
|
||||||
// testLRU() tests that the specified size limit is indeed honored, and
|
// testLRU() tests that the specified size limit is indeed honored, and
|
||||||
// the remaining objects in the map are indeed those that have been most
|
// the remaining objects in the map are indeed those that have been most
|
||||||
// recently used
|
// recently used
|
||||||
|
@ -3,9 +3,9 @@ package org.apache.lucene.util.encoding;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.FacetTestCase;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.IntsRef;
|
import org.apache.lucene.util.IntsRef;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
@ -26,7 +26,7 @@ import org.junit.Test;
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class EncodingTest extends LuceneTestCase {
|
public class EncodingTest extends FacetTestCase {
|
||||||
|
|
||||||
private static IntsRef uniqueSortedData, data;
|
private static IntsRef uniqueSortedData, data;
|
||||||
|
|
||||||
|
@ -63,6 +63,9 @@ Detailed Change List
|
|||||||
New Features
|
New Features
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
* SOLR-4043: Add ability to get success/failure responses from Collections API.
|
||||||
|
(Raintung Li, Mark Miller)
|
||||||
|
|
||||||
Bug Fixes
|
Bug Fixes
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
@ -80,6 +83,16 @@ Bug Fixes
|
|||||||
* SOLR-4349 : Admin UI - Query Interface does not work in IE
|
* SOLR-4349 : Admin UI - Query Interface does not work in IE
|
||||||
(steffkes)
|
(steffkes)
|
||||||
|
|
||||||
|
* SOLR-4359: The RecentUpdates#update method should treat a problem reading the
|
||||||
|
next record the same as a problem parsing the record - log the exception and
|
||||||
|
break. (Mark Miller)
|
||||||
|
|
||||||
|
* SOLR-4225: Term info page under schema browser shows incorrect count of terms
|
||||||
|
(steffkes)
|
||||||
|
|
||||||
|
* SOLR-3926: Solr should support better way of finding active sorts (Eirik Lygre via
|
||||||
|
Erick Erickson)
|
||||||
|
|
||||||
Optimizations
|
Optimizations
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
@ -100,6 +113,8 @@ Other Changes
|
|||||||
* SOLR-4348: Make the lock type configurable by system property by default.
|
* SOLR-4348: Make the lock type configurable by system property by default.
|
||||||
(Mark Miller)
|
(Mark Miller)
|
||||||
|
|
||||||
|
* SOLR-4353: Renamed example jetty context file to reduce confusion (hossman)
|
||||||
|
|
||||||
================== 4.1.0 ==================
|
================== 4.1.0 ==================
|
||||||
|
|
||||||
Versions of Major Components
|
Versions of Major Components
|
||||||
@ -607,6 +622,9 @@ Bug Fixes
|
|||||||
* SOLR-4266: HttpSolrServer does not release connection properly on exception
|
* SOLR-4266: HttpSolrServer does not release connection properly on exception
|
||||||
when no response parser is used. (Steve Molloy via Mark Miller)
|
when no response parser is used. (Steve Molloy via Mark Miller)
|
||||||
|
|
||||||
|
* SOLR-2298: Updated JavaDoc for SolrDocument.addField and SolrInputDocument.addField
|
||||||
|
to have more information on name and value parameters. (Siva Natarajan)
|
||||||
|
|
||||||
Other Changes
|
Other Changes
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
package org.apache.solr.handler.dataimport;
|
package org.apache.solr.handler.dataimport;
|
||||||
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Ignore;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.net.URLEncoder;
|
import java.net.URLEncoder;
|
||||||
@ -109,58 +108,63 @@ public class TestBuiltInEvaluators extends AbstractDataImportHandlerTestCase {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Date getNow() {
|
private Date twoDaysAgo(Locale l, TimeZone tz) {
|
||||||
Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("GMT"),
|
Calendar calendar = Calendar.getInstance(tz, l);
|
||||||
Locale.ROOT);
|
|
||||||
calendar.add(Calendar.DAY_OF_YEAR, -2);
|
calendar.add(Calendar.DAY_OF_YEAR, -2);
|
||||||
return calendar.getTime();
|
return calendar.getTime();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Ignore("fails if somewhere on earth is a DST change")
|
|
||||||
public void testDateFormatEvaluator() {
|
public void testDateFormatEvaluator() {
|
||||||
Evaluator dateFormatEval = new DateFormatEvaluator();
|
Evaluator dateFormatEval = new DateFormatEvaluator();
|
||||||
ContextImpl context = new ContextImpl(null, resolver, null,
|
ContextImpl context = new ContextImpl(null, resolver, null,
|
||||||
Context.FULL_DUMP, Collections.<String,Object> emptyMap(), null, null);
|
Context.FULL_DUMP, Collections.<String,Object> emptyMap(), null, null);
|
||||||
String currentLocale = Locale.getDefault().toString();
|
|
||||||
|
Locale rootLocale = Locale.ROOT;
|
||||||
|
Locale defaultLocale = Locale.getDefault();
|
||||||
|
TimeZone defaultTz = TimeZone.getDefault();
|
||||||
|
|
||||||
{
|
{
|
||||||
{
|
SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH", rootLocale);
|
||||||
SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH", Locale.ROOT);
|
String sdf = sdfDate.format(twoDaysAgo(rootLocale, defaultTz));
|
||||||
String sdf = sdfDate.format(getNow());
|
String dfe = dateFormatEval.evaluate("'NOW-2DAYS','yyyy-MM-dd HH'", context);
|
||||||
String dfe = dateFormatEval.evaluate("'NOW-2DAYS','yyyy-MM-dd HH'", context);
|
assertEquals(sdf,dfe);
|
||||||
assertEquals(sdf,dfe);
|
}
|
||||||
}
|
{
|
||||||
{
|
SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH", defaultLocale);
|
||||||
SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH", Locale.getDefault());
|
String sdf = sdfDate.format(twoDaysAgo(defaultLocale, TimeZone.getDefault()));
|
||||||
String sdf = sdfDate.format(getNow());
|
String dfe = dateFormatEval.evaluate(
|
||||||
String dfe = dateFormatEval.evaluate("'NOW-2DAYS','yyyy-MM-dd HH','"+ currentLocale + "'", context);
|
"'NOW-2DAYS','yyyy-MM-dd HH','" + defaultLocale + "'", context);
|
||||||
assertEquals(sdf,dfe);
|
assertEquals(sdf,dfe);
|
||||||
for(String tz : TimeZone.getAvailableIDs()) {
|
for(String tzStr : TimeZone.getAvailableIDs()) {
|
||||||
sdfDate.setTimeZone(TimeZone.getTimeZone(tz));
|
TimeZone tz = TimeZone.getTimeZone(tzStr);
|
||||||
sdf = sdfDate.format(getNow());
|
sdfDate.setTimeZone(tz);
|
||||||
dfe = dateFormatEval.evaluate("'NOW-2DAYS','yyyy-MM-dd HH','" + currentLocale + "','" + tz + "'", context);
|
sdf = sdfDate.format(twoDaysAgo(defaultLocale, tz));
|
||||||
assertEquals(sdf,dfe);
|
dfe = dateFormatEval.evaluate(
|
||||||
}
|
"'NOW-2DAYS','yyyy-MM-dd HH','" + defaultLocale + "','" + tzStr + "'", context);
|
||||||
|
assertEquals(sdf,dfe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Date d = new Date();
|
Date d = new Date();
|
||||||
Map<String,Object> map = new HashMap<String,Object>();
|
Map<String,Object> map = new HashMap<String,Object>();
|
||||||
map.put("key", d);
|
map.put("key", d);
|
||||||
resolver.addNamespace("A", map);
|
resolver.addNamespace("A", map);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.ROOT).format(d),
|
new SimpleDateFormat("yyyy-MM-dd HH:mm", rootLocale).format(d),
|
||||||
dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm'", context));
|
dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm'", context));
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.getDefault()).format(d),
|
new SimpleDateFormat("yyyy-MM-dd HH:mm", defaultLocale).format(d),
|
||||||
dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm','" + currentLocale
|
dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm','" + defaultLocale + "'", context));
|
||||||
+ "'", context));
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm", defaultLocale);
|
||||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.getDefault());
|
for(String tzStr : TimeZone.getAvailableIDs()) {
|
||||||
for(String tz : TimeZone.getAvailableIDs()) {
|
TimeZone tz = TimeZone.getTimeZone(tzStr);
|
||||||
sdf.setTimeZone(TimeZone.getTimeZone(tz));
|
sdf.setTimeZone(tz);
|
||||||
assertEquals(
|
assertEquals(
|
||||||
sdf.format(d),
|
sdf.format(d),
|
||||||
dateFormatEval.evaluate("A.key, 'yyyy-MM-dd HH:mm','" + currentLocale + "', '" + tz + "'", context));
|
dateFormatEval.evaluate(
|
||||||
|
"A.key, 'yyyy-MM-dd HH:mm','" + defaultLocale + "', '" + tzStr + "'", context));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,6 +48,8 @@ public class DistributedQueue {
|
|||||||
|
|
||||||
private final String prefix = "qn-";
|
private final String prefix = "qn-";
|
||||||
|
|
||||||
|
private final String response_prefix = "qnr-" ;
|
||||||
|
|
||||||
public DistributedQueue(SolrZkClient zookeeper, String dir, List<ACL> acl) {
|
public DistributedQueue(SolrZkClient zookeeper, String dir, List<ACL> acl) {
|
||||||
this.dir = dir;
|
this.dir = dir;
|
||||||
|
|
||||||
@ -100,7 +102,7 @@ public class DistributedQueue {
|
|||||||
*
|
*
|
||||||
* @return the data at the head of the queue.
|
* @return the data at the head of the queue.
|
||||||
*/
|
*/
|
||||||
public byte[] element() throws NoSuchElementException, KeeperException,
|
private QueueEvent element() throws NoSuchElementException, KeeperException,
|
||||||
InterruptedException {
|
InterruptedException {
|
||||||
TreeMap<Long,String> orderedChildren;
|
TreeMap<Long,String> orderedChildren;
|
||||||
|
|
||||||
@ -122,7 +124,7 @@ public class DistributedQueue {
|
|||||||
for (String headNode : orderedChildren.values()) {
|
for (String headNode : orderedChildren.values()) {
|
||||||
if (headNode != null) {
|
if (headNode != null) {
|
||||||
try {
|
try {
|
||||||
return zookeeper.getData(dir + "/" + headNode, null, null, true);
|
return new QueueEvent(dir + "/" + headNode, zookeeper.getData(dir + "/" + headNode, null, null, true), null);
|
||||||
} catch (KeeperException.NoNodeException e) {
|
} catch (KeeperException.NoNodeException e) {
|
||||||
// Another client removed the node first, try next
|
// Another client removed the node first, try next
|
||||||
}
|
}
|
||||||
@ -162,17 +164,41 @@ public class DistributedQueue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove the event and save the response into the other path.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public byte[] remove(QueueEvent event) throws KeeperException,
|
||||||
|
InterruptedException {
|
||||||
|
String path = event.getId();
|
||||||
|
String responsePath = dir + "/" + response_prefix
|
||||||
|
+ path.substring(path.lastIndexOf("-") + 1);
|
||||||
|
if (zookeeper.exists(responsePath, true)) {
|
||||||
|
zookeeper.setData(responsePath, event.getBytes(), true);
|
||||||
|
}
|
||||||
|
byte[] data = zookeeper.getData(path, null, null, true);
|
||||||
|
zookeeper.delete(path, -1, true);
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private class LatchChildWatcher implements Watcher {
|
private class LatchChildWatcher implements Watcher {
|
||||||
|
|
||||||
Object lock = new Object();
|
Object lock = new Object();
|
||||||
|
private WatchedEvent event = null;
|
||||||
|
|
||||||
public LatchChildWatcher() {}
|
public LatchChildWatcher() {}
|
||||||
|
|
||||||
|
public LatchChildWatcher(Object lock) {
|
||||||
|
this.lock = lock;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void process(WatchedEvent event) {
|
public void process(WatchedEvent event) {
|
||||||
LOG.info("Watcher fired on path: " + event.getPath() + " state: "
|
LOG.info("Watcher fired on path: " + event.getPath() + " state: "
|
||||||
+ event.getState() + " type " + event.getType());
|
+ event.getState() + " type " + event.getType());
|
||||||
synchronized (lock) {
|
synchronized (lock) {
|
||||||
|
this.event = event;
|
||||||
lock.notifyAll();
|
lock.notifyAll();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -182,6 +208,10 @@ public class DistributedQueue {
|
|||||||
lock.wait(timeout);
|
lock.wait(timeout);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public WatchedEvent getWatchedEvent() {
|
||||||
|
return event;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -225,22 +255,51 @@ public class DistributedQueue {
|
|||||||
*/
|
*/
|
||||||
public boolean offer(byte[] data) throws KeeperException,
|
public boolean offer(byte[] data) throws KeeperException,
|
||||||
InterruptedException {
|
InterruptedException {
|
||||||
|
return createData(dir + "/" + prefix, data,
|
||||||
|
CreateMode.PERSISTENT_SEQUENTIAL) != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inserts data into zookeeper.
|
||||||
|
*
|
||||||
|
* @return true if data was successfully added
|
||||||
|
*/
|
||||||
|
private String createData(String path, byte[] data, CreateMode mode)
|
||||||
|
throws KeeperException, InterruptedException {
|
||||||
for (;;) {
|
for (;;) {
|
||||||
try {
|
try {
|
||||||
zookeeper.create(dir + "/" + prefix, data, acl,
|
return zookeeper.create(path, data, acl, mode, true);
|
||||||
CreateMode.PERSISTENT_SEQUENTIAL, true);
|
|
||||||
return true;
|
|
||||||
} catch (KeeperException.NoNodeException e) {
|
} catch (KeeperException.NoNodeException e) {
|
||||||
try {
|
try {
|
||||||
zookeeper.create(dir, new byte[0], acl, CreateMode.PERSISTENT, true);
|
zookeeper.create(dir, new byte[0], acl, CreateMode.PERSISTENT, true);
|
||||||
} catch (KeeperException.NodeExistsException ne) {
|
} catch (KeeperException.NodeExistsException ne) {
|
||||||
//someone created it
|
// someone created it
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Offer the data and wait for the response
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public QueueEvent offer(byte[] data, long timeout) throws KeeperException,
|
||||||
|
InterruptedException {
|
||||||
|
String path = createData(dir + "/" + prefix, data,
|
||||||
|
CreateMode.PERSISTENT_SEQUENTIAL);
|
||||||
|
String watchID = createData(
|
||||||
|
dir + "/" + response_prefix + path.substring(path.lastIndexOf("-") + 1),
|
||||||
|
null, CreateMode.EPHEMERAL);
|
||||||
|
Object lock = new Object();
|
||||||
|
LatchChildWatcher watcher = new LatchChildWatcher(lock);
|
||||||
|
synchronized (lock) {
|
||||||
|
if (zookeeper.exists(watchID, watcher, true) != null) {
|
||||||
|
watcher.await(timeout);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
byte[] bytes = zookeeper.getData(watchID, null, null, true);
|
||||||
|
zookeeper.delete(watchID, -1, true);
|
||||||
|
return new QueueEvent(watchID, bytes, watcher.getWatchedEvent());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -251,21 +310,74 @@ public class DistributedQueue {
|
|||||||
*/
|
*/
|
||||||
public byte[] peek() throws KeeperException, InterruptedException {
|
public byte[] peek() throws KeeperException, InterruptedException {
|
||||||
try {
|
try {
|
||||||
return element();
|
return element().getBytes();
|
||||||
} catch (NoSuchElementException e) {
|
} catch (NoSuchElementException e) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static class QueueEvent {
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
final int prime = 31;
|
||||||
|
int result = 1;
|
||||||
|
result = prime * result + ((id == null) ? 0 : id.hashCode());
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) return true;
|
||||||
|
if (obj == null) return false;
|
||||||
|
if (getClass() != obj.getClass()) return false;
|
||||||
|
QueueEvent other = (QueueEvent) obj;
|
||||||
|
if (id == null) {
|
||||||
|
if (other.id != null) return false;
|
||||||
|
} else if (!id.equals(other.id)) return false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private WatchedEvent event = null;
|
||||||
|
private String id;
|
||||||
|
private byte[] bytes;
|
||||||
|
|
||||||
|
QueueEvent(String id, byte[] bytes, WatchedEvent event) {
|
||||||
|
this.id = id;
|
||||||
|
this.bytes = bytes;
|
||||||
|
this.event = event;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBytes(byte[] bytes) {
|
||||||
|
this.bytes = bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public byte[] getBytes() {
|
||||||
|
return bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public WatchedEvent getWatchedEvent() {
|
||||||
|
return event;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the data at the first element of the queue, or null if the queue is
|
* Returns the data at the first element of the queue, or null if the queue is
|
||||||
* empty.
|
* empty.
|
||||||
*
|
*
|
||||||
* @return data at the first element of the queue, or null.
|
* @return data at the first element of the queue, or null.
|
||||||
*/
|
*/
|
||||||
public byte[] peek(boolean block) throws KeeperException, InterruptedException {
|
public QueueEvent peek(boolean block) throws KeeperException, InterruptedException {
|
||||||
if (!block) {
|
if (!block) {
|
||||||
return peek();
|
return element();
|
||||||
}
|
}
|
||||||
|
|
||||||
TreeMap<Long,String> orderedChildren;
|
TreeMap<Long,String> orderedChildren;
|
||||||
@ -286,7 +398,7 @@ public class DistributedQueue {
|
|||||||
String path = dir + "/" + headNode;
|
String path = dir + "/" + headNode;
|
||||||
try {
|
try {
|
||||||
byte[] data = zookeeper.getData(path, null, null, true);
|
byte[] data = zookeeper.getData(path, null, null, true);
|
||||||
return data;
|
return new QueueEvent(path, data, childWatcher.getWatchedEvent());
|
||||||
} catch (KeeperException.NoNodeException e) {
|
} catch (KeeperException.NoNodeException e) {
|
||||||
// Another client deleted the node first.
|
// Another client deleted the node first.
|
||||||
}
|
}
|
||||||
|
@ -23,6 +23,8 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.solr.client.solrj.SolrResponse;
|
||||||
|
import org.apache.solr.cloud.DistributedQueue.QueueEvent;
|
||||||
import org.apache.solr.common.SolrException;
|
import org.apache.solr.common.SolrException;
|
||||||
import org.apache.solr.common.SolrException.ErrorCode;
|
import org.apache.solr.common.SolrException.ErrorCode;
|
||||||
import org.apache.solr.common.cloud.ClosableThread;
|
import org.apache.solr.common.cloud.ClosableThread;
|
||||||
@ -36,6 +38,7 @@ import org.apache.solr.common.cloud.ZooKeeperException;
|
|||||||
import org.apache.solr.common.params.CoreAdminParams;
|
import org.apache.solr.common.params.CoreAdminParams;
|
||||||
import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
|
import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
|
||||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||||
|
import org.apache.solr.common.util.NamedList;
|
||||||
import org.apache.solr.common.util.StrUtils;
|
import org.apache.solr.common.util.StrUtils;
|
||||||
import org.apache.solr.handler.component.ShardHandler;
|
import org.apache.solr.handler.component.ShardHandler;
|
||||||
import org.apache.solr.handler.component.ShardRequest;
|
import org.apache.solr.handler.component.ShardRequest;
|
||||||
@ -94,47 +97,33 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
log.info("Process current queue of collection messages");
|
log.info("Process current queue of collection creations");
|
||||||
while (amILeader() && !isClosed) {
|
while (amILeader() && !isClosed) {
|
||||||
try {
|
try {
|
||||||
byte[] head = workQueue.peek(true);
|
QueueEvent head = workQueue.peek(true);
|
||||||
|
final ZkNodeProps message = ZkNodeProps.load(head.getBytes());
|
||||||
//if (head != null) { // should not happen since we block above
|
log.info("Overseer Collection Processor: Get the message id:" + head.getId() + " message:" + message.toString());
|
||||||
final ZkNodeProps message = ZkNodeProps.load(head);
|
final String operation = message.getStr(QUEUE_OPERATION);
|
||||||
final String operation = message.getStr(QUEUE_OPERATION);
|
SolrResponse response = processMessage(message, operation);
|
||||||
try {
|
head.setBytes(SolrResponse.serializable(response));
|
||||||
boolean success = processMessage(message, operation);
|
workQueue.remove(head);
|
||||||
if (!success) {
|
log.info("Overseer Collection Processor: Message id:" + head.getId() + " complete, response:"+ response.getResponse().toString());
|
||||||
// TODO: what to do on failure / partial failure
|
} catch (KeeperException e) {
|
||||||
// if we fail, do we clean up then ?
|
if (e.code() == KeeperException.Code.SESSIONEXPIRED
|
||||||
SolrException.log(log,
|
|| e.code() == KeeperException.Code.CONNECTIONLOSS) {
|
||||||
"Collection " + operation + " of " + message.getStr("name")
|
log.warn("Overseer cannot talk to ZK");
|
||||||
+ " failed");
|
return;
|
||||||
}
|
}
|
||||||
} catch(Throwable t) {
|
SolrException.log(log, "", e);
|
||||||
SolrException.log(log,
|
throw new ZooKeeperException(
|
||||||
"Collection " + operation + " of " + message.getStr("name")
|
SolrException.ErrorCode.SERVER_ERROR, "", e);
|
||||||
+ " failed", t);
|
} catch (InterruptedException e) {
|
||||||
}
|
Thread.currentThread().interrupt();
|
||||||
//}
|
return;
|
||||||
|
} catch (Throwable e) {
|
||||||
|
SolrException.log(log, "", e);
|
||||||
workQueue.poll();
|
}
|
||||||
|
}
|
||||||
} catch (KeeperException e) {
|
|
||||||
if (e.code() == KeeperException.Code.SESSIONEXPIRED
|
|
||||||
|| e.code() == KeeperException.Code.CONNECTIONLOSS) {
|
|
||||||
log.warn("Overseer cannot talk to ZK");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
SolrException.log(log, "", e);
|
|
||||||
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "",
|
|
||||||
e);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
Thread.currentThread().interrupt();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void close() {
|
public void close() {
|
||||||
@ -157,21 +146,49 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected boolean processMessage(ZkNodeProps message, String operation) {
|
|
||||||
if (CREATECOLLECTION.equals(operation)) {
|
protected SolrResponse processMessage(ZkNodeProps message, String operation) {
|
||||||
return createCollection(zkStateReader.getClusterState(), message);
|
|
||||||
} else if (DELETECOLLECTION.equals(operation)) {
|
NamedList results = new NamedList();
|
||||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
try {
|
||||||
params.set(CoreAdminParams.ACTION, CoreAdminAction.UNLOAD.toString());
|
if (CREATECOLLECTION.equals(operation)) {
|
||||||
params.set(CoreAdminParams.DELETE_INSTANCE_DIR, true);
|
createCollection(zkStateReader.getClusterState(), message);
|
||||||
return collectionCmd(zkStateReader.getClusterState(), message, params);
|
} else if (DELETECOLLECTION.equals(operation)) {
|
||||||
} else if (RELOADCOLLECTION.equals(operation)) {
|
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
params.set(CoreAdminParams.ACTION, CoreAdminAction.UNLOAD.toString());
|
||||||
params.set(CoreAdminParams.ACTION, CoreAdminAction.RELOAD.toString());
|
params.set(CoreAdminParams.DELETE_INSTANCE_DIR, true);
|
||||||
return collectionCmd(zkStateReader.getClusterState(), message, params);
|
collectionCmd(zkStateReader.getClusterState(), message, params);
|
||||||
|
} else if (RELOADCOLLECTION.equals(operation)) {
|
||||||
|
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||||
|
params.set(CoreAdminParams.ACTION, CoreAdminAction.RELOAD.toString());
|
||||||
|
collectionCmd(zkStateReader.getClusterState(), message, params);
|
||||||
|
} else {
|
||||||
|
throw new SolrException(ErrorCode.BAD_REQUEST, "Unknow the operation:"
|
||||||
|
+ operation);
|
||||||
|
}
|
||||||
|
int failed = 0;
|
||||||
|
ShardResponse srsp;
|
||||||
|
|
||||||
|
do {
|
||||||
|
srsp = shardHandler.takeCompletedIncludingErrors();
|
||||||
|
if (srsp != null) {
|
||||||
|
Throwable e = srsp.getException();
|
||||||
|
if (e != null) {
|
||||||
|
failed++;
|
||||||
|
log.error("Error talking to shard: " + srsp.getShard(), e);
|
||||||
|
results.add(srsp.getShard(), e);
|
||||||
|
} else {
|
||||||
|
results.add(srsp.getShard(), srsp.getSolrResponse().getResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} while (srsp != null);
|
||||||
|
} catch (SolrException ex) {
|
||||||
|
SolrException.log(log, "Collection " + operation + " of " + operation
|
||||||
|
+ " failed");
|
||||||
|
results.add("Operation " + operation + " cause exception:", ex);
|
||||||
|
} finally {
|
||||||
|
return new OverseerSolrResponse(results);
|
||||||
}
|
}
|
||||||
// unknown command, toss it from our queue
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean createCollection(ClusterState clusterState, ZkNodeProps message) {
|
private boolean createCollection(ClusterState clusterState, ZkNodeProps message) {
|
||||||
|
@ -0,0 +1,47 @@
|
|||||||
|
package org.apache.solr.cloud;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import org.apache.solr.client.solrj.SolrResponse;
|
||||||
|
import org.apache.solr.common.util.NamedList;
|
||||||
|
|
||||||
|
public class OverseerSolrResponse extends SolrResponse {
|
||||||
|
|
||||||
|
NamedList responseList = null;
|
||||||
|
|
||||||
|
public OverseerSolrResponse(NamedList list) {
|
||||||
|
responseList = list;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getElapsedTime() {
|
||||||
|
// TODO Auto-generated method stub
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setResponse(NamedList<Object> rsp) {
|
||||||
|
this.responseList = rsp;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public NamedList<Object> getResponse() {
|
||||||
|
return responseList;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -21,10 +21,12 @@ import java.io.IOException;
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.solr.client.solrj.SolrResponse;
|
||||||
import org.apache.solr.client.solrj.SolrServerException;
|
import org.apache.solr.client.solrj.SolrServerException;
|
||||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||||
import org.apache.solr.client.solrj.request.CoreAdminRequest.RequestSyncShard;
|
import org.apache.solr.client.solrj.request.CoreAdminRequest.RequestSyncShard;
|
||||||
|
import org.apache.solr.cloud.DistributedQueue.QueueEvent;
|
||||||
import org.apache.solr.cloud.Overseer;
|
import org.apache.solr.cloud.Overseer;
|
||||||
import org.apache.solr.cloud.OverseerCollectionProcessor;
|
import org.apache.solr.cloud.OverseerCollectionProcessor;
|
||||||
import org.apache.solr.common.SolrException;
|
import org.apache.solr.common.SolrException;
|
||||||
@ -127,7 +129,35 @@ public class CollectionsHandler extends RequestHandlerBase {
|
|||||||
|
|
||||||
rsp.setHttpCaching(false);
|
rsp.setHttpCaching(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static long DEFAULT_ZK_TIMEOUT = 60*1000;
|
||||||
|
|
||||||
|
private void handleResponse(String operation, ZkNodeProps m,
|
||||||
|
SolrQueryResponse rsp) throws KeeperException, InterruptedException {
|
||||||
|
long time = System.currentTimeMillis();
|
||||||
|
QueueEvent event = coreContainer.getZkController()
|
||||||
|
.getOverseerCollectionQueue()
|
||||||
|
.offer(ZkStateReader.toJSON(m), DEFAULT_ZK_TIMEOUT);
|
||||||
|
if (event.getBytes() != null) {
|
||||||
|
SolrResponse response = SolrResponse.deserialize(event.getBytes());
|
||||||
|
rsp.getValues().addAll(response.getResponse());
|
||||||
|
} else {
|
||||||
|
if (System.currentTimeMillis() - time >= DEFAULT_ZK_TIMEOUT) {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, operation
|
||||||
|
+ " the collection time out:" + DEFAULT_ZK_TIMEOUT / 1000 + "s");
|
||||||
|
} else if (event.getWatchedEvent() != null) {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, operation
|
||||||
|
+ " the collection error [Watcher fired on path: "
|
||||||
|
+ event.getWatchedEvent().getPath() + " state: "
|
||||||
|
+ event.getWatchedEvent().getState() + " type "
|
||||||
|
+ event.getWatchedEvent().getType() + "]");
|
||||||
|
} else {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, operation
|
||||||
|
+ " the collection unkown case");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void handleReloadAction(SolrQueryRequest req, SolrQueryResponse rsp) throws KeeperException, InterruptedException {
|
private void handleReloadAction(SolrQueryRequest req, SolrQueryResponse rsp) throws KeeperException, InterruptedException {
|
||||||
log.info("Reloading Collection : " + req.getParamString());
|
log.info("Reloading Collection : " + req.getParamString());
|
||||||
String name = req.getParams().required().get("name");
|
String name = req.getParams().required().get("name");
|
||||||
@ -135,8 +165,7 @@ public class CollectionsHandler extends RequestHandlerBase {
|
|||||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION,
|
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION,
|
||||||
OverseerCollectionProcessor.RELOADCOLLECTION, "name", name);
|
OverseerCollectionProcessor.RELOADCOLLECTION, "name", name);
|
||||||
|
|
||||||
// TODO: what if you want to block until the collection is available?
|
handleResponse(OverseerCollectionProcessor.RELOADCOLLECTION, m, rsp);
|
||||||
coreContainer.getZkController().getOverseerCollectionQueue().offer(ZkStateReader.toJSON(m));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void handleSyncShardAction(SolrQueryRequest req, SolrQueryResponse rsp) throws KeeperException, InterruptedException, SolrServerException, IOException {
|
private void handleSyncShardAction(SolrQueryRequest req, SolrQueryResponse rsp) throws KeeperException, InterruptedException, SolrServerException, IOException {
|
||||||
@ -168,8 +197,7 @@ public class CollectionsHandler extends RequestHandlerBase {
|
|||||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION,
|
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION,
|
||||||
OverseerCollectionProcessor.DELETECOLLECTION, "name", name);
|
OverseerCollectionProcessor.DELETECOLLECTION, "name", name);
|
||||||
|
|
||||||
// TODO: what if you want to block until the collection is available?
|
handleResponse(OverseerCollectionProcessor.DELETECOLLECTION, m, rsp);
|
||||||
coreContainer.getZkController().getOverseerCollectionQueue().offer(ZkStateReader.toJSON(m));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -208,8 +236,7 @@ public class CollectionsHandler extends RequestHandlerBase {
|
|||||||
|
|
||||||
ZkNodeProps m = new ZkNodeProps(props);
|
ZkNodeProps m = new ZkNodeProps(props);
|
||||||
|
|
||||||
// TODO: what if you want to block until the collection is available?
|
handleResponse(OverseerCollectionProcessor.CREATECOLLECTION, m, rsp);
|
||||||
coreContainer.getZkController().getOverseerCollectionQueue().offer(ZkStateReader.toJSON(m));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ModifiableSolrParams params(String... params) {
|
public static ModifiableSolrParams params(String... params) {
|
||||||
|
@ -29,8 +29,6 @@ import org.apache.lucene.document.FieldType.NumericType;
|
|||||||
import org.apache.lucene.document.FloatField;
|
import org.apache.lucene.document.FloatField;
|
||||||
import org.apache.lucene.document.IntField;
|
import org.apache.lucene.document.IntField;
|
||||||
import org.apache.lucene.document.LongField;
|
import org.apache.lucene.document.LongField;
|
||||||
import org.apache.lucene.index.GeneralField;
|
|
||||||
import org.apache.lucene.index.IndexableField;
|
|
||||||
import org.apache.lucene.index.StorableField;
|
import org.apache.lucene.index.StorableField;
|
||||||
import org.apache.lucene.queries.function.ValueSource;
|
import org.apache.lucene.queries.function.ValueSource;
|
||||||
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
|
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
|
||||||
@ -48,7 +46,6 @@ import org.apache.solr.analysis.TrieTokenizerFactory;
|
|||||||
import org.apache.solr.common.SolrException;
|
import org.apache.solr.common.SolrException;
|
||||||
import org.apache.solr.response.TextResponseWriter;
|
import org.apache.solr.response.TextResponseWriter;
|
||||||
import org.apache.solr.search.QParser;
|
import org.apache.solr.search.QParser;
|
||||||
import org.apache.solr.search.function.*;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides field types to support for Lucene's {@link
|
* Provides field types to support for Lucene's {@link
|
||||||
@ -311,19 +308,19 @@ public class TrieField extends PrimitiveFieldType {
|
|||||||
String s = val.toString();
|
String s = val.toString();
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case INTEGER:
|
case INTEGER:
|
||||||
NumericUtils.intToPrefixCoded(Integer.parseInt(s), 0, result);
|
NumericUtils.intToPrefixCodedBytes(Integer.parseInt(s), 0, result);
|
||||||
break;
|
break;
|
||||||
case FLOAT:
|
case FLOAT:
|
||||||
NumericUtils.intToPrefixCoded(NumericUtils.floatToSortableInt(Float.parseFloat(s)), 0, result);
|
NumericUtils.intToPrefixCodedBytes(NumericUtils.floatToSortableInt(Float.parseFloat(s)), 0, result);
|
||||||
break;
|
break;
|
||||||
case LONG:
|
case LONG:
|
||||||
NumericUtils.longToPrefixCoded(Long.parseLong(s), 0, result);
|
NumericUtils.longToPrefixCodedBytes(Long.parseLong(s), 0, result);
|
||||||
break;
|
break;
|
||||||
case DOUBLE:
|
case DOUBLE:
|
||||||
NumericUtils.longToPrefixCoded(NumericUtils.doubleToSortableLong(Double.parseDouble(s)), 0, result);
|
NumericUtils.longToPrefixCodedBytes(NumericUtils.doubleToSortableLong(Double.parseDouble(s)), 0, result);
|
||||||
break;
|
break;
|
||||||
case DATE:
|
case DATE:
|
||||||
NumericUtils.longToPrefixCoded(dateField.parseMath(null, s).getTime(), 0, result);
|
NumericUtils.longToPrefixCodedBytes(dateField.parseMath(null, s).getTime(), 0, result);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + type);
|
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + type);
|
||||||
@ -419,17 +416,17 @@ public class TrieField extends PrimitiveFieldType {
|
|||||||
if (val != null) {
|
if (val != null) {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case INTEGER:
|
case INTEGER:
|
||||||
NumericUtils.intToPrefixCoded(val.intValue(), 0, bytes);
|
NumericUtils.intToPrefixCodedBytes(val.intValue(), 0, bytes);
|
||||||
break;
|
break;
|
||||||
case FLOAT:
|
case FLOAT:
|
||||||
NumericUtils.intToPrefixCoded(NumericUtils.floatToSortableInt(val.floatValue()), 0, bytes);
|
NumericUtils.intToPrefixCodedBytes(NumericUtils.floatToSortableInt(val.floatValue()), 0, bytes);
|
||||||
break;
|
break;
|
||||||
case LONG: //fallthrough!
|
case LONG: //fallthrough!
|
||||||
case DATE:
|
case DATE:
|
||||||
NumericUtils.longToPrefixCoded(val.longValue(), 0, bytes);
|
NumericUtils.longToPrefixCodedBytes(val.longValue(), 0, bytes);
|
||||||
break;
|
break;
|
||||||
case DOUBLE:
|
case DOUBLE:
|
||||||
NumericUtils.longToPrefixCoded(NumericUtils.doubleToSortableLong(val.doubleValue()), 0, bytes);
|
NumericUtils.longToPrefixCodedBytes(NumericUtils.doubleToSortableLong(val.doubleValue()), 0, bytes);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + f.name());
|
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + f.name());
|
||||||
@ -441,7 +438,7 @@ public class TrieField extends PrimitiveFieldType {
|
|||||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Invalid field contents: "+f.name());
|
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Invalid field contents: "+f.name());
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case INTEGER:
|
case INTEGER:
|
||||||
NumericUtils.intToPrefixCoded(toInt(bytesRef.bytes, bytesRef.offset), 0, bytes);
|
NumericUtils.intToPrefixCodedBytes(toInt(bytesRef.bytes, bytesRef.offset), 0, bytes);
|
||||||
break;
|
break;
|
||||||
case FLOAT: {
|
case FLOAT: {
|
||||||
// WARNING: Code Duplication! Keep in sync with o.a.l.util.NumericUtils!
|
// WARNING: Code Duplication! Keep in sync with o.a.l.util.NumericUtils!
|
||||||
@ -449,12 +446,12 @@ public class TrieField extends PrimitiveFieldType {
|
|||||||
// code in next 2 lines is identical to: int v = NumericUtils.floatToSortableInt(Float.intBitsToFloat(toInt(arr)));
|
// code in next 2 lines is identical to: int v = NumericUtils.floatToSortableInt(Float.intBitsToFloat(toInt(arr)));
|
||||||
int v = toInt(bytesRef.bytes, bytesRef.offset);
|
int v = toInt(bytesRef.bytes, bytesRef.offset);
|
||||||
if (v<0) v ^= 0x7fffffff;
|
if (v<0) v ^= 0x7fffffff;
|
||||||
NumericUtils.intToPrefixCoded(v, 0, bytes);
|
NumericUtils.intToPrefixCodedBytes(v, 0, bytes);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LONG: //fallthrough!
|
case LONG: //fallthrough!
|
||||||
case DATE:
|
case DATE:
|
||||||
NumericUtils.longToPrefixCoded(toLong(bytesRef.bytes, bytesRef.offset), 0, bytes);
|
NumericUtils.longToPrefixCodedBytes(toLong(bytesRef.bytes, bytesRef.offset), 0, bytes);
|
||||||
break;
|
break;
|
||||||
case DOUBLE: {
|
case DOUBLE: {
|
||||||
// WARNING: Code Duplication! Keep in sync with o.a.l.util.NumericUtils!
|
// WARNING: Code Duplication! Keep in sync with o.a.l.util.NumericUtils!
|
||||||
@ -462,7 +459,7 @@ public class TrieField extends PrimitiveFieldType {
|
|||||||
// code in next 2 lines is identical to: long v = NumericUtils.doubleToSortableLong(Double.longBitsToDouble(toLong(arr)));
|
// code in next 2 lines is identical to: long v = NumericUtils.doubleToSortableLong(Double.longBitsToDouble(toLong(arr)));
|
||||||
long v = toLong(bytesRef.bytes, bytesRef.offset);
|
long v = toLong(bytesRef.bytes, bytesRef.offset);
|
||||||
if (v<0) v ^= 0x7fffffffffffffffL;
|
if (v<0) v ^= 0x7fffffffffffffffL;
|
||||||
NumericUtils.longToPrefixCoded(v, 0, bytes);
|
NumericUtils.longToPrefixCodedBytes(v, 0, bytes);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
|
@ -917,10 +917,11 @@ public class UpdateLog implements PluginInfoInitialized {
|
|||||||
reader = oldLog.getReverseReader();
|
reader = oldLog.getReverseReader();
|
||||||
|
|
||||||
while (numUpdates < numRecordsToKeep) {
|
while (numUpdates < numRecordsToKeep) {
|
||||||
Object o = reader.next();
|
Object o = null;
|
||||||
if (o==null) break;
|
|
||||||
try {
|
try {
|
||||||
|
o = reader.next();
|
||||||
|
if (o==null) break;
|
||||||
|
|
||||||
// should currently be a List<Oper,Ver,Doc/Id>
|
// should currently be a List<Oper,Ver,Doc/Id>
|
||||||
List entry = (List)o;
|
List entry = (List)o;
|
||||||
|
|
||||||
|
@ -19,10 +19,11 @@ package org.apache.solr.cloud;
|
|||||||
|
|
||||||
import static org.easymock.EasyMock.capture;
|
import static org.easymock.EasyMock.capture;
|
||||||
import static org.easymock.EasyMock.createMock;
|
import static org.easymock.EasyMock.createMock;
|
||||||
import static org.easymock.EasyMock.reset;
|
|
||||||
import static org.easymock.EasyMock.expect;
|
import static org.easymock.EasyMock.expect;
|
||||||
import static org.easymock.EasyMock.expectLastCall;
|
import static org.easymock.EasyMock.expectLastCall;
|
||||||
import static org.easymock.EasyMock.replay;
|
import static org.easymock.EasyMock.replay;
|
||||||
|
import static org.easymock.EasyMock.anyObject;
|
||||||
|
import static org.easymock.EasyMock.reset;
|
||||||
import static org.easymock.EasyMock.verify;
|
import static org.easymock.EasyMock.verify;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -36,6 +37,8 @@ import java.util.Queue;
|
|||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.solr.SolrTestCaseJ4;
|
import org.apache.solr.SolrTestCaseJ4;
|
||||||
|
import org.apache.solr.client.solrj.SolrResponse;
|
||||||
|
import org.apache.solr.cloud.DistributedQueue.QueueEvent;
|
||||||
import org.apache.solr.common.cloud.ClusterState;
|
import org.apache.solr.common.cloud.ClusterState;
|
||||||
import org.apache.solr.common.cloud.SolrZkClient;
|
import org.apache.solr.common.cloud.SolrZkClient;
|
||||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||||
@ -43,11 +46,13 @@ import org.apache.solr.common.cloud.ZkStateReader;
|
|||||||
import org.apache.solr.common.params.CoreAdminParams;
|
import org.apache.solr.common.params.CoreAdminParams;
|
||||||
import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
|
import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
|
||||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||||
|
import org.apache.solr.common.util.NamedList;
|
||||||
import org.apache.solr.common.util.StrUtils;
|
import org.apache.solr.common.util.StrUtils;
|
||||||
import org.apache.solr.handler.component.ShardHandler;
|
import org.apache.solr.handler.component.ShardHandler;
|
||||||
import org.apache.solr.handler.component.ShardRequest;
|
import org.apache.solr.handler.component.ShardRequest;
|
||||||
import org.apache.solr.handler.component.ShardResponse;
|
import org.apache.solr.handler.component.ShardResponse;
|
||||||
import org.easymock.Capture;
|
import org.easymock.Capture;
|
||||||
|
import org.easymock.EasyMock;
|
||||||
import org.easymock.IAnswer;
|
import org.easymock.IAnswer;
|
||||||
import org.eclipse.jetty.util.BlockingArrayQueue;
|
import org.eclipse.jetty.util.BlockingArrayQueue;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
@ -71,12 +76,12 @@ public class OverseerCollectionProcessorTest extends SolrTestCaseJ4 {
|
|||||||
private OverseerCollectionProcessorToBeTested underTest;
|
private OverseerCollectionProcessorToBeTested underTest;
|
||||||
|
|
||||||
private Thread thread;
|
private Thread thread;
|
||||||
private Queue<byte[]> queue = new BlockingArrayQueue<byte[]>();
|
private Queue<QueueEvent> queue = new BlockingArrayQueue<QueueEvent>();
|
||||||
|
|
||||||
private class OverseerCollectionProcessorToBeTested extends
|
private class OverseerCollectionProcessorToBeTested extends
|
||||||
OverseerCollectionProcessor {
|
OverseerCollectionProcessor {
|
||||||
|
|
||||||
private boolean lastProcessMessageResult = true;
|
private SolrResponse lastProcessMessageResult;
|
||||||
|
|
||||||
public OverseerCollectionProcessorToBeTested(ZkStateReader zkStateReader,
|
public OverseerCollectionProcessorToBeTested(ZkStateReader zkStateReader,
|
||||||
String myId, ShardHandler shardHandler, String adminPath,
|
String myId, ShardHandler shardHandler, String adminPath,
|
||||||
@ -85,7 +90,7 @@ public class OverseerCollectionProcessorTest extends SolrTestCaseJ4 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean processMessage(ZkNodeProps message, String operation) {
|
protected SolrResponse processMessage(ZkNodeProps message, String operation) {
|
||||||
lastProcessMessageResult = super.processMessage(message, operation);
|
lastProcessMessageResult = super.processMessage(message, operation);
|
||||||
return lastProcessMessageResult;
|
return lastProcessMessageResult;
|
||||||
}
|
}
|
||||||
@ -147,11 +152,12 @@ public class OverseerCollectionProcessorTest extends SolrTestCaseJ4 {
|
|||||||
}
|
}
|
||||||
}).anyTimes();
|
}).anyTimes();
|
||||||
|
|
||||||
workQueueMock.remove();
|
workQueueMock.remove(anyObject(QueueEvent.class));
|
||||||
expectLastCall().andAnswer(new IAnswer<Object>() {
|
expectLastCall().andAnswer(new IAnswer<Object>() {
|
||||||
@Override
|
@Override
|
||||||
public Object answer() throws Throwable {
|
public Object answer() throws Throwable {
|
||||||
return queue.poll();
|
queue.remove((QueueEvent)EasyMock.getCurrentArguments()[0]);
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
}).anyTimes();
|
}).anyTimes();
|
||||||
|
|
||||||
@ -273,7 +279,8 @@ public class OverseerCollectionProcessorTest extends SolrTestCaseJ4 {
|
|||||||
OverseerCollectionProcessor.MAX_SHARDS_PER_NODE,
|
OverseerCollectionProcessor.MAX_SHARDS_PER_NODE,
|
||||||
maxShardsPerNode.toString());
|
maxShardsPerNode.toString());
|
||||||
}
|
}
|
||||||
queue.add(ZkStateReader.toJSON(props));
|
QueueEvent qe = new QueueEvent("id", ZkStateReader.toJSON(props), null);
|
||||||
|
queue.add(qe);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void verifySubmitCaptures(List<SubmitCapture> submitCaptures,
|
protected void verifySubmitCaptures(List<SubmitCapture> submitCaptures,
|
||||||
@ -443,7 +450,9 @@ public class OverseerCollectionProcessorTest extends SolrTestCaseJ4 {
|
|||||||
|
|
||||||
waitForEmptyQueue(10000);
|
waitForEmptyQueue(10000);
|
||||||
|
|
||||||
assertEquals(collectionExceptedToBeCreated, underTest.lastProcessMessageResult);
|
if (collectionExceptedToBeCreated) {
|
||||||
|
assertNotNull(underTest.lastProcessMessageResult.getResponse().toString(), underTest.lastProcessMessageResult);
|
||||||
|
}
|
||||||
verify(shardHandlerMock);
|
verify(shardHandlerMock);
|
||||||
|
|
||||||
if (collectionExceptedToBeCreated) {
|
if (collectionExceptedToBeCreated) {
|
||||||
|
@ -25,7 +25,10 @@ import org.apache.solr.common.params.StatsParams;
|
|||||||
import org.apache.solr.common.params.TermsParams;
|
import org.apache.solr.common.params.TermsParams;
|
||||||
import org.apache.solr.common.util.DateUtil;
|
import org.apache.solr.common.util.DateUtil;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
@ -44,6 +47,9 @@ public class SolrQuery extends ModifiableSolrParams
|
|||||||
return (this == asc) ? desc : asc;
|
return (this == asc) ? desc : asc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Maintains a map of current sorts */
|
||||||
|
private List<SortClause> sortClauses;
|
||||||
|
|
||||||
public SolrQuery() {
|
public SolrQuery() {
|
||||||
super();
|
super();
|
||||||
@ -529,38 +535,230 @@ public class SolrQuery extends ModifiableSolrParams
|
|||||||
return this.get(HighlightParams.SIMPLE_POST, "");
|
return this.get(HighlightParams.SIMPLE_POST, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replaces the sort string with a single sort field.
|
||||||
|
* @deprecated Use {@link #setSort(SortClause)} instead, which is part
|
||||||
|
* of an api handling a wider range of sort specifications.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public SolrQuery setSortField(String field, ORDER order) {
|
public SolrQuery setSortField(String field, ORDER order) {
|
||||||
this.remove(CommonParams.SORT);
|
this.remove(CommonParams.SORT);
|
||||||
addValueToParam(CommonParams.SORT, toSortString(field, order));
|
addValueToParam(CommonParams.SORT, toSortString(field, order));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a sort field to the end of the sort string.
|
||||||
|
* @deprecated Use {@link #addSort(SortClause)} instead, which is part
|
||||||
|
* of an api handling a wider range of sort specifications.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public SolrQuery addSortField(String field, ORDER order) {
|
public SolrQuery addSortField(String field, ORDER order) {
|
||||||
return addValueToParam(CommonParams.SORT, toSortString(field, order));
|
return addValueToParam(CommonParams.SORT, toSortString(field, order));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes a sort field to the end of the sort string.
|
||||||
|
* @deprecated Use {@link #removeSort(SortClause)} instead, which is part
|
||||||
|
* of an api handling a wider range of sort specifications.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public SolrQuery removeSortField(String field, ORDER order) {
|
public SolrQuery removeSortField(String field, ORDER order) {
|
||||||
String s = this.get(CommonParams.SORT);
|
String[] sorts = getSortFields();
|
||||||
String removeSort = toSortString(field, order);
|
if (sorts != null) {
|
||||||
if (s != null) {
|
String removeSort = toSortString(field, order);
|
||||||
String[] sorts = s.split(",");
|
String s = join(sorts, ",", removeSort);
|
||||||
s = join(sorts, ", ", removeSort);
|
|
||||||
if (s.length()==0) s=null;
|
if (s.length()==0) s=null;
|
||||||
this.set(CommonParams.SORT, s);
|
this.set(CommonParams.SORT, s);
|
||||||
}
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets an array of sort specifications.
|
||||||
|
* @deprecated Use {@link #getSorts()} instead, which is part
|
||||||
|
* of an api handling a wider range of sort specifications.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public String[] getSortFields() {
|
public String[] getSortFields() {
|
||||||
String s = getSortField();
|
String s = getSortField();
|
||||||
if (s==null) return null;
|
if (s==null) return null;
|
||||||
return s.split(",");
|
return s.trim().split(", *");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the raw sort field, as it will be sent to Solr.
|
||||||
|
* <p>
|
||||||
|
* The returned sort field will always contain a serialized version
|
||||||
|
* of the sort string built using {@link #setSort(SortClause)},
|
||||||
|
* {@link #addSort(SortClause)}, {@link #addOrUpdateSort(SortClause)},
|
||||||
|
* {@link #removeSort(SortClause)}, {@link #clearSorts()} and
|
||||||
|
* {@link #setSorts(List)}.
|
||||||
|
*/
|
||||||
public String getSortField() {
|
public String getSortField() {
|
||||||
return this.get(CommonParams.SORT);
|
return this.get(CommonParams.SORT);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clears current sort information.
|
||||||
|
*
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery clearSorts() {
|
||||||
|
sortClauses = null;
|
||||||
|
serializeSorts();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replaces the current sort information.
|
||||||
|
*
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery setSorts(List<SortClause> value) {
|
||||||
|
sortClauses = new ArrayList<SortClause>(value);
|
||||||
|
serializeSorts();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets an a list of current sort clauses.
|
||||||
|
*
|
||||||
|
* @return an immutable list of current sort clauses
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public List<SortClause> getSorts() {
|
||||||
|
if (sortClauses == null) return Collections.emptyList();
|
||||||
|
else return Collections.unmodifiableList(sortClauses);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replaces the current sort information with a single sort clause
|
||||||
|
*
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery setSort(String field, ORDER order) {
|
||||||
|
return setSort(new SortClause(field, order));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replaces the current sort information with a single sort clause
|
||||||
|
*
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery setSort(SortClause sortClause) {
|
||||||
|
clearSorts();
|
||||||
|
return addSort(sortClause);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a single sort clause to the end of the current sort information.
|
||||||
|
*
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery addSort(String field, ORDER order) {
|
||||||
|
return addSort(new SortClause(field, order));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a single sort clause to the end of the query.
|
||||||
|
*
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery addSort(SortClause sortClause) {
|
||||||
|
if (sortClauses == null) sortClauses = new ArrayList<SortClause>();
|
||||||
|
sortClauses.add(sortClause);
|
||||||
|
serializeSorts();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates or adds a single sort clause to the query.
|
||||||
|
* If the field is already used for sorting, the order
|
||||||
|
* of the existing field is modified; otherwise, it is
|
||||||
|
* added to the end.
|
||||||
|
* <p>
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery addOrUpdateSort(String field, ORDER order) {
|
||||||
|
return addOrUpdateSort(new SortClause(field, order));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates or adds a single sort field specification to the current sort
|
||||||
|
* information. If the sort field already exist in the sort information map,
|
||||||
|
* it's position is unchanged and the sort order is set; if it does not exist,
|
||||||
|
* it is appended at the end with the specified order..
|
||||||
|
*
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery addOrUpdateSort(SortClause sortClause) {
|
||||||
|
if (sortClauses != null) {
|
||||||
|
for (int index=0 ; index<sortClauses.size() ; index++) {
|
||||||
|
SortClause existing = sortClauses.get(index);
|
||||||
|
if (existing.getItem().equals(sortClause.getItem())) {
|
||||||
|
sortClauses.set(index, sortClause);
|
||||||
|
serializeSorts();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return addSort(sortClause);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes a single sort field from the current sort information.
|
||||||
|
*
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery removeSort(SortClause sortClause) {
|
||||||
|
return removeSort(sortClause.getItem());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes a single sort field from the current sort information.
|
||||||
|
*
|
||||||
|
* @return the modified SolrQuery object, for easy chaining
|
||||||
|
* @since 4.2
|
||||||
|
*/
|
||||||
|
public SolrQuery removeSort(String itemName) {
|
||||||
|
if (sortClauses != null) {
|
||||||
|
for (SortClause existing : sortClauses) {
|
||||||
|
if (existing.getItem().equals(itemName)) {
|
||||||
|
sortClauses.remove(existing);
|
||||||
|
if (sortClauses.isEmpty()) sortClauses = null;
|
||||||
|
serializeSorts();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void serializeSorts() {
|
||||||
|
if (sortClauses == null || sortClauses.isEmpty()) {
|
||||||
|
remove(CommonParams.SORT);
|
||||||
|
} else {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
for (SortClause sortClause : sortClauses) {
|
||||||
|
if (sb.length() > 0) sb.append(",");
|
||||||
|
sb.append(sortClause.getItem());
|
||||||
|
sb.append(" ");
|
||||||
|
sb.append(sortClause.getOrder());
|
||||||
|
}
|
||||||
|
set(CommonParams.SORT, sb.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void setGetFieldStatistics( boolean v )
|
public void setGetFieldStatistics( boolean v )
|
||||||
{
|
{
|
||||||
this.set( StatsParams.STATS, v );
|
this.set( StatsParams.STATS, v );
|
||||||
@ -823,13 +1021,126 @@ public class SolrQuery extends ModifiableSolrParams
|
|||||||
private String join(String[] vals, String sep, String removeVal) {
|
private String join(String[] vals, String sep, String removeVal) {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
for (int i=0; i<vals.length; i++) {
|
for (int i=0; i<vals.length; i++) {
|
||||||
if (removeVal==null || !vals[i].equals(removeVal)) {
|
if (!vals[i].equals(removeVal)) {
|
||||||
sb.append(vals[i]);
|
if (sb.length() > 0) {
|
||||||
if (i<vals.length-1) {
|
|
||||||
sb.append(sep);
|
sb.append(sep);
|
||||||
}
|
}
|
||||||
|
sb.append(vals[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return sb.toString().trim();
|
return sb.toString().trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A single sort clause, encapsulating what to sort and the sort order.
|
||||||
|
* <p>
|
||||||
|
* The item specified can be "anything sortable" by solr; some examples
|
||||||
|
* include a simple field name, the constant string {@code score}, and functions
|
||||||
|
* such as {@code sum(x_f, y_f)}.
|
||||||
|
* <p>
|
||||||
|
* A SortClause can be created through different mechanisms:
|
||||||
|
* <PRE><code>
|
||||||
|
* new SortClause("product", SolrQuery.ORDER.asc);
|
||||||
|
* new SortClause("product", "asc");
|
||||||
|
* SortClause.asc("product");
|
||||||
|
* SortClause.desc("product");
|
||||||
|
* </code></PRE>
|
||||||
|
*/
|
||||||
|
public static class SortClause implements java.io.Serializable {
|
||||||
|
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
private final String item;
|
||||||
|
private final ORDER order;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a SortClause based on item and order
|
||||||
|
* @param item item to sort on
|
||||||
|
* @param order direction to sort
|
||||||
|
*/
|
||||||
|
public SortClause(String item, ORDER order) {
|
||||||
|
this.item = item;
|
||||||
|
this.order = order;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a SortClause based on item and order
|
||||||
|
* @param item item to sort on
|
||||||
|
* @param order string value for direction to sort
|
||||||
|
*/
|
||||||
|
public SortClause(String item, String order) {
|
||||||
|
this(item, ORDER.valueOf(order));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates an ascending SortClause for an item
|
||||||
|
* @param item item to sort on
|
||||||
|
*/
|
||||||
|
public static SortClause create (String item, ORDER order) {
|
||||||
|
return new SortClause(item, order);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a SortClause based on item and order
|
||||||
|
* @param item item to sort on
|
||||||
|
* @param order string value for direction to sort
|
||||||
|
*/
|
||||||
|
public static SortClause create(String item, String order) {
|
||||||
|
return new SortClause(item, ORDER.valueOf(order));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates an ascending SortClause for an item
|
||||||
|
* @param item item to sort on
|
||||||
|
*/
|
||||||
|
public static SortClause asc (String item) {
|
||||||
|
return new SortClause(item, ORDER.asc);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a decending SortClause for an item
|
||||||
|
* @param item item to sort on
|
||||||
|
*/
|
||||||
|
public static SortClause desc (String item) {
|
||||||
|
return new SortClause(item, ORDER.desc);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the item to sort, typically a function or a fieldname
|
||||||
|
* @return item to sort
|
||||||
|
*/
|
||||||
|
public String getItem() {
|
||||||
|
return item;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the order to sort
|
||||||
|
* @return order to sort
|
||||||
|
*/
|
||||||
|
public ORDER getOrder() {
|
||||||
|
return order;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean equals(Object other){
|
||||||
|
if (this == other) return true;
|
||||||
|
if (!(other instanceof SortClause)) return false;
|
||||||
|
final SortClause that = (SortClause) other;
|
||||||
|
return this.getItem().equals(that.getItem()) && this.getOrder().equals(that.getOrder());
|
||||||
|
}
|
||||||
|
|
||||||
|
public int hashCode(){
|
||||||
|
return this.getItem().hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a human readable description of the sort clause.
|
||||||
|
* <p>
|
||||||
|
* The returned string is not suitable for passing to Solr,
|
||||||
|
* but may be useful in debug output and the like.
|
||||||
|
* @return a description of the current sort clause
|
||||||
|
*/
|
||||||
|
public String toString() {
|
||||||
|
return "[" + getClass().getSimpleName() + ": item=" + getItem() + "; order=" + getOrder() + "]";
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,19 +17,47 @@
|
|||||||
|
|
||||||
package org.apache.solr.client.solrj;
|
package org.apache.solr.client.solrj;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.ObjectInputStream;
|
||||||
|
import java.io.ObjectOutputStream;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import org.apache.solr.common.SolrException;
|
||||||
|
import org.apache.solr.common.SolrException.ErrorCode;
|
||||||
import org.apache.solr.common.util.NamedList;
|
import org.apache.solr.common.util.NamedList;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*
|
*
|
||||||
* @since solr 1.3
|
* @since solr 1.3
|
||||||
*/
|
*/
|
||||||
public abstract class SolrResponse implements Serializable
|
public abstract class SolrResponse implements Serializable {
|
||||||
{
|
|
||||||
public abstract long getElapsedTime();
|
public abstract long getElapsedTime();
|
||||||
public abstract void setResponse( NamedList<Object> rsp );
|
|
||||||
|
public abstract void setResponse(NamedList<Object> rsp);
|
||||||
|
|
||||||
public abstract NamedList<Object> getResponse();
|
public abstract NamedList<Object> getResponse();
|
||||||
|
|
||||||
|
public static byte[] serializable(SolrResponse response) {
|
||||||
|
try {
|
||||||
|
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
|
||||||
|
ObjectOutputStream outputStream = new ObjectOutputStream(byteStream);
|
||||||
|
outputStream.writeObject(response);
|
||||||
|
return byteStream.toByteArray();
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static SolrResponse deserialize(byte[] bytes) {
|
||||||
|
try {
|
||||||
|
ByteArrayInputStream byteStream = new ByteArrayInputStream(bytes);
|
||||||
|
ObjectInputStream inputStream = new ObjectInputStream(byteStream);
|
||||||
|
return (SolrResponse) inputStream.readObject();
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -108,7 +108,13 @@ public class SolrDocument implements Map<String,Object>, Iterable<Map.Entry<Stri
|
|||||||
/**
|
/**
|
||||||
* This will add a field to the document. If fields already exist with this
|
* This will add a field to the document. If fields already exist with this
|
||||||
* name it will append value to the collection. If the value is Collection,
|
* name it will append value to the collection. If the value is Collection,
|
||||||
* each value will be added independently.
|
* each value will be added independently.
|
||||||
|
*
|
||||||
|
* The class type of value and the name parameter should match schema.xml.
|
||||||
|
* schema.xml can be found in conf directory under the solr home by default.
|
||||||
|
*
|
||||||
|
* @param name Name of the field, should match one of the field names defined under "fields" tag in schema.xml.
|
||||||
|
* @param value Value of the field, should be of same class type as defined by "type" attribute of the corresponding field in schema.xml.
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public void addField(String name, Object value)
|
public void addField(String name, Object value)
|
||||||
|
@ -63,9 +63,12 @@ public class SolrInputDocument implements Map<String,SolrInputField>, Iterable<S
|
|||||||
/**
|
/**
|
||||||
* Add a field with implied null value for boost.
|
* Add a field with implied null value for boost.
|
||||||
*
|
*
|
||||||
|
* The class type of value and the name parameter should match schema.xml.
|
||||||
|
* schema.xml can be found in conf directory under the solr home by default.
|
||||||
|
*
|
||||||
|
* @param name Name of the field, should match one of the field names defined under "fields" tag in schema.xml.
|
||||||
|
* @param value Value of the field, should be of same class type as defined by "type" attribute of the corresponding field in schema.xml.
|
||||||
* @see #addField(String, Object, float)
|
* @see #addField(String, Object, float)
|
||||||
* @param name name of the field to add
|
|
||||||
* @param value value of the field
|
|
||||||
*/
|
*/
|
||||||
public void addField(String name, Object value)
|
public void addField(String name, Object value)
|
||||||
{
|
{
|
||||||
@ -132,8 +135,11 @@ public class SolrInputDocument implements Map<String,SolrInputField>, Iterable<S
|
|||||||
* field, with the new boost. If the value is a collection, then each of its
|
* field, with the new boost. If the value is a collection, then each of its
|
||||||
* values will be added to the field.
|
* values will be added to the field.
|
||||||
*
|
*
|
||||||
* @param name Name of the field to add
|
* The class type of value and the name parameter should match schema.xml.
|
||||||
* @param value Value of the field
|
* schema.xml can be found in conf directory under the solr home by default.
|
||||||
|
*
|
||||||
|
* @param name Name of the field, should match one of the field names defined under "fields" tag in schema.xml.
|
||||||
|
* @param value Value of the field, should be of same class type as defined by "type" attribute of the corresponding field in schema.xml.
|
||||||
* @param boost Boost value for the field
|
* @param boost Boost value for the field
|
||||||
*/
|
*/
|
||||||
public void addField(String name, Object value, float boost )
|
public void addField(String name, Object value, float boost )
|
||||||
|
@ -18,11 +18,17 @@
|
|||||||
package org.apache.solr.client.solrj;
|
package org.apache.solr.client.solrj;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.solr.client.solrj.SolrQuery.SortClause;
|
||||||
|
import org.apache.solr.common.params.CommonParams;
|
||||||
import org.apache.solr.common.params.FacetParams;
|
import org.apache.solr.common.params.FacetParams;
|
||||||
|
|
||||||
import junit.framework.Assert;
|
import junit.framework.Assert;
|
||||||
import org.apache.solr.common.util.DateUtil;
|
import org.apache.solr.common.util.DateUtil;
|
||||||
|
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.ObjectOutputStream;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Calendar;
|
import java.util.Calendar;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
@ -98,6 +104,169 @@ public class SolrQueryTest extends LuceneTestCase {
|
|||||||
// System.out.println(q);
|
// System.out.println(q);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Verifies that the old (deprecated) sort methods
|
||||||
|
* allows mix-and-match between the raw field and
|
||||||
|
* the itemized apis.
|
||||||
|
*/
|
||||||
|
public void testSortFieldRawStringAndMethods() {
|
||||||
|
SolrQuery q = new SolrQuery("dog");
|
||||||
|
q.set("sort", "price asc,date desc,qty desc");
|
||||||
|
q.removeSortField("date", SolrQuery.ORDER.desc);
|
||||||
|
Assert.assertEquals(2, q.getSortFields().length);
|
||||||
|
q.set("sort", "price asc, date desc, qty desc");
|
||||||
|
q.removeSortField("date", SolrQuery.ORDER.desc);
|
||||||
|
Assert.assertEquals(2, q.getSortFields().length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Verifies that you can use removeSortField() twice, which
|
||||||
|
* did not work in 4.0
|
||||||
|
*/
|
||||||
|
public void testSortFieldRemoveAfterRemove() {
|
||||||
|
SolrQuery q = new SolrQuery("dog");
|
||||||
|
q.addSortField("price", SolrQuery.ORDER.asc);
|
||||||
|
q.addSortField("date", SolrQuery.ORDER.desc);
|
||||||
|
q.addSortField("qty", SolrQuery.ORDER.desc);
|
||||||
|
q.removeSortField("date", SolrQuery.ORDER.desc);
|
||||||
|
Assert.assertEquals(2, q.getSortFields().length);
|
||||||
|
q.removeSortField("qty", SolrQuery.ORDER.desc);
|
||||||
|
Assert.assertEquals(1, q.getSortFields().length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Verifies that you can remove the last sort field, which
|
||||||
|
* did not work in 4.0
|
||||||
|
*/
|
||||||
|
public void testSortFieldRemoveLast() {
|
||||||
|
SolrQuery q = new SolrQuery("dog");
|
||||||
|
q.addSortField("date", SolrQuery.ORDER.desc);
|
||||||
|
q.addSortField("qty", SolrQuery.ORDER.desc);
|
||||||
|
q.removeSortField("qty", SolrQuery.ORDER.desc);
|
||||||
|
Assert.assertEquals("date desc", q.getSortField());
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Verifies that getSort() returns an immutable map,
|
||||||
|
* for both empty and non-empty situations
|
||||||
|
*/
|
||||||
|
public void testGetSortImmutable() {
|
||||||
|
SolrQuery q = new SolrQuery("dog");
|
||||||
|
|
||||||
|
try {
|
||||||
|
q.getSorts().add(new SortClause("price", SolrQuery.ORDER.asc));
|
||||||
|
fail("The returned (empty) map should be immutable; put() should fail!");
|
||||||
|
} catch (UnsupportedOperationException uoe) {
|
||||||
|
// pass
|
||||||
|
}
|
||||||
|
|
||||||
|
q.addSort("qty", SolrQuery.ORDER.desc);
|
||||||
|
try {
|
||||||
|
q.getSorts().add(new SortClause("price", SolrQuery.ORDER.asc));
|
||||||
|
fail("The returned (non-empty) map should be immutable; put() should fail!");
|
||||||
|
} catch (UnsupportedOperationException uoe) {
|
||||||
|
// pass
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should work even when setSorts passes an Immutable List
|
||||||
|
q.setSorts(Arrays.asList(new SortClause("price", SolrQuery.ORDER.asc)));
|
||||||
|
q.addSort(new SortClause("price", SolrQuery.ORDER.asc));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSortClause() {
|
||||||
|
new SolrQuery.SortClause("rating", SolrQuery.ORDER.desc);
|
||||||
|
new SolrQuery.SortClause("rating", SolrQuery.ORDER.valueOf("desc"));
|
||||||
|
new SolrQuery.SortClause("rating", SolrQuery.ORDER.valueOf("desc"));
|
||||||
|
SolrQuery.SortClause.create("rating", SolrQuery.ORDER.desc);
|
||||||
|
SolrQuery.SortClause.create("rating", SolrQuery.ORDER.desc);
|
||||||
|
SolrQuery.SortClause.create("rating", SolrQuery.ORDER.desc);
|
||||||
|
|
||||||
|
SolrQuery.SortClause sc1a = SolrQuery.SortClause.asc("sc1");
|
||||||
|
SolrQuery.SortClause sc1b = SolrQuery.SortClause.asc("sc1");
|
||||||
|
Assert.assertEquals(sc1a, sc1b);
|
||||||
|
Assert.assertEquals(sc1a.hashCode(), sc1b.hashCode());
|
||||||
|
|
||||||
|
SolrQuery.SortClause sc2a = SolrQuery.SortClause.asc("sc2");
|
||||||
|
SolrQuery.SortClause sc2b = SolrQuery.SortClause.desc("sc2");
|
||||||
|
Assert.assertFalse(sc2a.equals(sc2b));
|
||||||
|
|
||||||
|
SolrQuery.SortClause sc3a = SolrQuery.SortClause.asc("sc2");
|
||||||
|
SolrQuery.SortClause sc3b = SolrQuery.SortClause.asc("not sc2");
|
||||||
|
Assert.assertFalse(sc3a.equals(sc3b));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Verifies the symbolic sort operations
|
||||||
|
*/
|
||||||
|
public void testSort() throws IOException {
|
||||||
|
|
||||||
|
SolrQuery q = new SolrQuery("dog");
|
||||||
|
|
||||||
|
// Simple adds
|
||||||
|
q.addSort("price", SolrQuery.ORDER.asc);
|
||||||
|
q.addSort("date", SolrQuery.ORDER.desc);
|
||||||
|
q.addSort("qty", SolrQuery.ORDER.desc);
|
||||||
|
Assert.assertEquals(3, q.getSorts().size());
|
||||||
|
Assert.assertEquals("price asc,date desc,qty desc", q.get(CommonParams.SORT));
|
||||||
|
|
||||||
|
// Remove one (middle)
|
||||||
|
q.removeSort("date");
|
||||||
|
Assert.assertEquals(2, q.getSorts().size());
|
||||||
|
Assert.assertEquals("price asc,qty desc", q.get(CommonParams.SORT));
|
||||||
|
|
||||||
|
// Remove remaining (last, first)
|
||||||
|
q.removeSort("price");
|
||||||
|
q.removeSort("qty");
|
||||||
|
Assert.assertTrue(q.getSorts().isEmpty());
|
||||||
|
Assert.assertNull(q.get(CommonParams.SORT));
|
||||||
|
|
||||||
|
// Clear sort
|
||||||
|
q.addSort("price", SolrQuery.ORDER.asc);
|
||||||
|
q.clearSorts();
|
||||||
|
Assert.assertTrue(q.getSorts().isEmpty());
|
||||||
|
Assert.assertNull(q.get(CommonParams.SORT));
|
||||||
|
|
||||||
|
// Add vs update
|
||||||
|
q.clearSorts();
|
||||||
|
q.addSort("1", SolrQuery.ORDER.asc);
|
||||||
|
q.addSort("2", SolrQuery.ORDER.asc);
|
||||||
|
q.addSort("3", SolrQuery.ORDER.asc);
|
||||||
|
q.addOrUpdateSort("2", SolrQuery.ORDER.desc);
|
||||||
|
q.addOrUpdateSort("4", SolrQuery.ORDER.desc);
|
||||||
|
Assert.assertEquals("1 asc,2 desc,3 asc,4 desc", q.get(CommonParams.SORT));
|
||||||
|
|
||||||
|
// Using SortClause
|
||||||
|
q.clearSorts();
|
||||||
|
q.addSort(new SortClause("1", SolrQuery.ORDER.asc));
|
||||||
|
q.addSort(new SortClause("2", SolrQuery.ORDER.asc));
|
||||||
|
q.addSort(new SortClause("3", SolrQuery.ORDER.asc));
|
||||||
|
q.addOrUpdateSort(SortClause.desc("2"));
|
||||||
|
q.addOrUpdateSort(SortClause.asc("4"));
|
||||||
|
Assert.assertEquals("1 asc,2 desc,3 asc,4 asc", q.get(CommonParams.SORT));
|
||||||
|
q.setSort(SortClause.asc("A"));
|
||||||
|
q.addSort(SortClause.asc("B"));
|
||||||
|
q.addSort(SortClause.asc("C"));
|
||||||
|
q.addSort(SortClause.asc("D"));
|
||||||
|
Assert.assertEquals("A asc,B asc,C asc,D asc", q.get(CommonParams.SORT));
|
||||||
|
|
||||||
|
// removeSort should ignore the ORDER
|
||||||
|
q.setSort(SortClause.asc("A"));
|
||||||
|
q.addSort(SortClause.asc("B"));
|
||||||
|
q.addSort(SortClause.asc("C"));
|
||||||
|
q.addSort(SortClause.asc("D"));
|
||||||
|
q.removeSort("A");
|
||||||
|
q.removeSort(SortClause.asc("C"));
|
||||||
|
q.removeSort(SortClause.desc("B"));
|
||||||
|
Assert.assertEquals("D asc", q.get(CommonParams.SORT));
|
||||||
|
|
||||||
|
// Verify that a query containing a SortClause is serializable
|
||||||
|
q.clearSorts();
|
||||||
|
q.addSort("1", SolrQuery.ORDER.asc);
|
||||||
|
ObjectOutputStream out = new ObjectOutputStream(new ByteArrayOutputStream());
|
||||||
|
out.writeObject(q);
|
||||||
|
out.close();
|
||||||
|
}
|
||||||
|
|
||||||
public void testFacetSort() {
|
public void testFacetSort() {
|
||||||
SolrQuery q = new SolrQuery("dog");
|
SolrQuery q = new SolrQuery("dog");
|
||||||
assertEquals("count", q.getFacetSortString());
|
assertEquals("count", q.getFacetSortString());
|
||||||
|
@ -390,7 +390,8 @@
|
|||||||
color: #333;
|
color: #333;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #schema-browser #data #field .topterms-holder
|
#content #schema-browser #data #field .topterms-holder,
|
||||||
|
#content #schema-browser #data #field .histogram-holder
|
||||||
{
|
{
|
||||||
border-left: 1px solid #f0f0f0;
|
border-left: 1px solid #f0f0f0;
|
||||||
display: none;
|
display: none;
|
||||||
@ -448,12 +449,12 @@
|
|||||||
margin-bottom: 5px;
|
margin-bottom: 5px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* possible overwrite with inline style */
|
||||||
#content #schema-browser .topterms-holder li p
|
#content #schema-browser .topterms-holder li p
|
||||||
{
|
{
|
||||||
background-color: #999;
|
background-color: #999;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
float: left;
|
float: left;
|
||||||
width: 25px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #schema-browser .topterms-holder li p span
|
#content #schema-browser .topterms-holder li p span
|
||||||
@ -463,6 +464,7 @@
|
|||||||
text-align: right;
|
text-align: right;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* possible overwrite with inline style */
|
||||||
#content #schema-browser .topterms-holder li ul
|
#content #schema-browser .topterms-holder li ul
|
||||||
{
|
{
|
||||||
margin-left: 30px;
|
margin-left: 30px;
|
||||||
@ -492,51 +494,65 @@
|
|||||||
background-color: #c0c0c0;
|
background-color: #c0c0c0;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #schema-browser #data #field .histogram-holder
|
#content #schema-browser #data #field .histogram-holder ul
|
||||||
{
|
{
|
||||||
border-left: 1px solid #f0f0f0;
|
margin-left: 25px;
|
||||||
display: none;
|
|
||||||
float: left;
|
|
||||||
padding-left: 20px;
|
|
||||||
padding-right: 20px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #schema-browser #data #field .histogram-holder .histogram
|
#content #schema-browser #data #field .histogram-holder li
|
||||||
{
|
{
|
||||||
height: 150px;
|
margin-bottom: 2px;
|
||||||
|
position: relative;
|
||||||
|
width: 150px;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #schema-browser #data #field .histogram-holder .histogram.single canvas
|
#content #schema-browser #data #field .histogram-holder li.odd
|
||||||
{
|
|
||||||
background-color: #c0c0c0;
|
|
||||||
}
|
|
||||||
|
|
||||||
#content #schema-browser #data #field .histogram-holder dt,
|
|
||||||
#content #schema-browser #data #field .histogram-holder dd
|
|
||||||
{
|
|
||||||
float: left;
|
|
||||||
font-size: 10px;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
#content #schema-browser #data #field .histogram-holder span
|
|
||||||
{
|
{
|
||||||
background-color: #f0f0f0;
|
background-color: #f0f0f0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#content #schema-browser #data #field .histogram-holder li dl,
|
||||||
|
#content #schema-browser #data #field .histogram-holder li dt
|
||||||
|
{
|
||||||
|
padding-top: 1px;
|
||||||
|
padding-bottom: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#content #schema-browser #data #field .histogram-holder li dl
|
||||||
|
{
|
||||||
|
background-color: #c0c0c0;
|
||||||
|
min-width: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#content #schema-browser #data #field .histogram-holder li dt
|
||||||
|
{
|
||||||
|
color: #a0a0a0;
|
||||||
|
position: absolute;
|
||||||
|
overflow: hidden;
|
||||||
|
left: -25px;
|
||||||
|
top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#content #schema-browser #data #field .histogram-holder li dt span
|
||||||
|
{
|
||||||
display: block;
|
display: block;
|
||||||
width: 20px;
|
padding-right: 4px;
|
||||||
|
text-align: right;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #schema-browser #data #field .histogram-holder dt
|
#content #schema-browser #data #field .histogram-holder li dd
|
||||||
{
|
{
|
||||||
padding-right: 1px;
|
clear: left;
|
||||||
|
float: left;
|
||||||
|
margin-left: 2px;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #schema-browser #data #field .histogram-holder dd
|
#content #schema-browser #data #field .histogram-holder li:hover dl
|
||||||
{
|
{
|
||||||
padding-right: 3px;
|
background-color: #b0b0b0;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #schema-browser #data #field .histogram-holder dd span
|
#content #schema-browser #data #field .histogram-holder li:hover dt
|
||||||
{
|
{
|
||||||
width: 25px;
|
color: #333;
|
||||||
}
|
}
|
File diff suppressed because it is too large
Load Diff
@ -25,7 +25,6 @@ require
|
|||||||
'lib/order!lib/jquery.form',
|
'lib/order!lib/jquery.form',
|
||||||
'lib/order!lib/jquery.jstree',
|
'lib/order!lib/jquery.jstree',
|
||||||
'lib/order!lib/jquery.sammy',
|
'lib/order!lib/jquery.sammy',
|
||||||
'lib/order!lib/jquery.sparkline',
|
|
||||||
'lib/order!lib/jquery.timeago',
|
'lib/order!lib/jquery.timeago',
|
||||||
'lib/order!lib/jquery.blockUI',
|
'lib/order!lib/jquery.blockUI',
|
||||||
'lib/order!lib/highlight',
|
'lib/order!lib/highlight',
|
||||||
|
@ -468,6 +468,11 @@ var solr_admin = function( app_config )
|
|||||||
return json_str;
|
return json_str;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
this.format_number = function format_number( number )
|
||||||
|
{
|
||||||
|
return ( number || 0 ).toString().replace( /\B(?=(\d{3})+(?!\d))/g, ' ' );
|
||||||
|
};
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
var app = new solr_admin( app_config );
|
var app = new solr_admin( app_config );
|
||||||
|
@ -440,11 +440,6 @@ sammy.get
|
|||||||
var messages_count = 0;
|
var messages_count = 0;
|
||||||
for( var key in messages ) { messages_count++; }
|
for( var key in messages ) { messages_count++; }
|
||||||
|
|
||||||
var format_number = function format_number( number )
|
|
||||||
{
|
|
||||||
return ( number || 0 ).toString().replace( /\B(?=(\d{3})+(?!\d))/g, '\'' );
|
|
||||||
};
|
|
||||||
|
|
||||||
function dataimport_compute_details( response, details_element, elapsed_seconds )
|
function dataimport_compute_details( response, details_element, elapsed_seconds )
|
||||||
{
|
{
|
||||||
details_element
|
details_element
|
||||||
@ -464,10 +459,10 @@ sammy.get
|
|||||||
{
|
{
|
||||||
var value = parseInt( response.statusMessages[document_config[key]], 10 );
|
var value = parseInt( response.statusMessages[document_config[key]], 10 );
|
||||||
|
|
||||||
var detail = '<abbr title="' + document_config[key].esc() + '">' + key.esc() + '</abbr>: ' + format_number( value ).esc();
|
var detail = '<abbr title="' + document_config[key].esc() + '">' + key.esc() + '</abbr>: ' + app.format_number( value ).esc();
|
||||||
if( elapsed_seconds && 'skipped' !== key.toLowerCase() )
|
if( elapsed_seconds && 'skipped' !== key.toLowerCase() )
|
||||||
{
|
{
|
||||||
detail += ' <span>(' + format_number( Math.round( value / elapsed_seconds ) ).esc() + '/s)</span>'
|
detail += ' <span>(' + app.format_number( Math.round( value / elapsed_seconds ) ).esc() + '/s)</span>'
|
||||||
}
|
}
|
||||||
|
|
||||||
document_details.push( detail );
|
document_details.push( detail );
|
||||||
@ -523,7 +518,7 @@ sammy.get
|
|||||||
/\d{4,}/g,
|
/\d{4,}/g,
|
||||||
function( match, position, string )
|
function( match, position, string )
|
||||||
{
|
{
|
||||||
return format_number( parseInt( match, 10 ) );
|
return app.format_number( parseInt( match, 10 ) );
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -66,9 +66,7 @@ var load_terminfo = function( trigger_element, core_basepath, field, data_elemen
|
|||||||
},
|
},
|
||||||
success : function( response, text_status, xhr )
|
success : function( response, text_status, xhr )
|
||||||
{
|
{
|
||||||
$( 'span', trigger_element )
|
var additional_styles = [];
|
||||||
.removeClass( 'loader' );
|
|
||||||
|
|
||||||
var field_data = response.fields[field];
|
var field_data = response.fields[field];
|
||||||
|
|
||||||
if( !field_data || !( field_data.topTerms && field_data.histogram ) )
|
if( !field_data || !( field_data.topTerms && field_data.histogram ) )
|
||||||
@ -79,6 +77,11 @@ var load_terminfo = function( trigger_element, core_basepath, field, data_elemen
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var get_width = function get_width()
|
||||||
|
{
|
||||||
|
return $( this ).width();
|
||||||
|
}
|
||||||
|
|
||||||
var topterms_holder_element = $( '.topterms-holder', data_element );
|
var topterms_holder_element = $( '.topterms-holder', data_element );
|
||||||
var histogram_holder_element = $( '.histogram-holder', data_element );
|
var histogram_holder_element = $( '.histogram-holder', data_element );
|
||||||
|
|
||||||
@ -111,7 +114,7 @@ var load_terminfo = function( trigger_element, core_basepath, field, data_elemen
|
|||||||
|
|
||||||
topterms_frq_last = topterms[i+1];
|
topterms_frq_last = topterms[i+1];
|
||||||
topterms_content += '<li class="clearfix">'
|
topterms_content += '<li class="clearfix">'
|
||||||
+ '<p><span>' + topterms_frq_last.esc() + '</span></p>' + "\n"
|
+ '<p><span>' + app.format_number( topterms_frq_last ) + '</span></p>' + "\n"
|
||||||
+ '<ul>' + "\n";
|
+ '<ul>' + "\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,6 +132,13 @@ var load_terminfo = function( trigger_element, core_basepath, field, data_elemen
|
|||||||
topterms_table_element
|
topterms_table_element
|
||||||
.html( topterms_content );
|
.html( topterms_content );
|
||||||
|
|
||||||
|
var max_width = 10 + Math.max.apply( Math, $( 'p', topterms_table_element ).map( get_width ).get() );
|
||||||
|
additional_styles.push
|
||||||
|
(
|
||||||
|
topterms_table_element.selector + ' p { width: ' + max_width + 'px !important; }' + "\n" +
|
||||||
|
topterms_table_element.selector + ' ul { margin-left: ' + ( max_width + 5 ) + 'px !important; }'
|
||||||
|
);
|
||||||
|
|
||||||
topterms_count_element
|
topterms_count_element
|
||||||
.val( topterms_count );
|
.val( topterms_count );
|
||||||
|
|
||||||
@ -152,52 +162,57 @@ var load_terminfo = function( trigger_element, core_basepath, field, data_elemen
|
|||||||
histogram_holder_element
|
histogram_holder_element
|
||||||
.show();
|
.show();
|
||||||
|
|
||||||
var histogram_element = $( '.histogram', histogram_holder_element );
|
|
||||||
|
|
||||||
var histogram_values = luke_array_to_hash( field_data.histogram );
|
var histogram_values = luke_array_to_hash( field_data.histogram );
|
||||||
var histogram_legend = '';
|
var histogram_entries = [];
|
||||||
|
|
||||||
histogram_holder_element
|
var histogram_max = null;
|
||||||
.show();
|
for( var key in histogram_values )
|
||||||
|
{
|
||||||
|
histogram_max = Math.max( histogram_max, histogram_values[key] );
|
||||||
|
}
|
||||||
|
|
||||||
for( var key in histogram_values )
|
for( var key in histogram_values )
|
||||||
{
|
{
|
||||||
histogram_legend += '<dt><span>' + key + '</span></dt>' + "\n" +
|
histogram_entries.push
|
||||||
'<dd title="' + key + '">' +
|
(
|
||||||
'<span>' + histogram_values[key] + '</span>' +
|
'<li>' + "\n" +
|
||||||
'</dd>' + "\n";
|
' <dl class="clearfix" style="width: ' + ( ( histogram_values[key] / histogram_max ) * 100 ) + '%;">' + "\n" +
|
||||||
|
' <dt><span>' + app.format_number( key ) + '</span></dt>' + "\n" +
|
||||||
|
' <dd><span>' + app.format_number( histogram_values[key] ) + '</span></dd>' + "\n" +
|
||||||
|
' </dl>' + "\n" +
|
||||||
|
'</li>'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
$( 'dl', histogram_holder_element )
|
$( 'ul', histogram_holder_element )
|
||||||
.html( histogram_legend );
|
.html( histogram_entries.join( "\n" ) );
|
||||||
|
|
||||||
var histogram_values = luke_array_to_struct( field_data.histogram ).values;
|
$( 'ul li:even', histogram_holder_element )
|
||||||
|
.addClass( 'odd' );
|
||||||
|
|
||||||
histogram_element
|
var max_width = 10 + Math.max.apply( Math, $( 'dt', histogram_holder_element ).map( get_width ).get() );
|
||||||
.sparkline
|
additional_styles.push
|
||||||
(
|
(
|
||||||
histogram_values,
|
histogram_holder_element.selector + ' ul { margin-left: ' + max_width + 'px !important; }' + "\n" +
|
||||||
{
|
histogram_holder_element.selector + ' li dt { left: ' + ( max_width * -1 ) + 'px !important; width: ' + max_width + 'px !important; }'
|
||||||
type : 'bar',
|
);
|
||||||
barColor : '#c0c0c0',
|
|
||||||
zeroColor : '#000000',
|
|
||||||
height : histogram_element.height(),
|
|
||||||
barWidth : 46,
|
|
||||||
barSpacing : 3
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
1 === histogram_values.length
|
|
||||||
? histogram_element.addClass( 'single' )
|
|
||||||
: histogram_element.removeClass( 'single' );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if( additional_styles )
|
||||||
|
{
|
||||||
|
terminfo_element
|
||||||
|
.prepend( '<style type="text/css">' + additional_styles.join( "\n" ) + '</style>' );
|
||||||
|
}
|
||||||
},
|
},
|
||||||
error : function( xhr, text_status, error_thrown)
|
error : function( xhr, text_status, error_thrown)
|
||||||
{
|
{
|
||||||
|
terminfo_element
|
||||||
|
.addClass( 'disabled' );
|
||||||
},
|
},
|
||||||
complete : function( xhr, text_status )
|
complete : function( xhr, text_status )
|
||||||
{
|
{
|
||||||
|
$( 'span', trigger_element )
|
||||||
|
.removeClass( 'loader' );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -158,12 +158,8 @@ limitations under the License.
|
|||||||
|
|
||||||
<p class="head">Histogram:</p>
|
<p class="head">Histogram:</p>
|
||||||
|
|
||||||
<div class="histogram"></div>
|
<ul></ul>
|
||||||
|
|
||||||
<dl class="clearfix">
|
|
||||||
|
|
||||||
</dl>
|
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
Loading…
x
Reference in New Issue
Block a user