Terms API: Support numbers/dates, closes #78.

This commit is contained in:
kimchy 2010-03-22 02:34:42 +02:00
parent bc03d89c00
commit 93e025325e
19 changed files with 660 additions and 349 deletions

View File

@ -41,7 +41,7 @@ public class FieldTermsFreq implements Streamable, Iterable<TermFreq> {
private TermFreq[] termsFreqs; private TermFreq[] termsFreqs;
private transient ExtTObjectIntHasMap<String> termsFreqMap; private transient ExtTObjectIntHasMap<Object> termsFreqMap;
private FieldTermsFreq() { private FieldTermsFreq() {
@ -69,15 +69,16 @@ public class FieldTermsFreq implements Streamable, Iterable<TermFreq> {
/** /**
* Returns the document frequency of a term, <tt>-1</tt> if the term does not exists. * Returns the document frequency of a term, <tt>-1</tt> if the term does not exists.
*/ */
public int docFreq(String term) { public int docFreq(Object term) {
// we use "toString" on the term so we get hits when we the termValue is Long, and we lookup with int
if (termsFreqMap == null) { if (termsFreqMap == null) {
ExtTObjectIntHasMap<String> termsFreqMap = new ExtTObjectIntHasMap<String>().defaultReturnValue(-1); ExtTObjectIntHasMap<Object> termsFreqMap = new ExtTObjectIntHasMap<Object>().defaultReturnValue(-1);
for (TermFreq termFreq : termsFreqs) { for (TermFreq termFreq : termsFreqs) {
termsFreqMap.put(termFreq.term(), termFreq.docFreq()); termsFreqMap.put(termFreq.term().toString(), termFreq.docFreq());
} }
this.termsFreqMap = termsFreqMap; this.termsFreqMap = termsFreqMap;
} }
return termsFreqMap.get(term); return termsFreqMap.get(term.toString());
} }
@Override public Iterator<TermFreq> iterator() { @Override public Iterator<TermFreq> iterator() {

View File

@ -46,8 +46,6 @@ class ShardTermsRequest extends BroadcastShardOperationRequest {
private int size = 10; private int size = 10;
private boolean convert = true;
private TermsRequest.SortType sortType; private TermsRequest.SortType sortType;
private boolean exact = false; private boolean exact = false;
@ -65,7 +63,6 @@ class ShardTermsRequest extends BroadcastShardOperationRequest {
this.prefix = request.prefix(); this.prefix = request.prefix();
this.regexp = request.regexp(); this.regexp = request.regexp();
this.size = request.size(); this.size = request.size();
this.convert = request.convert();
this.sortType = request.sortType(); this.sortType = request.sortType();
this.exact = request.exact(); this.exact = request.exact();
} }
@ -102,10 +99,6 @@ class ShardTermsRequest extends BroadcastShardOperationRequest {
return size; return size;
} }
public boolean convert() {
return convert;
}
public TermsRequest.SortType sortType() { public TermsRequest.SortType sortType() {
return sortType; return sortType;
} }
@ -135,7 +128,6 @@ class ShardTermsRequest extends BroadcastShardOperationRequest {
regexp = in.readUTF(); regexp = in.readUTF();
} }
size = in.readVInt(); size = in.readVInt();
convert = in.readBoolean();
sortType = TermsRequest.SortType.fromValue(in.readByte()); sortType = TermsRequest.SortType.fromValue(in.readByte());
exact = in.readBoolean(); exact = in.readBoolean();
} }
@ -173,7 +165,6 @@ class ShardTermsRequest extends BroadcastShardOperationRequest {
out.writeUTF(regexp); out.writeUTF(regexp);
} }
out.writeVInt(size); out.writeVInt(size);
out.writeBoolean(convert);
out.writeByte(sortType.value()); out.writeByte(sortType.value());
out.writeBoolean(exact); out.writeBoolean(exact);
} }

View File

@ -24,6 +24,7 @@ import org.elasticsearch.util.gnu.trove.TObjectIntHashMap;
import org.elasticsearch.util.gnu.trove.TObjectIntIterator; import org.elasticsearch.util.gnu.trove.TObjectIntIterator;
import org.elasticsearch.util.io.stream.StreamInput; import org.elasticsearch.util.io.stream.StreamInput;
import org.elasticsearch.util.io.stream.StreamOutput; import org.elasticsearch.util.io.stream.StreamOutput;
import org.elasticsearch.util.lucene.Lucene;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
@ -34,7 +35,7 @@ import java.util.Map;
*/ */
class ShardTermsResponse extends BroadcastShardOperationResponse { class ShardTermsResponse extends BroadcastShardOperationResponse {
private Map<String, TObjectIntHashMap<String>> fieldsTermsFreqs = new HashMap<String, TObjectIntHashMap<String>>(); private Map<String, TObjectIntHashMap<Object>> fieldsTermsFreqs = new HashMap<String, TObjectIntHashMap<Object>>();
private int numDocs; private int numDocs;
@ -64,11 +65,11 @@ class ShardTermsResponse extends BroadcastShardOperationResponse {
return this.numDeletedDocs; return this.numDeletedDocs;
} }
void put(String fieldName, TObjectIntHashMap<String> termsFreqs) { void put(String fieldName, TObjectIntHashMap<Object> termsFreqs) {
fieldsTermsFreqs.put(fieldName, termsFreqs); fieldsTermsFreqs.put(fieldName, termsFreqs);
} }
Map<String, TObjectIntHashMap<String>> fieldsTermsFreqs() { Map<String, TObjectIntHashMap<Object>> fieldsTermsFreqs() {
return fieldsTermsFreqs; return fieldsTermsFreqs;
} }
@ -81,10 +82,10 @@ class ShardTermsResponse extends BroadcastShardOperationResponse {
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
String fieldName = in.readUTF(); String fieldName = in.readUTF();
TObjectIntHashMap<String> termsFreq = new TObjectIntHashMap<String>(); TObjectIntHashMap<Object> termsFreq = new TObjectIntHashMap<Object>();
int size1 = in.readVInt(); int size1 = in.readVInt();
for (int j = 0; j < size1; j++) { for (int j = 0; j < size1; j++) {
termsFreq.put(in.readUTF(), in.readVInt()); termsFreq.put(Lucene.readFieldValue(in), in.readVInt());
} }
fieldsTermsFreqs.put(fieldName, termsFreq); fieldsTermsFreqs.put(fieldName, termsFreq);
@ -97,12 +98,12 @@ class ShardTermsResponse extends BroadcastShardOperationResponse {
out.writeVInt(maxDoc); out.writeVInt(maxDoc);
out.writeVInt(numDeletedDocs); out.writeVInt(numDeletedDocs);
out.writeVInt(fieldsTermsFreqs.size()); out.writeVInt(fieldsTermsFreqs.size());
for (Map.Entry<String, TObjectIntHashMap<String>> entry : fieldsTermsFreqs.entrySet()) { for (Map.Entry<String, TObjectIntHashMap<Object>> entry : fieldsTermsFreqs.entrySet()) {
out.writeUTF(entry.getKey()); out.writeUTF(entry.getKey());
out.writeVInt(entry.getValue().size()); out.writeVInt(entry.getValue().size());
for (TObjectIntIterator<String> it = entry.getValue().iterator(); it.hasNext();) { for (TObjectIntIterator<Object> it = entry.getValue().iterator(); it.hasNext();) {
it.advance(); it.advance();
out.writeUTF(it.key()); Lucene.writeFieldValue(out, it.key());
out.writeVInt(it.value()); out.writeVInt(it.value());
} }
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.terms;
import org.elasticsearch.util.io.stream.StreamInput; import org.elasticsearch.util.io.stream.StreamInput;
import org.elasticsearch.util.io.stream.StreamOutput; import org.elasticsearch.util.io.stream.StreamOutput;
import org.elasticsearch.util.io.stream.Streamable; import org.elasticsearch.util.io.stream.Streamable;
import org.elasticsearch.util.lucene.Lucene;
import java.io.IOException; import java.io.IOException;
import java.util.Comparator; import java.util.Comparator;
@ -40,7 +41,7 @@ public class TermFreq implements Streamable {
@Override public int compare(TermFreq o1, TermFreq o2) { @Override public int compare(TermFreq o1, TermFreq o2) {
int i = o2.docFreq() - o1.docFreq(); int i = o2.docFreq() - o1.docFreq();
if (i == 0) { if (i == 0) {
i = o1.term().compareTo(o2.term()); i = ((Comparable) o1.term()).compareTo(o2.term());
} }
return i; return i;
} }
@ -51,7 +52,7 @@ public class TermFreq implements Streamable {
*/ */
private static final Comparator<TermFreq> termComparator = new Comparator<TermFreq>() { private static final Comparator<TermFreq> termComparator = new Comparator<TermFreq>() {
@Override public int compare(TermFreq o1, TermFreq o2) { @Override public int compare(TermFreq o1, TermFreq o2) {
int i = o1.term().compareTo(o2.term()); int i = ((Comparable) o1.term()).compareTo(o2.term());
if (i == 0) { if (i == 0) {
i = o1.docFreq() - o2.docFreq(); i = o1.docFreq() - o2.docFreq();
} }
@ -73,7 +74,7 @@ public class TermFreq implements Streamable {
return termComparator; return termComparator;
} }
private String term; private Object term;
private int docFreq; private int docFreq;
@ -87,7 +88,7 @@ public class TermFreq implements Streamable {
* @param term The term * @param term The term
* @param docFreq The document frequency * @param docFreq The document frequency
*/ */
TermFreq(String term, int docFreq) { TermFreq(Object term, int docFreq) {
this.term = term; this.term = term;
this.docFreq = docFreq; this.docFreq = docFreq;
} }
@ -95,10 +96,14 @@ public class TermFreq implements Streamable {
/** /**
* The term. * The term.
*/ */
public String term() { public Object term() {
return term; return term;
} }
public String termAsString() {
return term.toString();
}
/** /**
* The document frequency of the term (in how many documents this term exists). * The document frequency of the term (in how many documents this term exists).
*/ */
@ -113,12 +118,12 @@ public class TermFreq implements Streamable {
} }
@Override public void readFrom(StreamInput in) throws IOException { @Override public void readFrom(StreamInput in) throws IOException {
term = in.readUTF(); term = Lucene.readFieldValue(in);
docFreq = in.readVInt(); docFreq = in.readVInt();
} }
@Override public void writeTo(StreamOutput out) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(term); Lucene.writeFieldValue(out, term);
out.writeVInt(docFreq); out.writeVInt(docFreq);
} }
} }

View File

@ -179,13 +179,13 @@ public class TermsRequest extends BroadcastOperationRequest {
* The lower bound (lex) term from which the iteration will start. Defaults to start from the * The lower bound (lex) term from which the iteration will start. Defaults to start from the
* first. * first.
*/ */
public TermsRequest from(String from) { public TermsRequest from(Object from) {
this.from = from; this.from = from.toString();
return this; return this;
} }
/** /**
* Should the first from (if set using {@link #from(String)} be inclusive or not. Defaults * Should the first from (if set using {@link #from(Object)} be inclusive or not. Defaults
* to <tt>false</tt> (not inclusive / exclusive). * to <tt>false</tt> (not inclusive / exclusive).
*/ */
public boolean fromInclusive() { public boolean fromInclusive() {
@ -193,7 +193,7 @@ public class TermsRequest extends BroadcastOperationRequest {
} }
/** /**
* Should the first from (if set using {@link #from(String)} be inclusive or not. Defaults * Should the first from (if set using {@link #from(Object)} be inclusive or not. Defaults
* to <tt>false</tt> (not inclusive / exclusive). * to <tt>false</tt> (not inclusive / exclusive).
*/ */
public TermsRequest fromInclusive(boolean fromInclusive) { public TermsRequest fromInclusive(boolean fromInclusive) {
@ -211,13 +211,13 @@ public class TermsRequest extends BroadcastOperationRequest {
/** /**
* The upper bound (lex) term to which the iteration will end. Defaults to unbound (<tt>null</tt>). * The upper bound (lex) term to which the iteration will end. Defaults to unbound (<tt>null</tt>).
*/ */
public TermsRequest to(String to) { public TermsRequest to(Object to) {
this.to = to; this.to = to.toString();
return this; return this;
} }
/** /**
* Should the last to (if set using {@link #to(String)} be inclusive or not. Defaults to * Should the last to (if set using {@link #to(Object)} be inclusive or not. Defaults to
* <tt>true</tt>. * <tt>true</tt>.
*/ */
public boolean toInclusive() { public boolean toInclusive() {
@ -225,7 +225,7 @@ public class TermsRequest extends BroadcastOperationRequest {
} }
/** /**
* Should the last to (if set using {@link #to(String)} be inclusive or not. Defaults to * Should the last to (if set using {@link #to(Object)} be inclusive or not. Defaults to
* <tt>true</tt>. * <tt>true</tt>.
*/ */
public TermsRequest toInclusive(boolean toInclusive) { public TermsRequest toInclusive(boolean toInclusive) {
@ -309,23 +309,6 @@ public class TermsRequest extends BroadcastOperationRequest {
return this; return this;
} }
/**
* Should an attempt be made to convert the {@link #to(String)} and {@link #from(String)}.
* Defaults to <tt>true</tt>.
*/
public boolean convert() {
return convert;
}
/**
* Should an attempt be made to convert the {@link #to(String)} and {@link #from(String)}.
* Defaults to <tt>true</tt>.
*/
public TermsRequest convert(boolean convert) {
this.convert = convert;
return this;
}
/** /**
* The type of sorting for term / doc freq. Can either sort on term (lex) or doc frequency. Defaults to * The type of sorting for term / doc freq. Can either sort on term (lex) or doc frequency. Defaults to
* {@link TermsRequest.SortType#TERM}. * {@link TermsRequest.SortType#TERM}.

View File

@ -19,10 +19,12 @@
package org.elasticsearch.action.terms; package org.elasticsearch.action.terms;
import com.google.common.collect.Maps;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs; import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum; import org.apache.lucene.index.TermEnum;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.StringHelper;
import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.ShardOperationFailedException;
@ -42,6 +44,7 @@ import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.util.BoundedTreeSet; import org.elasticsearch.util.BoundedTreeSet;
import org.elasticsearch.util.Nullable;
import org.elasticsearch.util.gnu.trove.TObjectIntHashMap; import org.elasticsearch.util.gnu.trove.TObjectIntHashMap;
import org.elasticsearch.util.gnu.trove.TObjectIntIterator; import org.elasticsearch.util.gnu.trove.TObjectIntIterator;
import org.elasticsearch.util.settings.Settings; import org.elasticsearch.util.settings.Settings;
@ -70,7 +73,7 @@ public class TransportTermsAction extends TransportBroadcastOperationAction<Term
long maxDoc = 0; long maxDoc = 0;
long numDeletedDocs = 0; long numDeletedDocs = 0;
List<ShardOperationFailedException> shardFailures = null; List<ShardOperationFailedException> shardFailures = null;
ShardTermsResponse aggregator = null; Map<String, TObjectIntHashMap<Object>> aggregator = Maps.newHashMap();
for (int i = 0; i < shardsResponses.length(); i++) { for (int i = 0; i < shardsResponses.length(); i++) {
Object shardResponse = shardsResponses.get(i); Object shardResponse = shardsResponses.get(i);
if (shardResponse == null) { if (shardResponse == null) {
@ -83,20 +86,27 @@ public class TransportTermsAction extends TransportBroadcastOperationAction<Term
shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse));
} else { } else {
ShardTermsResponse shardTermsResponse = (ShardTermsResponse) shardResponse; ShardTermsResponse shardTermsResponse = (ShardTermsResponse) shardResponse;
if (aggregator == null) { IndexService indexService = indicesService.indexServiceSafe(shardTermsResponse.index());
aggregator = shardTermsResponse;
} else { for (Map.Entry<String, TObjectIntHashMap<Object>> entry : shardTermsResponse.fieldsTermsFreqs().entrySet()) {
for (Map.Entry<String, TObjectIntHashMap<String>> entry : shardTermsResponse.fieldsTermsFreqs().entrySet()) { String fieldName = entry.getKey();
String fieldName = entry.getKey();
TObjectIntHashMap<String> termsFreqs = aggregator.fieldsTermsFreqs().get(fieldName); FieldMapper fieldMapper = indexService.mapperService().smartNameFieldMapper(fieldName);
if (termsFreqs == null) {
termsFreqs = new TObjectIntHashMap<String>();
aggregator.put(fieldName, termsFreqs); TObjectIntHashMap<Object> termsFreqs = aggregator.get(fieldName);
} if (termsFreqs == null) {
for (TObjectIntIterator<String> it = entry.getValue().iterator(); it.hasNext();) { termsFreqs = new TObjectIntHashMap<Object>();
it.advance(); aggregator.put(fieldName, termsFreqs);
termsFreqs.adjustOrPutValue(it.key(), it.value(), it.value()); }
for (TObjectIntIterator<Object> it = entry.getValue().iterator(); it.hasNext();) {
it.advance();
Object termValue = it.key();
int freq = it.value();
if (fieldMapper != null) {
termValue = fieldMapper.valueForSearch(termValue);
} }
termsFreqs.adjustOrPutValue(termValue, freq, freq);
} }
} }
numDocs += shardTermsResponse.numDocs(); numDocs += shardTermsResponse.numDocs();
@ -108,7 +118,7 @@ public class TransportTermsAction extends TransportBroadcastOperationAction<Term
Map<String, NavigableSet<TermFreq>> fieldTermsFreqs = new HashMap<String, NavigableSet<TermFreq>>(); Map<String, NavigableSet<TermFreq>> fieldTermsFreqs = new HashMap<String, NavigableSet<TermFreq>>();
if (aggregator != null) { if (aggregator != null) {
for (Map.Entry<String, TObjectIntHashMap<String>> entry : aggregator.fieldsTermsFreqs().entrySet()) { for (Map.Entry<String, TObjectIntHashMap<Object>> entry : aggregator.entrySet()) {
String fieldName = entry.getKey(); String fieldName = entry.getKey();
NavigableSet<TermFreq> sortedFreqs = fieldTermsFreqs.get(fieldName); NavigableSet<TermFreq> sortedFreqs = fieldTermsFreqs.get(fieldName);
if (sortedFreqs == null) { if (sortedFreqs == null) {
@ -116,7 +126,7 @@ public class TransportTermsAction extends TransportBroadcastOperationAction<Term
sortedFreqs = new BoundedTreeSet<TermFreq>(comparator, request.size()); sortedFreqs = new BoundedTreeSet<TermFreq>(comparator, request.size());
fieldTermsFreqs.put(fieldName, sortedFreqs); fieldTermsFreqs.put(fieldName, sortedFreqs);
} }
for (TObjectIntIterator<String> it = entry.getValue().iterator(); it.hasNext();) { for (TObjectIntIterator<Object> it = entry.getValue().iterator(); it.hasNext();) {
it.advance(); it.advance();
if (it.value() >= request.minFreq() && it.value() <= request.maxFreq()) { if (it.value() >= request.minFreq() && it.value() <= request.maxFreq()) {
sortedFreqs.add(new TermFreq(it.key(), it.value())); sortedFreqs.add(new TermFreq(it.key(), it.value()));
@ -148,8 +158,6 @@ public class TransportTermsAction extends TransportBroadcastOperationAction<Term
regexpPattern = Pattern.compile(request.regexp(), Pattern.DOTALL | Pattern.CASE_INSENSITIVE); regexpPattern = Pattern.compile(request.regexp(), Pattern.DOTALL | Pattern.CASE_INSENSITIVE);
} }
for (String fieldName : request.fields()) { for (String fieldName : request.fields()) {
TObjectIntHashMap<String> termsFreqs = new TObjectIntHashMap<String>();
FieldMapper fieldMapper = indexService.mapperService().smartNameFieldMapper(fieldName); FieldMapper fieldMapper = indexService.mapperService().smartNameFieldMapper(fieldName);
String indexFieldName = fieldName; String indexFieldName = fieldName;
if (fieldMapper != null) { if (fieldMapper != null) {
@ -157,165 +165,19 @@ public class TransportTermsAction extends TransportBroadcastOperationAction<Term
} }
indexFieldName = StringHelper.intern(indexFieldName); indexFieldName = StringHelper.intern(indexFieldName);
// setup the to and from // if we are sorting by term, and the field mapper sorting type is STRING, then do plain term extraction (which is faster)
String from = request.from();
if (from == null) {
from = request.prefix();
} else {
if (request.convert()) {
if (fieldMapper != null) {
from = fieldMapper.indexedValue(from);
}
}
}
if (from == null) {
from = "";
}
Term fromTerm = new Term(indexFieldName, from);
String to = request.to();
if (to != null && request.convert() && fieldMapper != null) {
to = fieldMapper.indexedValue(to);
}
Term toTerm = to == null ? null : new Term(indexFieldName, to);
TermEnum termEnum = null;
try { try {
termEnum = searcher.reader().terms(fromTerm); ExecuteTermResult executeTermResult;
if (request.sortType() == TermsRequest.SortType.TERM && fieldMapper != null && (fieldMapper.sortType() == SortField.STRING || fieldMapper.sortType() == SortField.STRING_VAL)) {
// skip the first if we are not inclusive on from executeTermResult = executeTermSortedStringTerm(request, indexFieldName, searcher, regexpPattern, fieldMapper, termDocs);
if (!request.fromInclusive() && request.from() != null) { } else {
termEnum.next(); executeTermResult = executeTerms(request, indexFieldName, searcher, regexpPattern, fieldMapper, termDocs);
} }
termDocs = executeTermResult.termDocs;
if (request.sortType() == TermsRequest.SortType.TERM) { response.put(fieldName, executeTermResult.termsFreqs);
int counter = 0;
while (counter < request.size()) {
Term term = termEnum.term();
// have we reached the end?
if (term == null || indexFieldName != term.field()) { // StirngHelper.intern
break;
}
// convert to actual term text
if (fieldMapper != null && fieldMapper.requiresStringToStringConversion()) {
// valueAsString returns null indicating that this is not interesting
term = term.createTerm(fieldMapper.valueAsString(term.text()));
// if we need to break on this term enumeration, bail
if (fieldMapper.shouldBreakTermEnumeration(term.text())) {
break;
}
if (term.text() == null) {
continue;
}
}
// does it match on the prefix?
if (request.prefix() != null && !term.text().startsWith(request.prefix())) {
break;
}
// does it match on regexp?
if (regexpPattern != null && !regexpPattern.matcher(term.text()).matches()) {
termEnum.next();
continue;
}
// check on the to term
if (toTerm != null) {
int toCompareResult = term.compareTo(toTerm);
if (toCompareResult > 0 || (toCompareResult == 0 && !request.toInclusive())) {
break;
}
}
int docFreq = termEnum.docFreq();
if (request.exact()) {
if (termDocs == null) {
termDocs = searcher.reader().termDocs();
}
termDocs.seek(termEnum);
docFreq = 0;
while (termDocs.next()) {
if (!searcher.reader().isDeleted(termDocs.doc())) {
docFreq++;
}
}
}
termsFreqs.put(term.text(), docFreq);
if (!termEnum.next()) {
break;
}
counter++;
}
} else if (request.sortType() == TermsRequest.SortType.FREQ) {
BoundedTreeSet<TermFreq> sortedFreq = new BoundedTreeSet<TermFreq>(TermFreq.freqComparator(), request.size());
while (true) {
Term term = termEnum.term();
// have we reached the end?
if (term == null || indexFieldName != term.field()) { // StirngHelper.intern
break;
}
// convert to actual term text
if (fieldMapper != null && fieldMapper.requiresStringToStringConversion()) {
// valueAsString returns null indicating that this is not interesting
term = term.createTerm(fieldMapper.valueAsString(term.text()));
// if we need to break on this term enumeration, bail
if (fieldMapper.shouldBreakTermEnumeration(term.text())) {
break;
}
if (term.text() == null) {
continue;
}
}
// does it match on the prefix?
if (request.prefix() != null && !term.text().startsWith(request.prefix())) {
break;
}
// does it match on regexp?
if (regexpPattern != null && !regexpPattern.matcher(term.text()).matches()) {
termEnum.next();
continue;
}
// check on the to term
if (toTerm != null) {
int toCompareResult = term.compareTo(toTerm);
if (toCompareResult > 0 || (toCompareResult == 0 && !request.toInclusive())) {
break;
}
}
int docFreq = termEnum.docFreq();
if (request.exact()) {
if (termDocs == null) {
termDocs = searcher.reader().termDocs();
}
termDocs.seek(termEnum);
docFreq = 0;
while (termDocs.next()) {
if (!searcher.reader().isDeleted(termDocs.doc())) {
docFreq++;
}
}
}
sortedFreq.add(new TermFreq(term.text(), docFreq));
if (!termEnum.next()) {
break;
}
}
for (TermFreq termFreq : sortedFreq) {
termsFreqs.put(termFreq.term(), termFreq.docFreq());
}
}
response.put(fieldName, termsFreqs);
} catch (Exception e) { } catch (Exception e) {
logger.debug("Failed to get term enum from term [" + fromTerm + "]", e); // currently, just log
} finally { logger.warn("Failed to fetch terms for field [" + fieldName + "]", e);
if (termEnum != null) {
try {
termEnum.close();
} catch (IOException e) {
// ignore
}
}
} }
} }
return response; return response;
@ -331,6 +193,214 @@ public class TransportTermsAction extends TransportBroadcastOperationAction<Term
} }
} }
static class ExecuteTermResult {
public TObjectIntHashMap<Object> termsFreqs;
public TermDocs termDocs;
ExecuteTermResult(TObjectIntHashMap<Object> termsFreqs, TermDocs termDocs) {
this.termsFreqs = termsFreqs;
this.termDocs = termDocs;
}
}
private ExecuteTermResult executeTerms(ShardTermsRequest request, String indexFieldName, Engine.Searcher searcher,
@Nullable Pattern regexpPattern, @Nullable FieldMapper fieldMapper, @Nullable TermDocs termDocs) throws IOException {
TObjectIntHashMap<Object> termsFreqs = new TObjectIntHashMap<Object>();
String sFrom = request.from();
if (sFrom == null) {
// really, only make sense for strings
sFrom = request.prefix();
}
Object from = sFrom;
if (from != null && fieldMapper != null) {
from = fieldMapper.valueFromString(sFrom);
}
String sTo = request.to();
Object to = sTo;
if (to != null && fieldMapper != null) {
to = fieldMapper.valueFromString(sTo);
}
TermEnum termEnum = null;
Comparator<TermFreq> comparator = request.sortType() == TermsRequest.SortType.TERM ? TermFreq.termComparator() : TermFreq.freqComparator();
BoundedTreeSet<TermFreq> sortedFreq = new BoundedTreeSet<TermFreq>(comparator, request.size());
try {
termEnum = searcher.reader().terms(new Term(indexFieldName, ""));
while (true) {
Term term = termEnum.term();
// have we reached the end?
if (term == null || indexFieldName != term.field()) { // StirngHelper.intern
break;
}
Object termValue = term.text();
if (fieldMapper != null) {
termValue = fieldMapper.valueFromTerm(term.text());
if (fieldMapper.shouldBreakTermEnumeration(termValue)) {
break;
}
if (termValue == null) {
continue;
}
}
// check on the from term
if (from != null) {
int fromCompareResult = ((Comparable) termValue).compareTo(from);
if (fromCompareResult < 0 || (fromCompareResult == 0 && !request.fromInclusive())) {
termEnum.next();
continue;
}
}
// does it match on the prefix?
if (request.prefix() != null && !term.text().startsWith(request.prefix())) {
break;
}
// does it match on regexp?
if (regexpPattern != null && !regexpPattern.matcher(term.text()).matches()) {
termEnum.next();
continue;
}
// check on the to term
if (to != null) {
int toCompareResult = ((Comparable) termValue).compareTo(to);
if (toCompareResult > 0 || (toCompareResult == 0 && !request.toInclusive())) {
break;
}
}
int docFreq = termEnum.docFreq();
if (request.exact()) {
if (termDocs == null) {
termDocs = searcher.reader().termDocs();
}
termDocs.seek(termEnum);
docFreq = 0;
while (termDocs.next()) {
if (!searcher.reader().isDeleted(termDocs.doc())) {
docFreq++;
}
}
}
sortedFreq.add(new TermFreq(termValue, docFreq));
if (!termEnum.next()) {
break;
}
}
} finally {
if (termEnum != null) {
try {
termEnum.close();
} catch (IOException e) {
// ignore
}
}
}
for (TermFreq termFreq : sortedFreq) {
termsFreqs.put(termFreq.term(), termFreq.docFreq());
}
return new ExecuteTermResult(termsFreqs, termDocs);
}
private ExecuteTermResult executeTermSortedStringTerm(ShardTermsRequest request, String indexFieldName, Engine.Searcher searcher,
@Nullable Pattern regexpPattern, @Nullable FieldMapper fieldMapper, @Nullable TermDocs termDocs) throws IOException {
TObjectIntHashMap<Object> termsFreqs = new TObjectIntHashMap<Object>();
String from = request.from();
if (from == null) {
from = request.prefix();
}
if (from == null) {
from = "";
}
Term fromTerm = new Term(indexFieldName, from);
String to = request.to();
if (to != null && fieldMapper != null) {
to = fieldMapper.indexedValue(to);
}
Term toTerm = to == null ? null : new Term(indexFieldName, to);
TermEnum termEnum = null;
try {
termEnum = searcher.reader().terms(fromTerm);
// skip the first if we are not inclusive on from
if (!request.fromInclusive() && request.from() != null) {
Term term = termEnum.term();
if (term != null && indexFieldName == term.field() && term.text().equals(request.from())) {
termEnum.next();
}
}
if (request.sortType() == TermsRequest.SortType.TERM) {
int counter = 0;
while (counter < request.size()) {
Term term = termEnum.term();
// have we reached the end?
if (term == null || indexFieldName != term.field()) { // StirngHelper.intern
break;
}
// convert to actual term text
if (fieldMapper != null) {
// valueAsString returns null indicating that this is not interesting
Object termObj = fieldMapper.valueFromTerm(term.text());
// if we need to break on this term enumeration, bail
if (fieldMapper.shouldBreakTermEnumeration(termObj)) {
break;
}
if (termObj == null) {
termEnum.next();
continue;
}
}
// does it match on the prefix?
if (request.prefix() != null && !term.text().startsWith(request.prefix())) {
break;
}
// does it match on regexp?
if (regexpPattern != null && !regexpPattern.matcher(term.text()).matches()) {
termEnum.next();
continue;
}
// check on the to term
if (toTerm != null) {
int toCompareResult = term.compareTo(toTerm);
if (toCompareResult > 0 || (toCompareResult == 0 && !request.toInclusive())) {
break;
}
}
int docFreq = termEnum.docFreq();
if (request.exact()) {
if (termDocs == null) {
termDocs = searcher.reader().termDocs();
}
termDocs.seek(termEnum);
docFreq = 0;
while (termDocs.next()) {
if (!searcher.reader().isDeleted(termDocs.doc())) {
docFreq++;
}
}
}
termsFreqs.put(term.text(), docFreq);
if (!termEnum.next()) {
break;
}
counter++;
}
}
} finally {
if (termEnum != null) {
try {
termEnum.close();
} catch (IOException e) {
// ignore
}
}
}
return new ExecuteTermResult(termsFreqs, termDocs);
}
@Override protected String transportAction() { @Override protected String transportAction() {
return TransportActions.TERMS; return TransportActions.TERMS;
} }

View File

@ -118,6 +118,11 @@ public interface FieldMapper<T> {
*/ */
Object valueForSearch(Fieldable field); Object valueForSearch(Fieldable field);
/**
* Returns the value that will be returned to the user (similar to {@link #valueForSearch(org.apache.lucene.document.Fieldable)}).
*/
Object valueForSearch(Object value);
/** /**
* Returns the actual value of the field. * Returns the actual value of the field.
*/ */
@ -129,23 +134,22 @@ public interface FieldMapper<T> {
String valueAsString(Fieldable field); String valueAsString(Fieldable field);
/** /**
* Returns <tt>true</tt> if {@link #valueAsString(String)} is required to convert * Parses the string back into the type of the field (should be comparable!) in a similar
* from text value to text value. * manner {@link #valueForSearch(org.apache.lucene.document.Fieldable)} does with fields.
*/ */
boolean requiresStringToStringConversion(); Object valueFromTerm(String term);
/** /**
* Converts from the internal/indexed (term) text to the actual string representation. * Parses a string that represents the field into its value. For example, with numbers,
* Can return <tt>null</tt> indicating that this is "uninteresting" value (for example, with * it parses "1" to 1.
* numbers). Useful for example when enumerating terms. See {@link #shouldBreakTermEnumeration(String)}.
*/ */
String valueAsString(String text); Object valueFromString(String text);
/** /**
* Return <tt>true</tt> if this term value indicates breaking out of term enumeration on this * Return <tt>true</tt> if this term value indicates breaking out of term enumeration on this
* field. The term text passed is the one returned from {@link #valueAsString(String)}. * field. The term text passed is the one returned from {@link #valueFromTerm(String)}.
*/ */
boolean shouldBreakTermEnumeration(String text); boolean shouldBreakTermEnumeration(Object text);
/** /**
* Returns the indexed value. * Returns the indexed value.

View File

@ -110,12 +110,16 @@ public class JsonBoostFieldMapper extends JsonNumberFieldMapper<Float> implement
return NumericUtils.floatToPrefixCoded(value); return NumericUtils.floatToPrefixCoded(value);
} }
@Override public String valueAsString(String text) { @Override public Object valueFromTerm(String term) {
final int shift = text.charAt(0) - NumericUtils.SHIFT_START_INT; final int shift = term.charAt(0) - NumericUtils.SHIFT_START_INT;
if (shift > 0 && shift <= 31) { if (shift > 0 && shift <= 31) {
return null; return null;
} }
return Float.toString(NumericUtils.prefixCodedToFloat(text)); return NumericUtils.prefixCodedToFloat(term);
}
@Override public Object valueFromString(String text) {
return Float.parseFloat(text);
} }
@Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) { @Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) {

View File

@ -103,6 +103,17 @@ public class JsonDateFieldMapper extends JsonNumberFieldMapper<Long> {
return Numbers.bytesToLong(value); return Numbers.bytesToLong(value);
} }
/**
* Dates should return as a string, delegates to {@link #valueAsString(org.apache.lucene.document.Fieldable)}.
*/
@Override public Object valueForSearch(Fieldable field) {
return valueAsString(field);
}
@Override public Object valueForSearch(Object value) {
return dateTimeFormatter.printer().print((Long) value);
}
@Override public String valueAsString(Fieldable field) { @Override public String valueAsString(Fieldable field) {
return dateTimeFormatter.printer().print(value(field)); return dateTimeFormatter.printer().print(value(field));
} }
@ -115,12 +126,16 @@ public class JsonDateFieldMapper extends JsonNumberFieldMapper<Long> {
return NumericUtils.longToPrefixCoded(value); return NumericUtils.longToPrefixCoded(value);
} }
@Override public String valueAsString(String text) { @Override public Object valueFromTerm(String term) {
final int shift = text.charAt(0) - NumericUtils.SHIFT_START_LONG; final int shift = term.charAt(0) - NumericUtils.SHIFT_START_LONG;
if (shift > 0 && shift <= 63) { if (shift > 0 && shift <= 63) {
return null; return null;
} }
return dateTimeFormatter.printer().print(NumericUtils.prefixCodedToLong(text)); return NumericUtils.prefixCodedToLong(term);
}
@Override public Object valueFromString(String text) {
return dateTimeFormatter.parser().parseMillis(text);
} }
@Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) { @Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) {

View File

@ -99,12 +99,16 @@ public class JsonDoubleFieldMapper extends JsonNumberFieldMapper<Double> {
return NumericUtils.doubleToPrefixCoded(value); return NumericUtils.doubleToPrefixCoded(value);
} }
@Override public String valueAsString(String text) { @Override public Object valueFromTerm(String term) {
final int shift = text.charAt(0) - NumericUtils.SHIFT_START_LONG; final int shift = term.charAt(0) - NumericUtils.SHIFT_START_LONG;
if (shift > 0 && shift <= 63) { if (shift > 0 && shift <= 63) {
return null; return null;
} }
return Double.toString(NumericUtils.prefixCodedToDouble(text)); return NumericUtils.prefixCodedToDouble(term);
}
@Override public Object valueFromString(String text) {
return Double.parseDouble(text);
} }
@Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) { @Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) {

View File

@ -292,24 +292,25 @@ public abstract class JsonFieldMapper<T> implements FieldMapper<T>, JsonMapper {
return valueAsString(field); return valueAsString(field);
} }
/** @Override public Object valueForSearch(Object value) {
* Default base does not require stringToString conversion. return value;
*/
@Override public boolean requiresStringToStringConversion() {
return false;
} }
/** /**
* Simply returns the same string. * Simply returns the same string.
*/ */
@Override public String valueAsString(String text) { @Override public Object valueFromTerm(String term) {
return term;
}
@Override public Object valueFromString(String text) {
return text; return text;
} }
/** /**
* Never break on this term enumeration value. * Never break on this term enumeration value.
*/ */
@Override public boolean shouldBreakTermEnumeration(String text) { @Override public boolean shouldBreakTermEnumeration(Object text) {
return false; return false;
} }

View File

@ -99,12 +99,16 @@ public class JsonFloatFieldMapper extends JsonNumberFieldMapper<Float> {
return NumericUtils.floatToPrefixCoded(value); return NumericUtils.floatToPrefixCoded(value);
} }
@Override public String valueAsString(String text) { @Override public Object valueFromTerm(String term) {
final int shift = text.charAt(0) - NumericUtils.SHIFT_START_INT; final int shift = term.charAt(0) - NumericUtils.SHIFT_START_INT;
if (shift > 0 && shift <= 31) { if (shift > 0 && shift <= 31) {
return null; return null;
} }
return Float.toString(NumericUtils.prefixCodedToFloat(text)); return NumericUtils.prefixCodedToFloat(term);
}
@Override public Object valueFromString(String text) {
return Float.parseFloat(text);
} }
@Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) { @Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) {

View File

@ -98,8 +98,16 @@ public class JsonIntegerFieldMapper extends JsonNumberFieldMapper<Integer> {
return NumericUtils.intToPrefixCoded(value); return NumericUtils.intToPrefixCoded(value);
} }
@Override public String valueAsString(String text) { @Override public Object valueFromTerm(String term) {
return Integer.toString(NumericUtils.prefixCodedToInt(text)); final int shift = term.charAt(0) - NumericUtils.SHIFT_START_INT;
if (shift > 0 && shift <= 31) {
return null;
}
return NumericUtils.prefixCodedToInt(term);
}
@Override public Object valueFromString(String text) {
return Integer.parseInt(text);
} }
@Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) { @Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) {

View File

@ -98,12 +98,16 @@ public class JsonLongFieldMapper extends JsonNumberFieldMapper<Long> {
return NumericUtils.longToPrefixCoded(value); return NumericUtils.longToPrefixCoded(value);
} }
@Override public String valueAsString(String text) { @Override public Object valueFromTerm(String term) {
final int shift = text.charAt(0) - NumericUtils.SHIFT_START_LONG; final int shift = term.charAt(0) - NumericUtils.SHIFT_START_LONG;
if (shift > 0 && shift <= 63) { if (shift > 0 && shift <= 63) {
return null; return null;
} }
return Long.toString(NumericUtils.prefixCodedToLong(text)); return NumericUtils.prefixCodedToLong(term);
}
@Override public Object valueFromString(String text) {
return Long.parseLong(text);
} }
@Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) { @Override public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper) {

View File

@ -129,19 +129,14 @@ public abstract class JsonNumberFieldMapper<T extends Number> extends JsonFieldM
return value(field).toString(); return value(field).toString();
} }
/** @Override public abstract Object valueFromTerm(String term);
* Numbers require string conversion.
*/
@Override public boolean requiresStringToStringConversion() {
return true;
}
@Override public abstract String valueAsString(String text); @Override public abstract Object valueFromString(String text);
/** /**
* Breaks on this text if its <tt>null</tt>. * Breaks on this text if its <tt>null</tt>.
*/ */
@Override public boolean shouldBreakTermEnumeration(String text) { @Override public boolean shouldBreakTermEnumeration(Object text) {
return text == null; return text == null;
} }

View File

@ -94,7 +94,6 @@ public class RestTermsAction extends BaseRestHandler {
termsRequest.minFreq(request.paramAsInt("minFreq", termsRequest.minFreq())); termsRequest.minFreq(request.paramAsInt("minFreq", termsRequest.minFreq()));
termsRequest.maxFreq(request.paramAsInt("maxFreq", termsRequest.maxFreq())); termsRequest.maxFreq(request.paramAsInt("maxFreq", termsRequest.maxFreq()));
termsRequest.size(request.paramAsInt("size", termsRequest.size())); termsRequest.size(request.paramAsInt("size", termsRequest.size()));
termsRequest.convert(request.paramAsBoolean("convert", termsRequest.convert()));
termsRequest.prefix(request.param("prefix")); termsRequest.prefix(request.param("prefix"));
termsRequest.regexp(request.param("regexp")); termsRequest.regexp(request.param("regexp"));
termsRequest.sortType(TermsRequest.SortType.fromString(request.param("sort"), termsRequest.sortType())); termsRequest.sortType(TermsRequest.SortType.fromString(request.param("sort"), termsRequest.sortType()));
@ -130,7 +129,7 @@ public class RestTermsAction extends BaseRestHandler {
if (!termsAsArray) { if (!termsAsArray) {
builder.startObject("terms"); builder.startObject("terms");
for (TermFreq termFreq : fieldTermsFreq.termsFreqs()) { for (TermFreq termFreq : fieldTermsFreq.termsFreqs()) {
builder.startObject(termFreq.term()); builder.startObject(termFreq.termAsString());
builder.field("docFreq", termFreq.docFreq()); builder.field("docFreq", termFreq.docFreq());
builder.endObject(); builder.endObject();
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.internal;
import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.util.io.stream.StreamInput; import org.elasticsearch.util.io.stream.StreamInput;
import org.elasticsearch.util.io.stream.StreamOutput; import org.elasticsearch.util.io.stream.StreamOutput;
import org.elasticsearch.util.lucene.Lucene;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -69,61 +70,15 @@ public class InternalSearchHitField implements SearchHitField {
int size = in.readVInt(); int size = in.readVInt();
values = new ArrayList<Object>(size); values = new ArrayList<Object>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
Object value; values.add(Lucene.readFieldValue(in));
byte type = in.readByte();
if (type == 0) {
value = in.readUTF();
} else if (type == 1) {
value = in.readInt();
} else if (type == 2) {
value = in.readLong();
} else if (type == 3) {
value = in.readFloat();
} else if (type == 4) {
value = in.readDouble();
} else if (type == 5) {
value = in.readBoolean();
} else if (type == 6) {
int bytesSize = in.readVInt();
value = new byte[bytesSize];
in.readFully(((byte[]) value));
} else {
throw new IOException("Can't read unknown type [" + type + "]");
}
values.add(value);
} }
} }
@Override public void writeTo(StreamOutput out) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name); out.writeUTF(name);
out.writeVInt(values.size()); out.writeVInt(values.size());
for (Object obj : values) { for (Object value : values) {
Class type = obj.getClass(); Lucene.writeFieldValue(out, value);
if (type == String.class) {
out.writeByte((byte) 0);
out.writeUTF((String) obj);
} else if (type == Integer.class) {
out.writeByte((byte) 1);
out.writeInt((Integer) obj);
} else if (type == Long.class) {
out.writeByte((byte) 2);
out.writeLong((Long) obj);
} else if (type == Float.class) {
out.writeByte((byte) 3);
out.writeFloat((Float) obj);
} else if (type == Double.class) {
out.writeByte((byte) 4);
out.writeDouble((Double) obj);
} else if (type == Boolean.class) {
out.writeByte((byte) 5);
out.writeBoolean((Boolean) obj);
} else if (type == byte[].class) {
out.writeByte((byte) 6);
out.writeVInt(((byte[]) obj).length);
out.writeBytes(((byte[]) obj));
} else {
throw new IOException("Can't write type [" + type + "]");
}
} }
} }
} }

View File

@ -256,6 +256,59 @@ public class Lucene {
} }
} }
public static Object readFieldValue(StreamInput in) throws IOException {
byte type = in.readByte();
if (type == 0) {
return in.readUTF();
} else if (type == 1) {
return in.readInt();
} else if (type == 2) {
return in.readLong();
} else if (type == 3) {
return in.readFloat();
} else if (type == 4) {
return in.readDouble();
} else if (type == 5) {
return in.readBoolean();
} else if (type == 6) {
int bytesSize = in.readVInt();
byte[] value = new byte[bytesSize];
in.readFully(value);
return value;
} else {
throw new IOException("Can't read unknown type [" + type + "]");
}
}
public static void writeFieldValue(StreamOutput out, Object value) throws IOException {
Class type = value.getClass();
if (type == String.class) {
out.writeByte((byte) 0);
out.writeUTF((String) value);
} else if (type == Integer.class) {
out.writeByte((byte) 1);
out.writeInt((Integer) value);
} else if (type == Long.class) {
out.writeByte((byte) 2);
out.writeLong((Long) value);
} else if (type == Float.class) {
out.writeByte((byte) 3);
out.writeFloat((Float) value);
} else if (type == Double.class) {
out.writeByte((byte) 4);
out.writeDouble((Double) value);
} else if (type == Boolean.class) {
out.writeByte((byte) 5);
out.writeBoolean((Boolean) value);
} else if (type == byte[].class) {
out.writeByte((byte) 6);
out.writeVInt(((byte[]) value).length);
out.writeBytes(((byte[]) value));
} else {
throw new IOException("Can't write type [" + type + "]");
}
}
public static class CountCollector extends Collector { public static class CountCollector extends Collector {
private final float minScore; private final float minScore;

View File

@ -23,14 +23,17 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.action.admin.indices.optimize.OptimizeResponse; import org.elasticsearch.action.admin.indices.optimize.OptimizeResponse;
import org.elasticsearch.action.admin.indices.status.IndexStatus; import org.elasticsearch.action.admin.indices.status.IndexStatus;
import org.elasticsearch.action.terms.TermsRequest;
import org.elasticsearch.action.terms.TermsResponse; import org.elasticsearch.action.terms.TermsResponse;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.test.integration.AbstractServersTests; import org.elasticsearch.test.integration.AbstractServersTests;
import org.testng.annotations.AfterMethod; import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import static org.elasticsearch.action.terms.TermsRequest.SortType.*; import static org.elasticsearch.action.terms.TermsRequest.SortType.*;
import static org.elasticsearch.client.Requests.*; import static org.elasticsearch.client.Requests.*;
import static org.elasticsearch.util.MapBuilder.*;
import static org.elasticsearch.util.json.JsonBuilder.*; import static org.elasticsearch.util.json.JsonBuilder.*;
import static org.hamcrest.MatcherAssert.*; import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.*;
@ -41,26 +44,13 @@ import static org.hamcrest.Matchers.*;
@Test @Test
public class TermsActionTests extends AbstractServersTests { public class TermsActionTests extends AbstractServersTests {
@AfterMethod public void closeServers() { private Client client;
closeAllServers();
}
@Test public void testTermsAction() throws Exception { @BeforeMethod public void createServersAndClient() throws Exception {
startServer("server1"); startServer("server1");
startServer("server2"); startServer("server2");
Client client = getClient(); client = getClient();
try {
verifyTermsActions(client);
} finally {
client.close();
}
}
protected Client getClient() {
return client("server2");
}
protected void verifyTermsActions(Client client) throws Exception {
logger.info("Creating index test"); logger.info("Creating index test");
client.admin().indices().create(createIndexRequest("test")).actionGet(); client.admin().indices().create(createIndexRequest("test")).actionGet();
logger.info("Running Cluster Health"); logger.info("Running Cluster Health");
@ -68,7 +58,18 @@ public class TermsActionTests extends AbstractServersTests {
logger.info("Done Cluster Health, status " + clusterHealth.status()); logger.info("Done Cluster Health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false)); assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
}
@AfterMethod public void closeServers() {
client.close();
closeAllServers();
}
protected Client getClient() {
return client("server2");
}
@Test public void testSimpleStringTerms() throws Exception {
IndexStatus indexStatus = client.admin().indices().status(indicesStatus("test")).actionGet().index("test"); IndexStatus indexStatus = client.admin().indices().status(indicesStatus("test")).actionGet().index("test");
// verify no freqs // verify no freqs
@ -139,9 +140,9 @@ public class TermsActionTests extends AbstractServersTests {
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2)); assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2));
// check the order // check the order
assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2)); assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2));
assertThat(termsResponse.field("value").termsFreqs()[0].term(), equalTo("aaa")); assertThat(termsResponse.field("value").termsFreqs()[0].termAsString(), equalTo("aaa"));
assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(1)); assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(1));
assertThat(termsResponse.field("value").termsFreqs()[1].term(), equalTo("bbb")); assertThat(termsResponse.field("value").termsFreqs()[1].termAsString(), equalTo("bbb"));
assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(2)); assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(2));
logger.info("Verify freqs (sort gy freq)"); logger.info("Verify freqs (sort gy freq)");
@ -153,9 +154,9 @@ public class TermsActionTests extends AbstractServersTests {
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2)); assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2));
// check the order // check the order
assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2)); assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2));
assertThat(termsResponse.field("value").termsFreqs()[0].term(), equalTo("bbb")); assertThat(termsResponse.field("value").termsFreqs()[0].termAsString(), equalTo("bbb"));
assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(2)); assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(2));
assertThat(termsResponse.field("value").termsFreqs()[1].term(), equalTo("aaa")); assertThat(termsResponse.field("value").termsFreqs()[1].termAsString(), equalTo("aaa"));
assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(1)); assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(1));
logger.info("Verify freq (size and sort by freq)"); logger.info("Verify freq (size and sort by freq)");
@ -167,7 +168,7 @@ public class TermsActionTests extends AbstractServersTests {
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2)); assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2));
// check the order // check the order
assertThat(termsResponse.field("value").termsFreqs().length, equalTo(1)); assertThat(termsResponse.field("value").termsFreqs().length, equalTo(1));
assertThat(termsResponse.field("value").termsFreqs()[0].term(), equalTo("bbb")); assertThat(termsResponse.field("value").termsFreqs()[0].termAsString(), equalTo("bbb"));
assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(2)); assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(2));
logger.info("Verify freq (minFreq with sort by freq)"); logger.info("Verify freq (minFreq with sort by freq)");
@ -179,7 +180,7 @@ public class TermsActionTests extends AbstractServersTests {
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2)); assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2));
// check the order // check the order
assertThat(termsResponse.field("value").termsFreqs().length, equalTo(1)); assertThat(termsResponse.field("value").termsFreqs().length, equalTo(1));
assertThat(termsResponse.field("value").termsFreqs()[0].term(), equalTo("bbb")); assertThat(termsResponse.field("value").termsFreqs()[0].termAsString(), equalTo("bbb"));
assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(2)); assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(2));
logger.info("Verify freq (prefix with sort by freq)"); logger.info("Verify freq (prefix with sort by freq)");
@ -191,7 +192,7 @@ public class TermsActionTests extends AbstractServersTests {
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2)); assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2));
// check the order // check the order
assertThat(termsResponse.field("value").termsFreqs().length, equalTo(1)); assertThat(termsResponse.field("value").termsFreqs().length, equalTo(1));
assertThat(termsResponse.field("value").termsFreqs()[0].term(), equalTo("bbb")); assertThat(termsResponse.field("value").termsFreqs()[0].termAsString(), equalTo("bbb"));
assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(2)); assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(2));
// test deleting the last doc // test deleting the last doc
@ -212,9 +213,9 @@ public class TermsActionTests extends AbstractServersTests {
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2)); assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(2));
// check the order // check the order
assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2)); assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2));
assertThat(termsResponse.field("value").termsFreqs()[0].term(), equalTo("aaa")); assertThat(termsResponse.field("value").termsFreqs()[0].termAsString(), equalTo("aaa"));
assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(1)); assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(1));
assertThat(termsResponse.field("value").termsFreqs()[1].term(), equalTo("bbb")); assertThat(termsResponse.field("value").termsFreqs()[1].termAsString(), equalTo("bbb"));
assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(2)); assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(2));
logger.info("Verify freq (with exact, should see the delete)"); logger.info("Verify freq (with exact, should see the delete)");
@ -226,9 +227,9 @@ public class TermsActionTests extends AbstractServersTests {
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(1)); assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(1));
// check the order // check the order
assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2)); assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2));
assertThat(termsResponse.field("value").termsFreqs()[0].term(), equalTo("aaa")); assertThat(termsResponse.field("value").termsFreqs()[0].termAsString(), equalTo("aaa"));
assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(1)); assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(1));
assertThat(termsResponse.field("value").termsFreqs()[1].term(), equalTo("bbb")); assertThat(termsResponse.field("value").termsFreqs()[1].termAsString(), equalTo("bbb"));
assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(1)); assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(1));
logger.info("Optimize (onlyExpungeDeletes with refresh)"); logger.info("Optimize (onlyExpungeDeletes with refresh)");
@ -246,9 +247,222 @@ public class TermsActionTests extends AbstractServersTests {
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(1)); assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(1));
// check the order // check the order
assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2)); assertThat(termsResponse.field("value").termsFreqs().length, equalTo(2));
assertThat(termsResponse.field("value").termsFreqs()[0].term(), equalTo("aaa")); assertThat(termsResponse.field("value").termsFreqs()[0].termAsString(), equalTo("aaa"));
assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(1)); assertThat(termsResponse.field("value").termsFreqs()[0].docFreq(), equalTo(1));
assertThat(termsResponse.field("value").termsFreqs()[1].term(), equalTo("bbb")); assertThat(termsResponse.field("value").termsFreqs()[1].termAsString(), equalTo("bbb"));
assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(1)); assertThat(termsResponse.field("value").termsFreqs()[1].docFreq(), equalTo(1));
} }
@Test public void testNumberedTerms() throws Exception {
IndexStatus indexStatus = client.admin().indices().status(indicesStatus("test")).actionGet().index("test");
logger.info("Index ...");
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 1).put("fl", 2.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 1).put("fl", 2.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 1).put("fl", 2.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 1).put("fl", 2.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 2).put("fl", 3.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 2).put("fl", 3.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 2).put("fl", 3.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 3).put("fl", 4.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 11).put("fl", 12.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 11).put("fl", 12.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 12).put("fl", 13.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 12).put("fl", 13.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 12).put("fl", 13.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 13).put("fl", 14.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 13).put("fl", 14.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 21).put("fl", 20.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 22).put("fl", 21.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 22).put("fl", 21.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 22).put("fl", 21.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 22).put("fl", 21.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 22).put("fl", 21.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 22).put("fl", 21.0f).map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("int", 23).put("fl", 22.0f).map())).actionGet();
logger.info("Refresh");
client.admin().indices().refresh(refreshRequest()).actionGet();
logger.info("Verify int with sort on term");
TermsResponse termsResponse = client.terms(termsRequest("test").fields("int").sortType(TermsRequest.SortType.TERM)).actionGet();
assertThat(termsResponse.successfulShards(), equalTo(indexStatus.shards().size()));
assertThat(termsResponse.failedShards(), equalTo(0));
assertThat(termsResponse.numDocs(), equalTo(23l));
assertThat(termsResponse.maxDoc(), equalTo(23l));
assertThat(termsResponse.deletedDocs(), equalTo(0l));
assertThat(termsResponse.fieldsAsMap().isEmpty(), equalTo(false));
assertThat(termsResponse.field("int").docFreq(1), equalTo(4));
assertThat(termsResponse.field("int").docFreq(2), equalTo(3));
// check the order
assertThat(termsResponse.field("int").termsFreqs().length, equalTo(9));
assertThat(termsResponse.field("int").termsFreqs()[0].termAsString(), equalTo("1"));
assertThat(termsResponse.field("int").termsFreqs()[0].docFreq(), equalTo(4));
assertThat(termsResponse.field("int").termsFreqs()[1].termAsString(), equalTo("2"));
assertThat(termsResponse.field("int").termsFreqs()[1].docFreq(), equalTo(3));
assertThat(termsResponse.field("int").termsFreqs()[2].termAsString(), equalTo("3"));
assertThat(termsResponse.field("int").termsFreqs()[2].docFreq(), equalTo(1));
assertThat(termsResponse.field("int").termsFreqs()[3].termAsString(), equalTo("11"));
assertThat(termsResponse.field("int").termsFreqs()[3].docFreq(), equalTo(2));
assertThat(termsResponse.field("int").termsFreqs()[4].termAsString(), equalTo("12"));
assertThat(termsResponse.field("int").termsFreqs()[4].docFreq(), equalTo(3));
assertThat(termsResponse.field("int").termsFreqs()[5].termAsString(), equalTo("13"));
assertThat(termsResponse.field("int").termsFreqs()[5].docFreq(), equalTo(2));
assertThat(termsResponse.field("int").termsFreqs()[6].termAsString(), equalTo("21"));
assertThat(termsResponse.field("int").termsFreqs()[6].docFreq(), equalTo(1));
logger.info("Verify int with sort on freq");
termsResponse = client.terms(termsRequest("test").fields("int").sortType(TermsRequest.SortType.FREQ)).actionGet();
assertThat(termsResponse.successfulShards(), equalTo(indexStatus.shards().size()));
assertThat(termsResponse.failedShards(), equalTo(0));
assertThat(termsResponse.numDocs(), equalTo(23l));
assertThat(termsResponse.maxDoc(), equalTo(23l));
assertThat(termsResponse.deletedDocs(), equalTo(0l));
assertThat(termsResponse.fieldsAsMap().isEmpty(), equalTo(false));
assertThat(termsResponse.field("int").docFreq(1), equalTo(4));
assertThat(termsResponse.field("int").docFreq(2), equalTo(3));
assertThat(termsResponse.field("int").termsFreqs().length, equalTo(9));
assertThat(termsResponse.field("int").termsFreqs()[0].termAsString(), equalTo("22"));
assertThat(termsResponse.field("int").termsFreqs()[0].docFreq(), equalTo(6));
assertThat(termsResponse.field("int").termsFreqs()[1].termAsString(), equalTo("1"));
assertThat(termsResponse.field("int").termsFreqs()[1].docFreq(), equalTo(4));
logger.info("Verify int with sort on freq and from 2 to 11");
termsResponse = client.terms(termsRequest("test").fields("int").sortType(TermsRequest.SortType.FREQ).from(2).to(11)).actionGet();
assertThat(termsResponse.successfulShards(), equalTo(indexStatus.shards().size()));
assertThat(termsResponse.failedShards(), equalTo(0));
assertThat(termsResponse.numDocs(), equalTo(23l));
assertThat(termsResponse.maxDoc(), equalTo(23l));
assertThat(termsResponse.deletedDocs(), equalTo(0l));
assertThat(termsResponse.fieldsAsMap().isEmpty(), equalTo(false));
assertThat(termsResponse.field("int").docFreq(1), equalTo(-1));
assertThat(termsResponse.field("int").docFreq(2), equalTo(3));
assertThat(termsResponse.field("int").termsFreqs().length, equalTo(3));
assertThat(termsResponse.field("int").termsFreqs()[0].termAsString(), equalTo("2"));
assertThat(termsResponse.field("int").termsFreqs()[0].docFreq(), equalTo(3));
assertThat(termsResponse.field("int").termsFreqs()[1].termAsString(), equalTo("11"));
assertThat(termsResponse.field("int").termsFreqs()[1].docFreq(), equalTo(2));
assertThat(termsResponse.field("int").termsFreqs()[2].termAsString(), equalTo("3"));
assertThat(termsResponse.field("int").termsFreqs()[2].docFreq(), equalTo(1));
logger.info("Verify int with sort on term and from 2 to 11");
termsResponse = client.terms(termsRequest("test").fields("int").sortType(TermsRequest.SortType.TERM).from(2).to(11)).actionGet();
assertThat(termsResponse.successfulShards(), equalTo(indexStatus.shards().size()));
assertThat(termsResponse.failedShards(), equalTo(0));
assertThat(termsResponse.numDocs(), equalTo(23l));
assertThat(termsResponse.maxDoc(), equalTo(23l));
assertThat(termsResponse.deletedDocs(), equalTo(0l));
assertThat(termsResponse.fieldsAsMap().isEmpty(), equalTo(false));
assertThat(termsResponse.field("int").docFreq(1), equalTo(-1));
assertThat(termsResponse.field("int").docFreq(2), equalTo(3));
assertThat(termsResponse.field("int").termsFreqs().length, equalTo(3));
assertThat(termsResponse.field("int").termsFreqs()[0].termAsString(), equalTo("2"));
assertThat(termsResponse.field("int").termsFreqs()[0].docFreq(), equalTo(3));
assertThat(termsResponse.field("int").termsFreqs()[1].termAsString(), equalTo("3"));
assertThat(termsResponse.field("int").termsFreqs()[1].docFreq(), equalTo(1));
assertThat(termsResponse.field("int").termsFreqs()[2].termAsString(), equalTo("11"));
assertThat(termsResponse.field("int").termsFreqs()[2].docFreq(), equalTo(2));
logger.info("Verify int with sort on term and from 2 to 11, fromInclusive=false");
termsResponse = client.terms(termsRequest("test").fields("int").sortType(TermsRequest.SortType.TERM).from(2).to(11).fromInclusive(false)).actionGet();
assertThat(termsResponse.successfulShards(), equalTo(indexStatus.shards().size()));
assertThat(termsResponse.failedShards(), equalTo(0));
assertThat(termsResponse.numDocs(), equalTo(23l));
assertThat(termsResponse.maxDoc(), equalTo(23l));
assertThat(termsResponse.deletedDocs(), equalTo(0l));
assertThat(termsResponse.fieldsAsMap().isEmpty(), equalTo(false));
assertThat(termsResponse.field("int").docFreq(1), equalTo(-1));
assertThat(termsResponse.field("int").docFreq(3), equalTo(1));
assertThat(termsResponse.field("int").termsFreqs().length, equalTo(2));
assertThat(termsResponse.field("int").termsFreqs()[0].termAsString(), equalTo("3"));
assertThat(termsResponse.field("int").termsFreqs()[0].docFreq(), equalTo(1));
assertThat(termsResponse.field("int").termsFreqs()[1].termAsString(), equalTo("11"));
assertThat(termsResponse.field("int").termsFreqs()[1].docFreq(), equalTo(2));
logger.info("Verify int with sort on term and from 2 to 11, toInclusive=false");
termsResponse = client.terms(termsRequest("test").fields("int").sortType(TermsRequest.SortType.TERM).from(2).to(11).toInclusive(false)).actionGet();
assertThat(termsResponse.successfulShards(), equalTo(indexStatus.shards().size()));
assertThat(termsResponse.failedShards(), equalTo(0));
assertThat(termsResponse.numDocs(), equalTo(23l));
assertThat(termsResponse.maxDoc(), equalTo(23l));
assertThat(termsResponse.deletedDocs(), equalTo(0l));
assertThat(termsResponse.fieldsAsMap().isEmpty(), equalTo(false));
assertThat(termsResponse.field("int").docFreq(1), equalTo(-1));
assertThat(termsResponse.field("int").docFreq(2), equalTo(3));
assertThat(termsResponse.field("int").termsFreqs().length, equalTo(2));
assertThat(termsResponse.field("int").termsFreqs()[0].termAsString(), equalTo("2"));
assertThat(termsResponse.field("int").termsFreqs()[0].docFreq(), equalTo(3));
assertThat(termsResponse.field("int").termsFreqs()[1].termAsString(), equalTo("3"));
assertThat(termsResponse.field("int").termsFreqs()[1].docFreq(), equalTo(1));
}
@Test public void testDateTerms() throws Exception {
IndexStatus indexStatus = client.admin().indices().status(indicesStatus("test")).actionGet().index("test");
logger.info("Index ...");
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("date", "2003-01-01").map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("date", "2003-01-01").map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("date", "2003-01-02").map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("date", "2003-01-03").map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("date", "2003-01-03").map())).actionGet();
client.index(indexRequest("test").type("type1").source(newMapBuilder().put("date", "2003-01-03").map())).actionGet();
logger.info("Refresh");
client.admin().indices().refresh(refreshRequest()).actionGet();
logger.info("Verify int with sort on term");
TermsResponse termsResponse = client.terms(termsRequest("test").fields("date").sortType(TermsRequest.SortType.TERM)).actionGet();
assertThat(termsResponse.successfulShards(), equalTo(indexStatus.shards().size()));
assertThat(termsResponse.failedShards(), equalTo(0));
assertThat(termsResponse.numDocs(), equalTo(6l));
assertThat(termsResponse.maxDoc(), equalTo(6l));
assertThat(termsResponse.deletedDocs(), equalTo(0l));
assertThat(termsResponse.fieldsAsMap().isEmpty(), equalTo(false));
assertThat(termsResponse.field("date").docFreq("2003-01-01T00:00:00.000Z"), equalTo(2));
assertThat(termsResponse.field("date").docFreq("2003-01-02T00:00:00.000Z"), equalTo(1));
assertThat(termsResponse.field("date").docFreq("2003-01-03T00:00:00.000Z"), equalTo(3));
assertThat(termsResponse.field("date").termsFreqs().length, equalTo(3));
assertThat(termsResponse.field("date").termsFreqs()[0].termAsString(), equalTo("2003-01-01T00:00:00.000Z"));
assertThat(termsResponse.field("date").termsFreqs()[0].docFreq(), equalTo(2));
assertThat(termsResponse.field("date").termsFreqs()[1].termAsString(), equalTo("2003-01-02T00:00:00.000Z"));
assertThat(termsResponse.field("date").termsFreqs()[1].docFreq(), equalTo(1));
assertThat(termsResponse.field("date").termsFreqs()[2].termAsString(), equalTo("2003-01-03T00:00:00.000Z"));
assertThat(termsResponse.field("date").termsFreqs()[2].docFreq(), equalTo(3));
logger.info("Verify int with sort on freq");
termsResponse = client.terms(termsRequest("test").fields("date").sortType(TermsRequest.SortType.FREQ)).actionGet();
assertThat(termsResponse.successfulShards(), equalTo(indexStatus.shards().size()));
assertThat(termsResponse.failedShards(), equalTo(0));
assertThat(termsResponse.numDocs(), equalTo(6l));
assertThat(termsResponse.maxDoc(), equalTo(6l));
assertThat(termsResponse.deletedDocs(), equalTo(0l));
assertThat(termsResponse.fieldsAsMap().isEmpty(), equalTo(false));
assertThat(termsResponse.field("date").docFreq("2003-01-01T00:00:00.000Z"), equalTo(2));
assertThat(termsResponse.field("date").docFreq("2003-01-02T00:00:00.000Z"), equalTo(1));
assertThat(termsResponse.field("date").docFreq("2003-01-03T00:00:00.000Z"), equalTo(3));
assertThat(termsResponse.field("date").termsFreqs().length, equalTo(3));
assertThat(termsResponse.field("date").termsFreqs()[0].termAsString(), equalTo("2003-01-03T00:00:00.000Z"));
assertThat(termsResponse.field("date").termsFreqs()[0].docFreq(), equalTo(3));
assertThat(termsResponse.field("date").termsFreqs()[1].termAsString(), equalTo("2003-01-01T00:00:00.000Z"));
assertThat(termsResponse.field("date").termsFreqs()[1].docFreq(), equalTo(2));
assertThat(termsResponse.field("date").termsFreqs()[2].termAsString(), equalTo("2003-01-02T00:00:00.000Z"));
assertThat(termsResponse.field("date").termsFreqs()[2].docFreq(), equalTo(1));
}
} }