Search Facets: Field Facets, closes #207

This commit is contained in:
kimchy 2010-06-04 02:31:49 +03:00
parent dbb7df742e
commit 66c9f2f834
90 changed files with 4815 additions and 329 deletions

View File

@ -26,6 +26,7 @@
<w>cpus</w>
<w>datagram</w>
<w>datas</w>
<w>deque</w>
<w>desc</w>
<w>deserialize</w>
<w>docid</w>
@ -46,6 +47,7 @@
<w>inet</w>
<w>infos</w>
<w>intf</w>
<w>ints</w>
<w>iter</w>
<w>iterable</w>
<w>javax</w>
@ -70,12 +72,14 @@
<w>nanos</w>
<w>newcount</w>
<w>ngram</w>
<w>noop</w>
<w>nospawn</w>
<w>param</w>
<w>params</w>
<w>pluggable</w>
<w>plugins</w>
<w>porterstem</w>
<w>proc</w>
<w>publishhost</w>
<w>queryparser</w>
<w>rackspace</w>

View File

@ -254,8 +254,31 @@ public class SearchRequestBuilder {
* @param name The logical name of the facet, it will be returned under the name
* @param query The query facet
*/
public SearchRequestBuilder addFacet(String name, XContentQueryBuilder query) {
facetsBuilder().facet(name, query);
public SearchRequestBuilder addQueryFacet(String name, XContentQueryBuilder query) {
facetsBuilder().queryFacet(name, query);
return this;
}
/**
* Adds a query facet (which results in a count facet returned) with an option to
* be global on the index or bounded by the search query.
*
* @param name The logical name of the facet, it will be returned under the name
* @param query The query facet
* @param global Should the facet be executed globally or not
*/
public SearchRequestBuilder addQueryFacet(String name, XContentQueryBuilder query, boolean global) {
facetsBuilder().queryFacet(name, query, global);
return this;
}
public SearchRequestBuilder addFieldFacet(String name, String fieldName, int size) {
facetsBuilder().fieldFacet(name, fieldName, size);
return this;
}
public SearchRequestBuilder addFieldFacet(String name, String fieldName, int size, boolean global) {
facetsBuilder().fieldFacet(name, fieldName, size, global);
return this;
}

View File

@ -19,8 +19,11 @@
package org.elasticsearch.index.cache;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.field.FieldDataCache;
import org.elasticsearch.index.cache.field.none.NoneFieldDataCache;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.index.cache.filter.none.NoneFilterCache;
import org.elasticsearch.index.settings.IndexSettings;
@ -36,24 +39,38 @@ public class IndexCache extends AbstractIndexComponent {
private final FilterCache filterCache;
private final FieldDataCache fieldDataCache;
public IndexCache(Index index) {
this(index, EMPTY_SETTINGS, new NoneFilterCache(index, EMPTY_SETTINGS));
this(index, EMPTY_SETTINGS, new NoneFilterCache(index, EMPTY_SETTINGS), new NoneFieldDataCache(index, EMPTY_SETTINGS));
}
@Inject public IndexCache(Index index, @IndexSettings Settings indexSettings, FilterCache filterCache) {
@Inject public IndexCache(Index index, @IndexSettings Settings indexSettings, FilterCache filterCache, FieldDataCache fieldDataCache) {
super(index, indexSettings);
this.filterCache = filterCache;
this.fieldDataCache = fieldDataCache;
}
public FilterCache filter() {
return filterCache;
}
public FieldDataCache fieldData() {
return fieldDataCache;
}
public void clear(IndexReader reader) {
filterCache.clear(reader);
fieldDataCache.clear(reader);
}
public void clear() {
filterCache.clear();
fieldDataCache.clear();
}
public void clearUnreferenced() {
filterCache.clearUnreferenced();
fieldDataCache.clearUnreferenced();
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.cache;
import org.elasticsearch.index.cache.field.FieldDataCacheModule;
import org.elasticsearch.index.cache.filter.FilterCacheModule;
import org.elasticsearch.util.inject.AbstractModule;
import org.elasticsearch.util.settings.Settings;
@ -36,6 +37,7 @@ public class IndexCacheModule extends AbstractModule {
@Override protected void configure() {
new FilterCacheModule(settings).configure(binder());
new FieldDataCacheModule(settings).configure(binder());
bind(IndexCache.class).asEagerSingleton();
}

View File

@ -0,0 +1,45 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.field;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.util.component.CloseableComponent;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public interface FieldDataCache extends CloseableComponent {
<T extends FieldData> T cache(Class<T> type, IndexReader reader, String fieldName, FieldDataOptions options) throws IOException;
FieldData cache(FieldData.Type type, IndexReader reader, String fieldName, FieldDataOptions options) throws IOException;
String type();
void clear();
void clear(IndexReader reader);
void clearUnreferenced();
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.field;
import org.elasticsearch.index.cache.field.weak.WeakFieldDataCache;
import org.elasticsearch.util.inject.AbstractModule;
import org.elasticsearch.util.inject.Scopes;
import org.elasticsearch.util.settings.Settings;
/**
* @author kimchy (Shay Banon)
*/
public class FieldDataCacheModule extends AbstractModule {
public static final class FieldDataCacheSettings {
public static final String FIELD_DATA_CACHE_TYPE = "index.cache.field.type";
}
private final Settings settings;
public FieldDataCacheModule(Settings settings) {
this.settings = settings;
}
@Override protected void configure() {
bind(FieldDataCache.class)
.to(settings.getAsClass(FieldDataCacheSettings.FIELD_DATA_CACHE_TYPE, WeakFieldDataCache.class, "org.elasticsearch.index.cache.field.", "FieldDataCache"))
.in(Scopes.SINGLETON);
}
}

View File

@ -0,0 +1,68 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.field.none;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.field.FieldDataCache;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.util.inject.Inject;
import org.elasticsearch.util.settings.Settings;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public class NoneFieldDataCache extends AbstractIndexComponent implements FieldDataCache {
@Inject public NoneFieldDataCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings);
logger.debug("Using no field cache");
}
@Override public <T extends FieldData> T cache(Class<T> type, IndexReader reader, String fieldName, FieldDataOptions options) throws IOException {
return FieldData.load(type, reader, fieldName, options);
}
@Override public FieldData cache(FieldData.Type type, IndexReader reader, String fieldName, FieldDataOptions options) throws IOException {
return FieldData.load(type, reader, fieldName, options);
}
@Override public String type() {
return "none";
}
@Override public void clear() {
}
@Override public void clear(IndexReader reader) {
}
@Override public void clearUnreferenced() {
}
@Override public void close() throws ElasticSearchException {
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.field.soft;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.field.support.AbstractConcurrentMapFieldDataCache;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.util.collect.MapMaker;
import org.elasticsearch.util.inject.Inject;
import org.elasticsearch.util.settings.Settings;
import java.util.concurrent.ConcurrentMap;
/**
* @author kimchy (Shay Banon)
*/
public class SoftFieldDataCache extends AbstractConcurrentMapFieldDataCache {
@Inject public SoftFieldDataCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings, new MapMaker()
.softKeys()
.<Object, ConcurrentMap<String, FieldData>>makeMap());
}
@Override public String type() {
return "soft";
}
}

View File

@ -0,0 +1,121 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.field.support;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.field.FieldDataCache;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.util.concurrent.ConcurrentCollections;
import org.elasticsearch.util.settings.Settings;
import java.io.IOException;
import java.util.concurrent.ConcurrentMap;
/**
* @author kimchy (Shay Banon)
*/
public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexComponent implements FieldDataCache {
private final ConcurrentMap<Object, ConcurrentMap<String, FieldData>> cache;
private final Object creationMutex = new Object();
protected AbstractConcurrentMapFieldDataCache(Index index, @IndexSettings Settings indexSettings,
ConcurrentMap<Object, ConcurrentMap<String, FieldData>> cache) {
super(index, indexSettings);
this.cache = cache;
}
@Override public void close() throws ElasticSearchException {
cache.clear();
}
@Override public void clear() {
cache.clear();
}
@Override public void clear(IndexReader reader) {
cache.remove(reader.getFieldCacheKey());
}
@Override public void clearUnreferenced() {
// nothing to do here...
}
@Override public FieldData cache(FieldData.Type type, IndexReader reader, String fieldName, FieldDataOptions options) throws IOException {
return cache(type.fieldDataClass, reader, fieldName, options);
}
@Override public <T extends FieldData> T cache(Class<T> type, IndexReader reader, String fieldName, FieldDataOptions options) throws IOException {
ConcurrentMap<String, FieldData> fieldDataCache = cache.get(reader.getFieldCacheKey());
if (fieldDataCache == null) {
synchronized (creationMutex) {
fieldDataCache = cache.get(reader.getFieldCacheKey());
if (fieldDataCache == null) {
fieldDataCache = ConcurrentCollections.newConcurrentMap();
}
T fieldData = (T) fieldDataCache.get(fieldName);
if (fieldData != null) {
if (!options.subsetOf(fieldData.options())) {
fieldData = FieldData.load(type, reader, fieldName, options);
fieldDataCache.put(fieldName, fieldData);
}
} else {
fieldData = FieldData.load(type, reader, fieldName, options);
fieldDataCache.put(fieldName, fieldData);
}
return fieldData;
}
}
T fieldData = (T) fieldDataCache.get(fieldName);
if (fieldData == null) {
synchronized (creationMutex) {
fieldData = (T) fieldDataCache.get(fieldName);
if (fieldData == null) {
fieldData = FieldData.load(type, reader, fieldName, options);
fieldDataCache.put(fieldName, fieldData);
} else if (!options.subsetOf(fieldData.options())) {
fieldData = FieldData.load(type, reader, fieldName, options);
fieldDataCache.put(fieldName, fieldData);
}
return fieldData;
}
} else if (!options.subsetOf(fieldData.options())) {
synchronized (creationMutex) {
fieldData = (T) fieldDataCache.get(fieldName);
if (fieldData != null) {
if (!options.subsetOf(fieldData.options())) {
fieldData = FieldData.load(type, reader, fieldName, options);
fieldDataCache.put(fieldName, fieldData);
}
} else {
fieldData = FieldData.load(type, reader, fieldName, options);
fieldDataCache.put(fieldName, fieldData);
}
}
}
return fieldData;
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.field.weak;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.field.support.AbstractConcurrentMapFieldDataCache;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.util.collect.MapMaker;
import org.elasticsearch.util.inject.Inject;
import org.elasticsearch.util.settings.Settings;
import java.util.concurrent.ConcurrentMap;
/**
* @author kimchy (Shay Banon)
*/
public class WeakFieldDataCache extends AbstractConcurrentMapFieldDataCache {
@Inject public WeakFieldDataCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings, new MapMaker()
.weakKeys()
.<Object, ConcurrentMap<String, FieldData>>makeMap());
}
@Override public String type() {
return "weak";
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.cache.filter;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Filter;
import org.elasticsearch.index.IndexComponent;
import org.elasticsearch.util.component.CloseableComponent;
@ -34,6 +35,8 @@ public interface FilterCache extends IndexComponent, CloseableComponent {
boolean isCached(Filter filter);
void clear(IndexReader reader);
void clear();
/**

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.cache.filter.none;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Filter;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
@ -57,6 +58,10 @@ public class NoneFilterCache extends AbstractIndexComponent implements FilterCac
// nothing to do here
}
@Override public void clear(IndexReader reader) {
// nothing to do here
}
@Override public void clearUnreferenced() {
// nothing to do here
}

View File

@ -19,13 +19,13 @@
package org.elasticsearch.index.cache.filter.soft;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.filter.support.AbstractConcurrentMapFilterCache;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.util.collect.MapMaker;
import org.elasticsearch.util.inject.Inject;
import org.elasticsearch.util.lucene.docset.DocSet;
import org.elasticsearch.util.settings.Settings;
import java.util.concurrent.ConcurrentMap;
@ -38,7 +38,7 @@ import java.util.concurrent.ConcurrentMap;
public class SoftFilterCache extends AbstractConcurrentMapFilterCache {
@Inject public SoftFilterCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings, new MapMaker().softKeys().<Object, ConcurrentMap<Filter, DocIdSet>>makeMap());
super(index, indexSettings, new MapMaker().softKeys().<Object, ConcurrentMap<Filter, DocSet>>makeMap());
}
@Override public String type() {

View File

@ -26,13 +26,14 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.util.lucene.docset.DocSet;
import org.elasticsearch.util.settings.Settings;
import java.io.IOException;
import java.util.concurrent.ConcurrentMap;
import static org.elasticsearch.util.concurrent.ConcurrentCollections.*;
import static org.elasticsearch.util.lucene.docidset.DocIdSets.*;
import static org.elasticsearch.util.lucene.docset.DocSets.*;
/**
* A base concurrent filter cache that accepts the actual cache to use.
@ -41,10 +42,10 @@ import static org.elasticsearch.util.lucene.docidset.DocIdSets.*;
*/
public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComponent implements FilterCache {
private final ConcurrentMap<Object, ConcurrentMap<Filter, DocIdSet>> cache;
private final ConcurrentMap<Object, ConcurrentMap<Filter, DocSet>> cache;
protected AbstractConcurrentMapFilterCache(Index index, @IndexSettings Settings indexSettings,
ConcurrentMap<Object, ConcurrentMap<Filter, DocIdSet>> cache) {
ConcurrentMap<Object, ConcurrentMap<Filter, DocSet>> cache) {
super(index, indexSettings);
this.cache = cache;
}
@ -57,6 +58,10 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
cache.clear();
}
@Override public void clear(IndexReader reader) {
cache.remove(reader.getFieldCacheKey());
}
@Override public void clearUnreferenced() {
// can't do this, since we cache on cacheKey...
// int totalCount = cache.size();
@ -85,7 +90,7 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
return filter instanceof FilterCacheFilterWrapper;
}
protected ConcurrentMap<Filter, DocIdSet> buildFilterMap() {
protected ConcurrentMap<Filter, DocSet> buildFilterMap() {
return newConcurrentMap();
}
@ -102,18 +107,18 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
}
@Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
ConcurrentMap<Filter, DocIdSet> cachedFilters = cache.get(reader.getFieldCacheKey());
ConcurrentMap<Filter, DocSet> cachedFilters = cache.get(reader.getFieldCacheKey());
if (cachedFilters == null) {
cachedFilters = buildFilterMap();
cache.putIfAbsent(reader.getFieldCacheKey(), cachedFilters);
}
DocIdSet docIdSet = cachedFilters.get(filter);
if (docIdSet != null) {
return docIdSet;
DocSet docSet = cachedFilters.get(filter);
if (docSet != null) {
return docSet;
}
docIdSet = filter.getDocIdSet(reader);
docIdSet = cacheable(reader, docIdSet);
cachedFilters.putIfAbsent(filter, docIdSet);
DocIdSet docIdSet = filter.getDocIdSet(reader);
docSet = cacheable(reader, docIdSet);
cachedFilters.putIfAbsent(filter, docSet);
return docIdSet;
}

View File

@ -19,13 +19,13 @@
package org.elasticsearch.index.cache.filter.weak;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.filter.support.AbstractConcurrentMapFilterCache;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.util.collect.MapMaker;
import org.elasticsearch.util.inject.Inject;
import org.elasticsearch.util.lucene.docset.DocSet;
import org.elasticsearch.util.settings.Settings;
import java.util.concurrent.ConcurrentMap;
@ -38,7 +38,7 @@ import java.util.concurrent.ConcurrentMap;
public class WeakFilterCache extends AbstractConcurrentMapFilterCache {
@Inject public WeakFilterCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings, new MapMaker().weakKeys().<Object, ConcurrentMap<Filter, DocIdSet>>makeMap());
super(index, indexSettings, new MapMaker().weakKeys().<Object, ConcurrentMap<Filter, DocSet>>makeMap());
}
@Override public String type() {

View File

@ -0,0 +1,112 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.index.field.doubles.DoubleFieldData;
import org.elasticsearch.index.field.floats.FloatFieldData;
import org.elasticsearch.index.field.ints.IntFieldData;
import org.elasticsearch.index.field.longs.LongFieldData;
import org.elasticsearch.index.field.shorts.ShortFieldData;
import org.elasticsearch.index.field.strings.StringFieldData;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
// General TODOs on FieldData
// TODO Make storing of freqs optional
// TODO Optimize the order (both int[] and int[][] when they are sparse, create an Order abstraction)
public abstract class FieldData {
public static enum Type {
STRING(StringFieldData.class),
SHORT(ShortFieldData.class),
INT(IntFieldData.class),
LONG(LongFieldData.class),
FLOAT(FloatFieldData.class),
DOUBLE(DoubleFieldData.class);
public final Class<? extends FieldData> fieldDataClass;
Type(Class<? extends FieldData> clazz) {
this.fieldDataClass = clazz;
}
}
private final String fieldName;
private final FieldDataOptions options;
protected FieldData(String fieldName, FieldDataOptions options) {
this.fieldName = fieldName;
this.options = options;
}
/**
* The field name of this field data.
*/
public final String fieldName() {
return fieldName;
}
/**
* Is the field data a multi valued one (has multiple values / terms per document id) or not.
*/
public abstract boolean multiValued();
/**
* Is there a value associated with this document id.
*/
public abstract boolean hasValue(int docId);
/**
* The type of this field data.
*/
public abstract Type type();
public FieldDataOptions options() {
return this.options;
}
public static FieldData load(Type type, IndexReader reader, String fieldName, FieldDataOptions options) throws IOException {
return load(type.fieldDataClass, reader, fieldName, options);
}
@SuppressWarnings({"unchecked"})
public static <T extends FieldData> T load(Class<T> type, IndexReader reader, String fieldName, FieldDataOptions options) throws IOException {
if (type == StringFieldData.class) {
return (T) StringFieldData.load(reader, fieldName, options);
} else if (type == IntFieldData.class) {
return (T) IntFieldData.load(reader, fieldName, options);
} else if (type == LongFieldData.class) {
return (T) LongFieldData.load(reader, fieldName, options);
} else if (type == FloatFieldData.class) {
return (T) FloatFieldData.load(reader, fieldName, options);
} else if (type == DoubleFieldData.class) {
return (T) DoubleFieldData.load(reader, fieldName, options);
} else if (type == ShortFieldData.class) {
return (T) ShortFieldData.load(reader, fieldName, options);
}
throw new ElasticSearchIllegalArgumentException("No support for type [" + type + "] to load field data");
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field;
/**
* @author kimchy (Shay Banon)
*/
public class FieldDataOptions {
public static FieldDataOptions fieldDataOptions() {
return new FieldDataOptions();
}
boolean freqs = false;
public final FieldDataOptions withFreqs(boolean freqs) {
this.freqs = freqs;
return this;
}
public final boolean hasFreqs() {
return freqs;
}
public boolean subsetOf(FieldDataOptions options) {
if (freqs && !options.freqs) {
return false;
}
return true;
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.doubles;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.index.field.support.FieldDataLoader;
import org.elasticsearch.util.gnu.trove.TDoubleArrayList;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public abstract class DoubleFieldData extends FieldData {
static final double[] EMPTY_DOUBLE_ARRAY = new double[0];
protected final double[] values;
protected final int[] freqs;
protected DoubleFieldData(String fieldName, FieldDataOptions options, double[] values, int[] freqs) {
super(fieldName, options);
this.values = values;
this.freqs = freqs;
}
abstract public double value(int docId);
abstract public double[] values(int docId);
@Override public Type type() {
return Type.DOUBLE;
}
public void forEachValue(ValueProc proc) {
if (freqs == null) {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], -1);
}
} else {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], freqs[i]);
}
}
}
public static interface ValueProc {
void onValue(double value, int freq);
}
public static DoubleFieldData load(IndexReader reader, String field, FieldDataOptions options) throws IOException {
return FieldDataLoader.load(reader, field, options, new DoubleTypeLoader());
}
static class DoubleTypeLoader extends FieldDataLoader.FreqsTypeLoader<DoubleFieldData> {
private final TDoubleArrayList terms = new TDoubleArrayList();
DoubleTypeLoader() {
super();
// the first one indicates null value
terms.add(0);
}
@Override public void collectTerm(String term) {
terms.add(FieldCache.NUMERIC_UTILS_DOUBLE_PARSER.parseDouble(term));
}
@Override public DoubleFieldData buildSingleValue(String field, int[] order) {
return new SingleValueDoubleFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
@Override public DoubleFieldData buildMultiValue(String field, int[][] order) {
return new MultiValueDoubleFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
}
}

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.doubles;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.util.ThreadLocals;
/**
* @author kimchy (Shay Banon)
*/
public class MultiValueDoubleFieldData extends DoubleFieldData {
private static final int VALUE_CACHE_SIZE = 100;
private static ThreadLocal<ThreadLocals.CleanableValue<double[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<double[][]>>() {
@Override protected ThreadLocals.CleanableValue<double[][]> initialValue() {
double[][] value = new double[VALUE_CACHE_SIZE][];
for (int i = 0; i < value.length; i++) {
value[i] = new double[i];
}
return new ThreadLocals.CleanableValue<double[][]>(value);
}
};
// order with value 0 indicates no value
private final int[][] order;
public MultiValueDoubleFieldData(String fieldName, FieldDataOptions options, int[][] order, double[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return true;
}
@Override public boolean hasValue(int docId) {
return order[docId] != null;
}
@Override public double value(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return 0;
}
return values[docOrders[0]];
}
@Override public double[] values(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return EMPTY_DOUBLE_ARRAY;
}
double[] doubles;
if (docOrders.length < VALUE_CACHE_SIZE) {
doubles = valuesCache.get().get()[docOrders.length];
} else {
doubles = new double[docOrders.length];
}
for (int i = 0; i < docOrders.length; i++) {
doubles[i] = values[docOrders[i]];
}
return doubles;
}
}

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.doubles;
import org.elasticsearch.index.field.FieldDataOptions;
/**
* @author kimchy (Shay Banon)
*/
public class SingleValueDoubleFieldData extends DoubleFieldData {
private static ThreadLocal<double[]> valuesCache = new ThreadLocal<double[]>() {
@Override protected double[] initialValue() {
return new double[1];
}
};
// order with value 0 indicates no value
private final int[] order;
public SingleValueDoubleFieldData(String fieldName, FieldDataOptions options, int[] order, double[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return false;
}
@Override public boolean hasValue(int docId) {
return order[docId] != 0;
}
@Override public double value(int docId) {
return values[order[docId]];
}
@Override public double[] values(int docId) {
int loc = order[docId];
if (loc == 0) {
return EMPTY_DOUBLE_ARRAY;
}
double[] ret = valuesCache.get();
ret[0] = values[loc];
return ret;
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.floats;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.index.field.support.FieldDataLoader;
import org.elasticsearch.util.gnu.trove.TFloatArrayList;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public abstract class FloatFieldData extends FieldData {
static final float[] EMPTY_FLOAT_ARRAY = new float[0];
protected final float[] values;
protected final int[] freqs;
protected FloatFieldData(String fieldName, FieldDataOptions options, float[] values, int[] freqs) {
super(fieldName, options);
this.values = values;
this.freqs = freqs;
}
abstract public float value(int docId);
abstract public float[] values(int docId);
@Override public Type type() {
return Type.FLOAT;
}
public void forEachValue(ValueProc proc) {
if (freqs == null) {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], -1);
}
} else {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], freqs[i]);
}
}
}
public static interface ValueProc {
void onValue(float value, int freq);
}
public static FloatFieldData load(IndexReader reader, String field, FieldDataOptions options) throws IOException {
return FieldDataLoader.load(reader, field, options, new FloatTypeLoader());
}
static class FloatTypeLoader extends FieldDataLoader.FreqsTypeLoader<FloatFieldData> {
private final TFloatArrayList terms = new TFloatArrayList();
FloatTypeLoader() {
super();
// the first one indicates null value
terms.add(0);
}
@Override public void collectTerm(String term) {
terms.add(FieldCache.NUMERIC_UTILS_FLOAT_PARSER.parseFloat(term));
}
@Override public FloatFieldData buildSingleValue(String field, int[] order) {
return new SingleValueFloatFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
@Override public FloatFieldData buildMultiValue(String field, int[][] order) {
return new MultiValueFloatFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
}
}

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.floats;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.util.ThreadLocals;
/**
* @author kimchy (Shay Banon)
*/
public class MultiValueFloatFieldData extends FloatFieldData {
private static final int VALUE_CACHE_SIZE = 100;
private static ThreadLocal<ThreadLocals.CleanableValue<float[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<float[][]>>() {
@Override protected ThreadLocals.CleanableValue<float[][]> initialValue() {
float[][] value = new float[VALUE_CACHE_SIZE][];
for (int i = 0; i < value.length; i++) {
value[i] = new float[i];
}
return new ThreadLocals.CleanableValue<float[][]>(value);
}
};
// order with value 0 indicates no value
private final int[][] order;
public MultiValueFloatFieldData(String fieldName, FieldDataOptions options, int[][] order, float[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return true;
}
@Override public boolean hasValue(int docId) {
return order[docId] != null;
}
@Override public float value(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return 0;
}
return values[docOrders[0]];
}
@Override public float[] values(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return EMPTY_FLOAT_ARRAY;
}
float[] floats;
if (docOrders.length < VALUE_CACHE_SIZE) {
floats = valuesCache.get().get()[docOrders.length];
} else {
floats = new float[docOrders.length];
}
for (int i = 0; i < docOrders.length; i++) {
floats[i] = values[docOrders[i]];
}
return floats;
}
}

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.floats;
import org.elasticsearch.index.field.FieldDataOptions;
/**
* @author kimchy (Shay Banon)
*/
public class SingleValueFloatFieldData extends FloatFieldData {
private static ThreadLocal<float[]> valuesCache = new ThreadLocal<float[]>() {
@Override protected float[] initialValue() {
return new float[1];
}
};
// order with value 0 indicates no value
private final int[] order;
public SingleValueFloatFieldData(String fieldName, FieldDataOptions options, int[] order, float[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return false;
}
@Override public boolean hasValue(int docId) {
return order[docId] != 0;
}
@Override public float value(int docId) {
return values[order[docId]];
}
@Override public float[] values(int docId) {
int loc = order[docId];
if (loc == 0) {
return EMPTY_FLOAT_ARRAY;
}
float[] ret = valuesCache.get();
ret[0] = values[loc];
return ret;
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.ints;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.index.field.support.FieldDataLoader;
import org.elasticsearch.util.gnu.trove.TIntArrayList;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public abstract class IntFieldData extends FieldData {
static final int[] EMPTY_INT_ARRAY = new int[0];
protected final int[] values;
protected final int[] freqs;
protected IntFieldData(String fieldName, FieldDataOptions options, int[] values, int[] freqs) {
super(fieldName, options);
this.values = values;
this.freqs = freqs;
}
abstract public int value(int docId);
abstract public int[] values(int docId);
@Override public Type type() {
return Type.INT;
}
public void forEachValue(ValueProc proc) {
if (freqs == null) {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], -1);
}
} else {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], freqs[i]);
}
}
}
public static interface ValueProc {
void onValue(int value, int freq);
}
public static IntFieldData load(IndexReader reader, String field, FieldDataOptions options) throws IOException {
return FieldDataLoader.load(reader, field, options, new IntTypeLoader());
}
static class IntTypeLoader extends FieldDataLoader.FreqsTypeLoader<IntFieldData> {
private final TIntArrayList terms = new TIntArrayList();
IntTypeLoader() {
super();
// the first one indicates null value
terms.add(0);
}
@Override public void collectTerm(String term) {
terms.add(FieldCache.NUMERIC_UTILS_INT_PARSER.parseInt(term));
}
@Override public IntFieldData buildSingleValue(String field, int[] order) {
return new SingleValueIntFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
@Override public IntFieldData buildMultiValue(String field, int[][] order) {
return new MultiValueIntFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
}
}

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.ints;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.util.ThreadLocals;
/**
* @author kimchy (Shay Banon)
*/
public class MultiValueIntFieldData extends IntFieldData {
private static final int VALUE_CACHE_SIZE = 100;
private static ThreadLocal<ThreadLocals.CleanableValue<int[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<int[][]>>() {
@Override protected ThreadLocals.CleanableValue<int[][]> initialValue() {
int[][] value = new int[VALUE_CACHE_SIZE][];
for (int i = 0; i < value.length; i++) {
value[i] = new int[i];
}
return new ThreadLocals.CleanableValue<int[][]>(value);
}
};
// order with value 0 indicates no value
private final int[][] order;
public MultiValueIntFieldData(String fieldName, FieldDataOptions options, int[][] order, int[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return true;
}
@Override public boolean hasValue(int docId) {
return order[docId] != null;
}
@Override public int value(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return 0;
}
return values[docOrders[0]];
}
@Override public int[] values(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return EMPTY_INT_ARRAY;
}
int[] ints;
if (docOrders.length < VALUE_CACHE_SIZE) {
ints = valuesCache.get().get()[docOrders.length];
} else {
ints = new int[docOrders.length];
}
for (int i = 0; i < docOrders.length; i++) {
ints[i] = values[docOrders[i]];
}
return ints;
}
}

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.ints;
import org.elasticsearch.index.field.FieldDataOptions;
/**
* @author kimchy (Shay Banon)
*/
public class SingleValueIntFieldData extends IntFieldData {
private static ThreadLocal<int[]> valuesCache = new ThreadLocal<int[]>() {
@Override protected int[] initialValue() {
return new int[1];
}
};
// order with value 0 indicates no value
private final int[] order;
public SingleValueIntFieldData(String fieldName, FieldDataOptions options, int[] order, int[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return false;
}
@Override public boolean hasValue(int docId) {
return order[docId] != 0;
}
@Override public int value(int docId) {
return values[order[docId]];
}
@Override public int[] values(int docId) {
int loc = order[docId];
if (loc == 0) {
return EMPTY_INT_ARRAY;
}
int[] ret = valuesCache.get();
ret[0] = values[loc];
return ret;
}
}

View File

@ -0,0 +1,99 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.longs;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.index.field.support.FieldDataLoader;
import org.elasticsearch.util.gnu.trove.TLongArrayList;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public abstract class LongFieldData extends FieldData {
static final long[] EMPTY_LONG_ARRAY = new long[0];
protected final long[] values;
protected final int[] freqs;
protected LongFieldData(String fieldName, FieldDataOptions options, long[] values, int[] freqs) {
super(fieldName, options);
this.values = values;
this.freqs = freqs;
}
abstract public long value(int docId);
abstract public long[] values(int docId);
@Override public Type type() {
return Type.LONG;
}
public void forEachValue(ValueProc proc) {
if (freqs == null) {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], -1);
}
} else {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], freqs[i]);
}
}
}
public static interface ValueProc {
void onValue(long value, int freq);
}
public static LongFieldData load(IndexReader reader, String field, FieldDataOptions options) throws IOException {
return FieldDataLoader.load(reader, field, options, new LongTypeLoader());
}
static class LongTypeLoader extends FieldDataLoader.FreqsTypeLoader<LongFieldData> {
private final TLongArrayList terms = new TLongArrayList();
LongTypeLoader() {
super();
// the first one indicates null value
terms.add(0);
}
@Override public void collectTerm(String term) {
terms.add(FieldCache.NUMERIC_UTILS_LONG_PARSER.parseLong(term));
}
@Override public LongFieldData buildSingleValue(String field, int[] order) {
return new SingleValueLongFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
@Override public LongFieldData buildMultiValue(String field, int[][] order) {
return new MultiValueLongFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
}
}

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.longs;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.util.ThreadLocals;
/**
* @author kimchy (Shay Banon)
*/
public class MultiValueLongFieldData extends LongFieldData {
private static final int VALUE_CACHE_SIZE = 100;
private static ThreadLocal<ThreadLocals.CleanableValue<long[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<long[][]>>() {
@Override protected ThreadLocals.CleanableValue<long[][]> initialValue() {
long[][] value = new long[VALUE_CACHE_SIZE][];
for (int i = 0; i < value.length; i++) {
value[i] = new long[i];
}
return new ThreadLocals.CleanableValue<long[][]>(value);
}
};
// order with value 0 indicates no value
private final int[][] order;
public MultiValueLongFieldData(String fieldName, FieldDataOptions options, int[][] order, long[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return true;
}
@Override public boolean hasValue(int docId) {
return order[docId] != null;
}
@Override public long value(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return 0;
}
return values[docOrders[0]];
}
@Override public long[] values(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return EMPTY_LONG_ARRAY;
}
long[] longs;
if (docOrders.length < VALUE_CACHE_SIZE) {
longs = valuesCache.get().get()[docOrders.length];
} else {
longs = new long[docOrders.length];
}
for (int i = 0; i < docOrders.length; i++) {
longs[i] = values[docOrders[i]];
}
return longs;
}
}

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.longs;
import org.elasticsearch.index.field.FieldDataOptions;
/**
* @author kimchy (Shay Banon)
*/
public class SingleValueLongFieldData extends LongFieldData {
private static ThreadLocal<long[]> valuesCache = new ThreadLocal<long[]>() {
@Override protected long[] initialValue() {
return new long[1];
}
};
// order with value 0 indicates no value
private final int[] order;
public SingleValueLongFieldData(String fieldName, FieldDataOptions options, int[] order, long[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return false;
}
@Override public boolean hasValue(int docId) {
return order[docId] != 0;
}
@Override public long value(int docId) {
return values[order[docId]];
}
@Override public long[] values(int docId) {
int loc = order[docId];
if (loc == 0) {
return EMPTY_LONG_ARRAY;
}
long[] ret = valuesCache.get();
ret[0] = values[loc];
return ret;
}
}

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.shorts;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.util.ThreadLocals;
/**
* @author kimchy (Shay Banon)
*/
public class MultiValueShortFieldData extends ShortFieldData {
private static final int VALUE_CACHE_SIZE = 100;
private static ThreadLocal<ThreadLocals.CleanableValue<short[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<short[][]>>() {
@Override protected ThreadLocals.CleanableValue<short[][]> initialValue() {
short[][] value = new short[VALUE_CACHE_SIZE][];
for (int i = 0; i < value.length; i++) {
value[i] = new short[i];
}
return new ThreadLocals.CleanableValue<short[][]>(value);
}
};
// order with value 0 indicates no value
private final int[][] order;
public MultiValueShortFieldData(String fieldName, FieldDataOptions options, int[][] order, short[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return true;
}
@Override public boolean hasValue(int docId) {
return order[docId] != null;
}
@Override public short value(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return 0;
}
return values[docOrders[0]];
}
@Override public short[] values(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return EMPTY_SHORT_ARRAY;
}
short[] shorts;
if (docOrders.length < VALUE_CACHE_SIZE) {
shorts = valuesCache.get().get()[docOrders.length];
} else {
shorts = new short[docOrders.length];
}
for (int i = 0; i < docOrders.length; i++) {
shorts[i] = values[docOrders[i]];
}
return shorts;
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.shorts;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.index.field.support.FieldDataLoader;
import org.elasticsearch.util.gnu.trove.TShortArrayList;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public abstract class ShortFieldData extends FieldData {
static final short[] EMPTY_SHORT_ARRAY = new short[0];
protected final short[] values;
protected final int[] freqs;
protected ShortFieldData(String fieldName, FieldDataOptions options, short[] values, int[] freqs) {
super(fieldName, options);
this.values = values;
this.freqs = freqs;
}
abstract public short value(int docId);
abstract public short[] values(int docId);
@Override public Type type() {
return Type.SHORT;
}
public void forEachValue(ValueProc proc) {
if (freqs == null) {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], -1);
}
} else {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], freqs[i]);
}
}
}
public static interface ValueProc {
void onValue(short value, int freq);
}
public static ShortFieldData load(IndexReader reader, String field, FieldDataOptions options) throws IOException {
return FieldDataLoader.load(reader, field, options, new ShortTypeLoader());
}
static class ShortTypeLoader extends FieldDataLoader.FreqsTypeLoader<ShortFieldData> {
private final TShortArrayList terms = new TShortArrayList();
ShortTypeLoader() {
super();
// the first one indicates null value
terms.add((short) 0);
}
@Override public void collectTerm(String term) {
terms.add((short) FieldCache.NUMERIC_UTILS_INT_PARSER.parseInt(term));
}
@Override public ShortFieldData buildSingleValue(String field, int[] order) {
return new SingleValueShortFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
@Override public ShortFieldData buildMultiValue(String field, int[][] order) {
return new MultiValueShortFieldData(field, options, order, terms.toNativeArray(), buildFreqs());
}
}
}

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.shorts;
import org.elasticsearch.index.field.FieldDataOptions;
/**
* @author kimchy (Shay Banon)
*/
public class SingleValueShortFieldData extends ShortFieldData {
private static ThreadLocal<short[]> valuesCache = new ThreadLocal<short[]>() {
@Override protected short[] initialValue() {
return new short[1];
}
};
// order with value 0 indicates no value
private final int[] order;
public SingleValueShortFieldData(String fieldName, FieldDataOptions options, int[] order, short[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return false;
}
@Override public boolean hasValue(int docId) {
return order[docId] != 0;
}
@Override public short value(int docId) {
return values[order[docId]];
}
@Override public short[] values(int docId) {
int loc = order[docId];
if (loc == 0) {
return EMPTY_SHORT_ARRAY;
}
short[] ret = valuesCache.get();
ret[0] = values[loc];
return ret;
}
}

View File

@ -0,0 +1,83 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.strings;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.util.Strings;
import org.elasticsearch.util.ThreadLocals;
/**
* @author kimchy (Shay Banon)
*/
public class MultiValueStringFieldData extends StringFieldData {
private static final int VALUE_CACHE_SIZE = 100;
private static ThreadLocal<ThreadLocals.CleanableValue<String[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<String[][]>>() {
@Override protected ThreadLocals.CleanableValue<String[][]> initialValue() {
String[][] value = new String[VALUE_CACHE_SIZE][];
for (int i = 0; i < value.length; i++) {
value[i] = new String[i];
}
return new ThreadLocals.CleanableValue<java.lang.String[][]>(value);
}
};
// order with value 0 indicates no value
private final int[][] order;
public MultiValueStringFieldData(String fieldName, FieldDataOptions options, int[][] order, String[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return true;
}
@Override public boolean hasValue(int docId) {
return order[docId] != null;
}
@Override public String value(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return null;
}
return values[docOrders[0]];
}
@Override public String[] values(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return Strings.EMPTY_ARRAY;
}
String[] strings;
if (docOrders.length < VALUE_CACHE_SIZE) {
strings = valuesCache.get().get()[docOrders.length];
} else {
strings = new String[docOrders.length];
}
for (int i = 0; i < docOrders.length; i++) {
strings[i] = values[docOrders[i]];
}
return strings;
}
}

View File

@ -0,0 +1,65 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.strings;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.util.Strings;
/**
* @author kimchy (Shay Banon)
*/
public class SingleValueStringFieldData extends StringFieldData {
private static ThreadLocal<String[]> valuesCache = new ThreadLocal<String[]>() {
@Override protected String[] initialValue() {
return new String[1];
}
};
// order with value 0 indicates no value
private final int[] order;
public SingleValueStringFieldData(String fieldName, FieldDataOptions options, int[] order, String[] values, int[] freqs) {
super(fieldName, options, values, freqs);
this.order = order;
}
@Override public boolean multiValued() {
return false;
}
@Override public boolean hasValue(int docId) {
return order[docId] != 0;
}
@Override public String value(int docId) {
return values[order[docId]];
}
@Override public String[] values(int docId) {
int loc = order[docId];
if (loc == 0) {
return Strings.EMPTY_ARRAY;
}
String[] ret = valuesCache.get();
ret[0] = values[loc];
return ret;
}
}

View File

@ -0,0 +1,94 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.strings;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.index.field.support.FieldDataLoader;
import java.io.IOException;
import java.util.ArrayList;
/**
* @author kimchy (Shay Banon)
*/
public abstract class StringFieldData extends FieldData {
protected final String[] values;
protected final int[] freqs;
protected StringFieldData(String fieldName, FieldDataOptions options, String[] values, int[] freqs) {
super(fieldName, options);
this.values = values;
this.freqs = freqs;
}
abstract public String value(int docId);
abstract public String[] values(int docId);
@Override public Type type() {
return Type.STRING;
}
public void forEachValue(ValueProc proc) {
if (freqs == null) {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], -1);
}
} else {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i], freqs[i]);
}
}
}
public static interface ValueProc {
void onValue(String value, int freq);
}
public static StringFieldData load(IndexReader reader, String field, FieldDataOptions options) throws IOException {
return FieldDataLoader.load(reader, field, options, new StringTypeLoader());
}
static class StringTypeLoader extends FieldDataLoader.FreqsTypeLoader<StringFieldData> {
private final ArrayList<String> terms = new ArrayList<String>();
StringTypeLoader() {
super();
// the first one indicates null value
terms.add(null);
}
@Override public void collectTerm(String term) {
terms.add(term);
}
@Override public StringFieldData buildSingleValue(String field, int[] order) {
return new SingleValueStringFieldData(field, options, order, terms.toArray(new String[terms.size()]), buildFreqs());
}
@Override public StringFieldData buildMultiValue(String field, int[][] order) {
return new MultiValueStringFieldData(field, options, order, terms.toArray(new String[terms.size()]), buildFreqs());
}
}
}

View File

@ -0,0 +1,146 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.support;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.util.StringHelper;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.field.FieldDataOptions;
import org.elasticsearch.util.gnu.trove.TIntArrayList;
import java.io.IOException;
import java.util.Arrays;
/**
* @author kimchy (Shay Banon)
*/
public class FieldDataLoader {
@SuppressWarnings({"StringEquality"})
public static <T extends FieldData> T load(IndexReader reader, String field, FieldDataOptions options, TypeLoader<T> loader) throws IOException {
loader.init(options);
field = StringHelper.intern(field);
int[][] orders = new int[reader.maxDoc()][];
int t = 1; // current term number
boolean multiValued = false;
TermDocs termDocs = reader.termDocs();
TermEnum termEnum = reader.terms(new Term(field));
try {
do {
Term term = termEnum.term();
if (term == null || term.field() != field) break;
loader.collectTerm(term.text());
termDocs.seek(termEnum);
int df = 0;
while (termDocs.next()) {
df++;
int doc = termDocs.doc();
int[] orderPerDoc = orders[doc];
if (orderPerDoc == null) {
orderPerDoc = new int[1];
orderPerDoc[0] = t;
orders[doc] = orderPerDoc;
} else {
multiValued = true;
orderPerDoc = Arrays.copyOf(orderPerDoc, orderPerDoc.length + 1);
orderPerDoc[orderPerDoc.length - 1] = t;
orders[doc] = orderPerDoc;
}
}
if (options.hasFreqs()) {
loader.collectFreq(df);
}
t++;
} while (termEnum.next());
} catch (RuntimeException e) {
if (e.getClass().getName().endsWith("StopFillCacheException")) {
// all is well, in case numeric parsers are used.
} else {
throw e;
}
} finally {
termDocs.close();
termEnum.close();
}
if (multiValued) {
return loader.buildMultiValue(field, orders);
} else {
// optimize for a single valued
int[] sOrders = new int[reader.maxDoc()];
for (int i = 0; i < orders.length; i++) {
if (orders[i] != null) {
sOrders[i] = orders[i][0];
}
}
return loader.buildSingleValue(field, sOrders);
}
}
public static interface TypeLoader<T extends FieldData> {
void init(FieldDataOptions options);
void collectTerm(String term);
void collectFreq(int freq);
T buildSingleValue(String fieldName, int[] order);
T buildMultiValue(String fieldName, int[][] order);
}
public static abstract class FreqsTypeLoader<T extends FieldData> implements TypeLoader<T> {
protected FieldDataOptions options;
private TIntArrayList freqs;
protected FreqsTypeLoader() {
}
@Override public void init(FieldDataOptions options) {
this.options = options;
if (options.hasFreqs()) {
freqs = new TIntArrayList();
freqs.add(0);
}
}
@Override public void collectFreq(int freq) {
freqs.add(freq);
}
protected int[] buildFreqs() {
if (freqs == null) {
return null;
}
return freqs.toNativeArray();
}
}
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.util.concurrent.Immutable;
import org.elasticsearch.util.concurrent.ThreadSafe;
@ -189,4 +190,6 @@ public interface FieldMapper<T> {
Filter rangeFilter(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper);
int sortType();
FieldData.Type fieldDataType();
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.mapper.BoostFieldMapper;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.util.Numbers;
@ -180,6 +181,10 @@ public class XContentBoostFieldMapper extends XContentNumberFieldMapper<Float> i
return SortField.FLOAT;
}
@Override public FieldData.Type fieldDataType() {
return FieldData.Type.FLOAT;
}
@Override protected String contentType() {
return CONTENT_TYPE;
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.util.Numbers;
import org.elasticsearch.util.Strings;
@ -217,6 +218,10 @@ public class XContentDateFieldMapper extends XContentNumberFieldMapper<Long> {
return SortField.LONG;
}
@Override public FieldData.Type fieldDataType() {
return FieldData.Type.LONG;
}
@Override protected String contentType() {
return CONTENT_TYPE;
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDoubleAnalyzer;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.util.Numbers;
import org.elasticsearch.util.xcontent.XContentParser;
@ -195,6 +196,10 @@ public class XContentDoubleFieldMapper extends XContentNumberFieldMapper<Double>
return SortField.DOUBLE;
}
@Override public FieldData.Type fieldDataType() {
return FieldData.Type.DOUBLE;
}
@Override protected String contentType() {
return CONTENT_TYPE;
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -357,6 +358,10 @@ public abstract class XContentFieldMapper<T> implements FieldMapper<T>, XContent
return SortField.STRING;
}
@Override public FieldData.Type fieldDataType() {
return FieldData.Type.STRING;
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(names.name());
doXContentBody(builder);

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.util.Numbers;
import org.elasticsearch.util.Strings;
@ -194,6 +195,10 @@ public class XContentFloatFieldMapper extends XContentNumberFieldMapper<Float> {
return SortField.FLOAT;
}
@Override public FieldData.Type fieldDataType() {
return FieldData.Type.FLOAT;
}
@Override protected String contentType() {
return CONTENT_TYPE;
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.util.Numbers;
import org.elasticsearch.util.Strings;
@ -194,6 +195,10 @@ public class XContentIntegerFieldMapper extends XContentNumberFieldMapper<Intege
return SortField.INT;
}
@Override public FieldData.Type fieldDataType() {
return FieldData.Type.INT;
}
@Override protected String contentType() {
return CONTENT_TYPE;
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.util.Numbers;
import org.elasticsearch.util.Strings;
@ -194,6 +195,10 @@ public class XContentLongFieldMapper extends XContentNumberFieldMapper<Long> {
return SortField.LONG;
}
@Override public FieldData.Type fieldDataType() {
return FieldData.Type.LONG;
}
@Override protected String contentType() {
return CONTENT_TYPE;
}

View File

@ -27,6 +27,7 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.util.ThreadLocals;
import org.elasticsearch.util.gnu.trove.TIntObjectHashMap;
import org.elasticsearch.util.xcontent.builder.XContentBuilder;
@ -171,6 +172,8 @@ public abstract class XContentNumberFieldMapper<T extends Number> extends XConte
@Override public abstract int sortType();
@Override public abstract FieldData.Type fieldDataType();
/**
* Removes a cached numeric token stream. The stream will be returned to the cahed once it is used
* sicne it implements the end method.

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.util.Numbers;
import org.elasticsearch.util.Strings;
@ -194,6 +195,10 @@ public class XContentShortFieldMapper extends XContentNumberFieldMapper<Short> {
return SortField.SHORT;
}
@Override public FieldData.Type fieldDataType() {
return FieldData.Type.SHORT;
}
@Override protected String contentType() {
return CONTENT_TYPE;
}

View File

@ -36,17 +36,8 @@ import static org.elasticsearch.util.collect.Lists.*;
*/
public class SearchSourceFacetsBuilder implements ToXContent {
private String queryExecution;
private List<FacetQuery> queryFacets;
/**
* Controls the type of query facet execution.
*/
public SearchSourceFacetsBuilder queryExecution(String queryExecution) {
this.queryExecution = queryExecution;
return this;
}
private List<QueryFacet> queryFacets;
private List<FieldFacet> fieldFacets;
/**
* Adds a query facet (which results in a count facet returned).
@ -54,12 +45,8 @@ public class SearchSourceFacetsBuilder implements ToXContent {
* @param name The logical name of the facet, it will be returned under the name
* @param query The query facet
*/
public SearchSourceFacetsBuilder facet(String name, XContentQueryBuilder query) {
if (queryFacets == null) {
queryFacets = newArrayListWithCapacity(2);
}
queryFacets.add(new FacetQuery(name, query, null));
return this;
public SearchSourceFacetsBuilder queryFacet(String name, XContentQueryBuilder query) {
return queryFacet(name, query, null);
}
/**
@ -68,47 +55,103 @@ public class SearchSourceFacetsBuilder implements ToXContent {
*
* @param name The logical name of the facet, it will be returned under the name
* @param query The query facet
* @param global Should the facet be executed globally or not
*/
public SearchSourceFacetsBuilder facet(String name, XContentQueryBuilder query, boolean global) {
public SearchSourceFacetsBuilder queryFacet(String name, XContentQueryBuilder query, Boolean global) {
if (queryFacets == null) {
queryFacets = newArrayListWithCapacity(2);
}
queryFacets.add(new FacetQuery(name, query, global));
queryFacets.add(new QueryFacet(name, query, global));
return this;
}
public SearchSourceFacetsBuilder fieldFacet(String name, String fieldName, int size) {
return fieldFacet(name, fieldName, size, null);
}
public SearchSourceFacetsBuilder fieldFacet(String name, String fieldName, int size, Boolean global) {
if (fieldFacets == null) {
fieldFacets = newArrayListWithCapacity(2);
}
fieldFacets.add(new FieldFacet(name, fieldName, size, global));
return this;
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
if (queryExecution == null && queryFacets == null) {
if (queryFacets == null && fieldFacets == null) {
return;
}
builder.field("facets");
builder.startObject();
if (queryExecution != null) {
builder.field("query_execution", queryExecution);
}
if (queryFacets != null) {
for (FacetQuery facetQuery : queryFacets) {
builder.startObject(facetQuery.name());
for (QueryFacet queryFacet : queryFacets) {
builder.startObject(queryFacet.name());
builder.field("query");
facetQuery.queryBuilder().toXContent(builder, params);
if (facetQuery.global() != null) {
builder.field("global", facetQuery.global());
queryFacet.queryBuilder().toXContent(builder, params);
if (queryFacet.global() != null) {
builder.field("global", queryFacet.global());
}
builder.endObject();
}
}
if (fieldFacets != null) {
for (FieldFacet fieldFacet : fieldFacets) {
builder.startObject(fieldFacet.name());
builder.startObject("field");
builder.field("name", fieldFacet.fieldName());
builder.field("size", fieldFacet.size());
builder.endObject();
if (fieldFacet.global() != null) {
builder.field("global", fieldFacet.global());
}
builder.endObject();
}
}
builder.endObject();
}
private static class FacetQuery {
private static class FieldFacet {
private final String name;
private final String fieldName;
private final int size;
private final Boolean global;
private FieldFacet(String name, String fieldName, int size, Boolean global) {
this.name = name;
this.fieldName = fieldName;
this.size = size;
this.global = global;
}
public String name() {
return name;
}
public String fieldName() {
return fieldName;
}
public int size() {
return size;
}
public Boolean global() {
return global;
}
}
private static class QueryFacet {
private final String name;
private final XContentQueryBuilder queryBuilder;
private final Boolean global;
private FacetQuery(String name, XContentQueryBuilder queryBuilder, Boolean global) {
private QueryFacet(String name, XContentQueryBuilder queryBuilder, Boolean global) {
this.name = name;
this.queryBuilder = queryBuilder;
this.global = global;

View File

@ -25,13 +25,12 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.ShardFieldDocSortedHitQueue;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.dfs.AggregatedDfs;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.facets.CountFacet;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.Facets;
import org.elasticsearch.search.facets.internal.InternalFacet;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResultProvider;
import org.elasticsearch.search.internal.InternalSearchHit;
@ -154,24 +153,15 @@ public class SearchPhaseController {
// we rely on the fact that the order of facets is the same on all query results
QuerySearchResult queryResult = queryResults.values().iterator().next().queryResult();
// we assume the facets are in the same order!
if (queryResult.facets() != null && queryResult.facets().facets() != null && !queryResult.facets().facets().isEmpty()) {
List<Facet> mergedFacets = Lists.newArrayListWithCapacity(2);
for (Facet facet : queryResult.facets().facets()) {
if (facet.type() == Facet.Type.COUNT) {
mergedFacets.add(new CountFacet(facet.name(), 0));
} else {
throw new ElasticSearchIllegalStateException("Can't handle type [" + facet.type() + "]");
}
}
List<Facet> allFacets = Lists.newArrayList();
for (QuerySearchResultProvider queryResultProvider : queryResults.values()) {
List<Facet> queryFacets = queryResultProvider.queryResult().facets().facets();
for (int i = 0; i < mergedFacets.size(); i++) {
Facet queryFacet = queryFacets.get(i);
Facet mergedFacet = mergedFacets.get(i);
if (queryFacet.type() == Facet.Type.COUNT) {
((CountFacet) mergedFacet).increment(((CountFacet) queryFacet).count());
}
allFacets.addAll(queryResultProvider.queryResult().facets().facets());
}
List<Facet> mergedFacets = Lists.newArrayList();
for (Facet facet : queryResult.facets().facets()) {
mergedFacets.add(((InternalFacet) facet).aggregate(allFacets));
}
facets = new Facets(mergedFacets);
}

View File

@ -19,86 +19,20 @@
package org.elasticsearch.search.facets;
import org.elasticsearch.util.io.stream.StreamInput;
import org.elasticsearch.util.io.stream.StreamOutput;
import org.elasticsearch.util.xcontent.builder.XContentBuilder;
import java.io.IOException;
/**
* A count facet is a facet that holds a count.
*
* @author kimchy (shay.banon)
*/
public class CountFacet implements Facet {
private String name;
private long count;
private CountFacet() {
}
public CountFacet(String name, long count) {
this.name = name;
this.count = count;
}
@Override public Type type() {
return Type.COUNT;
}
@Override public Type getType() {
return type();
}
/**
* The "logical" name of the facet.
*/
public String name() {
return name;
}
@Override public String getName() {
return name();
}
public interface CountFacet extends Facet {
/**
* The count of the facet.
*/
public long count() {
return count;
}
long count();
/**
* The count of the facet.
*/
public long getCount() {
return count;
}
public void increment(long increment) {
count += increment;
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(name, count);
}
public static CountFacet readCountFacet(StreamInput in) throws IOException {
CountFacet result = new CountFacet();
result.readFrom(in);
return result;
}
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
count = in.readVLong();
}
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeVLong(count);
}
long getCount();
}

View File

@ -20,21 +20,20 @@
package org.elasticsearch.search.facets;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.util.io.stream.Streamable;
import org.elasticsearch.util.xcontent.ToXContent;
/**
* A search facet.
*
* @author kimchy (shay.banon)
*/
public interface Facet extends Streamable, ToXContent {
public interface Facet {
enum Type {
/**
* Count type facet.
*/
COUNT((byte) 0);
COUNT((byte) 0),
MULTI_COUNT((byte) 1);
byte id;
@ -49,6 +48,8 @@ public interface Facet extends Streamable, ToXContent {
public static Type fromId(byte id) {
if (id == 0) {
return COUNT;
} else if (id == 1) {
return MULTI_COUNT;
} else {
throw new ElasticSearchIllegalArgumentException("No match for id [" + id + "]");
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.facets;
import org.elasticsearch.search.facets.internal.InternalFacet;
import org.elasticsearch.util.collect.ImmutableList;
import org.elasticsearch.util.io.stream.StreamInput;
import org.elasticsearch.util.io.stream.StreamOutput;
@ -31,7 +32,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.search.facets.CountFacet.*;
import static org.elasticsearch.search.facets.internal.InternalCountFacet.*;
import static org.elasticsearch.search.facets.internal.InternalMultiCountFacet.*;
import static org.elasticsearch.util.collect.Lists.*;
import static org.elasticsearch.util.collect.Maps.*;
@ -112,7 +114,7 @@ public class Facets implements Streamable, ToXContent, Iterable<Facet> {
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("facets");
for (Facet facet : facets) {
facet.toXContent(builder, params);
((InternalFacet) facet).toXContent(builder, params);
}
builder.endObject();
}
@ -133,6 +135,8 @@ public class Facets implements Streamable, ToXContent, Iterable<Facet> {
byte id = in.readByte();
if (id == Facet.Type.COUNT.id()) {
facets.add(readCountFacet(in));
} else if (id == Facet.Type.MULTI_COUNT.id()) {
facets.add(readMultiCountFacet(in));
} else {
throw new IOException("Can't handle facet type with id [" + id + "]");
}
@ -144,7 +148,7 @@ public class Facets implements Streamable, ToXContent, Iterable<Facet> {
out.writeVInt(facets.size());
for (Facet facet : facets) {
out.writeByte(facet.type().id());
facet.writeTo(out);
((InternalFacet) facet).writeTo(out);
}
}
}

View File

@ -19,23 +19,35 @@
package org.elasticsearch.search.facets;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.xcontent.XContentIndexQueryParser;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.facets.collector.FacetCollector;
import org.elasticsearch.search.facets.collector.FacetCollectorParser;
import org.elasticsearch.search.facets.collector.field.FieldFacetCollectorParser;
import org.elasticsearch.search.facets.collector.query.QueryFacetCollectorParser;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.util.MapBuilder;
import org.elasticsearch.util.collect.ImmutableMap;
import org.elasticsearch.util.collect.Lists;
import org.elasticsearch.util.xcontent.XContentParser;
import java.util.List;
import static org.elasticsearch.util.MapBuilder.*;
/**
* <pre>
* facets : {
* query_execution : "collect|idset",
* facet1: {
* query : { ... },
* global : false
* },
* facet2: {
* field : {
* name : "myfield",
* size : 12
* },
* global : false
* }
* }
* </pre>
@ -44,60 +56,56 @@ import java.util.List;
*/
public class FacetsParseElement implements SearchParseElement {
private final ImmutableMap<String, FacetCollectorParser> facetCollectorParsers;
public FacetsParseElement() {
MapBuilder<String, FacetCollectorParser> builder = newMapBuilder();
builder.put("field", new FieldFacetCollectorParser());
builder.put("query", new QueryFacetCollectorParser());
this.facetCollectorParsers = builder.immutableMap();
}
@Override public void parse(XContentParser parser, SearchContext context) throws Exception {
XContentParser.Token token;
SearchContextFacets.QueryExecutionType queryExecutionType = SearchContextFacets.QueryExecutionType.COLLECT;
List<SearchContextFacets.QueryFacet> queryFacets = null;
List<FacetCollector> facetCollectors = null;
String topLevelFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
topLevelFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("query_execution".equals(topLevelFieldName) || "queryExecution".equals(topLevelFieldName)) {
String text = parser.text();
if ("collect".equals(text)) {
queryExecutionType = SearchContextFacets.QueryExecutionType.COLLECT;
} else if ("idset".equals(text)) {
queryExecutionType = SearchContextFacets.QueryExecutionType.IDSET;
} else {
throw new SearchParseException(context, "Unsupported query type [" + text + "]");
}
}
} else if (token == XContentParser.Token.START_OBJECT) {
SearchContextFacets.Facet facet = null;
FacetCollector facet = null;
boolean global = false;
String facetFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
facetFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(facetFieldName)) {
XContentIndexQueryParser indexQueryParser = (XContentIndexQueryParser) context.queryParser();
Query facetQuery = indexQueryParser.parse(parser);
facet = new SearchContextFacets.QueryFacet(topLevelFieldName, facetQuery);
if (queryFacets == null) {
queryFacets = Lists.newArrayListWithCapacity(2);
}
queryFacets.add((SearchContextFacets.QueryFacet) facet);
FacetCollectorParser facetCollectorParser = facetCollectorParsers.get(facetFieldName);
if (facetCollectorParser == null) {
throw new SearchParseException(context, "No facet type for [" + facetFieldName + "]");
}
facet = facetCollectorParser.parser(topLevelFieldName, parser, context);
} else if (token.isValue()) {
if ("global".equals(facetFieldName)) {
global = parser.booleanValue();
}
}
}
if (facet == null) {
throw new SearchParseException(context, "No facet type found for [" + topLevelFieldName + "]");
if (facetCollectors == null) {
facetCollectors = Lists.newArrayList();
}
facetCollectors.add(facet);
if (global) {
context.searcher().addGlobalCollector(facet);
} else {
context.searcher().addCollector(facet);
}
facet.global(global);
}
}
if (queryExecutionType == SearchContextFacets.QueryExecutionType.IDSET) {
// if we are using doc id sets, we need to enable the fact that we accumelate it
context.searcher().enabledDocIdSet();
}
context.facets(new SearchContextFacets(queryExecutionType, queryFacets));
context.facets(new SearchContextFacets(facetCollectors));
}
}

View File

@ -20,15 +20,18 @@
package org.elasticsearch.search.facets;
import org.apache.lucene.search.*;
import org.apache.lucene.util.OpenBitSet;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.facets.collector.FacetCollector;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.query.QueryPhaseExecutionException;
import org.elasticsearch.util.collect.ImmutableMap;
import org.elasticsearch.util.collect.Lists;
import org.elasticsearch.util.lucene.Lucene;
import org.elasticsearch.util.lucene.search.NoopCollector;
import org.elasticsearch.util.lucene.search.Queries;
import org.elasticsearch.util.lucene.search.TermFilter;
import java.io.IOException;
import java.util.List;
@ -51,59 +54,50 @@ public class FacetsPhase implements SearchPhase {
return;
}
if (context.queryResult().facets() != null) {
// no need to compute the facets twice, they should be computed on a per conext basis
// no need to compute the facets twice, they should be computed on a per context basis
return;
}
// run global facets ...
if (context.searcher().globalCollectors() != null) {
Query query = new ConstantScoreQuery(context.filterCache().cache(Queries.MATCH_ALL_FILTER));
if (context.types().length > 0) {
if (context.types().length == 1) {
String type = context.types()[0];
DocumentMapper docMapper = context.mapperService().documentMapper(type);
Filter typeFilter = new TermFilter(docMapper.typeMapper().term(docMapper.type()));
typeFilter = context.filterCache().cache(typeFilter);
query = new FilteredQuery(query, typeFilter);
} else {
BooleanFilter booleanFilter = new BooleanFilter();
for (String type : context.types()) {
DocumentMapper docMapper = context.mapperService().documentMapper(type);
Filter typeFilter = new TermFilter(docMapper.typeMapper().term(docMapper.type()));
typeFilter = context.filterCache().cache(typeFilter);
booleanFilter.add(new FilterClause(typeFilter, BooleanClause.Occur.SHOULD));
}
query = new FilteredQuery(query, booleanFilter);
}
}
context.searcher().useGlobalCollectors(true);
try {
context.searcher().search(query, NoopCollector.NOOP_COLLECTOR);
} catch (IOException e) {
throw new QueryPhaseExecutionException(context, "Failed to execute global facets", e);
} finally {
context.searcher().useGlobalCollectors(false);
}
}
SearchContextFacets contextFacets = context.facets();
List<Facet> facets = Lists.newArrayListWithCapacity(2);
if (contextFacets.queryFacets() != null) {
for (SearchContextFacets.QueryFacet queryFacet : contextFacets.queryFacets()) {
if (queryFacet.global()) {
try {
Query globalQuery = new ConstantScoreQuery(context.filterCache().cache(new QueryWrapperFilter(queryFacet.query())));
long count = Lucene.count(context.searcher(), globalQuery, -1.0f);
facets.add(new CountFacet(queryFacet.name(), count));
} catch (Exception e) {
throw new FacetPhaseExecutionException(queryFacet.name(), "Failed to execute global facet [" + queryFacet.query() + "]", e);
}
} else {
Filter facetFilter = new QueryWrapperFilter(queryFacet.query());
facetFilter = context.filterCache().cache(facetFilter);
long count;
// if we already have the doc id set, then use idset since its faster
if (context.searcher().docIdSet() != null || contextFacets.queryType() == SearchContextFacets.QueryExecutionType.IDSET) {
count = executeQueryIdSetCount(context, queryFacet, facetFilter);
} else if (contextFacets.queryType() == SearchContextFacets.QueryExecutionType.COLLECT) {
count = executeQueryCollectorCount(context, queryFacet, facetFilter);
} else {
throw new ElasticSearchIllegalStateException("No matching for type [" + contextFacets.queryType() + "]");
}
facets.add(new CountFacet(queryFacet.name(), count));
if (contextFacets.facetCollectors() != null) {
for (FacetCollector facetCollector : contextFacets.facetCollectors()) {
facets.add(facetCollector.facet());
}
}
}
context.queryResult().facets(new Facets(facets));
}
private long executeQueryIdSetCount(SearchContext context, SearchContextFacets.QueryFacet queryFacet, Filter facetFilter) {
try {
DocIdSet filterDocIdSet = facetFilter.getDocIdSet(context.searcher().getIndexReader());
return OpenBitSet.intersectionCount(context.searcher().docIdSet(), (OpenBitSet) filterDocIdSet);
} catch (IOException e) {
throw new FacetPhaseExecutionException(queryFacet.name(), "Failed to bitset facets for query [" + queryFacet.query() + "]", e);
}
}
private long executeQueryCollectorCount(SearchContext context, SearchContextFacets.QueryFacet queryFacet, Filter facetFilter) {
Lucene.CountCollector countCollector = new Lucene.CountCollector(-1.0f);
try {
context.searcher().search(context.query(), facetFilter, countCollector);
} catch (IOException e) {
throw new FacetPhaseExecutionException(queryFacet.name(), "Failed to collect facets for query [" + queryFacet.query() + "]", e);
}
return countCollector.count();
}
}

View File

@ -0,0 +1,177 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import java.util.Comparator;
import java.util.List;
/**
* @author kimchy (Shay Banon)
*/
public interface MultiCountFacet<T extends Comparable> extends Facet, Iterable<MultiCountFacet.Entry<T>> {
public static enum ComparatorType {
COUNT((byte) 0, new Comparator<Entry>() {
@Override public int compare(Entry o1, Entry o2) {
int i = o2.count() - o1.count();
if (i == 0) {
i = o2.value().compareTo(o1.value());
if (i == 0) {
i = System.identityHashCode(o2) - System.identityHashCode(o1);
}
}
return i;
}
}),
VALUE((byte) 1, new Comparator<Entry>() {
@Override public int compare(Entry o1, Entry o2) {
int i = o2.value().compareTo(o1.value());
if (i == 0) {
i = o2.count() - o1.count();
if (i == 0) {
i = System.identityHashCode(o2) - System.identityHashCode(o1);
}
}
return i;
}
});
private final byte id;
private final Comparator<Entry> comparator;
ComparatorType(byte id, Comparator<Entry> comparator) {
this.id = id;
this.comparator = comparator;
}
public byte id() {
return this.id;
}
public Comparator<Entry> comparator() {
return comparator;
}
public static ComparatorType fromId(byte id) {
if (id == 0) {
return COUNT;
} else if (id == 1) {
return VALUE;
}
throw new ElasticSearchIllegalArgumentException("No type argument match for multi count comparator [" + id + "]");
}
}
public static enum ValueType {
STRING((byte) 0),
SHORT((byte) 1),
INT((byte) 2),
LONG((byte) 3),
FLOAT((byte) 4),
DOUBLE((byte) 5);
private final byte id;
ValueType(byte id) {
this.id = id;
}
public byte id() {
return id;
}
public static ValueType fromId(byte id) {
if (id == 0) {
return STRING;
} else if (id == 1) {
return SHORT;
} else if (id == 2) {
return INT;
} else if (id == 3) {
return LONG;
} else if (id == 4) {
return FLOAT;
} else if (id == 5) {
return DOUBLE;
}
throw new ElasticSearchIllegalArgumentException("No type argument match for multi count facet [" + id + "]");
}
}
public class Entry<T extends Comparable> {
private T value;
private int count;
public Entry(T value, int count) {
this.value = value;
this.count = count;
}
public T value() {
return value;
}
public T getValue() {
return value;
}
public String valueAsString() {
return value.toString();
}
public String getValueAsString() {
return valueAsString();
}
public Number valueAsNumber() {
if (value instanceof Number) {
return (Number) value;
}
return null;
}
public Number getValueAsNumber() {
return valueAsNumber();
}
public int count() {
return count;
}
public int getCount() {
return count();
}
}
ValueType valueType();
ValueType getValueType();
List<Entry<T>> entries();
List<Entry<T>> getEntries();
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.facets;
import org.apache.lucene.search.Query;
import org.elasticsearch.search.facets.collector.FacetCollector;
import java.util.List;
@ -28,58 +28,13 @@ import java.util.List;
*/
public class SearchContextFacets {
public static enum QueryExecutionType {
COLLECT,
IDSET
private final List<FacetCollector> facetCollectors;
public SearchContextFacets(List<FacetCollector> facetCollectors) {
this.facetCollectors = facetCollectors;
}
private final QueryExecutionType queryExecutionType;
private final List<QueryFacet> queryFacets;
public SearchContextFacets(QueryExecutionType queryExecutionType, List<QueryFacet> queryFacets) {
this.queryExecutionType = queryExecutionType;
this.queryFacets = queryFacets;
}
public QueryExecutionType queryType() {
return this.queryExecutionType;
}
public List<QueryFacet> queryFacets() {
return queryFacets;
}
public static abstract class Facet {
private boolean global;
protected Facet() {
}
public boolean global() {
return global;
}
public void global(boolean global) {
this.global = global;
}
}
public static class QueryFacet extends Facet {
private final String name;
private final Query query;
public QueryFacet(String name, Query query) {
this.name = name;
this.query = query;
}
public String name() {
return name;
}
public Query query() {
return query;
}
public List<FacetCollector> facetCollectors() {
return facetCollectors;
}
}

View File

@ -0,0 +1,31 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets.collector;
import org.apache.lucene.search.Collector;
import org.elasticsearch.search.facets.Facet;
/**
* @author kimchy (Shay Banon)
*/
public abstract class FacetCollector extends Collector {
public abstract Facet facet();
}

View File

@ -0,0 +1,35 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets.collector;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.util.xcontent.XContentParser;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public interface FacetCollectorParser {
String name();
FacetCollector parser(String facetName, XContentParser parser, SearchContext context) throws IOException;
}

View File

@ -0,0 +1,130 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets.collector.field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.index.cache.field.FieldDataCache;
import org.elasticsearch.index.field.strings.StringFieldData;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.MultiCountFacet;
import org.elasticsearch.search.facets.collector.FacetCollector;
import org.elasticsearch.search.facets.internal.InternalMultiCountFacet;
import org.elasticsearch.util.BoundedTreeSet;
import org.elasticsearch.util.ThreadLocals;
import org.elasticsearch.util.collect.ImmutableList;
import org.elasticsearch.util.gnu.trove.TObjectIntHashMap;
import org.elasticsearch.util.gnu.trove.TObjectIntIterator;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Deque;
import static org.elasticsearch.index.field.FieldDataOptions.*;
/**
* @author kimchy (Shay Banon)
*/
public class FieldFacetCollector extends FacetCollector {
private static ThreadLocal<ThreadLocals.CleanableValue<Deque<TObjectIntHashMap<String>>>> cache = new ThreadLocal<ThreadLocals.CleanableValue<Deque<TObjectIntHashMap<String>>>>() {
@Override protected ThreadLocals.CleanableValue<Deque<TObjectIntHashMap<String>>> initialValue() {
return new ThreadLocals.CleanableValue<Deque<TObjectIntHashMap<java.lang.String>>>(new ArrayDeque<TObjectIntHashMap<String>>());
}
};
private final FieldDataCache fieldDataCache;
private final String name;
private final String fieldName;
private final int size;
private StringFieldData fieldData;
private final TObjectIntHashMap<String> facets;
public FieldFacetCollector(String name, String fieldName, FieldDataCache fieldDataCache, int size) {
this.name = name;
this.fieldDataCache = fieldDataCache;
this.fieldName = fieldName;
this.size = size;
facets = popFacets();
}
@Override public void setScorer(Scorer scorer) throws IOException {
// nothing to do here
}
@Override public boolean acceptsDocsOutOfOrder() {
return true;
}
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
fieldData = fieldDataCache.cache(StringFieldData.class, reader, fieldName, fieldDataOptions().withFreqs(false));
}
@Override public void collect(int doc) throws IOException {
if (fieldData.multiValued()) {
for (String value : fieldData.values(doc)) {
facets.adjustOrPutValue(value, 1, 1);
}
} else {
if (fieldData.hasValue(doc)) {
facets.adjustOrPutValue(fieldData.value(doc), 1, 1);
}
}
}
@Override public Facet facet() {
if (facets.isEmpty()) {
pushFacets(facets);
return new InternalMultiCountFacet<String>(name, MultiCountFacet.ValueType.STRING, MultiCountFacet.ComparatorType.COUNT, size, ImmutableList.<MultiCountFacet.Entry<String>>of());
} else {
BoundedTreeSet<MultiCountFacet.Entry<String>> ordered = new BoundedTreeSet<MultiCountFacet.Entry<String>>(MultiCountFacet.ComparatorType.COUNT.comparator(), size);
for (TObjectIntIterator<String> it = facets.iterator(); it.hasNext();) {
it.advance();
ordered.add(new MultiCountFacet.Entry<String>(it.key(), it.value()));
}
pushFacets(facets);
return new InternalMultiCountFacet<String>(name, MultiCountFacet.ValueType.STRING, MultiCountFacet.ComparatorType.COUNT, size, ordered);
}
}
private TObjectIntHashMap<String> popFacets() {
Deque<TObjectIntHashMap<String>> deque = cache.get().get();
if (deque.isEmpty()) {
deque.add(new TObjectIntHashMap<String>());
}
TObjectIntHashMap<String> facets = deque.pollFirst();
facets.clear();
return facets;
}
private void pushFacets(TObjectIntHashMap<String> facets) {
facets.clear();
Deque<TObjectIntHashMap<String>> deque = cache.get().get();
if (deque != null) {
deque.add(facets);
}
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets.collector.field;
import org.elasticsearch.search.facets.collector.FacetCollector;
import org.elasticsearch.search.facets.collector.FacetCollectorParser;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.util.xcontent.XContentParser;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public class FieldFacetCollectorParser implements FacetCollectorParser {
@Override public String name() {
return "field";
}
@Override public FacetCollector parser(String facetName, XContentParser parser, SearchContext context) throws IOException {
String field = null;
int size = 10;
String termFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
termFieldName = parser.currentName();
} else if (token.isValue()) {
if ("name".equals(termFieldName)) {
field = parser.text();
} else if ("size".equals(termFieldName)) {
size = parser.intValue();
}
}
}
return new FieldFacetCollector(facetName, field, context.fieldDataCache(), size);
}
}

View File

@ -0,0 +1,77 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets.collector.query;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.*;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.collector.FacetCollector;
import org.elasticsearch.search.facets.internal.InternalCountFacet;
import org.elasticsearch.util.lucene.docset.DocSet;
import org.elasticsearch.util.lucene.docset.DocSets;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public class QueryFacetCollector extends FacetCollector {
private final Filter filter;
private final String name;
private DocSet docSet;
private int count = 0;
public QueryFacetCollector(String name, Query query, FilterCache filterCache) {
this.name = name;
this.filter = filterCache.cache(new QueryWrapperFilter(query));
}
@Override public void setScorer(Scorer scorer) throws IOException {
// ignore
}
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
DocIdSet docIdSet = filter.getDocIdSet(reader);
if (docIdSet instanceof DocSet) {
docSet = (DocSet) docIdSet;
} else {
docSet = DocSets.cacheable(reader, docIdSet);
}
}
@Override public void collect(int doc) throws IOException {
if (docSet.get(doc)) {
count++;
}
}
@Override public boolean acceptsDocsOutOfOrder() {
return true;
}
@Override public Facet facet() {
return new InternalCountFacet(name, count);
}
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets.collector.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.xcontent.XContentIndexQueryParser;
import org.elasticsearch.search.facets.collector.FacetCollector;
import org.elasticsearch.search.facets.collector.FacetCollectorParser;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.util.xcontent.XContentParser;
/**
* @author kimchy (Shay Banon)
*/
public class QueryFacetCollectorParser implements FacetCollectorParser {
@Override public String name() {
return "query";
}
@Override public FacetCollector parser(String facetName, XContentParser parser, SearchContext context) {
XContentIndexQueryParser indexQueryParser = (XContentIndexQueryParser) context.queryParser();
Query facetQuery = indexQueryParser.parse(parser);
return new QueryFacetCollector(facetName, facetQuery, context.filterCache());
}
}

View File

@ -0,0 +1,115 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets.internal;
import org.elasticsearch.search.facets.CountFacet;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.util.io.stream.StreamInput;
import org.elasticsearch.util.io.stream.StreamOutput;
import org.elasticsearch.util.xcontent.builder.XContentBuilder;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public class InternalCountFacet implements CountFacet, InternalFacet {
private String name;
private long count;
private InternalCountFacet() {
}
public InternalCountFacet(String name, long count) {
this.name = name;
this.count = count;
}
@Override public Type type() {
return Type.COUNT;
}
@Override public Type getType() {
return type();
}
/**
* The "logical" name of the facet.
*/
public String name() {
return name;
}
@Override public String getName() {
return name();
}
/**
* The count of the facet.
*/
public long count() {
return count;
}
/**
* The count of the facet.
*/
public long getCount() {
return count;
}
public void increment(long increment) {
count += increment;
}
@Override public Facet aggregate(Iterable<Facet> facets) {
int count = 0;
for (Facet facet : facets) {
if (facet.name().equals(name)) {
count += ((InternalCountFacet) facet).count();
}
}
return new InternalCountFacet(name, count);
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(name, count);
}
public static CountFacet readCountFacet(StreamInput in) throws IOException {
InternalCountFacet result = new InternalCountFacet();
result.readFrom(in);
return result;
}
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
count = in.readVLong();
}
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeVLong(count);
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets.internal;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.util.io.stream.Streamable;
import org.elasticsearch.util.xcontent.ToXContent;
/**
* @author kimchy (Shay Banon)
*/
public interface InternalFacet extends Facet, Streamable, ToXContent {
/**
* Aggregate the data of the provided facets and returns the aggregated value. Note, this method
* might should handle cases of facets provided with different names, and should excllude them.
*/
Facet aggregate(Iterable<Facet> facets);
}

View File

@ -0,0 +1,201 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facets.internal;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.MultiCountFacet;
import org.elasticsearch.util.BoundedTreeSet;
import org.elasticsearch.util.ThreadLocals;
import org.elasticsearch.util.collect.ImmutableList;
import org.elasticsearch.util.collect.Lists;
import org.elasticsearch.util.gnu.trove.TObjectIntHashMap;
import org.elasticsearch.util.gnu.trove.TObjectIntIterator;
import org.elasticsearch.util.io.stream.StreamInput;
import org.elasticsearch.util.io.stream.StreamOutput;
import org.elasticsearch.util.xcontent.builder.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
/**
* @author kimchy (Shay Banon)
*/
public class InternalMultiCountFacet<T extends Comparable> implements InternalFacet, MultiCountFacet<T> {
private String name;
private int requiredSize;
private Collection<Entry<T>> entries = ImmutableList.of();
private ValueType valueType;
private ComparatorType comparatorType;
private InternalMultiCountFacet() {
}
public InternalMultiCountFacet(String name, ValueType valueType, ComparatorType comparatorType, int requiredSize, Collection<Entry<T>> entries) {
this.name = name;
this.valueType = valueType;
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
}
@Override public String name() {
return this.name;
}
@Override public String getName() {
return this.name;
}
@Override public Type type() {
return Type.MULTI_COUNT;
}
@Override public Type getType() {
return type();
}
public ValueType valueType() {
return valueType;
}
public ValueType getValueType() {
return valueType();
}
@Override public List<Entry<T>> entries() {
return Lists.newArrayList(this);
}
@Override public List<Entry<T>> getEntries() {
return Lists.newArrayList(this);
}
@Override public Iterator<Entry<T>> iterator() {
return entries.iterator();
}
private static ThreadLocal<ThreadLocals.CleanableValue<TObjectIntHashMap<Object>>> aggregateCache = new ThreadLocal<ThreadLocals.CleanableValue<TObjectIntHashMap<Object>>>() {
@Override protected ThreadLocals.CleanableValue<TObjectIntHashMap<Object>> initialValue() {
return new ThreadLocals.CleanableValue<TObjectIntHashMap<java.lang.Object>>(new TObjectIntHashMap<Object>());
}
};
@Override public Facet aggregate(Iterable<Facet> facets) {
TObjectIntHashMap<Object> aggregated = aggregateCache.get().get();
aggregated.clear();
for (Facet facet : facets) {
if (!facet.name().equals(name)) {
continue;
}
MultiCountFacet<T> mFacet = (MultiCountFacet<T>) facet;
for (Entry<T> entry : mFacet) {
aggregated.adjustOrPutValue(entry.value(), entry.count(), entry.count());
}
}
BoundedTreeSet<Entry<T>> ordered = new BoundedTreeSet<Entry<T>>(comparatorType.comparator(), requiredSize);
for (TObjectIntIterator<Object> it = aggregated.iterator(); it.hasNext();) {
it.advance();
ordered.add(new Entry<T>((T) it.key(), it.value()));
}
return new InternalMultiCountFacet<T>(name, valueType, comparatorType, requiredSize, ordered);
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray(name());
for (Entry<T> entry : entries) {
builder.startObject();
builder.field("value", entry.value());
builder.field("count", entry.count());
builder.endObject();
}
builder.endArray();
}
public static InternalMultiCountFacet readMultiCountFacet(StreamInput in) throws IOException {
InternalMultiCountFacet facet = new InternalMultiCountFacet();
facet.readFrom(in);
return facet;
}
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
valueType = ValueType.fromId(in.readByte());
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
int size = in.readVInt();
entries = new ArrayList<Entry<T>>(size);
for (int i = 0; i < size; i++) {
Object value = null;
if (valueType == ValueType.STRING) {
value = in.readUTF();
} else if (valueType == ValueType.SHORT) {
value = in.readShort();
} else if (valueType == ValueType.INT) {
value = in.readInt();
} else if (valueType == ValueType.LONG) {
value = in.readLong();
} else if (valueType == ValueType.FLOAT) {
value = in.readFloat();
} else if (valueType == ValueType.DOUBLE) {
value = in.readDouble();
}
entries.add(new Entry<T>((T) value, in.readVInt()));
}
}
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeByte(valueType.id());
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);
out.writeVInt(entries.size());
for (Entry<T> entry : entries) {
if (valueType == ValueType.STRING) {
out.writeUTF((String) entry.value());
} else if (valueType == ValueType.SHORT) {
out.writeShort((Short) entry.value());
} else if (valueType == ValueType.INT) {
out.writeInt((Integer) entry.value());
} else if (valueType == ValueType.LONG) {
out.writeLong((Long) entry.value());
} else if (valueType == ValueType.FLOAT) {
out.writeFloat((Float) entry.value());
} else if (valueType == ValueType.DOUBLE) {
out.writeDouble((Double) entry.value());
}
out.writeVInt(entry.count());
}
}
}

View File

@ -21,11 +21,12 @@ package org.elasticsearch.search.internal;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.*;
import org.apache.lucene.util.OpenBitSet;
import org.elasticsearch.search.dfs.CachedDfSource;
import org.elasticsearch.util.lucene.docidset.DocIdSetCollector;
import org.elasticsearch.util.collect.Lists;
import org.elasticsearch.util.lucene.MultiCollector;
import java.io.IOException;
import java.util.List;
/**
* @author kimchy (shay.banon)
@ -36,9 +37,11 @@ public class ContextIndexSearcher extends IndexSearcher {
private CachedDfSource dfSource;
private boolean docIdSetEnabled;
private List<Collector> collectors;
private OpenBitSet docIdSet;
private List<Collector> globalCollectors;
private boolean useGlobalCollectors = false;
public ContextIndexSearcher(SearchContext searchContext, IndexReader r) {
super(r);
@ -49,12 +52,30 @@ public class ContextIndexSearcher extends IndexSearcher {
this.dfSource = dfSource;
}
public void enabledDocIdSet() {
docIdSetEnabled = true;
public void addCollector(Collector collector) {
if (collectors == null) {
collectors = Lists.newArrayList();
}
collectors.add(collector);
}
public OpenBitSet docIdSet() {
return docIdSet;
public List<Collector> collectors() {
return collectors;
}
public void addGlobalCollector(Collector collector) {
if (globalCollectors == null) {
globalCollectors = Lists.newArrayList();
}
globalCollectors.add(collector);
}
public List<Collector> globalCollectors() {
return globalCollectors;
}
public void useGlobalCollectors(boolean useGlobalCollectors) {
this.useGlobalCollectors = useGlobalCollectors;
}
@Override public Query rewrite(Query original) throws IOException {
@ -82,10 +103,16 @@ public class ContextIndexSearcher extends IndexSearcher {
if (searchContext.timeout() != null) {
collector = new TimeLimitingCollector(collector, searchContext.timeout().millis());
}
// we only compute the doc id set once since within a context, we execute the same query always...
if (docIdSetEnabled && docIdSet == null) {
collector = new DocIdSetCollector(collector, getIndexReader());
if (useGlobalCollectors) {
if (globalCollectors != null) {
collector = new MultiCollector(collector, globalCollectors.toArray(new Collector[globalCollectors.size()]));
}
} else {
if (collectors != null) {
collector = new MultiCollector(collector, collectors.toArray(new Collector[collectors.size()]));
}
}
// we only compute the doc id set once since within a context, we execute the same query always...
if (searchContext.timeout() != null) {
searchContext.queryResult().searchTimedOut(false);
try {
@ -96,8 +123,5 @@ public class ContextIndexSearcher extends IndexSearcher {
} else {
super.search(weight, filter, collector);
}
if (docIdSetEnabled && docIdSet == null) {
this.docIdSet = ((DocIdSetCollector) collector).docIdSet();
}
}
}

View File

@ -23,6 +23,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.index.cache.field.FieldDataCache;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.MapperService;
@ -217,6 +218,10 @@ public class SearchContext implements Releasable {
return indexService.cache().filter();
}
public FieldDataCache fieldDataCache() {
return indexService.cache().fieldData();
}
public TimeValue timeout() {
return timeout;
}

View File

@ -100,7 +100,7 @@ public class QueryPhase implements SearchPhase {
}
searchContext.queryResult().topDocs(topDocs);
} catch (Exception e) {
throw new QueryPhaseExecutionException(searchContext, "", e);
throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e);
}
facetsPhase.execute(searchContext);

View File

@ -24,6 +24,10 @@ package org.elasticsearch.util;
*/
public class Tuple<V1, V2> {
public static <V1, V2> Tuple<V1, V2> tuple(V1 v1, V2 v2) {
return new Tuple<V1, V2>(v1, v2);
}
private final V1 v1;
private final V2 v2;

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.util.lucene;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Scorer;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public class MultiCollector extends Collector {
private final Collector collector;
private final Collector[] collectors;
public MultiCollector(Collector collector, Collector[] collectors) {
this.collector = collector;
this.collectors = collectors;
}
@Override public void setScorer(Scorer scorer) throws IOException {
collector.setScorer(scorer);
for (Collector collector : collectors) {
collector.setScorer(scorer);
}
}
@Override public void collect(int doc) throws IOException {
collector.collect(doc);
for (Collector collector : collectors) {
collector.collect(doc);
}
}
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
collector.setNextReader(reader, docBase);
for (Collector collector : collectors) {
collector.setNextReader(reader, docBase);
}
}
@Override public boolean acceptsDocsOutOfOrder() {
if (!collector.acceptsDocsOutOfOrder()) {
return false;
}
for (Collector collector : collectors) {
if (!collector.acceptsDocsOutOfOrder()) {
return false;
}
}
return true;
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.util.lucene.docidset;
package org.elasticsearch.util.lucene.docset;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Collector;

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.util.lucene.docset;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public abstract class DocSet extends DocIdSet {
public static DocSet EMPTY_DOC_SET = new DocSet() {
@Override public boolean get(int doc) throws IOException {
return false;
}
@Override public DocIdSetIterator iterator() throws IOException {
return DocIdSet.EMPTY_DOCIDSET.iterator();
}
@Override public boolean isCacheable() {
return true;
}
};
public abstract boolean get(int doc) throws IOException;
}

View File

@ -17,38 +17,40 @@
* under the License.
*/
package org.elasticsearch.util.lucene.docidset;
package org.elasticsearch.util.lucene.docset;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.OpenBitSetDISI;
import org.apache.lucene.util.OpenBitSet;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public class DocIdSets {
public class DocSets {
/**
* Returns a cacheable version of the doc id set (might be the same instance provided as a parameter).
*/
public static DocIdSet cacheable(IndexReader reader, DocIdSet docIdSet) throws IOException {
public static DocSet cacheable(IndexReader reader, DocIdSet docIdSet) throws IOException {
if (docIdSet == null) {
return DocIdSet.EMPTY_DOCIDSET;
} else if (docIdSet.isCacheable()) {
return docIdSet;
return DocSet.EMPTY_DOC_SET;
} else if (docIdSet.isCacheable() && (docIdSet instanceof DocSet)) {
return (DocSet) docIdSet;
} else if (docIdSet instanceof OpenBitSet) {
return new OpenBitDocSet((OpenBitSet) docIdSet);
} else {
final DocIdSetIterator it = docIdSet.iterator();
// null is allowed to be returned by iterator(),
// in this case we wrap with the empty set,
// which is cacheable.
return (it == null) ? DocIdSet.EMPTY_DOCIDSET : new OpenBitSetDISI(it, reader.maxDoc());
return (it == null) ? DocSet.EMPTY_DOC_SET : new OpenBitDocSet(it, reader.maxDoc());
}
}
private DocIdSets() {
private DocSets() {
}

View File

@ -0,0 +1,54 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.util.lucene.docset;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.OpenBitSet;
import org.apache.lucene.util.OpenBitSetDISI;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public class OpenBitDocSet extends DocSet {
private final OpenBitSet set;
public OpenBitDocSet(OpenBitSet set) {
this.set = set;
}
public OpenBitDocSet(int numBits) {
this.set = new OpenBitSetDISI(numBits);
}
public OpenBitDocSet(DocIdSetIterator disi, int numBits) throws IOException {
this.set = new OpenBitSetDISI(disi, numBits);
}
@Override public boolean get(int doc) throws IOException {
return set.get(doc);
}
@Override public DocIdSetIterator iterator() throws IOException {
return set.iterator();
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.util.lucene.search;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Scorer;
import java.io.IOException;
/**
* @author kimchy (Shay Banon)
*/
public class NoopCollector extends Collector {
public static final NoopCollector NOOP_COLLECTOR = new NoopCollector();
@Override public void setScorer(Scorer scorer) throws IOException {
}
@Override public void collect(int doc) throws IOException {
}
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
}
@Override public boolean acceptsDocsOutOfOrder() {
return true;
}
}

View File

@ -29,6 +29,10 @@ import java.util.List;
*/
public class Queries {
public final static MatchAllDocsQuery MATCH_ALL_QUERY = new MatchAllDocsQuery();
public final static QueryWrapperFilter MATCH_ALL_FILTER = new QueryWrapperFilter(MATCH_ALL_QUERY);
private final static Field disjuncts;
static {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.util.xcontent;
import org.elasticsearch.util.Booleans;
import org.elasticsearch.util.xcontent.builder.XContentBuilder;
import java.io.IOException;
@ -35,6 +36,10 @@ public interface ToXContent {
String param(String key);
String param(String key, String defaultValue);
boolean paramAsBoolean(String key, boolean defaultValue);
Boolean paramAsBoolean(String key, Boolean defaultValue);
}
public static final Params EMPTY_PARAMS = new Params() {
@ -45,6 +50,14 @@ public interface ToXContent {
@Override public String param(String key, String defaultValue) {
return defaultValue;
}
@Override public boolean paramAsBoolean(String key, boolean defaultValue) {
return defaultValue;
}
@Override public Boolean paramAsBoolean(String key, Boolean defaultValue) {
return defaultValue;
}
};
public static class MapParams implements Params {
@ -66,6 +79,18 @@ public interface ToXContent {
}
return value;
}
@Override public boolean paramAsBoolean(String key, boolean defaultValue) {
return Booleans.parseBoolean(param(key), defaultValue);
}
@Override public Boolean paramAsBoolean(String key, Boolean defaultValue) {
String sValue = param(key);
if (sValue == null) {
return defaultValue;
}
return !(sValue.equals("false") || sValue.equals("0") || sValue.equals("off"));
}
}
void toXContent(XContentBuilder builder, Params params) throws IOException;

View File

@ -0,0 +1,171 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.doubles;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.util.Tuple;
import org.elasticsearch.util.lucene.Lucene;
import org.testng.annotations.Test;
import java.util.ArrayList;
import static org.elasticsearch.index.field.FieldDataOptions.*;
import static org.elasticsearch.util.Tuple.*;
import static org.elasticsearch.util.lucene.DocumentBuilder.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (Shay Banon)
*/
public class DoubleFieldDataTests {
@Test public void intFieldDataTests() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setDoubleValue(4))
.add(new NumericField("mvalue").setDoubleValue(104))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setDoubleValue(3))
.add(new NumericField("mvalue").setDoubleValue(104))
.add(new NumericField("mvalue").setDoubleValue(105))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setDoubleValue(7))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("mvalue").setDoubleValue(102))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setDoubleValue(4))
.build());
IndexReader reader = indexWriter.getReader();
DoubleFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(false));
DoubleFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(false));
DoubleFieldData sFieldData = DoubleFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(true));
DoubleFieldData mFieldData = DoubleFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(true));
assertThat(sFieldData.fieldName(), equalTo("svalue"));
assertThat(sFieldData.type(), equalTo(FieldData.Type.DOUBLE));
assertThat(sFieldData.multiValued(), equalTo(false));
assertThat(mFieldData.fieldName(), equalTo("mvalue"));
assertThat(mFieldData.type(), equalTo(FieldData.Type.DOUBLE));
assertThat(mFieldData.multiValued(), equalTo(true));
// svalue
assertThat(sFieldData.hasValue(0), equalTo(true));
assertThat(sFieldData.value(0), equalTo(4d));
assertThat(sFieldData.values(0).length, equalTo(1));
assertThat(sFieldData.values(0)[0], equalTo(4d));
assertThat(sFieldData.hasValue(1), equalTo(true));
assertThat(sFieldData.value(1), equalTo(3d));
assertThat(sFieldData.values(1).length, equalTo(1));
assertThat(sFieldData.values(1)[0], equalTo(3d));
assertThat(sFieldData.hasValue(2), equalTo(true));
assertThat(sFieldData.value(2), equalTo(7d));
assertThat(sFieldData.values(2).length, equalTo(1));
assertThat(sFieldData.values(2)[0], equalTo(7d));
assertThat(sFieldData.hasValue(3), equalTo(false));
assertThat(sFieldData.hasValue(4), equalTo(true));
assertThat(sFieldData.value(4), equalTo(4d));
assertThat(sFieldData.values(4).length, equalTo(1));
assertThat(sFieldData.values(4)[0], equalTo(4d));
// check order is correct
final ArrayList<Tuple<Double, Integer>> values = new ArrayList<Tuple<Double, Integer>>();
sFieldData.forEachValue(new DoubleFieldData.ValueProc() {
@Override public void onValue(double value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo(3d));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo(4d));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo(7d));
assertThat(values.get(2).v2(), equalTo(1));
// mvalue
assertThat(mFieldData.hasValue(0), equalTo(true));
assertThat(mFieldData.value(0), equalTo(104d));
assertThat(mFieldData.values(0).length, equalTo(1));
assertThat(mFieldData.values(0)[0], equalTo(104d));
assertThat(mFieldData.hasValue(1), equalTo(true));
assertThat(mFieldData.value(1), equalTo(104d));
assertThat(mFieldData.values(1).length, equalTo(2));
assertThat(mFieldData.values(1)[0], equalTo(104d));
assertThat(mFieldData.values(1)[1], equalTo(105d));
assertThat(mFieldData.hasValue(2), equalTo(false));
assertThat(mFieldData.hasValue(3), equalTo(true));
assertThat(mFieldData.value(3), equalTo(102d));
assertThat(mFieldData.values(3).length, equalTo(1));
assertThat(mFieldData.values(3)[0], equalTo(102d));
assertThat(mFieldData.hasValue(4), equalTo(false));
indexWriter.close();
// check order is correct
values.clear();
mFieldData.forEachValue(new DoubleFieldData.ValueProc() {
@Override public void onValue(double value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo(102d));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo(104d));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo(105d));
assertThat(values.get(2).v2(), equalTo(1));
}
}

View File

@ -0,0 +1,171 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.floats;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.util.Tuple;
import org.elasticsearch.util.lucene.Lucene;
import org.testng.annotations.Test;
import java.util.ArrayList;
import static org.elasticsearch.index.field.FieldDataOptions.*;
import static org.elasticsearch.util.Tuple.*;
import static org.elasticsearch.util.lucene.DocumentBuilder.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (Shay Banon)
*/
public class FloatFieldDataTests {
@Test public void intFieldDataTests() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setFloatValue(4))
.add(new NumericField("mvalue").setFloatValue(104))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setFloatValue(3))
.add(new NumericField("mvalue").setFloatValue(104))
.add(new NumericField("mvalue").setFloatValue(105))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setFloatValue(7))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("mvalue").setFloatValue(102))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setFloatValue(4))
.build());
IndexReader reader = indexWriter.getReader();
FloatFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(false));
FloatFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(false));
FloatFieldData sFieldData = FloatFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(true));
FloatFieldData mFieldData = FloatFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(true));
assertThat(sFieldData.fieldName(), equalTo("svalue"));
assertThat(sFieldData.type(), equalTo(FieldData.Type.FLOAT));
assertThat(sFieldData.multiValued(), equalTo(false));
assertThat(mFieldData.fieldName(), equalTo("mvalue"));
assertThat(mFieldData.type(), equalTo(FieldData.Type.FLOAT));
assertThat(mFieldData.multiValued(), equalTo(true));
// svalue
assertThat(sFieldData.hasValue(0), equalTo(true));
assertThat(sFieldData.value(0), equalTo(4f));
assertThat(sFieldData.values(0).length, equalTo(1));
assertThat(sFieldData.values(0)[0], equalTo(4f));
assertThat(sFieldData.hasValue(1), equalTo(true));
assertThat(sFieldData.value(1), equalTo(3f));
assertThat(sFieldData.values(1).length, equalTo(1));
assertThat(sFieldData.values(1)[0], equalTo(3f));
assertThat(sFieldData.hasValue(2), equalTo(true));
assertThat(sFieldData.value(2), equalTo(7f));
assertThat(sFieldData.values(2).length, equalTo(1));
assertThat(sFieldData.values(2)[0], equalTo(7f));
assertThat(sFieldData.hasValue(3), equalTo(false));
assertThat(sFieldData.hasValue(4), equalTo(true));
assertThat(sFieldData.value(4), equalTo(4f));
assertThat(sFieldData.values(4).length, equalTo(1));
assertThat(sFieldData.values(4)[0], equalTo(4f));
// check order is correct
final ArrayList<Tuple<Float, Integer>> values = new ArrayList<Tuple<Float, Integer>>();
sFieldData.forEachValue(new FloatFieldData.ValueProc() {
@Override public void onValue(float value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo(3f));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo(4f));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo(7f));
assertThat(values.get(2).v2(), equalTo(1));
// mvalue
assertThat(mFieldData.hasValue(0), equalTo(true));
assertThat(mFieldData.value(0), equalTo(104f));
assertThat(mFieldData.values(0).length, equalTo(1));
assertThat(mFieldData.values(0)[0], equalTo(104f));
assertThat(mFieldData.hasValue(1), equalTo(true));
assertThat(mFieldData.value(1), equalTo(104f));
assertThat(mFieldData.values(1).length, equalTo(2));
assertThat(mFieldData.values(1)[0], equalTo(104f));
assertThat(mFieldData.values(1)[1], equalTo(105f));
assertThat(mFieldData.hasValue(2), equalTo(false));
assertThat(mFieldData.hasValue(3), equalTo(true));
assertThat(mFieldData.value(3), equalTo(102f));
assertThat(mFieldData.values(3).length, equalTo(1));
assertThat(mFieldData.values(3)[0], equalTo(102f));
assertThat(mFieldData.hasValue(4), equalTo(false));
indexWriter.close();
// check order is correct
values.clear();
mFieldData.forEachValue(new FloatFieldData.ValueProc() {
@Override public void onValue(float value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo(102f));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo(104f));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo(105f));
assertThat(values.get(2).v2(), equalTo(1));
}
}

View File

@ -0,0 +1,171 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.ints;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.util.Tuple;
import org.elasticsearch.util.lucene.Lucene;
import org.testng.annotations.Test;
import java.util.ArrayList;
import static org.elasticsearch.index.field.FieldDataOptions.*;
import static org.elasticsearch.util.Tuple.*;
import static org.elasticsearch.util.lucene.DocumentBuilder.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (Shay Banon)
*/
public class IntFieldDataTests {
@Test public void intFieldDataTests() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setIntValue(4))
.add(new NumericField("mvalue").setIntValue(104))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setIntValue(3))
.add(new NumericField("mvalue").setIntValue(104))
.add(new NumericField("mvalue").setIntValue(105))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setIntValue(7))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("mvalue").setIntValue(102))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setIntValue(4))
.build());
IndexReader reader = indexWriter.getReader();
IntFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(false));
IntFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(false));
IntFieldData sFieldData = IntFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(true));
IntFieldData mFieldData = IntFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(true));
assertThat(sFieldData.fieldName(), equalTo("svalue"));
assertThat(sFieldData.type(), equalTo(FieldData.Type.INT));
assertThat(sFieldData.multiValued(), equalTo(false));
assertThat(mFieldData.fieldName(), equalTo("mvalue"));
assertThat(mFieldData.type(), equalTo(FieldData.Type.INT));
assertThat(mFieldData.multiValued(), equalTo(true));
// svalue
assertThat(sFieldData.hasValue(0), equalTo(true));
assertThat(sFieldData.value(0), equalTo(4));
assertThat(sFieldData.values(0).length, equalTo(1));
assertThat(sFieldData.values(0)[0], equalTo(4));
assertThat(sFieldData.hasValue(1), equalTo(true));
assertThat(sFieldData.value(1), equalTo(3));
assertThat(sFieldData.values(1).length, equalTo(1));
assertThat(sFieldData.values(1)[0], equalTo(3));
assertThat(sFieldData.hasValue(2), equalTo(true));
assertThat(sFieldData.value(2), equalTo(7));
assertThat(sFieldData.values(2).length, equalTo(1));
assertThat(sFieldData.values(2)[0], equalTo(7));
assertThat(sFieldData.hasValue(3), equalTo(false));
assertThat(sFieldData.hasValue(4), equalTo(true));
assertThat(sFieldData.value(4), equalTo(4));
assertThat(sFieldData.values(4).length, equalTo(1));
assertThat(sFieldData.values(4)[0], equalTo(4));
// check order is correct
final ArrayList<Tuple<Integer, Integer>> values = new ArrayList<Tuple<Integer, Integer>>();
sFieldData.forEachValue(new IntFieldData.ValueProc() {
@Override public void onValue(int value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo(3));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo(4));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo(7));
assertThat(values.get(2).v2(), equalTo(1));
// mvalue
assertThat(mFieldData.hasValue(0), equalTo(true));
assertThat(mFieldData.value(0), equalTo(104));
assertThat(mFieldData.values(0).length, equalTo(1));
assertThat(mFieldData.values(0)[0], equalTo(104));
assertThat(mFieldData.hasValue(1), equalTo(true));
assertThat(mFieldData.value(1), equalTo(104));
assertThat(mFieldData.values(1).length, equalTo(2));
assertThat(mFieldData.values(1)[0], equalTo(104));
assertThat(mFieldData.values(1)[1], equalTo(105));
assertThat(mFieldData.hasValue(2), equalTo(false));
assertThat(mFieldData.hasValue(3), equalTo(true));
assertThat(mFieldData.value(3), equalTo(102));
assertThat(mFieldData.values(3).length, equalTo(1));
assertThat(mFieldData.values(3)[0], equalTo(102));
assertThat(mFieldData.hasValue(4), equalTo(false));
indexWriter.close();
// check order is correct
values.clear();
mFieldData.forEachValue(new IntFieldData.ValueProc() {
@Override public void onValue(int value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo(102));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo(104));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo(105));
assertThat(values.get(2).v2(), equalTo(1));
}
}

View File

@ -0,0 +1,173 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.longs;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.util.Tuple;
import org.elasticsearch.util.lucene.Lucene;
import org.testng.annotations.Test;
import java.util.ArrayList;
import static org.elasticsearch.index.field.FieldDataOptions.*;
import static org.elasticsearch.util.Tuple.*;
import static org.elasticsearch.util.lucene.DocumentBuilder.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (Shay Banon)
*/
public class LongFieldDataTests {
@Test public void intFieldDataTests() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setLongValue(4))
.add(new NumericField("mvalue").setLongValue(104))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setLongValue(3))
.add(new NumericField("mvalue").setLongValue(104))
.add(new NumericField("mvalue").setLongValue(105))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setLongValue(7))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("mvalue").setLongValue(102))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setLongValue(4))
.build());
IndexReader reader = indexWriter.getReader();
// load it once with no freqs
LongFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(false));
LongFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(false));
LongFieldData sFieldData = LongFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(true));
LongFieldData mFieldData = LongFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(true));
assertThat(sFieldData.fieldName(), equalTo("svalue"));
assertThat(sFieldData.type(), equalTo(FieldData.Type.LONG));
assertThat(sFieldData.multiValued(), equalTo(false));
assertThat(mFieldData.fieldName(), equalTo("mvalue"));
assertThat(mFieldData.type(), equalTo(FieldData.Type.LONG));
assertThat(mFieldData.multiValued(), equalTo(true));
// svalue
assertThat(sFieldData.hasValue(0), equalTo(true));
assertThat(sFieldData.value(0), equalTo(4l));
assertThat(sFieldData.values(0).length, equalTo(1));
assertThat(sFieldData.values(0)[0], equalTo(4l));
assertThat(sFieldData.hasValue(1), equalTo(true));
assertThat(sFieldData.value(1), equalTo(3l));
assertThat(sFieldData.values(1).length, equalTo(1));
assertThat(sFieldData.values(1)[0], equalTo(3l));
assertThat(sFieldData.hasValue(2), equalTo(true));
assertThat(sFieldData.value(2), equalTo(7l));
assertThat(sFieldData.values(2).length, equalTo(1));
assertThat(sFieldData.values(2)[0], equalTo(7l));
assertThat(sFieldData.hasValue(3), equalTo(false));
assertThat(sFieldData.hasValue(4), equalTo(true));
assertThat(sFieldData.value(4), equalTo(4l));
assertThat(sFieldData.values(4).length, equalTo(1));
assertThat(sFieldData.values(4)[0], equalTo(4l));
// check order is correct
final ArrayList<Tuple<Long, Integer>> values = new ArrayList<Tuple<Long, Integer>>();
sFieldData.forEachValue(new LongFieldData.ValueProc() {
@Override public void onValue(long value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo(3l));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo(4l));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo(7l));
assertThat(values.get(2).v2(), equalTo(1));
// mvalue
assertThat(mFieldData.hasValue(0), equalTo(true));
assertThat(mFieldData.value(0), equalTo(104l));
assertThat(mFieldData.values(0).length, equalTo(1));
assertThat(mFieldData.values(0)[0], equalTo(104l));
assertThat(mFieldData.hasValue(1), equalTo(true));
assertThat(mFieldData.value(1), equalTo(104l));
assertThat(mFieldData.values(1).length, equalTo(2));
assertThat(mFieldData.values(1)[0], equalTo(104l));
assertThat(mFieldData.values(1)[1], equalTo(105l));
assertThat(mFieldData.hasValue(2), equalTo(false));
assertThat(mFieldData.hasValue(3), equalTo(true));
assertThat(mFieldData.value(3), equalTo(102l));
assertThat(mFieldData.values(3).length, equalTo(1));
assertThat(mFieldData.values(3)[0], equalTo(102l));
assertThat(mFieldData.hasValue(4), equalTo(false));
indexWriter.close();
// check order is correct
values.clear();
mFieldData.forEachValue(new LongFieldData.ValueProc() {
@Override public void onValue(long value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo(102l));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo(104l));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo(105l));
assertThat(values.get(2).v2(), equalTo(1));
}
}

View File

@ -0,0 +1,171 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.shorts;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.util.Tuple;
import org.elasticsearch.util.lucene.Lucene;
import org.testng.annotations.Test;
import java.util.ArrayList;
import static org.elasticsearch.index.field.FieldDataOptions.*;
import static org.elasticsearch.util.Tuple.*;
import static org.elasticsearch.util.lucene.DocumentBuilder.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (Shay Banon)
*/
public class ShortFieldDataTests {
@Test public void intFieldDataTests() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setIntValue(4))
.add(new NumericField("mvalue").setIntValue(104))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setIntValue(3))
.add(new NumericField("mvalue").setIntValue(104))
.add(new NumericField("mvalue").setIntValue(105))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setIntValue(7))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("mvalue").setIntValue(102))
.build());
indexWriter.addDocument(doc()
.add(new NumericField("svalue").setIntValue(4))
.build());
IndexReader reader = indexWriter.getReader();
ShortFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(false));
ShortFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(false));
ShortFieldData sFieldData = ShortFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(true));
ShortFieldData mFieldData = ShortFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(true));
assertThat(sFieldData.fieldName(), equalTo("svalue"));
assertThat(sFieldData.type(), equalTo(FieldData.Type.SHORT));
assertThat(sFieldData.multiValued(), equalTo(false));
assertThat(mFieldData.fieldName(), equalTo("mvalue"));
assertThat(mFieldData.type(), equalTo(FieldData.Type.SHORT));
assertThat(mFieldData.multiValued(), equalTo(true));
// svalue
assertThat(sFieldData.hasValue(0), equalTo(true));
assertThat(sFieldData.value(0), equalTo((short) 4));
assertThat(sFieldData.values(0).length, equalTo(1));
assertThat(sFieldData.values(0)[0], equalTo((short) 4));
assertThat(sFieldData.hasValue(1), equalTo(true));
assertThat(sFieldData.value(1), equalTo((short) 3));
assertThat(sFieldData.values(1).length, equalTo(1));
assertThat(sFieldData.values(1)[0], equalTo((short) 3));
assertThat(sFieldData.hasValue(2), equalTo(true));
assertThat(sFieldData.value(2), equalTo((short) 7));
assertThat(sFieldData.values(2).length, equalTo(1));
assertThat(sFieldData.values(2)[0], equalTo((short) 7));
assertThat(sFieldData.hasValue(3), equalTo(false));
assertThat(sFieldData.hasValue(4), equalTo(true));
assertThat(sFieldData.value(4), equalTo((short) 4));
assertThat(sFieldData.values(4).length, equalTo(1));
assertThat(sFieldData.values(4)[0], equalTo((short) 4));
// check order is correct
final ArrayList<Tuple<Short, Integer>> values = new ArrayList<Tuple<Short, Integer>>();
sFieldData.forEachValue(new ShortFieldData.ValueProc() {
@Override public void onValue(short value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo((short) 3));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo((short) 4));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo((short) 7));
assertThat(values.get(2).v2(), equalTo(1));
// mvalue
assertThat(mFieldData.hasValue(0), equalTo(true));
assertThat(mFieldData.value(0), equalTo((short) 104));
assertThat(mFieldData.values(0).length, equalTo(1));
assertThat(mFieldData.values(0)[0], equalTo((short) 104));
assertThat(mFieldData.hasValue(1), equalTo(true));
assertThat(mFieldData.value(1), equalTo((short) 104));
assertThat(mFieldData.values(1).length, equalTo(2));
assertThat(mFieldData.values(1)[0], equalTo((short) 104));
assertThat(mFieldData.values(1)[1], equalTo((short) 105));
assertThat(mFieldData.hasValue(2), equalTo(false));
assertThat(mFieldData.hasValue(3), equalTo(true));
assertThat(mFieldData.value(3), equalTo((short) 102));
assertThat(mFieldData.values(3).length, equalTo(1));
assertThat(mFieldData.values(3)[0], equalTo((short) 102));
assertThat(mFieldData.hasValue(4), equalTo(false));
indexWriter.close();
// check order is correct
values.clear();
mFieldData.forEachValue(new ShortFieldData.ValueProc() {
@Override public void onValue(short value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo((short) 102));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo((short) 104));
assertThat(values.get(1).v2(), equalTo(2));
assertThat(values.get(2).v1(), equalTo((short) 105));
assertThat(values.get(2).v2(), equalTo(1));
}
}

View File

@ -0,0 +1,166 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.strings;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.index.field.FieldData;
import org.elasticsearch.util.Tuple;
import org.elasticsearch.util.lucene.Lucene;
import org.testng.annotations.Test;
import java.util.ArrayList;
import static org.elasticsearch.index.field.FieldDataOptions.*;
import static org.elasticsearch.util.Tuple.*;
import static org.elasticsearch.util.lucene.DocumentBuilder.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (Shay Banon)
*/
public class StringFieldDataTests {
@Test public void stringFieldDataTests() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
indexWriter.addDocument(doc()
.add(field("svalue", "zzz"))
.add(field("mvalue", "111")).build());
indexWriter.addDocument(doc()
.add(field("svalue", "xxx"))
.add(field("mvalue", "222 333")).build());
indexWriter.addDocument(doc()
.add(field("mvalue", "333 444")).build());
indexWriter.addDocument(doc()
.add(field("svalue", "aaa")).build());
indexWriter.addDocument(doc()
.add(field("svalue", "aaa")).build());
IndexReader reader = indexWriter.getReader();
StringFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(false));
StringFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(false));
StringFieldData sFieldData = StringFieldData.load(reader, "svalue", fieldDataOptions().withFreqs(true));
StringFieldData mFieldData = StringFieldData.load(reader, "mvalue", fieldDataOptions().withFreqs(true));
assertThat(sFieldData.fieldName(), equalTo("svalue"));
assertThat(sFieldData.type(), equalTo(FieldData.Type.STRING));
assertThat(sFieldData.multiValued(), equalTo(false));
assertThat(mFieldData.fieldName(), equalTo("mvalue"));
assertThat(mFieldData.type(), equalTo(FieldData.Type.STRING));
assertThat(mFieldData.multiValued(), equalTo(true));
// svalue
assertThat(sFieldData.hasValue(0), equalTo(true));
assertThat(sFieldData.value(0), equalTo("zzz"));
assertThat(sFieldData.values(0).length, equalTo(1));
assertThat(sFieldData.values(0)[0], equalTo("zzz"));
assertThat(sFieldData.hasValue(1), equalTo(true));
assertThat(sFieldData.value(1), equalTo("xxx"));
assertThat(sFieldData.values(1).length, equalTo(1));
assertThat(sFieldData.values(1)[0], equalTo("xxx"));
assertThat(sFieldData.hasValue(2), equalTo(false));
assertThat(sFieldData.hasValue(3), equalTo(true));
assertThat(sFieldData.value(3), equalTo("aaa"));
assertThat(sFieldData.values(3).length, equalTo(1));
assertThat(sFieldData.values(3)[0], equalTo("aaa"));
assertThat(sFieldData.hasValue(4), equalTo(true));
assertThat(sFieldData.value(4), equalTo("aaa"));
assertThat(sFieldData.values(4).length, equalTo(1));
assertThat(sFieldData.values(4)[0], equalTo("aaa"));
// check order is correct
final ArrayList<Tuple<String, Integer>> values = new ArrayList<Tuple<String, Integer>>();
sFieldData.forEachValue(new StringFieldData.ValueProc() {
@Override public void onValue(String value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(3));
assertThat(values.get(0).v1(), equalTo("aaa"));
assertThat(values.get(0).v2(), equalTo(2));
assertThat(values.get(1).v1(), equalTo("xxx"));
assertThat(values.get(1).v2(), equalTo(1));
assertThat(values.get(2).v1(), equalTo("zzz"));
assertThat(values.get(2).v2(), equalTo(1));
// mvalue
assertThat(mFieldData.hasValue(0), equalTo(true));
assertThat(mFieldData.value(0), equalTo("111"));
assertThat(mFieldData.values(0).length, equalTo(1));
assertThat(mFieldData.values(0)[0], equalTo("111"));
assertThat(mFieldData.hasValue(1), equalTo(true));
assertThat(mFieldData.value(1), equalTo("222"));
assertThat(mFieldData.values(1).length, equalTo(2));
assertThat(mFieldData.values(1)[0], equalTo("222"));
assertThat(mFieldData.values(1)[1], equalTo("333"));
assertThat(mFieldData.hasValue(2), equalTo(true));
assertThat(mFieldData.value(2), equalTo("333"));
assertThat(mFieldData.values(2).length, equalTo(2));
assertThat(mFieldData.values(2)[0], equalTo("333"));
assertThat(mFieldData.values(2)[1], equalTo("444"));
assertThat(mFieldData.hasValue(3), equalTo(false));
assertThat(mFieldData.hasValue(4), equalTo(false));
values.clear();
mFieldData.forEachValue(new StringFieldData.ValueProc() {
@Override public void onValue(String value, int freq) {
values.add(tuple(value, freq));
}
});
assertThat(values.size(), equalTo(4));
assertThat(values.get(0).v1(), equalTo("111"));
assertThat(values.get(0).v2(), equalTo(1));
assertThat(values.get(1).v1(), equalTo("222"));
assertThat(values.get(1).v2(), equalTo(1));
assertThat(values.get(2).v1(), equalTo("333"));
assertThat(values.get(2).v2(), equalTo(2));
assertThat(values.get(3).v1(), equalTo("444"));
assertThat(values.get(3).v2(), equalTo(1));
indexWriter.close();
}
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.query.xcontent.guice;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNameModule;
import org.elasticsearch.index.analysis.AnalysisModule;
import org.elasticsearch.index.cache.filter.FilterCacheModule;
import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.engine.IndexEngineModule;
import org.elasticsearch.index.query.IndexQueryParserModule;
import org.elasticsearch.index.query.IndexQueryParserService;
@ -56,7 +56,7 @@ public class IndexQueryParserModuleTests {
Index index = new Index("test");
Injector injector = Guice.createInjector(
new IndexSettingsModule(settings),
new FilterCacheModule(settings),
new IndexCacheModule(settings),
new AnalysisModule(settings),
new IndexEngineModule(settings),
new SimilarityModule(settings),

View File

@ -145,28 +145,10 @@ public class SingleInstanceEmbeddedSearchTests extends AbstractNodesTests {
assertThat(fetchResult.hits().hits()[0].type(), equalTo("type1"));
}
@Test public void testSimpleQueryFacetsNoExecutionType() throws Exception {
@Test public void testSimpleQueryFacets() throws Exception {
QuerySearchResult queryResult = searchService.executeQueryPhase(searchRequest(
searchSource().query(wildcardQuery("name", "te*"))
.facets(facets().facet("age2", termQuery("age", 2)).facet("age1", termQuery("age", 1)))
));
assertThat(queryResult.facets().countFacet("age2").count(), equalTo(4l));
assertThat(queryResult.facets().countFacet("age1").count(), equalTo(1l));
}
@Test public void testSimpleQueryFacetsQueryExecutionCollect() throws Exception {
QuerySearchResult queryResult = searchService.executeQueryPhase(searchRequest(
searchSource().query(wildcardQuery("name", "te*"))
.facets(facets().queryExecution("collect").facet("age2", termQuery("age", 2)).facet("age1", termQuery("age", 1)))
));
assertThat(queryResult.facets().countFacet("age2").count(), equalTo(4l));
assertThat(queryResult.facets().countFacet("age1").count(), equalTo(1l));
}
@Test public void testSimpleQueryFacetsQueryExecutionIdset() throws Exception {
QuerySearchResult queryResult = searchService.executeQueryPhase(searchRequest(
searchSource().query(wildcardQuery("name", "te*"))
.facets(facets().queryExecution("idset").facet("age2", termQuery("age", 2)).facet("age1", termQuery("age", 1)))
.facets(facets().queryFacet("age2", termQuery("age", 2)).queryFacet("age1", termQuery("age", 1)))
));
assertThat(queryResult.facets().countFacet("age2").count(), equalTo(4l));
assertThat(queryResult.facets().countFacet("age1").count(), equalTo(1l));

View File

@ -253,7 +253,7 @@ public class TransportTwoServersSearchTests extends AbstractNodesTests {
SearchSourceBuilder sourceBuilder = searchSource()
.query(termQuery("multi", "test"))
.from(0).size(20).explain(true)
.facets(facets().facet("all", termQuery("multi", "test"), true).facet("test1", termQuery("name", "test1")));
.facets(facets().queryFacet("all", termQuery("multi", "test"), true).queryFacet("test1", termQuery("name", "test1")));
SearchResponse searchResponse = client.search(searchRequest("test").source(sourceBuilder)).actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));

View File

@ -319,7 +319,7 @@ public class TwoInstanceEmbeddedSearchTests extends AbstractNodesTests {
SearchSourceBuilder sourceBuilder = searchSource()
.query(termQuery("multi", "test"))
.from(0).size(20).explain(true).sort("age", false)
.facets(facets().facet("all", termQuery("multi", "test")).facet("test1", termQuery("name", "test1")));
.facets(facets().queryFacet("all", termQuery("multi", "test")).queryFacet("test1", termQuery("name", "test1")));
Map<SearchShardTarget, QuerySearchResultProvider> queryResults = newHashMap();
for (ShardsIterator shardsIt : indicesService.searchShards(clusterService.state(), new String[]{"test"}, null)) {

View File

@ -325,7 +325,7 @@ public class TwoInstanceUnbalancedShardsEmbeddedSearchTests extends AbstractNode
SearchSourceBuilder sourceBuilder = searchSource()
.query(termQuery("multi", "test"))
.from(0).size(20).explain(true).sort("age", false)
.facets(facets().facet("all", termQuery("multi", "test")).facet("test1", termQuery("name", "test1")));
.facets(facets().queryFacet("all", termQuery("multi", "test")).queryFacet("test1", termQuery("name", "test1")));
Map<SearchShardTarget, QuerySearchResultProvider> queryResults = newHashMap();
for (ShardsIterator shardsIt : indicesService.searchShards(clusterService.state(), new String[]{"test"}, null)) {

View File

@ -0,0 +1,95 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.integration.search.facets;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.search.facets.MultiCountFacet;
import org.elasticsearch.test.integration.AbstractNodesTests;
import org.hamcrest.MatcherAssert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.elasticsearch.index.query.xcontent.QueryBuilders.*;
import static org.elasticsearch.util.xcontent.XContentFactory.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (Shay Banon)
*/
public class SimpleFacetsTests extends AbstractNodesTests {
private Client client;
@BeforeClass public void createNodes() throws Exception {
startNode("server1");
startNode("server2");
client = getClient();
}
@AfterClass public void closeNodes() {
client.close();
closeAllNodes();
}
protected Client getClient() {
return client("server1");
}
@Test public void testFieldFacets() throws Exception {
try {
client.admin().indices().prepareDelete("test").execute().actionGet();
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test").execute().actionGet();
client.prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
.field("stag", "111")
.startArray("tag").value("xxx").value("yyy").endArray()
.endObject()).execute().actionGet();
client.admin().indices().prepareRefresh().execute().actionGet();
client.prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
.field("stag", "111")
.startArray("tag").value("zzz").value("yyy").endArray()
.endObject()).execute().actionGet();
client.admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client.prepareSearch()
.setQuery(termQuery("stag", "111"))
.addFieldFacet("facet1", "stag", 10)
.addFieldFacet("facet2", "tag", 10)
.execute().actionGet();
MultiCountFacet<String> facet = (MultiCountFacet<String>) searchResponse.facets().facet("facet1");
MatcherAssert.assertThat(facet.name(), equalTo("facet1"));
MatcherAssert.assertThat(facet.entries().size(), equalTo(1));
MatcherAssert.assertThat(facet.entries().get(0).value(), equalTo("111"));
MatcherAssert.assertThat(facet.entries().get(0).count(), equalTo(2));
facet = (MultiCountFacet<String>) searchResponse.facets().facet("facet2");
MatcherAssert.assertThat(facet.name(), equalTo("facet2"));
MatcherAssert.assertThat(facet.entries().size(), equalTo(3));
MatcherAssert.assertThat(facet.entries().get(0).value(), equalTo("yyy"));
MatcherAssert.assertThat(facet.entries().get(0).count(), equalTo(2));
}
}

View File

@ -0,0 +1,9 @@
cluster:
routing:
schedule: 100ms
index:
number_of_shards: 3
number_of_replicas: 0
routing :
# Use simple hashing since we want even distribution and our ids are simple incremented number based
hash.type : simple

View File

@ -21,6 +21,7 @@ package org.elasticsearch.memcached;
import org.elasticsearch.rest.support.AbstractRestRequest;
import org.elasticsearch.rest.support.RestUtils;
import org.elasticsearch.util.Booleans;
import org.elasticsearch.util.Unicode;
import org.elasticsearch.util.collect.ImmutableList;
import org.elasticsearch.util.collect.ImmutableSet;
@ -164,4 +165,16 @@ public class MemcachedRestRequest extends AbstractRestRequest {
}
return defaultValue;
}
@Override public boolean paramAsBoolean(String key, boolean defaultValue) {
return Booleans.parseBoolean(param(key), defaultValue);
}
@Override public Boolean paramAsBoolean(String key, Boolean defaultValue) {
String sValue = param(key);
if (sValue == null) {
return defaultValue;
}
return !(sValue.equals("false") || sValue.equals("0") || sValue.equals("off"));
}
}