initial facet refactoring

the main goal of the facet refactoring is to allow for two modes of facet execution, collector based, that get callbacks as hist match, and post based, which iterates over all the relevant hits
it also includes a some simplification of the facet implementation
This commit is contained in:
Shay Banon 2013-02-16 02:25:04 +01:00
parent 06b82a45d4
commit 73a447da86
126 changed files with 4319 additions and 5021 deletions

View File

@ -33,7 +33,7 @@ import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.sort.SortBuilder;
@ -485,7 +485,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
/**
* Adds a facet to the search operation.
*/
public SearchRequestBuilder addFacet(AbstractFacetBuilder facet) {
public SearchRequestBuilder addFacet(FacetBuilder facet) {
sourceBuilder().facet(facet);
return this;
}
@ -663,17 +663,17 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
suggestBuilder().addSuggestion(suggestion);
return this;
}
public SearchRequestBuilder setRescorer(RescoreBuilder.Rescorer rescorer) {
rescoreBuilder().setRescorer(rescorer);
return this;
}
public SearchRequestBuilder setRescoreWindow(int window) {
rescoreBuilder().setWindowSize(window);
return this;
}
/**
* Sets the source of the request as a json string. Note, settings anything other
* than the search type will cause this source to be overridden, consider using
@ -850,7 +850,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
private SuggestBuilder suggestBuilder() {
return sourceBuilder().suggest();
}
private RescoreBuilder rescoreBuilder() {
return sourceBuilder().rescore();
}

View File

@ -23,13 +23,14 @@ import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.ScoreCachingWrappingScorer;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.lucene.search.XCollector;
import java.io.IOException;
/**
*
*/
public class MultiCollector extends Collector {
public class MultiCollector extends XCollector {
private final Collector collector;
@ -80,4 +81,16 @@ public class MultiCollector extends Collector {
}
return true;
}
@Override
public void postCollection() {
if (collector instanceof XCollector) {
((XCollector) collector).postCollection();
}
for (Collector collector : collectors) {
if (collector instanceof XCollector) {
((XCollector) collector).postCollection();
}
}
}
}

View File

@ -17,17 +17,20 @@
* under the License.
*/
package org.elasticsearch.search.facet;
package org.elasticsearch.common.lucene.docset;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Filter;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSet;
/**
*
*/
public abstract class FacetCollector extends Collector {
public class ContextDocIdSet {
public abstract Facet facet();
public final AtomicReaderContext context;
public final DocIdSet docSet;
public abstract void setFilter(Filter filter);
public ContextDocIdSet(AtomicReaderContext context, DocIdSet docSet) {
this.context = context;
this.docSet = docSet;
}
}

View File

@ -31,7 +31,7 @@ import java.io.IOException;
/**
*
*/
public class FilteredCollector extends Collector {
public class FilteredCollector extends XCollector {
private final Collector collector;
@ -44,6 +44,13 @@ public class FilteredCollector extends Collector {
this.filter = filter;
}
@Override
public void postCollection() {
if (collector instanceof XCollector) {
((XCollector) collector).postCollection();
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
collector.setScorer(scorer);

View File

@ -17,19 +17,17 @@
* under the License.
*/
package org.elasticsearch.search.facet;
package org.elasticsearch.common.lucene.search;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import org.apache.lucene.search.Collector;
/**
*
* An extension to {@link Collector} that allows for a callback when
* collection is done.
*/
public interface FacetProcessor {
public abstract class XCollector extends Collector {
String[] types();
public void postCollection() {
FacetCollector parse(String facetName, XContentParser parser, SearchContext context) throws IOException;
}
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.docset.DocSetCache;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.index.cache.id.IdCache;
import org.elasticsearch.index.cache.query.parser.QueryParserCache;
@ -42,24 +43,24 @@ import org.elasticsearch.index.settings.IndexSettings;
public class IndexCache extends AbstractIndexComponent implements CloseableComponent, ClusterStateListener {
private final FilterCache filterCache;
private final QueryParserCache queryParserCache;
private final IdCache idCache;
private final DocSetCache docSetCache;
private final TimeValue refreshInterval;
private ClusterService clusterService;
private long latestCacheStatsTimestamp = -1;
private CacheStats latestCacheStats;
@Inject
public IndexCache(Index index, @IndexSettings Settings indexSettings, FilterCache filterCache, QueryParserCache queryParserCache, IdCache idCache) {
public IndexCache(Index index, @IndexSettings Settings indexSettings, FilterCache filterCache, QueryParserCache queryParserCache, IdCache idCache,
DocSetCache docSetCache) {
super(index, indexSettings);
this.filterCache = filterCache;
this.queryParserCache = queryParserCache;
this.idCache = idCache;
this.docSetCache = docSetCache;
this.refreshInterval = componentSettings.getAsTime("stats.refresh_interval", TimeValue.timeValueSeconds(1));
@ -94,6 +95,10 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
return filterCache;
}
public DocSetCache docSet() {
return this.docSetCache;
}
public IdCache idCache() {
return this.idCache;
}
@ -107,6 +112,7 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
filterCache.close();
idCache.close();
queryParserCache.close();
docSetCache.clear("close");
if (clusterService != null) {
clusterService.remove(this);
}
@ -115,12 +121,14 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
public void clear(IndexReader reader) {
filterCache.clear(reader);
idCache.clear(reader);
docSetCache.clear(reader);
}
public void clear(String reason) {
filterCache.clear(reason);
idCache.clear();
queryParserCache.clear();
docSetCache.clear(reason);
}
@Override

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.cache;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.cache.docset.DocSetCacheModule;
import org.elasticsearch.index.cache.filter.FilterCacheModule;
import org.elasticsearch.index.cache.id.IdCacheModule;
import org.elasticsearch.index.cache.query.parser.QueryParserCacheModule;
@ -41,6 +42,7 @@ public class IndexCacheModule extends AbstractModule {
new FilterCacheModule(settings).configure(binder());
new IdCacheModule(settings).configure(binder());
new QueryParserCacheModule(settings).configure(binder());
new DocSetCacheModule(settings).configure(binder());
bind(IndexCache.class).asEagerSingleton();
}

View File

@ -17,13 +17,22 @@
* under the License.
*/
package org.elasticsearch.search.facet;
package org.elasticsearch.index.cache.docset;
import org.elasticsearch.search.internal.SearchContext;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.index.IndexComponent;
import java.io.IOException;
/**
*/
public interface DocSetCache extends IndexComponent {
public interface OptimizeGlobalFacetCollector {
void clear(String reason);
void optimizedGlobalExecution(SearchContext searchContext) throws IOException;
}
void clear(IndexReader reader);
ContextDocIdSet obtain(AtomicReaderContext context);
void release(ContextDocIdSet docSet);
}

View File

@ -0,0 +1,44 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.docset;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Scopes;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.cache.docset.simple.SimpleDocSetCache;
/**
*
*/
public class DocSetCacheModule extends AbstractModule {
private final Settings settings;
public DocSetCacheModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
bind(DocSetCache.class)
.to(settings.getAsClass("index.cache.docset.type", SimpleDocSetCache.class, "org.elasticsearch.index.cache.docset.", "DocSetCache"))
.in(Scopes.SINGLETON);
}
}

View File

@ -0,0 +1,58 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.docset.none;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.docset.DocSetCache;
import org.elasticsearch.index.settings.IndexSettings;
/**
*/
public class NoneDocSetCache extends AbstractIndexComponent implements DocSetCache {
@Inject
public NoneDocSetCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings);
}
@Override
public void clear(String reason) {
}
@Override
public void clear(IndexReader reader) {
}
@Override
public ContextDocIdSet obtain(AtomicReaderContext context) {
return new ContextDocIdSet(context, new FixedBitSet(context.reader().maxDoc()));
}
@Override
public void release(ContextDocIdSet docSet) {
}
}

View File

@ -0,0 +1,91 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.docset.simple;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.docset.DocSetCache;
import org.elasticsearch.index.settings.IndexSettings;
import java.util.Queue;
import java.util.concurrent.ConcurrentMap;
/**
*/
public class SimpleDocSetCache extends AbstractIndexComponent implements DocSetCache, SegmentReader.CoreClosedListener {
private final ConcurrentMap<Object, Queue<FixedBitSet>> cache;
@Inject
public SimpleDocSetCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings);
this.cache = ConcurrentCollections.newConcurrentMap();
}
@Override
public void onClose(SegmentReader owner) {
cache.remove(owner.getCoreCacheKey());
}
@Override
public void clear(String reason) {
cache.clear();
}
@Override
public void clear(IndexReader reader) {
cache.remove(reader.getCoreCacheKey());
}
@Override
public ContextDocIdSet obtain(AtomicReaderContext context) {
Queue<FixedBitSet> docIdSets = cache.get(context.reader().getCoreCacheKey());
if (docIdSets == null) {
if (context.reader() instanceof SegmentReader) {
((SegmentReader) context.reader()).addCoreClosedListener(this);
}
cache.put(context.reader().getCoreCacheKey(), ConcurrentCollections.<FixedBitSet>newQueue());
return new ContextDocIdSet(context, new FixedBitSet(context.reader().maxDoc()));
}
FixedBitSet docIdSet = docIdSets.poll();
if (docIdSet == null) {
docIdSet = new FixedBitSet(context.reader().maxDoc());
} else {
docIdSet.clear(0, docIdSet.length());
}
return new ContextDocIdSet(context, docIdSet);
}
@Override
public void release(ContextDocIdSet docSet) {
Queue<FixedBitSet> docIdSets = cache.get(docSet.context.reader().getCoreCacheKey());
if (docIdSets != null) {
docIdSets.add((FixedBitSet) docSet.docSet);
}
}
}

View File

@ -20,47 +20,39 @@
package org.elasticsearch.index.search.nested;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.common.lucene.search.XCollector;
import java.io.IOException;
/**
* A collector that accepts parent docs, and calls back the collect on child docs of that parent.
*/
public class NestedChildrenCollector extends FacetCollector {
private final FacetCollector collector;
public class NestedChildrenCollector extends XCollector {
private final Collector collector;
private final Filter parentFilter;
private final Filter childFilter;
private Bits childDocs;
private FixedBitSet parentDocs;
public NestedChildrenCollector(FacetCollector collector, Filter parentFilter, Filter childFilter) {
public NestedChildrenCollector(Collector collector, Filter parentFilter, Filter childFilter) {
this.collector = collector;
this.parentFilter = parentFilter;
this.childFilter = childFilter;
}
@Override
public Facet facet() {
return collector.facet();
}
@Override
public void setFilter(Filter filter) {
// delegate the facet_filter to the children
collector.setFilter(filter);
public void postCollection() {
if (collector instanceof XCollector) {
((XCollector) collector).postCollection();
}
}
@Override

View File

@ -36,7 +36,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.sort.SortBuilder;
@ -99,14 +99,14 @@ public class SearchSourceBuilder implements ToXContent {
private List<ScriptField> scriptFields;
private List<PartialField> partialFields;
private List<AbstractFacetBuilder> facets;
private List<FacetBuilder> facets;
private BytesReference facetsBinary;
private HighlightBuilder highlightBuilder;
private SuggestBuilder suggestBuilder;
private RescoreBuilder rescoreBuilder;
private TObjectFloatHashMap<String> indexBoost = null;
@ -341,7 +341,7 @@ public class SearchSourceBuilder implements ToXContent {
/**
* Add a facet to perform as part of the search.
*/
public SearchSourceBuilder facet(AbstractFacetBuilder facet) {
public SearchSourceBuilder facet(FacetBuilder facet) {
if (facets == null) {
facets = Lists.newArrayList();
}
@ -412,7 +412,7 @@ public class SearchSourceBuilder implements ToXContent {
}
return suggestBuilder;
}
public RescoreBuilder rescore() {
if (rescoreBuilder == null) {
rescoreBuilder = new RescoreBuilder();
@ -711,7 +711,7 @@ public class SearchSourceBuilder implements ToXContent {
if (facets != null) {
builder.field("facets");
builder.startObject();
for (AbstractFacetBuilder facet : facets) {
for (FacetBuilder facet : facets) {
facet.toXContent(builder, params);
}
builder.endObject();
@ -732,9 +732,9 @@ public class SearchSourceBuilder implements ToXContent {
if (suggestBuilder != null) {
suggestBuilder.toXContent(builder, params);
}
if (rescoreBuilder != null) {
rescoreBuilder.toXContent(builder, params);
rescoreBuilder.toXContent(builder, params);
}
if (stats != null) {

View File

@ -309,7 +309,7 @@ public class SearchPhaseController extends AbstractComponent {
namedFacets.clear();
for (QuerySearchResultProvider queryResultProvider : queryResults.values()) {
for (Facet facet1 : queryResultProvider.queryResult().facets()) {
if (facet.name().equals(facet1.name())) {
if (facet.getName().equals(facet1.getName())) {
namedFacets.add(facet1);
}
}

View File

@ -1,96 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.AndFilter;
import java.io.IOException;
/**
*
*/
public abstract class AbstractFacetCollector extends FacetCollector {
protected final String facetName;
protected Filter filter;
private Bits bits = null;
public AbstractFacetCollector(String facetName) {
this.facetName = facetName;
}
public Filter getFilter() {
return this.filter;
}
public Filter getAndClearFilter() {
Filter filter = this.filter;
this.filter = null;
return filter;
}
@Override
public void setFilter(Filter filter) {
if (this.filter == null) {
this.filter = filter;
} else {
this.filter = new AndFilter(ImmutableList.of(filter, this.filter));
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
// usually, there is nothing to do here
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true; // when working on FieldData, docs can be out of order
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
if (filter != null) {
bits = DocIdSets.toSafeBits(context.reader(), filter.getDocIdSet(context, context.reader().getLiveDocs()));
}
doSetNextReader(context);
}
protected abstract void doSetNextReader(AtomicReaderContext context) throws IOException;
@Override
public void collect(int doc) throws IOException {
if (bits == null) {
doCollect(doc);
} else if (bits.get(doc)) {
doCollect(doc);
}
}
protected abstract void doCollect(int doc) throws IOException;
}

View File

@ -24,21 +24,11 @@ package org.elasticsearch.search.facet;
*/
public interface Facet {
/**
* The "logical" name of the search facet.
*/
String name();
/**
* The "logical" name of the search facet.
*/
String getName();
/**
* The type of the facet.
*/
String type();
/**
* The type of the facet.
*/

View File

@ -30,8 +30,8 @@ import org.elasticsearch.search.internal.SearchContext;
public class FacetBinaryParseElement extends FacetParseElement {
@Inject
public FacetBinaryParseElement(FacetProcessors facetProcessors) {
super(facetProcessors);
public FacetBinaryParseElement(FacetParsers facetParsers) {
super(facetParsers);
}
@Override

View File

@ -28,21 +28,36 @@ import java.io.IOException;
/**
*
*/
public abstract class AbstractFacetBuilder implements ToXContent {
public abstract class FacetBuilder implements ToXContent {
public static enum Mode {
COLLECTOR() {
@Override
public String toString() {
return "collector";
}
},
POST() {
@Override
public String toString() {
return "post";
}
};
public abstract String toString();
}
protected final String name;
protected FilterBuilder facetFilter;
protected Boolean global;
protected String nested;
protected Mode mode;
protected AbstractFacetBuilder(String name) {
protected FacetBuilder(String name) {
this.name = name;
}
public AbstractFacetBuilder facetFilter(FilterBuilder filter) {
public FacetBuilder facetFilter(FilterBuilder filter) {
this.facetFilter = filter;
return this;
}
@ -51,7 +66,7 @@ public abstract class AbstractFacetBuilder implements ToXContent {
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
* nested objects matching the path to be computed into the facet.
*/
public AbstractFacetBuilder nested(String nested) {
public FacetBuilder nested(String nested) {
this.nested = nested;
return this;
}
@ -59,23 +74,29 @@ public abstract class AbstractFacetBuilder implements ToXContent {
/**
* Marks the facet to run in a global scope, not bounded by any query.
*/
public AbstractFacetBuilder global(boolean global) {
public FacetBuilder global(boolean global) {
this.global = global;
return this;
}
public FacetBuilder mode(Mode mode) {
this.mode = mode;
return this;
}
protected void addFilterFacetAndGlobal(XContentBuilder builder, Params params) throws IOException {
if (facetFilter != null) {
builder.field("facet_filter");
facetFilter.toXContent(builder, params);
}
if (nested != null) {
builder.field("nested", nested);
}
if (global != null) {
builder.field("global", global);
}
if (mode != null) {
builder.field("mode", mode.toString());
}
}
}

View File

@ -0,0 +1,123 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.lucene.docset.AndDocIdSet;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.lucene.search.XCollector;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* A facet processor ends up actually executing the relevant facet for a specific
* search request.
* <p/>
* The facet executor requires at least one of the {@link #collector()} or {@link #post()} methods to be
* implemented.
*/
public abstract class FacetExecutor {
/**
* A post class extends this class to implement post hits processing.
*/
public static abstract class Post {
public abstract void executePost(List<ContextDocIdSet> docSets) throws IOException;
public static class Filtered extends Post {
private final Post post;
private final Filter filter;
public Filtered(Post post, Filter filter) {
this.post = post;
this.filter = filter;
}
@Override
public void executePost(List<ContextDocIdSet> docSets) throws IOException {
List<ContextDocIdSet> filteredEntries = new ArrayList<ContextDocIdSet>(docSets.size());
for (int i = 0; i < docSets.size(); i++) {
ContextDocIdSet entry = docSets.get(i);
DocIdSet filteredSet = filter.getDocIdSet(entry.context, null);
filteredEntries.add(new ContextDocIdSet(
entry.context,
// TODO: can we be smart here, maybe AndDocIdSet is not always fastest?
new AndDocIdSet(new DocIdSet[]{entry.docSet, filteredSet})
));
}
post.executePost(filteredEntries);
}
}
}
/**
* Simple extension to {@link XCollector} that implements methods that are typically
* not needed when doing collector based faceting.
*/
public static abstract class Collector extends XCollector {
@Override
public void setScorer(Scorer scorer) throws IOException {
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
@Override
public abstract void postCollection();
}
/**
* The mode of the execution.
*/
public static enum Mode {
/**
* Collector mode, maps to {@link #collector()}.
*/
COLLECTOR,
/**
* Post mode, maps to {@link #post()}.
*/
POST
}
/**
* Builds the facet.
*/
public abstract InternalFacet buildFacet(String facetName);
/**
* A collector based facet implementation, collection the facet as hits match.
*/
public abstract Collector collector();
/**
* A post based facet that executes the facet using the aggregated docs relevant.
*/
public abstract Post post();
}

View File

@ -22,15 +22,15 @@ package org.elasticsearch.search.facet;
import com.google.common.collect.Lists;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.multibindings.Multibinder;
import org.elasticsearch.search.facet.datehistogram.DateHistogramFacetProcessor;
import org.elasticsearch.search.facet.filter.FilterFacetProcessor;
import org.elasticsearch.search.facet.geodistance.GeoDistanceFacetProcessor;
import org.elasticsearch.search.facet.histogram.HistogramFacetProcessor;
import org.elasticsearch.search.facet.query.QueryFacetProcessor;
import org.elasticsearch.search.facet.range.RangeFacetProcessor;
import org.elasticsearch.search.facet.statistical.StatisticalFacetProcessor;
import org.elasticsearch.search.facet.terms.TermsFacetProcessor;
import org.elasticsearch.search.facet.termsstats.TermsStatsFacetProcessor;
import org.elasticsearch.search.facet.datehistogram.DateHistogramFacetParser;
import org.elasticsearch.search.facet.filter.FilterFacetParser;
import org.elasticsearch.search.facet.geodistance.GeoDistanceFacetParser;
import org.elasticsearch.search.facet.histogram.HistogramFacetParser;
import org.elasticsearch.search.facet.query.QueryFacetParser;
import org.elasticsearch.search.facet.range.RangeFacetParser;
import org.elasticsearch.search.facet.statistical.StatisticalFacetParser;
import org.elasticsearch.search.facet.terms.TermsFacetParser;
import org.elasticsearch.search.facet.termsstats.TermsStatsFacetParser;
import java.util.List;
@ -39,31 +39,31 @@ import java.util.List;
*/
public class FacetModule extends AbstractModule {
private List<Class<? extends FacetProcessor>> processors = Lists.newArrayList();
private List<Class<? extends FacetParser>> processors = Lists.newArrayList();
public FacetModule() {
processors.add(FilterFacetProcessor.class);
processors.add(QueryFacetProcessor.class);
processors.add(GeoDistanceFacetProcessor.class);
processors.add(HistogramFacetProcessor.class);
processors.add(DateHistogramFacetProcessor.class);
processors.add(RangeFacetProcessor.class);
processors.add(StatisticalFacetProcessor.class);
processors.add(TermsFacetProcessor.class);
processors.add(TermsStatsFacetProcessor.class);
processors.add(FilterFacetParser.class);
processors.add(QueryFacetParser.class);
processors.add(GeoDistanceFacetParser.class);
processors.add(HistogramFacetParser.class);
processors.add(DateHistogramFacetParser.class);
processors.add(RangeFacetParser.class);
processors.add(StatisticalFacetParser.class);
processors.add(TermsFacetParser.class);
processors.add(TermsStatsFacetParser.class);
}
public void addFacetProcessor(Class<? extends FacetProcessor> facetProcessor) {
public void addFacetProcessor(Class<? extends FacetParser> facetProcessor) {
processors.add(facetProcessor);
}
@Override
protected void configure() {
Multibinder<FacetProcessor> multibinder = Multibinder.newSetBinder(binder(), FacetProcessor.class);
for (Class<? extends FacetProcessor> processor : processors) {
Multibinder<FacetParser> multibinder = Multibinder.newSetBinder(binder(), FacetParser.class);
for (Class<? extends FacetParser> processor : processors) {
multibinder.addBinding().to(processor);
}
bind(FacetProcessors.class).asEagerSingleton();
bind(FacetParsers.class).asEagerSingleton();
bind(FacetParseElement.class).asEagerSingleton();
bind(FacetPhase.class).asEagerSingleton();
}

View File

@ -20,14 +20,12 @@
package org.elasticsearch.search.facet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NestedChildrenCollector;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.facet.nested.NestedFacetExecutor;
import org.elasticsearch.search.internal.SearchContext;
import java.util.ArrayList;
@ -52,52 +50,66 @@ import java.util.List;
*/
public class FacetParseElement implements SearchParseElement {
private final FacetProcessors facetProcessors;
private final FacetParsers facetParsers;
@Inject
public FacetParseElement(FacetProcessors facetProcessors) {
this.facetProcessors = facetProcessors;
public FacetParseElement(FacetParsers facetParsers) {
this.facetParsers = facetParsers;
}
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
XContentParser.Token token;
List<FacetCollector> queryCollectors = null;
List<FacetCollector> globalCollectors = null;
List<SearchContextFacets.Entry> entries = new ArrayList<SearchContextFacets.Entry>();
String topLevelFieldName = null;
String facetName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
topLevelFieldName = parser.currentName();
facetName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
FacetCollector facet = null;
FacetExecutor facetExecutor = null;
boolean global = false;
String facetFieldName = null;
FacetExecutor.Mode defaultMainMode = null;
FacetExecutor.Mode defaultGlobalMode = null;
FacetExecutor.Mode mode = null;
Filter filter = null;
boolean cacheFilter = true;
boolean cacheFilter = false;
String nestedPath = null;
String fieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
facetFieldName = parser.currentName();
fieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("facet_filter".equals(facetFieldName) || "facetFilter".equals(facetFieldName)) {
if ("facet_filter".equals(fieldName) || "facetFilter".equals(fieldName)) {
filter = context.queryParserService().parseInnerFilter(parser);
} else {
FacetProcessor facetProcessor = facetProcessors.processor(facetFieldName);
if (facetProcessor == null) {
throw new SearchParseException(context, "No facet type found for [" + facetFieldName + "]");
FacetParser facetParser = facetParsers.processor(fieldName);
if (facetParser == null) {
throw new SearchParseException(context, "No facet type found for [" + fieldName + "]");
}
facet = facetProcessor.parse(topLevelFieldName, parser, context);
facetExecutor = facetParser.parse(facetName, parser, context);
defaultMainMode = facetParser.defaultMainMode();
defaultGlobalMode = facetParser.defaultGlobalMode();
}
} else if (token.isValue()) {
if ("global".equals(facetFieldName)) {
if ("global".equals(fieldName)) {
global = parser.booleanValue();
} else if ("scope".equals(facetFieldName) || "_scope".equals(facetFieldName)) {
} else if ("mode".equals(fieldName)) {
String modeAsText = parser.text();
if ("collector".equals(modeAsText)) {
mode = FacetExecutor.Mode.COLLECTOR;
} else if ("post".equals(modeAsText)) {
mode = FacetExecutor.Mode.POST;
} else {
throw new ElasticSearchIllegalArgumentException("failed to parse facet mode [" + modeAsText + "]");
}
} else if ("scope".equals(fieldName) || "_scope".equals(fieldName)) {
throw new SearchParseException(context, "the [scope] support in facets have been removed");
} else if ("cache_filter".equals(facetFieldName) || "cacheFilter".equals(facetFieldName)) {
} else if ("cache_filter".equals(fieldName) || "cacheFilter".equals(fieldName)) {
cacheFilter = parser.booleanValue();
} else if ("nested".equals(facetFieldName)) {
} else if ("nested".equals(fieldName)) {
nestedPath = parser.text();
}
}
@ -106,44 +118,23 @@ public class FacetParseElement implements SearchParseElement {
if (cacheFilter) {
filter = context.filterCache().cache(filter);
}
facet.setFilter(filter);
}
if (facetExecutor == null) {
throw new SearchParseException(context, "no facet type found for facet named [" + facetName + "]");
}
if (nestedPath != null) {
// its a nested facet, wrap the collector with a facet one...
MapperService.SmartNameObjectMapper mapper = context.smartNameObjectMapper(nestedPath);
if (mapper == null) {
throw new SearchParseException(context, "facet nested path [" + nestedPath + "] not found");
}
ObjectMapper objectMapper = mapper.mapper();
if (objectMapper == null) {
throw new SearchParseException(context, "facet nested path [" + nestedPath + "] not found");
}
if (!objectMapper.nested().isNested()) {
throw new SearchParseException(context, "facet nested path [" + nestedPath + "] is not nested");
}
facet = new NestedChildrenCollector(facet, context.filterCache().cache(NonNestedDocsFilter.INSTANCE), context.filterCache().cache(objectMapper.nestedTypeFilter()));
facetExecutor = new NestedFacetExecutor(facetExecutor, context, nestedPath);
}
if (facet == null) {
throw new SearchParseException(context, "no facet type found for facet named [" + topLevelFieldName + "]");
if (mode == null) {
mode = global ? defaultGlobalMode : defaultMainMode;
}
if (global) {
if (globalCollectors == null) {
globalCollectors = new ArrayList<FacetCollector>();
}
globalCollectors.add(facet);
} else {
if (queryCollectors == null) {
queryCollectors = new ArrayList<FacetCollector>();
}
queryCollectors.add(facet);
}
entries.add(new SearchContextFacets.Entry(facetName, mode, facetExecutor, global, filter));
}
}
context.facets(new SearchContextFacets(queryCollectors, globalCollectors));
context.facets(new SearchContextFacets(entries));
}
}

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
* A facet parser parses the relevant matching "type" of facet into a {@link FacetExecutor}.
* <p/>
* The parser also suggest the default {@link FacetExecutor.Mode} both for global and main executions.
*/
public interface FacetParser {
/**
* The type of the facet, for example, terms.
*/
String[] types();
/**
* The default mode to use when executed as a "main" (query level) facet.
*/
FacetExecutor.Mode defaultMainMode();
/**
* The default mode to use when executed as a "global" (all docs) facet.
*/
FacetExecutor.Mode defaultGlobalMode();
/**
* Parses the facet into a {@link FacetExecutor}.
*/
FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException;
}

View File

@ -28,14 +28,14 @@ import java.util.Set;
/**
*
*/
public class FacetProcessors {
public class FacetParsers {
private final ImmutableMap<String, FacetProcessor> processors;
private final ImmutableMap<String, FacetParser> processors;
@Inject
public FacetProcessors(Set<FacetProcessor> processors) {
MapBuilder<String, FacetProcessor> builder = MapBuilder.newMapBuilder();
for (FacetProcessor processor : processors) {
public FacetParsers(Set<FacetParser> processors) {
MapBuilder<String, FacetParser> builder = MapBuilder.newMapBuilder();
for (FacetParser processor : processors) {
for (String type : processor.types()) {
builder.put(type, processor);
}
@ -43,7 +43,7 @@ public class FacetProcessors {
this.processors = builder.immutableMap();
}
public FacetProcessor processor(String type) {
public FacetParser processor(String type) {
return processors.get(type);
}
}

View File

@ -20,23 +20,20 @@
package org.elasticsearch.search.facet;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.*;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.lucene.docset.AllDocIdSet;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.lucene.search.*;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.query.QueryPhaseExecutionException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ -63,9 +60,22 @@ public class FacetPhase implements SearchPhase {
@Override
public void preProcess(SearchContext context) {
if (context.facets() != null && context.facets().queryCollectors() != null) {
for (FacetCollector collector : context.facets().queryCollectors()) {
context.searcher().addMainQueryCollector(collector);
if (context.facets() != null && context.facets().hasQuery()) {
for (SearchContextFacets.Entry entry : context.facets().entries()) {
if (entry.isGlobal()) {
continue;
}
if (entry.getMode() == FacetExecutor.Mode.COLLECTOR) {
Collector collector = entry.getFacetExecutor().collector();
if (entry.getFilter() != null) {
collector = new FilteredCollector(collector, entry.getFilter());
}
context.searcher().addMainQueryCollector(collector);
} else if (entry.getMode() == FacetExecutor.Mode.POST) {
context.searcher().enableMainDocIdSetCollector();
} else {
throw new ElasticSearchIllegalStateException("what mode?");
}
}
}
}
@ -75,42 +85,69 @@ public class FacetPhase implements SearchPhase {
if (context.facets() == null) {
return;
}
if (context.queryResult().facets() != null) {
// no need to compute the facets twice, they should be computed on a per context basis
return;
}
// optimize global facet execution, based on filters (don't iterate over all docs), and check
// if we have special facets that can be optimized for all execution, do it
List<FacetCollector> collectors = context.facets().globalCollectors();
if (collectors != null && !collectors.isEmpty()) {
Map<Filter, List<Collector>> filtersByCollector = Maps.newHashMap();
for (Collector collector : collectors) {
if (collector instanceof OptimizeGlobalFacetCollector) {
try {
((OptimizeGlobalFacetCollector) collector).optimizedGlobalExecution(context);
} catch (IOException e) {
throw new QueryPhaseExecutionException(context, "Failed to execute global facets", e);
Map<Filter, List<Collector>> filtersByCollector = null;
List<ContextDocIdSet> globalDocSets = null;
for (SearchContextFacets.Entry entry : context.facets().entries()) {
if (!entry.isGlobal()) {
if (entry.getMode() == FacetExecutor.Mode.POST) {
FacetExecutor.Post post = entry.getFacetExecutor().post();
if (entry.getFilter() != null) {
post = new FacetExecutor.Post.Filtered(post, entry.getFilter());
}
} else {
Filter filter = Queries.MATCH_ALL_FILTER;
if (collector instanceof AbstractFacetCollector) {
AbstractFacetCollector facetCollector = (AbstractFacetCollector) collector;
if (facetCollector.getFilter() != null) {
// we can clear the filter, since we are anyhow going to iterate over it
// so no need to double check it...
filter = facetCollector.getAndClearFilter();
try {
post.executePost(context.searcher().mainDocIdSetCollector().docSets());
} catch (Exception e) {
throw new QueryPhaseExecutionException(context, "failed to execute facet [" + entry.getFacetName() + "]", e);
}
}
} else {
if (entry.getMode() == FacetExecutor.Mode.POST) {
if (globalDocSets == null) {
// build global post entries, map a reader context to a live docs docIdSet
List<AtomicReaderContext> leaves = context.searcher().getIndexReader().leaves();
globalDocSets = new ArrayList<ContextDocIdSet>(leaves.size());
for (AtomicReaderContext leaf : leaves) {
globalDocSets.add(new ContextDocIdSet(
leaf,
BitsFilteredDocIdSet.wrap(new AllDocIdSet(leaf.reader().maxDoc()), leaf.reader().getLiveDocs())) // need to only include live docs
);
}
}
try {
FacetExecutor.Post post = entry.getFacetExecutor().post();
if (entry.getFilter() != null) {
post = new FacetExecutor.Post.Filtered(post, entry.getFilter());
}
post.executePost(globalDocSets);
} catch (Exception e) {
throw new QueryPhaseExecutionException(context, "Failed to execute facet [" + entry.getFacetName() + "]", e);
}
} else if (entry.getMode() == FacetExecutor.Mode.COLLECTOR) {
Filter filter = Queries.MATCH_ALL_FILTER;
if (entry.getFilter() != null) {
filter = entry.getFilter();
}
if (filtersByCollector == null) {
filtersByCollector = Maps.newHashMap();
}
List<Collector> list = filtersByCollector.get(filter);
if (list == null) {
list = new ArrayList<Collector>();
filtersByCollector.put(filter, list);
}
list.add(collector);
list.add(entry.getFacetExecutor().collector());
}
}
}
// optimize the global collector based execution
if (filtersByCollector != null) {
// now, go and execute the filters->collector ones
for (Map.Entry<Filter, List<Collector>> entry : filtersByCollector.entrySet()) {
Filter filter = entry.getKey();
@ -121,24 +158,20 @@ public class FacetPhase implements SearchPhase {
}
try {
context.searcher().search(query, MultiCollector.wrap(entry.getValue().toArray(new Collector[entry.getValue().size()])));
} catch (IOException e) {
} catch (Exception e) {
throw new QueryPhaseExecutionException(context, "Failed to execute global facets", e);
}
for (Collector collector : entry.getValue()) {
if (collector instanceof XCollector) {
((XCollector) collector).postCollection();
}
}
}
}
SearchContextFacets contextFacets = context.facets();
List<Facet> facets = Lists.newArrayListWithCapacity(2);
if (contextFacets.queryCollectors() != null) {
for (FacetCollector facetCollector : contextFacets.queryCollectors()) {
facets.add(facetCollector.facet());
}
}
if (contextFacets.globalCollectors() != null) {
for (FacetCollector facetCollector : contextFacets.globalCollectors()) {
facets.add(facetCollector.facet());
}
List<Facet> facets = new ArrayList<Facet>(context.facets().entries().size());
for (SearchContextFacets.Entry entry : context.facets().entries()) {
facets.add(entry.getFacetExecutor().buildFacet(entry.getFacetName()));
}
context.queryResult().facets(new InternalFacets(facets));
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.facet;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
@ -31,11 +32,24 @@ import java.util.List;
/**
*
*/
public interface InternalFacet extends Facet, Streamable, ToXContent {
public abstract class InternalFacet implements Facet, Streamable, ToXContent {
String streamType();
private String facetName;
Facet reduce(List<Facet> facets);
/**
* Here just for streams...
*/
protected InternalFacet() {
}
protected InternalFacet(String facetName) {
this.facetName = facetName;
}
public abstract String streamType();
public abstract Facet reduce(List<Facet> facets);
public static interface Stream {
Facet readFacet(String type, StreamInput in) throws IOException;
@ -57,4 +71,19 @@ public interface InternalFacet extends Facet, Streamable, ToXContent {
return streams.get(type);
}
}
@Override
public final String getName() {
return facetName;
}
@Override
public void readFrom(StreamInput in) throws IOException {
facetName = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(facetName);
}
}

View File

@ -87,7 +87,7 @@ public class InternalFacets implements Facets, Streamable, ToXContent, Iterable<
}
Map<String, Facet> facetsAsMap = newHashMap();
for (Facet facet : facets) {
facetsAsMap.put(facet.name(), facet);
facetsAsMap.put(facet.getName(), facet);
}
this.facetsAsMap = facetsAsMap;
return facetsAsMap;

View File

@ -19,6 +19,9 @@
package org.elasticsearch.search.facet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.Nullable;
import java.util.List;
/**
@ -26,19 +29,74 @@ import java.util.List;
*/
public class SearchContextFacets {
private final List<FacetCollector> queryCollectors;
private final List<FacetCollector> globalCollectors;
public static class Entry {
private final String facetName;
private final FacetExecutor.Mode mode;
private final FacetExecutor facetExecutor;
private final boolean global;
@Nullable
private final Filter filter;
public SearchContextFacets(List<FacetCollector> queryCollectors, List<FacetCollector> globalCollectors) {
this.queryCollectors = queryCollectors;
this.globalCollectors = globalCollectors;
public Entry(String facetName, FacetExecutor.Mode mode, FacetExecutor facetExecutor, boolean global, @Nullable Filter filter) {
this.facetName = facetName;
this.mode = mode;
this.facetExecutor = facetExecutor;
this.global = global;
this.filter = filter;
}
public String getFacetName() {
return facetName;
}
public FacetExecutor.Mode getMode() {
return mode;
}
public FacetExecutor getFacetExecutor() {
return facetExecutor;
}
public boolean isGlobal() {
return global;
}
public Filter getFilter() {
return filter;
}
}
public List<FacetCollector> queryCollectors() {
return queryCollectors;
private final List<Entry> entries;
private boolean hasQuery;
private boolean hasGlobal;
public SearchContextFacets(List<Entry> entries) {
this.entries = entries;
for (Entry entry : entries) {
if (entry.global) {
hasGlobal = true;
} else {
hasQuery = true;
}
}
}
public List<FacetCollector> globalCollectors() {
return globalCollectors;
public List<Entry> entries() {
return this.entries;
}
/**
* Are there facets that need to be computed on the query hits?
*/
public boolean hasQuery() {
return hasQuery;
}
/**
* Are there global facets that need to be computed on all the docs.
*/
public boolean hasGlobal() {
return hasGlobal;
}
}

View File

@ -1,92 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.datehistogram;
import gnu.trove.map.hash.TLongLongHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.joda.TimeZoneRounding;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
* A date histogram facet collector that uses the same field as the key as well as the
* value.
*/
public class CountDateHistogramFacetCollector extends AbstractFacetCollector {
private final IndexNumericFieldData indexFieldData;
private final DateHistogramFacet.ComparatorType comparatorType;
private LongValues values;
private final DateHistogramProc histoProc;
public CountDateHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
this.histoProc = new DateHistogramProc(tzRounding);
}
@Override
protected void doCollect(int doc) throws IOException {
values.forEachValueInDoc(doc, histoProc);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getLongValues();
}
@Override
public Facet facet() {
return new InternalCountDateHistogramFacet(facetName, comparatorType, histoProc.counts(), true);
}
public static class DateHistogramProc implements LongValues.ValueInDocProc {
private final TLongLongHashMap counts = CacheRecycler.popLongLongMap();
private final TimeZoneRounding tzRounding;
public DateHistogramProc(TimeZoneRounding tzRounding) {
this.tzRounding = tzRounding;
}
@Override
public void onMissing(int docId) {
}
@Override
public void onValue(int docId, long value) {
counts.adjustOrPutValue(tzRounding.calc(value), 1, 1);
}
public TLongLongHashMap counts() {
return counts;
}
}
}

View File

@ -0,0 +1,134 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.datehistogram;
import gnu.trove.map.hash.TLongLongHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.joda.TimeZoneRounding;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import java.io.IOException;
import java.util.List;
/**
* A date histogram facet collector that uses the same field as the key as well as the
* value.
*/
public class CountDateHistogramFacetExecutor extends FacetExecutor {
private final TimeZoneRounding tzRounding;
private final IndexNumericFieldData indexFieldData;
final DateHistogramFacet.ComparatorType comparatorType;
final TLongLongHashMap counts;
public CountDateHistogramFacetExecutor(IndexNumericFieldData indexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType) {
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
this.tzRounding = tzRounding;
this.counts = CacheRecycler.popLongLongMap();
}
@Override
public Collector collector() {
return new Collector();
}
@Override
public Post post() {
return new Post();
}
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalCountDateHistogramFacet(facetName, comparatorType, counts, true);
}
class Post extends FacetExecutor.Post {
@Override
public void executePost(List<ContextDocIdSet> docSets) throws IOException {
DateHistogramProc histoProc = new DateHistogramProc(counts, tzRounding);
for (ContextDocIdSet docSet : docSets) {
LongValues values = indexFieldData.load(docSet.context).getLongValues();
DocIdSetIterator it = docSet.docSet.iterator();
int doc;
while ((doc = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
values.forEachValueInDoc(doc, histoProc);
}
}
}
}
class Collector extends FacetExecutor.Collector {
private LongValues values;
private final DateHistogramProc histoProc;
public Collector() {
this.histoProc = new DateHistogramProc(counts, tzRounding);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getLongValues();
}
@Override
public void collect(int doc) throws IOException {
values.forEachValueInDoc(doc, histoProc);
}
@Override
public void postCollection() {
}
}
public static class DateHistogramProc implements LongValues.ValueInDocProc {
private final TLongLongHashMap counts;
private final TimeZoneRounding tzRounding;
public DateHistogramProc(TLongLongHashMap counts, TimeZoneRounding tzRounding) {
this.counts = counts;
this.tzRounding = tzRounding;
}
@Override
public void onMissing(int docId) {
}
@Override
public void onValue(int docId, long value) {
counts.adjustOrPutValue(tzRounding.calc(value), 1, 1);
}
public TLongLongHashMap counts() {
return counts;
}
}
}

View File

@ -35,11 +35,6 @@ public interface DateHistogramFacet extends Facet, Iterable<DateHistogramFacet.E
*/
public static final String TYPE = "date_histogram";
/**
* An ordered list of histogram facet entries.
*/
List<? extends Entry> entries();
/**
* An ordered list of histogram facet entries.
*/
@ -60,7 +55,7 @@ public interface DateHistogramFacet extends Facet, Iterable<DateHistogramFacet.E
if (o2 == null) {
return -1;
}
return (o1.time() < o2.time() ? -1 : (o1.time() == o2.time() ? 0 : 1));
return (o1.getTime() < o2.getTime() ? -1 : (o1.getTime() == o2.getTime() ? 0 : 1));
}
}),
COUNT((byte) 1, "count", new Comparator<Entry>() {
@ -77,7 +72,7 @@ public interface DateHistogramFacet extends Facet, Iterable<DateHistogramFacet.E
if (o2 == null) {
return -1;
}
return (o1.count() < o2.count() ? -1 : (o1.count() == o2.count() ? 0 : 1));
return (o1.getCount() < o2.getCount() ? -1 : (o1.getCount() == o2.getCount() ? 0 : 1));
}
}),
TOTAL((byte) 2, "total", new Comparator<Entry>() {
@ -94,7 +89,7 @@ public interface DateHistogramFacet extends Facet, Iterable<DateHistogramFacet.E
if (o2 == null) {
return -1;
}
return (o1.total() < o2.total() ? -1 : (o1.total() == o2.total() ? 0 : 1));
return (o1.getTotal() < o2.getTotal() ? -1 : (o1.getTotal() == o2.getTotal() ? 0 : 1));
}
});
@ -147,71 +142,36 @@ public interface DateHistogramFacet extends Facet, Iterable<DateHistogramFacet.E
public interface Entry {
/**
* The time bucket start (in milliseconds).
*/
long time();
/**
* The time bucket start (in milliseconds).
*/
long getTime();
/**
* The number of hits that fall within that key "range" or "interval".
*/
long count();
/**
* The number of hits that fall within that key "range" or "interval".
*/
long getCount();
/**
* The total count of values aggregated to compute the total.
*/
long totalCount();
/**
* The total count of values aggregated to compute the total.
*/
long getTotalCount();
/**
* The sum / total of the value field that fall within this key "interval".
*/
double total();
/**
* The sum / total of the value field that fall within this key "interval".
*/
double getTotal();
/**
* The mean of this facet interval.
*/
double mean();
/**
* The mean of this facet interval.
*/
double getMean();
/**
* The minimum value.
*/
double min();
/**
* The minimum value.
*/
double getMin();
/**
* The maximum value.
*/
double max();
/**
* The maximum value.
*/

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
import java.util.Map;
@ -32,7 +32,7 @@ import java.util.Map;
/**
* A facet builder of date histogram facets.
*/
public class DateHistogramFacetBuilder extends AbstractFacetBuilder {
public class DateHistogramFacetBuilder extends FacetBuilder {
private String keyFieldName;
private String valueFieldName;
private String interval = null;

View File

@ -31,9 +31,9 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.FacetParser;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.facet.FacetProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.Chronology;
import org.joda.time.DateTimeField;
@ -46,12 +46,12 @@ import java.util.Map;
/**
*
*/
public class DateHistogramFacetProcessor extends AbstractComponent implements FacetProcessor {
public class DateHistogramFacetParser extends AbstractComponent implements FacetParser {
private final ImmutableMap<String, DateFieldParser> dateFieldParsers;
@Inject
public DateHistogramFacetProcessor(Settings settings) {
public DateHistogramFacetParser(Settings settings) {
super(settings);
InternalDateHistogramFacet.registerStreams();
@ -80,7 +80,17 @@ public class DateHistogramFacetProcessor extends AbstractComponent implements Fa
}
@Override
public FacetCollector parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
public FacetExecutor.Mode defaultMainMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor.Mode defaultGlobalMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
String keyField = null;
String valueField = null;
String valueScript = null;
@ -169,16 +179,16 @@ public class DateHistogramFacetProcessor extends AbstractComponent implements Fa
if (valueScript != null) {
SearchScript script = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
return new ValueScriptDateHistogramFacetCollector(facetName, keyIndexFieldData, script, tzRounding, comparatorType, context);
return new ValueScriptDateHistogramFacetExecutor(keyIndexFieldData, script, tzRounding, comparatorType);
} else if (valueField != null) {
FieldMapper valueMapper = context.smartNameFieldMapper(valueField);
if (valueMapper == null) {
throw new FacetPhaseExecutionException(facetName, "(value) field [" + valueField + "] not found");
}
IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueMapper);
return new ValueDateHistogramFacetCollector(facetName, keyIndexFieldData, valueIndexFieldData, tzRounding, comparatorType, context);
return new ValueDateHistogramFacetExecutor(keyIndexFieldData, valueIndexFieldData, tzRounding, comparatorType);
} else {
return new CountDateHistogramFacetCollector(facetName, keyIndexFieldData, tzRounding, comparatorType, context);
return new CountDateHistogramFacetExecutor(keyIndexFieldData, tzRounding, comparatorType);
}
}

View File

@ -70,28 +70,13 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
}
@Override
public long time() {
public long getTime() {
return time;
}
@Override
public long getTime() {
return time();
}
@Override
public long count() {
return count;
}
@Override
public long getCount() {
return count();
}
@Override
public long totalCount() {
return 0;
return count;
}
@Override
@ -99,28 +84,13 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
return 0;
}
@Override
public double total() {
return Double.NaN;
}
@Override
public double getTotal() {
return total();
}
@Override
public double mean() {
return Double.NaN;
}
@Override
public double getMean() {
return mean();
}
@Override
public double min() {
return Double.NaN;
}
@ -129,69 +99,35 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
return Double.NaN;
}
@Override
public double max() {
return Double.NaN;
}
@Override
public double getMax() {
return Double.NaN;
}
}
private String name;
private ComparatorType comparatorType;
TLongLongHashMap counts;
boolean cachedCounts;
CountEntry[] entries = null;
private InternalCountDateHistogramFacet() {
}
public InternalCountDateHistogramFacet(String name, ComparatorType comparatorType, TLongLongHashMap counts, boolean cachedCounts) {
this.name = name;
super(name);
this.comparatorType = comparatorType;
this.counts = counts;
this.cachedCounts = cachedCounts;
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return name();
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return type();
}
@Override
public List<CountEntry> entries() {
public List<CountEntry> getEntries() {
return Arrays.asList(computeEntries());
}
@Override
public List<CountEntry> getEntries() {
return entries();
}
@Override
public Iterator<Entry> iterator() {
return (Iterator) entries().iterator();
return (Iterator) getEntries().iterator();
}
void releaseCache() {
@ -234,7 +170,7 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
}
return new InternalCountDateHistogramFacet(name, comparatorType, counts, true);
return new InternalCountDateHistogramFacet(getName(), comparatorType, counts, true);
}
static final class Fields {
@ -246,13 +182,13 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, TYPE);
builder.startArray(Fields.ENTRIES);
for (Entry entry : computeEntries()) {
builder.startObject();
builder.field(Fields.TIME, entry.time());
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.TIME, entry.getTime());
builder.field(Fields.COUNT, entry.getCount());
builder.endObject();
}
builder.endArray();
@ -268,7 +204,7 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
comparatorType = ComparatorType.fromId(in.readByte());
int size = in.readVInt();
@ -282,7 +218,7 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeByte(comparatorType.id());
out.writeVInt(counts.size());
for (TLongLongIterator it = counts.iterator(); it.hasNext(); ) {

View File

@ -24,10 +24,22 @@ import org.elasticsearch.search.facet.InternalFacet;
/**
*
*/
public abstract class InternalDateHistogramFacet implements DateHistogramFacet, InternalFacet {
public abstract class InternalDateHistogramFacet extends InternalFacet implements DateHistogramFacet {
public static void registerStreams() {
InternalCountDateHistogramFacet.registerStreams();
InternalFullDateHistogramFacet.registerStreams();
}
protected InternalDateHistogramFacet() {
}
protected InternalDateHistogramFacet(String facetName) {
super(facetName);
}
@Override
public final String getType() {
return TYPE;
}
}

View File

@ -75,92 +75,58 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
}
@Override
public long time() {
public long getTime() {
return time;
}
@Override
public long getTime() {
return time();
}
@Override
public long count() {
public long getCount() {
return count;
}
@Override
public long getCount() {
return count();
}
@Override
public double total() {
public double getTotal() {
return total;
}
@Override
public double getTotal() {
return total();
}
@Override
public long totalCount() {
public long getTotalCount() {
return totalCount;
}
@Override
public long getTotalCount() {
return this.totalCount;
}
@Override
public double mean() {
public double getMean() {
if (totalCount == 0) {
return totalCount;
}
return total / totalCount;
}
@Override
public double getMean() {
return total / totalCount;
}
@Override
public double min() {
return this.min;
}
@Override
public double getMin() {
return this.min;
}
@Override
public double max() {
return this.max;
}
@Override
public double getMax() {
return this.max;
}
}
private String name;
private ComparatorType comparatorType;
ExtTLongObjectHashMap<FullEntry> tEntries;
boolean cachedEntries;
Collection<FullEntry> entries;
private InternalFullDateHistogramFacet() {
InternalFullDateHistogramFacet() {
}
InternalFullDateHistogramFacet(String name) {
super(name);
}
public InternalFullDateHistogramFacet(String name, ComparatorType comparatorType, ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries, boolean cachedEntries) {
this.name = name;
super(name);
this.comparatorType = comparatorType;
this.tEntries = entries;
this.cachedEntries = cachedEntries;
@ -168,41 +134,16 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return name();
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return type();
}
@Override
public List<FullEntry> entries() {
public List<FullEntry> getEntries() {
if (!(entries instanceof List)) {
entries = new ArrayList<FullEntry>(entries);
}
return (List<FullEntry>) entries;
}
@Override
public List<FullEntry> getEntries() {
return entries();
}
@Override
public Iterator<Entry> iterator() {
return (Iterator) entries().iterator();
return (Iterator) getEntries().iterator();
}
void releaseCache() {
@ -218,7 +159,7 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
if (facets.size() == 1) {
// we need to sort it
InternalFullDateHistogramFacet internalFacet = (InternalFullDateHistogramFacet) facets.get(0);
List<FullEntry> entries = internalFacet.entries();
List<FullEntry> entries = internalFacet.getEntries();
Collections.sort(entries, comparatorType.comparator());
internalFacet.releaseCache();
return internalFacet;
@ -262,8 +203,7 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
CacheRecycler.pushLongObjectMap(map);
// just initialize it as already ordered facet
InternalFullDateHistogramFacet ret = new InternalFullDateHistogramFacet();
ret.name = name;
InternalFullDateHistogramFacet ret = new InternalFullDateHistogramFacet(getName());
ret.comparatorType = comparatorType;
ret.entries = ordered;
return ret;
@ -283,18 +223,18 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, TYPE);
builder.startArray(Fields.ENTRIES);
for (Entry entry : entries()) {
for (Entry entry : getEntries()) {
builder.startObject();
builder.field(Fields.TIME, entry.time());
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.MIN, entry.min());
builder.field(Fields.MAX, entry.max());
builder.field(Fields.TOTAL, entry.total());
builder.field(Fields.TOTAL_COUNT, entry.totalCount());
builder.field(Fields.MEAN, entry.mean());
builder.field(Fields.TIME, entry.getTime());
builder.field(Fields.COUNT, entry.getCount());
builder.field(Fields.MIN, entry.getMin());
builder.field(Fields.MAX, entry.getMax());
builder.field(Fields.TOTAL, entry.getTotal());
builder.field(Fields.TOTAL_COUNT, entry.getTotalCount());
builder.field(Fields.MEAN, entry.getMean());
builder.endObject();
}
builder.endArray();
@ -310,7 +250,7 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
comparatorType = ComparatorType.fromId(in.readByte());
cachedEntries = false;
@ -323,7 +263,7 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeByte(comparatorType.id());
out.writeVInt(entries.size());
for (FullEntry entry : entries) {

View File

@ -20,67 +20,111 @@
package org.elasticsearch.search.facet.datehistogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.joda.TimeZoneRounding;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import java.io.IOException;
import java.util.List;
/**
* A histogram facet collector that uses different fields for the key and the value.
*/
public class ValueDateHistogramFacetCollector extends AbstractFacetCollector {
public class ValueDateHistogramFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData keyIndexFieldData;
private final IndexNumericFieldData valueIndexFieldData;
private final DateHistogramFacet.ComparatorType comparatorType;
final TimeZoneRounding tzRounding;
private LongValues keyValues;
private final DateHistogramProc histoProc;
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries;
public ValueDateHistogramFacetCollector(String facetName, IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
public ValueDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType) {
this.comparatorType = comparatorType;
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
this.histoProc = new DateHistogramProc(tzRounding);
this.tzRounding = tzRounding;
this.entries = CacheRecycler.popLongObjectMap();
}
@Override
protected void doCollect(int doc) throws IOException {
keyValues.forEachValueInDoc(doc, histoProc);
public Collector collector() {
return new Collector();
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getLongValues();
histoProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
public Post post() {
return new Post();
}
@Override
public Facet facet() {
return new InternalFullDateHistogramFacet(facetName, comparatorType, histoProc.entries, true);
public InternalFacet buildFacet(String facetName) {
return new InternalFullDateHistogramFacet(facetName, comparatorType, entries, true);
}
class Post extends FacetExecutor.Post {
@Override
public void executePost(List<ContextDocIdSet> docSets) throws IOException {
DateHistogramProc histoProc = new DateHistogramProc(tzRounding, ValueDateHistogramFacetExecutor.this.entries);
for (ContextDocIdSet docSet : docSets) {
LongValues keyValues = keyIndexFieldData.load(docSet.context).getLongValues();
histoProc.valueValues = valueIndexFieldData.load(docSet.context).getDoubleValues();
DocIdSetIterator it = docSet.docSet.iterator();
int doc;
while ((doc = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
keyValues.forEachValueInDoc(doc, histoProc);
}
}
}
}
class Collector extends FacetExecutor.Collector {
private final DateHistogramProc histoProc;
private LongValues keyValues;
public Collector() {
this.histoProc = new DateHistogramProc(tzRounding, entries);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getLongValues();
histoProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
}
@Override
public void collect(int doc) throws IOException {
keyValues.forEachValueInDoc(doc, histoProc);
}
@Override
public void postCollection() {
}
}
public static class DateHistogramProc implements LongValues.ValueInDocProc {
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries = CacheRecycler.popLongObjectMap();
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries;
private final TimeZoneRounding tzRounding;
DoubleValues valueValues;
final ValueAggregator valueAggregator = new ValueAggregator();
public DateHistogramProc(TimeZoneRounding tzRounding) {
public DateHistogramProc(TimeZoneRounding tzRounding, ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries) {
this.tzRounding = tzRounding;
this.entries = entries;
}
@Override

View File

@ -20,73 +20,116 @@
package org.elasticsearch.search.facet.datehistogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.joda.TimeZoneRounding;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import java.io.IOException;
import java.util.List;
/**
* A histogram facet collector that uses the same field as the key as well as the
* value.
*/
public class ValueScriptDateHistogramFacetCollector extends AbstractFacetCollector {
public class ValueScriptDateHistogramFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData keyIndexFieldData;
private final DateHistogramFacet.ComparatorType comparatorType;
final SearchScript valueScript;
final TimeZoneRounding tzRounding;
private LongValues keyValues;
private final SearchScript valueScript;
private final DateHistogramProc histoProc;
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries;
public ValueScriptDateHistogramFacetCollector(String facetName, IndexNumericFieldData keyIndexFieldData, SearchScript valueScript, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
public ValueScriptDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, SearchScript valueScript, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType) {
this.comparatorType = comparatorType;
this.keyIndexFieldData = keyIndexFieldData;
this.valueScript = valueScript;
histoProc = new DateHistogramProc(tzRounding, this.valueScript);
this.tzRounding = tzRounding;
this.entries = CacheRecycler.popLongObjectMap();
}
@Override
protected void doCollect(int doc) throws IOException {
keyValues.forEachValueInDoc(doc, histoProc);
public Collector collector() {
return new Collector();
}
@Override
public void setScorer(Scorer scorer) throws IOException {
valueScript.setScorer(scorer);
public Post post() {
return new Post();
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getLongValues();
valueScript.setNextReader(context);
public InternalFacet buildFacet(String facetName) {
return new InternalFullDateHistogramFacet(facetName, comparatorType, entries, true);
}
@Override
public Facet facet() {
return new InternalFullDateHistogramFacet(facetName, comparatorType, histoProc.entries, true);
class Post extends FacetExecutor.Post {
@Override
public void executePost(List<ContextDocIdSet> docSets) throws IOException {
DateHistogramProc histoProc = new DateHistogramProc(tzRounding, valueScript, ValueScriptDateHistogramFacetExecutor.this.entries);
for (ContextDocIdSet entry : docSets) {
LongValues keyValues = keyIndexFieldData.load(entry.context).getLongValues();
valueScript.setNextReader(entry.context);
DocIdSetIterator it = entry.docSet.iterator();
int doc;
while ((doc = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
keyValues.forEachValueInDoc(doc, histoProc);
}
}
}
}
class Collector extends FacetExecutor.Collector {
private final DateHistogramProc histoProc;
private LongValues keyValues;
public Collector() {
histoProc = new DateHistogramProc(tzRounding, valueScript, entries);
}
@Override
public void setScorer(Scorer scorer) throws IOException {
valueScript.setScorer(scorer);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getLongValues();
valueScript.setNextReader(context);
}
@Override
public void collect(int doc) throws IOException {
keyValues.forEachValueInDoc(doc, histoProc);
}
@Override
public void postCollection() {
}
}
public static class DateHistogramProc implements LongValues.ValueInDocProc {
private final TimeZoneRounding tzRounding;
protected final SearchScript valueScript;
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries = CacheRecycler.popLongObjectMap();
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries;
public DateHistogramProc(TimeZoneRounding tzRounding, SearchScript valueScript) {
public DateHistogramProc(TimeZoneRounding tzRounding, SearchScript valueScript, final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries) {
this.tzRounding = tzRounding;
this.valueScript = valueScript;
this.entries = entries;
}
@Override

View File

@ -23,8 +23,6 @@ import org.elasticsearch.search.facet.Facet;
/**
* A query facets returns the count (number of hits) for a facet based on a query.
*
*
*/
public interface FilterFacet extends Facet {
@ -33,11 +31,6 @@ public interface FilterFacet extends Facet {
*/
public static final String TYPE = "filter";
/**
* The count of the facet.
*/
long count();
/**
* The count of the facet.
*/

View File

@ -22,14 +22,14 @@ package org.elasticsearch.search.facet.filter;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
/**
*
*/
public class FilterFacetBuilder extends AbstractFacetBuilder {
public class FilterFacetBuilder extends FacetBuilder {
private FilterBuilder filter;

View File

@ -1,85 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.filter;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.OptimizeGlobalFacetCollector;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
*
*/
public class FilterFacetCollector extends AbstractFacetCollector implements OptimizeGlobalFacetCollector {
private final Filter filter;
private Bits bits;
private int count = 0;
public FilterFacetCollector(String facetName, Filter filter, FilterCache filterCache) {
super(facetName);
this.filter = filter;
}
@Override
public void optimizedGlobalExecution(SearchContext searchContext) throws IOException {
Query query = new XConstantScoreQuery(filter);
if (super.filter != null) {
query = new XFilteredQuery(query, super.filter);
}
Filter searchFilter = searchContext.mapperService().searchFilter(searchContext.types());
if (searchFilter != null) {
query = new XFilteredQuery(query, searchContext.filterCache().cache(searchFilter));
}
TotalHitCountCollector collector = new TotalHitCountCollector();
searchContext.searcher().search(query, collector);
count = collector.getTotalHits();
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
bits = DocIdSets.toSafeBits(context.reader(), filter.getDocIdSet(context, context.reader().getLiveDocs()));
}
@Override
protected void doCollect(int doc) throws IOException {
if (bits.get(doc)) {
count++;
}
}
@Override
public Facet facet() {
return new InternalFilterFacet(facetName, count);
}
}

View File

@ -0,0 +1,106 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.filter;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.AndDocIdSet;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import java.io.IOException;
import java.util.List;
/**
*
*/
public class FilterFacetExecutor extends FacetExecutor {
private final Filter filter;
int count = -1;
public FilterFacetExecutor(Filter filter) {
this.filter = filter;
}
@Override
public Collector collector() {
return new Collector();
}
@Override
public Post post() {
return new Post();
}
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalFilterFacet(facetName, count);
}
class Post extends FacetExecutor.Post {
@Override
public void executePost(List<ContextDocIdSet> docSets) throws IOException {
int count = 0;
for (ContextDocIdSet docSet : docSets) {
DocIdSet filteredDocIdSet = filter.getDocIdSet(docSet.context, docSet.context.reader().getLiveDocs());
if (filteredDocIdSet == null || docSet.docSet == null) {
continue;
}
DocIdSetIterator iter = new AndDocIdSet(new DocIdSet[]{docSet.docSet, filteredDocIdSet}).iterator();
while (iter.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
count++;
}
}
FilterFacetExecutor.this.count = count;
}
}
class Collector extends FacetExecutor.Collector {
private int count = 0;
private Bits bits;
@Override
public void collect(int doc) throws IOException {
if (bits.get(doc)) {
count++;
}
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
bits = DocIdSets.toSafeBits(context.reader(), filter.getDocIdSet(context, context.reader().getLiveDocs()));
}
@Override
public void postCollection() {
bits = null;
FilterFacetExecutor.this.count = count;
}
}
}

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.search.facet.FacetProcessor;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.FacetParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -33,10 +33,10 @@ import java.io.IOException;
/**
*
*/
public class FilterFacetProcessor extends AbstractComponent implements FacetProcessor {
public class FilterFacetParser extends AbstractComponent implements FacetParser {
@Inject
public FilterFacetProcessor(Settings settings) {
public FilterFacetParser(Settings settings) {
super(settings);
InternalFilterFacet.registerStreams();
}
@ -47,8 +47,18 @@ public class FilterFacetProcessor extends AbstractComponent implements FacetProc
}
@Override
public FacetCollector parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
public FacetExecutor.Mode defaultMainMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor.Mode defaultGlobalMode() {
return FacetExecutor.Mode.POST;
}
@Override
public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
Filter facetFilter = context.queryParserService().parseInnerFilter(parser);
return new FilterFacetCollector(facetName, facetFilter, context.filterCache());
return new FilterFacetExecutor(facetFilter);
}
}

View File

@ -32,7 +32,7 @@ import java.util.List;
/**
*
*/
public class InternalFilterFacet implements FilterFacet, InternalFacet {
public class InternalFilterFacet extends InternalFacet implements FilterFacet {
private static final String STREAM_TYPE = "filter";
@ -52,48 +52,21 @@ public class InternalFilterFacet implements FilterFacet, InternalFacet {
return STREAM_TYPE;
}
private String name;
private long count;
private InternalFilterFacet() {
InternalFilterFacet() {
}
public InternalFilterFacet(String name, long count) {
this.name = name;
super(name);
this.count = count;
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return TYPE;
}
/**
* The "logical" name of the facet.
*/
public String name() {
return name;
}
@Override
public String getName() {
return name();
}
/**
* The count of the facet.
*/
public long count() {
return count;
}
/**
* The count of the facet.
*/
@ -108,9 +81,9 @@ public class InternalFilterFacet implements FilterFacet, InternalFacet {
}
int count = 0;
for (Facet facet : facets) {
count += ((FilterFacet) facet).count();
count += ((FilterFacet) facet).getCount();
}
return new InternalFilterFacet(name, count);
return new InternalFilterFacet(getName(), count);
}
static final class Fields {
@ -120,7 +93,7 @@ public class InternalFilterFacet implements FilterFacet, InternalFacet {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, FilterFacet.TYPE);
builder.field(Fields.COUNT, count);
builder.endObject();
@ -135,13 +108,13 @@ public class InternalFilterFacet implements FilterFacet, InternalFacet {
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
count = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeVLong(count);
}
}

View File

@ -33,11 +33,6 @@ public interface GeoDistanceFacet extends Facet, Iterable<GeoDistanceFacet.Entry
*/
public static final String TYPE = "geo_distance";
/**
* An ordered list of geo distance facet entries.
*/
List<Entry> entries();
/**
* An ordered list of geo distance facet entries.
*/
@ -46,11 +41,8 @@ public interface GeoDistanceFacet extends Facet, Iterable<GeoDistanceFacet.Entry
public class Entry {
double from = Double.NEGATIVE_INFINITY;
double to = Double.POSITIVE_INFINITY;
long count;
long totalCount;
double total;
double min = Double.POSITIVE_INFINITY;
@ -74,75 +66,40 @@ public interface GeoDistanceFacet extends Facet, Iterable<GeoDistanceFacet.Entry
this.max = max;
}
public double from() {
public double getFrom() {
return this.from;
}
public double getFrom() {
return from();
}
public double to() {
public double getTo() {
return this.to;
}
public double getTo() {
return to();
}
public long count() {
return this.count;
}
public long getCount() {
return count();
}
public long totalCount() {
return this.totalCount;
return this.count;
}
public long getTotalCount() {
return this.totalCount;
}
public double total() {
return this.total;
}
public double getTotal() {
return total();
}
/**
* The mean of this facet interval.
*/
public double mean() {
if (totalCount == 0) {
return 0;
}
return total / totalCount;
return this.total;
}
/**
* The mean of this facet interval.
*/
public double getMean() {
return mean();
}
public double min() {
return this.min;
if (totalCount == 0) {
return 0;
}
return total / totalCount;
}
public double getMin() {
return this.min;
}
public double max() {
return this.max;
}
public double getMax() {
return this.max;
}

View File

@ -26,7 +26,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
import java.util.List;
@ -37,7 +37,7 @@ import java.util.Map;
* number of hits within each distance range, and aggregated data (like totals of either the distance or
* cusotm value fields).
*/
public class GeoDistanceFacetBuilder extends AbstractFacetBuilder {
public class GeoDistanceFacetBuilder extends FacetBuilder {
private String fieldName;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.fielddata.GeoPointValues;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -33,26 +33,19 @@ import java.io.IOException;
/**
*
*/
public class GeoDistanceFacetCollector extends AbstractFacetCollector {
public class GeoDistanceFacetExecutor extends FacetExecutor {
protected final IndexGeoPointFieldData indexFieldData;
final IndexGeoPointFieldData indexFieldData;
final double lat;
final double lon;
final DistanceUnit unit;
final GeoDistance geoDistance;
final GeoDistance.FixedSourceDistance fixedSourceDistance;
protected final double lat;
protected final double lon;
final GeoDistanceFacet.Entry[] entries;
protected final DistanceUnit unit;
protected final GeoDistance geoDistance;
protected final GeoDistance.FixedSourceDistance fixedSourceDistance;
protected GeoPointValues values;
protected final GeoDistanceFacet.Entry[] entries;
protected GeoPointValues.LatLonValueInDocProc aggregator;
public GeoDistanceFacetCollector(String facetName, IndexGeoPointFieldData indexFieldData, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
GeoDistanceFacet.Entry[] entries, SearchContext context) {
super(facetName);
public GeoDistanceFacetExecutor(IndexGeoPointFieldData indexFieldData, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
GeoDistanceFacet.Entry[] entries, SearchContext context) {
this.lat = lat;
this.lon = lon;
this.unit = unit;
@ -60,27 +53,50 @@ public class GeoDistanceFacetCollector extends AbstractFacetCollector {
this.geoDistance = geoDistance;
this.indexFieldData = indexFieldData;
this.fixedSourceDistance = geoDistance.fixedSourceDistance(lat, lon, unit);
this.aggregator = new Aggregator(fixedSourceDistance, entries);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getGeoPointValues();
public Collector collector() {
return new Collector();
}
@Override
protected void doCollect(int doc) throws IOException {
for (GeoDistanceFacet.Entry entry : entries) {
entry.foundInDoc = false;
}
values.forEachLatLonValueInDoc(doc, aggregator);
public Post post() {
return null;
}
@Override
public Facet facet() {
public InternalFacet buildFacet(String facetName) {
return new InternalGeoDistanceFacet(facetName, entries);
}
class Collector extends FacetExecutor.Collector {
protected GeoPointValues values;
protected GeoPointValues.LatLonValueInDocProc aggregator;
Collector() {
this.aggregator = new Aggregator(fixedSourceDistance, entries);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getGeoPointValues();
}
@Override
public void collect(int doc) throws IOException {
for (GeoDistanceFacet.Entry entry : entries) {
entry.foundInDoc = false;
}
values.forEachLatLonValueInDoc(doc, aggregator);
}
@Override
public void postCollection() {
}
}
public static class Aggregator implements GeoPointValues.LatLonValueInDocProc {
private final GeoDistance.FixedSourceDistance fixedSourceDistance;

View File

@ -33,9 +33,9 @@ import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.FacetParser;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.facet.FacetProcessor;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -45,10 +45,10 @@ import java.util.Map;
/**
*
*/
public class GeoDistanceFacetProcessor extends AbstractComponent implements FacetProcessor {
public class GeoDistanceFacetParser extends AbstractComponent implements FacetParser {
@Inject
public GeoDistanceFacetProcessor(Settings settings) {
public GeoDistanceFacetParser(Settings settings) {
super(settings);
InternalGeoDistanceFacet.registerStreams();
}
@ -59,7 +59,17 @@ public class GeoDistanceFacetProcessor extends AbstractComponent implements Face
}
@Override
public FacetCollector parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
public FacetExecutor.Mode defaultMainMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor.Mode defaultGlobalMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
String fieldName = null;
String valueFieldName = null;
String valueScript = null;
@ -174,16 +184,16 @@ public class GeoDistanceFacetProcessor extends AbstractComponent implements Face
throw new FacetPhaseExecutionException(facetName, "failed to find mapping for [" + valueFieldName + "]");
}
IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueFieldMapper);
return new ValueGeoDistanceFacetCollector(facetName, keyIndexFieldData, point.lat(), point.lon(), unit, geoDistance, entries.toArray(new GeoDistanceFacet.Entry[entries.size()]),
return new ValueGeoDistanceFacetExecutor(keyIndexFieldData, point.lat(), point.lon(), unit, geoDistance, entries.toArray(new GeoDistanceFacet.Entry[entries.size()]),
context, valueIndexFieldData);
}
if (valueScript != null) {
return new ScriptGeoDistanceFacetCollector(facetName, keyIndexFieldData, point.lat(), point.lon(), unit, geoDistance, entries.toArray(new GeoDistanceFacet.Entry[entries.size()]),
return new ScriptGeoDistanceFacetExecutor(keyIndexFieldData, point.lat(), point.lon(), unit, geoDistance, entries.toArray(new GeoDistanceFacet.Entry[entries.size()]),
context, scriptLang, valueScript, params);
}
return new GeoDistanceFacetCollector(facetName, keyIndexFieldData, point.lat(), point.lon(), unit, geoDistance, entries.toArray(new GeoDistanceFacet.Entry[entries.size()]),
return new GeoDistanceFacetExecutor(keyIndexFieldData, point.lat(), point.lon(), unit, geoDistance, entries.toArray(new GeoDistanceFacet.Entry[entries.size()]),
context);
}
}

View File

@ -34,7 +34,7 @@ import java.util.List;
/**
*
*/
public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet {
public class InternalGeoDistanceFacet extends InternalFacet implements GeoDistanceFacet {
private static final String STREAM_TYPE = "geoDistance";
@ -54,51 +54,29 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
return STREAM_TYPE;
}
private String name;
Entry[] entries;
InternalGeoDistanceFacet() {
}
public InternalGeoDistanceFacet(String name, Entry[] entries) {
this.name = name;
super(name);
this.entries = entries;
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return name();
}
@Override
public String type() {
public String getType() {
return TYPE;
}
@Override
public String getType() {
return type();
}
@Override
public List<Entry> entries() {
public List<Entry> getEntries() {
return ImmutableList.copyOf(entries);
}
@Override
public List<Entry> getEntries() {
return entries();
}
@Override
public Iterator<Entry> iterator() {
return entries().iterator();
return getEntries().iterator();
}
@Override
@ -134,7 +112,7 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
entries = new Entry[in.readVInt()];
for (int i = 0; i < entries.length; i++) {
entries[i] = new Entry(in.readDouble(), in.readDouble(), in.readVLong(), in.readVLong(), in.readDouble(), in.readDouble(), in.readDouble());
@ -143,7 +121,7 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeVInt(entries.length);
for (Entry entry : entries) {
out.writeDouble(entry.from);
@ -172,7 +150,7 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, GeoDistanceFacet.TYPE);
builder.startArray(Fields.RANGES);
for (Entry entry : entries) {
@ -183,11 +161,12 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
if (!Double.isInfinite(entry.to)) {
builder.field(Fields.TO, entry.to);
}
builder.field(Fields.MIN, entry.min());
builder.field(Fields.MAX, entry.max());
builder.field(Fields.TOTAL_COUNT, entry.totalCount());
builder.field(Fields.TOTAL, entry.total());
builder.field(Fields.MEAN, entry.mean());
builder.field(Fields.COUNT, entry.getCount());
builder.field(Fields.MIN, entry.getMin());
builder.field(Fields.MAX, entry.getMax());
builder.field(Fields.TOTAL_COUNT, entry.getTotalCount());
builder.field(Fields.TOTAL, entry.getTotal());
builder.field(Fields.MEAN, entry.getMean());
builder.endObject();
}
builder.endArray();

View File

@ -34,38 +34,53 @@ import java.util.Map;
/**
*
*/
public class ScriptGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
public class ScriptGeoDistanceFacetExecutor extends GeoDistanceFacetExecutor {
private final SearchScript script;
private Aggregator scriptAggregator;
public ScriptGeoDistanceFacetCollector(String facetName, IndexGeoPointFieldData indexFieldData, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
GeoDistanceFacet.Entry[] entries, SearchContext context,
String scriptLang, String script, Map<String, Object> params) {
super(facetName, indexFieldData, lat, lon, unit, geoDistance, entries, context);
public ScriptGeoDistanceFacetExecutor(IndexGeoPointFieldData indexFieldData, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
GeoDistanceFacet.Entry[] entries, SearchContext context,
String scriptLang, String script, Map<String, Object> params) {
super(indexFieldData, lat, lon, unit, geoDistance, entries, context);
this.script = context.scriptService().search(context.lookup(), scriptLang, script, params);
this.aggregator = new Aggregator(fixedSourceDistance, entries);
this.scriptAggregator = (Aggregator) this.aggregator;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
script.setScorer(scorer);
public Collector collector() {
return new Collector();
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
super.doSetNextReader(context);
script.setNextReader(context);
public Post post() {
return null;
}
@Override
protected void doCollect(int doc) throws IOException {
script.setNextDocId(doc);
this.scriptAggregator.scriptValue = script.runAsDouble();
super.doCollect(doc);
class Collector extends GeoDistanceFacetExecutor.Collector {
private Aggregator scriptAggregator;
Collector() {
this.aggregator = new Aggregator(fixedSourceDistance, entries);
this.scriptAggregator = (Aggregator) this.aggregator;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
script.setScorer(scorer);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
super.setNextReader(context);
script.setNextReader(context);
}
@Override
public void collect(int doc) throws IOException {
script.setNextDocId(doc);
this.scriptAggregator.scriptValue = script.runAsDouble();
super.collect(doc);
}
}
public static class Aggregator implements GeoPointValues.LatLonValueInDocProc {

View File

@ -33,21 +33,37 @@ import java.io.IOException;
/**
*
*/
public class ValueGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
public class ValueGeoDistanceFacetExecutor extends GeoDistanceFacetExecutor {
private final IndexNumericFieldData valueIndexFieldData;
public ValueGeoDistanceFacetCollector(String facetName, IndexGeoPointFieldData indexFieldData, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
GeoDistanceFacet.Entry[] entries, SearchContext context, IndexNumericFieldData valueIndexFieldData) {
super(facetName, indexFieldData, lat, lon, unit, geoDistance, entries, context);
public ValueGeoDistanceFacetExecutor(IndexGeoPointFieldData indexFieldData, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
GeoDistanceFacet.Entry[] entries, SearchContext context, IndexNumericFieldData valueIndexFieldData) {
super(indexFieldData, lat, lon, unit, geoDistance, entries, context);
this.valueIndexFieldData = valueIndexFieldData;
this.aggregator = new Aggregator(fixedSourceDistance, entries);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
super.doSetNextReader(context);
((Aggregator) this.aggregator).valueValues = valueIndexFieldData.load(context).getDoubleValues();
public Collector collector() {
return new Collector();
}
@Override
public Post post() {
return null;
}
class Collector extends GeoDistanceFacetExecutor.Collector {
Collector() {
this.aggregator = new Aggregator(fixedSourceDistance, entries);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
super.setNextReader(context);
((Aggregator) this.aggregator).valueValues = valueIndexFieldData.load(context).getDoubleValues();
}
}
public static class Aggregator implements GeoPointValues.LatLonValueInDocProc {

View File

@ -17,16 +17,15 @@
* under the License.
*/
package org.elasticsearch.search.facet.histogram.unbounded;
package org.elasticsearch.search.facet.histogram;
import gnu.trove.map.hash.TLongLongHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -35,49 +34,73 @@ import java.io.IOException;
* A histogram facet collector that uses the same field as the key as well as the
* value.
*/
public class CountHistogramFacetCollector extends AbstractFacetCollector {
public class CountHistogramFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData indexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
final long interval;
private DoubleValues values;
private final HistogramProc histoProc;
final TLongLongHashMap counts;
public CountHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
public CountHistogramFacetExecutor(IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
histoProc = new HistogramProc(interval);
this.interval = interval;
this.counts = CacheRecycler.popLongLongMap();
}
@Override
protected void doCollect(int doc) throws IOException {
values.forEachValueInDoc(doc, histoProc);
public Collector collector() {
return new Collector();
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
public Post post() {
return null;
}
@Override
public Facet facet() {
return new InternalCountHistogramFacet(facetName, comparatorType, histoProc.counts(), true);
public InternalFacet buildFacet(String facetName) {
return new InternalCountHistogramFacet(facetName, comparatorType, counts, true);
}
public static long bucket(double value, long interval) {
return (((long) (value / interval)) * interval);
}
class Collector extends FacetExecutor.Collector {
private final HistogramProc histoProc;
private DoubleValues values;
public Collector() {
histoProc = new HistogramProc(interval, counts);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
}
@Override
public void collect(int doc) throws IOException {
values.forEachValueInDoc(doc, histoProc);
}
@Override
public void postCollection() {
}
}
public static class HistogramProc implements DoubleValues.ValueInDocProc {
private final long interval;
private final TLongLongHashMap counts;
private final TLongLongHashMap counts = CacheRecycler.popLongLongMap();
public HistogramProc(long interval) {
public HistogramProc(long interval, TLongLongHashMap counts) {
this.interval = interval;
this.counts = counts;
}
@Override

View File

@ -17,16 +17,15 @@
* under the License.
*/
package org.elasticsearch.search.facet.histogram.unbounded;
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -35,49 +34,73 @@ import java.io.IOException;
* A histogram facet collector that uses the same field as the key as well as the
* value.
*/
public class FullHistogramFacetCollector extends AbstractFacetCollector {
public class FullHistogramFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData indexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
final long interval;
private DoubleValues values;
private final HistogramProc histoProc;
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries;
public FullHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
public FullHistogramFacetExecutor(IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
histoProc = new HistogramProc(interval);
this.interval = interval;
this.entries = CacheRecycler.popLongObjectMap();
}
@Override
protected void doCollect(int doc) throws IOException {
values.forEachValueInDoc(doc, histoProc);
public Collector collector() {
return new Collector();
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
public Post post() {
return null;
}
@Override
public Facet facet() {
return new InternalFullHistogramFacet(facetName, comparatorType, histoProc.entries, true);
public InternalFacet buildFacet(String facetName) {
return new InternalFullHistogramFacet(facetName, comparatorType, entries, true);
}
public static long bucket(double value, long interval) {
return (((long) (value / interval)) * interval);
}
class Collector extends FacetExecutor.Collector {
private final HistogramProc histoProc;
private DoubleValues values;
Collector() {
this.histoProc = new HistogramProc(interval, entries);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
}
@Override
public void collect(int doc) throws IOException {
values.forEachValueInDoc(doc, histoProc);
}
@Override
public void postCollection() {
}
}
public static class HistogramProc implements DoubleValues.ValueInDocProc {
final long interval;
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries;
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries = CacheRecycler.popLongObjectMap();
public HistogramProc(long interval) {
public HistogramProc(long interval, ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries) {
this.interval = interval;
this.entries = entries;
}
@Override

View File

@ -35,11 +35,6 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
*/
public static final String TYPE = "histogram";
/**
* An ordered list of histogram facet entries.
*/
List<? extends Entry> entries();
/**
* An ordered list of histogram facet entries.
*/
@ -60,7 +55,7 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
if (o2 == null) {
return -1;
}
return (o1.key() < o2.key() ? -1 : (o1.key() == o2.key() ? 0 : 1));
return (o1.getKey() < o2.getKey() ? -1 : (o1.getKey() == o2.getKey() ? 0 : 1));
}
}),
COUNT((byte) 1, "count", new Comparator<Entry>() {
@ -77,7 +72,7 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
if (o2 == null) {
return -1;
}
return (o1.count() < o2.count() ? -1 : (o1.count() == o2.count() ? 0 : 1));
return (o1.getCount() < o2.getCount() ? -1 : (o1.getCount() == o2.getCount() ? 0 : 1));
}
}),
TOTAL((byte) 2, "total", new Comparator<Entry>() {
@ -94,7 +89,7 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
if (o2 == null) {
return -1;
}
return (o1.total() < o2.total() ? -1 : (o1.total() == o2.total() ? 0 : 1));
return (o1.getTotal() < o2.getTotal() ? -1 : (o1.getTotal() == o2.getTotal() ? 0 : 1));
}
});
@ -147,71 +142,36 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
public interface Entry {
/**
* The key value of the histogram.
*/
long key();
/**
* The key value of the histogram.
*/
long getKey();
/**
* The number of hits that fall within that key "range" or "interval".
*/
long count();
/**
* The number of hits that fall within that key "range" or "interval".
*/
long getCount();
/**
* The total count of values aggregated to compute the total.
*/
long totalCount();
/**
* The total count of values aggregated to compute the total.
*/
long getTotalCount();
/**
* The sum / total of the value field that fall within this key "interval".
*/
double total();
/**
* The sum / total of the value field that fall within this key "interval".
*/
double getTotal();
/**
* The mean of this facet interval.
*/
double mean();
/**
* The mean of this facet interval.
*/
double getMean();
/**
* The minimum value.
*/
double min();
/**
* The minimum value.
*/
double getMin();
/**
* The maximum value.
*/
double max();
/**
* The maximum value.
*/

View File

@ -22,7 +22,7 @@ package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
@ -30,13 +30,11 @@ import java.util.concurrent.TimeUnit;
/**
* A facet builder of histogram facets.
*/
public class HistogramFacetBuilder extends AbstractFacetBuilder {
public class HistogramFacetBuilder extends FacetBuilder {
private String keyFieldName;
private String valueFieldName;
private long interval = -1;
private HistogramFacet.ComparatorType comparatorType;
private Object from;
private Object to;
/**
* Constructs a new histogram facet with the provided facet logical name.
@ -90,16 +88,6 @@ public class HistogramFacetBuilder extends AbstractFacetBuilder {
return interval(unit.toMillis(interval));
}
/**
* Sets the bounds from and to for the facet. Both performs bounds check and includes only
* values within the bounds, and improves performance.
*/
public HistogramFacetBuilder bounds(Object from, Object to) {
this.from = from;
this.to = to;
return this;
}
public HistogramFacetBuilder comparator(HistogramFacet.ComparatorType comparatorType) {
this.comparatorType = comparatorType;
return this;
@ -150,11 +138,6 @@ public class HistogramFacetBuilder extends AbstractFacetBuilder {
}
builder.field("interval", interval);
if (from != null && to != null) {
builder.field("from", from);
builder.field("to", to);
}
if (comparatorType != null) {
builder.field("comparator", comparatorType.description());
}

View File

@ -26,13 +26,9 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.FacetParser;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.facet.FacetProcessor;
import org.elasticsearch.search.facet.histogram.bounded.BoundedCountHistogramFacetCollector;
import org.elasticsearch.search.facet.histogram.bounded.BoundedValueHistogramFacetCollector;
import org.elasticsearch.search.facet.histogram.bounded.BoundedValueScriptHistogramFacetCollector;
import org.elasticsearch.search.facet.histogram.unbounded.*;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -41,10 +37,10 @@ import java.util.Map;
/**
*
*/
public class HistogramFacetProcessor extends AbstractComponent implements FacetProcessor {
public class HistogramFacetParser extends AbstractComponent implements FacetParser {
@Inject
public HistogramFacetProcessor(Settings settings) {
public HistogramFacetParser(Settings settings) {
super(settings);
InternalHistogramFacet.registerStreams();
}
@ -55,7 +51,17 @@ public class HistogramFacetProcessor extends AbstractComponent implements FacetP
}
@Override
public FacetCollector parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
public FacetExecutor.Mode defaultMainMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor.Mode defaultGlobalMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
String keyField = null;
String valueField = null;
String keyScript = null;
@ -66,8 +72,6 @@ public class HistogramFacetProcessor extends AbstractComponent implements FacetP
HistogramFacet.ComparatorType comparatorType = HistogramFacet.ComparatorType.KEY;
XContentParser.Token token;
String fieldName = null;
String sFrom = null;
String sTo = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
@ -84,10 +88,6 @@ public class HistogramFacetProcessor extends AbstractComponent implements FacetP
valueField = parser.text();
} else if ("interval".equals(fieldName)) {
interval = parser.longValue();
} else if ("from".equals(fieldName)) {
sFrom = parser.text();
} else if ("to".equals(fieldName)) {
sTo = parser.text();
} else if ("time_interval".equals(fieldName)) {
interval = TimeValue.parseTimeValue(parser.text(), null).millis();
} else if ("key_script".equals(fieldName) || "keyScript".equals(fieldName)) {
@ -103,7 +103,7 @@ public class HistogramFacetProcessor extends AbstractComponent implements FacetP
}
if (keyScript != null && valueScript != null) {
return new ScriptHistogramFacetCollector(facetName, scriptLang, keyScript, valueScript, params, interval, comparatorType, context);
return new ScriptHistogramFacetExecutor(scriptLang, keyScript, valueScript, params, interval, comparatorType, context);
}
if (keyField == null) {
@ -129,28 +129,15 @@ public class HistogramFacetProcessor extends AbstractComponent implements FacetP
valueIndexFieldData = context.fieldData().getForField(valueMapper);
}
if (sFrom != null && sTo != null && keyField != null) {
long from = ((Number) keyMapper.value(sFrom)).longValue();
long to = ((Number) keyMapper.value(sTo)).longValue();
if (valueField != null) {
return new BoundedValueHistogramFacetCollector(facetName, keyIndexFieldData, valueIndexFieldData, interval, from, to, comparatorType, context);
} else if (valueScript != null) {
return new BoundedValueScriptHistogramFacetCollector(facetName, keyIndexFieldData, scriptLang, valueScript, params, interval, from, to, comparatorType, context);
} else {
return new BoundedCountHistogramFacetCollector(facetName, keyIndexFieldData, interval, from, to, comparatorType, context);
}
}
if (valueScript != null) {
return new ValueScriptHistogramFacetCollector(facetName, keyIndexFieldData, scriptLang, valueScript, params, interval, comparatorType, context);
return new ValueScriptHistogramFacetExecutor(keyIndexFieldData, scriptLang, valueScript, params, interval, comparatorType, context);
} else if (valueField == null) {
return new CountHistogramFacetCollector(facetName, keyIndexFieldData, interval, comparatorType, context);
return new CountHistogramFacetExecutor(keyIndexFieldData, interval, comparatorType, context);
} else if (keyField.equals(valueField)) {
return new FullHistogramFacetCollector(facetName, keyIndexFieldData, interval, comparatorType, context);
return new FullHistogramFacetExecutor(keyIndexFieldData, interval, comparatorType, context);
} else {
// we have a value field, and its different than the key
return new ValueHistogramFacetCollector(facetName, keyIndexFieldData, valueIndexFieldData, interval, comparatorType, context);
return new ValueHistogramFacetExecutor(keyIndexFieldData, valueIndexFieldData, interval, comparatorType, context);
}
}
}

View File

@ -23,7 +23,7 @@ import com.google.common.collect.Maps;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
import java.util.Map;
@ -31,7 +31,7 @@ import java.util.Map;
/**
*
*/
public class HistogramScriptFacetBuilder extends AbstractFacetBuilder {
public class HistogramScriptFacetBuilder extends FacetBuilder {
private String lang;
private String keyFieldName;
private String keyScript;
@ -39,8 +39,6 @@ public class HistogramScriptFacetBuilder extends AbstractFacetBuilder {
private Map<String, Object> params;
private long interval = -1;
private HistogramFacet.ComparatorType comparatorType;
private Object from;
private Object to;
public HistogramScriptFacetBuilder(String name) {
super(name);
@ -96,16 +94,6 @@ public class HistogramScriptFacetBuilder extends AbstractFacetBuilder {
return this;
}
/**
* Sets the bounds from and to for the facet. Both performs bounds check and includes only
* values within the bounds, and improves performance.
*/
public HistogramScriptFacetBuilder bounds(Object from, Object to) {
this.from = from;
this.to = to;
return this;
}
public HistogramScriptFacetBuilder facetFilter(FilterBuilder filter) {
this.facetFilter = filter;
return this;
@ -138,11 +126,6 @@ public class HistogramScriptFacetBuilder extends AbstractFacetBuilder {
}
builder.field("value_script", valueScript);
if (from != null && to != null) {
builder.field("from", from);
builder.field("to", to);
}
if (lang != null) {
builder.field("lang", lang);
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.search.facet.histogram.unbounded;
package org.elasticsearch.search.facet.histogram;
import gnu.trove.iterator.TLongLongIterator;
import gnu.trove.map.hash.TLongLongHashMap;
@ -27,8 +27,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.facet.histogram.InternalHistogramFacet;
import java.io.IOException;
import java.util.Arrays;
@ -72,38 +70,18 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
}
@Override
public long key() {
public long getKey() {
return key;
}
@Override
public long getKey() {
return key();
}
@Override
public long count() {
public long getCount() {
return count;
}
@Override
public long getCount() {
return count();
}
@Override
public double total() {
return Double.NaN;
}
@Override
public double getTotal() {
return total();
}
@Override
public long totalCount() {
return 0;
return Double.NaN;
}
@Override
@ -111,18 +89,8 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
return 0;
}
@Override
public double mean() {
return Double.NaN;
}
@Override
public double getMean() {
return mean();
}
@Override
public double min() {
return Double.NaN;
}
@ -131,69 +99,35 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
return Double.NaN;
}
@Override
public double max() {
return Double.NaN;
}
@Override
public double getMax() {
return Double.NaN;
}
}
private String name;
ComparatorType comparatorType;
TLongLongHashMap counts;
boolean cachedCounts;
CountEntry[] entries = null;
private InternalCountHistogramFacet() {
}
public InternalCountHistogramFacet(String name, ComparatorType comparatorType, TLongLongHashMap counts, boolean cachedCounts) {
this.name = name;
super(name);
this.comparatorType = comparatorType;
this.counts = counts;
this.cachedCounts = cachedCounts;
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return name();
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return type();
}
@Override
public List<CountEntry> entries() {
public List<CountEntry> getEntries() {
return Arrays.asList(computeEntries());
}
@Override
public List<CountEntry> getEntries() {
return entries();
}
@Override
public Iterator<Entry> iterator() {
return (Iterator) entries().iterator();
return (Iterator) getEntries().iterator();
}
private CountEntry[] computeEntries() {
@ -235,7 +169,7 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
histoFacet.releaseCache();
}
return new InternalCountHistogramFacet(name, comparatorType, counts, true);
return new InternalCountHistogramFacet(getName(), comparatorType, counts, true);
}
static final class Fields {
@ -247,13 +181,13 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.startArray(Fields.ENTRIES);
for (Entry entry : computeEntries()) {
builder.startObject();
builder.field(Fields.KEY, entry.key());
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.KEY, entry.getKey());
builder.field(Fields.COUNT, entry.getCount());
builder.endObject();
}
builder.endArray();
@ -269,7 +203,7 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
comparatorType = ComparatorType.fromId(in.readByte());
int size = in.readVInt();
@ -283,7 +217,7 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeByte(comparatorType.id());
// optimize the write, since we know we have the same buckets as keys
out.writeVInt(counts.size());

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.search.facet.histogram.unbounded;
package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.io.stream.StreamInput;
@ -26,8 +26,6 @@ import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.facet.histogram.InternalHistogramFacet;
import java.io.IOException;
import java.util.*;
@ -77,38 +75,18 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
}
@Override
public long key() {
public long getKey() {
return key;
}
@Override
public long getKey() {
return key();
}
@Override
public long count() {
public long getCount() {
return count;
}
@Override
public long getCount() {
return count();
}
@Override
public double total() {
return total;
}
@Override
public double getTotal() {
return total();
}
@Override
public long totalCount() {
return totalCount;
return total;
}
@Override
@ -116,50 +94,36 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
return this.totalCount;
}
@Override
public double mean() {
return total / totalCount;
}
@Override
public double getMean() {
return total / totalCount;
}
@Override
public double min() {
return this.min;
}
@Override
public double getMin() {
return this.min;
}
@Override
public double max() {
return this.max;
}
@Override
public double getMax() {
return this.max;
}
}
private String name;
private ComparatorType comparatorType;
ExtTLongObjectHashMap<FullEntry> tEntries;
boolean cachedEntries;
Collection<FullEntry> entries;
private InternalFullHistogramFacet() {
InternalFullHistogramFacet() {
}
InternalFullHistogramFacet(String name) {
super(name);
}
public InternalFullHistogramFacet(String name, ComparatorType comparatorType, ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries, boolean cachedEntries) {
this.name = name;
super(name);
this.comparatorType = comparatorType;
this.tEntries = entries;
this.cachedEntries = cachedEntries;
@ -167,41 +131,16 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return name();
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return type();
}
@Override
public List<FullEntry> entries() {
public List<FullEntry> getEntries() {
if (!(entries instanceof List)) {
entries = new ArrayList<FullEntry>(entries);
}
return (List<FullEntry>) entries;
}
@Override
public List<FullEntry> getEntries() {
return entries();
}
@Override
public Iterator<Entry> iterator() {
return (Iterator) entries().iterator();
return (Iterator) getEntries().iterator();
}
void releaseCache() {
@ -217,7 +156,7 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
if (facets.size() == 1) {
// we need to sort it
InternalFullHistogramFacet internalFacet = (InternalFullHistogramFacet) facets.get(0);
List<FullEntry> entries = internalFacet.entries();
List<FullEntry> entries = internalFacet.getEntries();
Collections.sort(entries, comparatorType.comparator());
internalFacet.releaseCache();
return internalFacet;
@ -261,8 +200,7 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
CacheRecycler.pushLongObjectMap(map);
// just initialize it as already ordered facet
InternalFullHistogramFacet ret = new InternalFullHistogramFacet();
ret.name = name;
InternalFullHistogramFacet ret = new InternalFullHistogramFacet(getName());
ret.comparatorType = comparatorType;
ret.entries = ordered;
return ret;
@ -282,18 +220,18 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.startArray(Fields.ENTRIES);
for (Entry entry : entries) {
builder.startObject();
builder.field(Fields.KEY, entry.key());
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.MIN, entry.min());
builder.field(Fields.MAX, entry.max());
builder.field(Fields.TOTAL, entry.total());
builder.field(Fields.TOTAL_COUNT, entry.totalCount());
builder.field(Fields.MEAN, entry.mean());
builder.field(Fields.KEY, entry.getKey());
builder.field(Fields.COUNT, entry.getCount());
builder.field(Fields.MIN, entry.getMin());
builder.field(Fields.MAX, entry.getMax());
builder.field(Fields.TOTAL, entry.getTotal());
builder.field(Fields.TOTAL_COUNT, entry.getTotalCount());
builder.field(Fields.MEAN, entry.getMean());
builder.endObject();
}
builder.endArray();
@ -309,7 +247,7 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
comparatorType = ComparatorType.fromId(in.readByte());
cachedEntries = false;
@ -322,7 +260,7 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeByte(comparatorType.id());
out.writeVInt(entries.size());
for (FullEntry entry : entries) {

View File

@ -20,20 +20,26 @@
package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.facet.histogram.bounded.InternalBoundedCountHistogramFacet;
import org.elasticsearch.search.facet.histogram.bounded.InternalBoundedFullHistogramFacet;
import org.elasticsearch.search.facet.histogram.unbounded.InternalCountHistogramFacet;
import org.elasticsearch.search.facet.histogram.unbounded.InternalFullHistogramFacet;
/**
*
*/
public abstract class InternalHistogramFacet implements HistogramFacet, InternalFacet {
public abstract class InternalHistogramFacet extends InternalFacet implements HistogramFacet {
public static void registerStreams() {
InternalFullHistogramFacet.registerStreams();
InternalCountHistogramFacet.registerStreams();
InternalBoundedCountHistogramFacet.registerStreams();
InternalBoundedFullHistogramFacet.registerStreams();
}
protected InternalHistogramFacet() {
}
protected InternalHistogramFacet(String facetName) {
super(facetName);
}
@Override
public final String getType() {
return TYPE;
}
}

View File

@ -0,0 +1,127 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public class ScriptHistogramFacetExecutor extends FacetExecutor {
final SearchScript keyScript;
final SearchScript valueScript;
final long interval;
private final HistogramFacet.ComparatorType comparatorType;
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries;
public ScriptHistogramFacetExecutor(String scriptLang, String keyScript, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
this.keyScript = context.scriptService().search(context.lookup(), scriptLang, keyScript, params);
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
this.interval = interval > 0 ? interval : 0;
this.comparatorType = comparatorType;
this.entries = CacheRecycler.popLongObjectMap();
}
@Override
public Collector collector() {
return new Collector(entries);
}
@Override
public Post post() {
return null;
}
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalFullHistogramFacet(facetName, comparatorType, entries, true);
}
public static long bucket(double value, long interval) {
return (((long) (value / interval)) * interval);
}
class Collector extends FacetExecutor.Collector {
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries;
Collector(ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries) {
this.entries = entries;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
keyScript.setScorer(scorer);
valueScript.setScorer(scorer);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
keyScript.setNextReader(context);
valueScript.setNextReader(context);
}
@Override
public void collect(int doc) throws IOException {
keyScript.setNextDocId(doc);
valueScript.setNextDocId(doc);
long bucket;
if (interval == 0) {
bucket = keyScript.runAsLong();
} else {
bucket = bucket(keyScript.runAsDouble(), interval);
}
double value = valueScript.runAsDouble();
InternalFullHistogramFacet.FullEntry entry = entries.get(bucket);
if (entry == null) {
entry = new InternalFullHistogramFacet.FullEntry(bucket, 1, value, value, 1, value);
entries.put(bucket, entry);
} else {
entry.count++;
entry.totalCount++;
entry.total += value;
if (value < entry.min) {
entry.min = value;
}
if (value > entry.max) {
entry.max = value;
}
}
}
@Override
public void postCollection() {
}
}
}

View File

@ -17,16 +17,15 @@
* under the License.
*/
package org.elasticsearch.search.facet.histogram.unbounded;
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -34,53 +33,75 @@ import java.io.IOException;
/**
* A histogram facet collector that uses different fields for the key and the value.
*/
public class ValueHistogramFacetCollector extends AbstractFacetCollector {
public class ValueHistogramFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData keyIndexFieldData;
private final IndexNumericFieldData valueIndexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
private final long interval;
private DoubleValues keyValues;
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries;
private final HistogramProc histoProc;
public ValueHistogramFacetCollector(String facetName, IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
public ValueHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
this.comparatorType = comparatorType;
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
histoProc = new HistogramProc(interval);
this.interval = interval;
this.entries = CacheRecycler.popLongObjectMap();
}
@Override
protected void doCollect(int doc) throws IOException {
keyValues.forEachValueInDoc(doc, histoProc);
public Collector collector() {
return new Collector();
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getDoubleValues();
histoProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
public Post post() {
return null;
}
@Override
public Facet facet() {
return new InternalFullHistogramFacet(facetName, comparatorType, histoProc.entries, true);
public InternalFacet buildFacet(String facetName) {
return new InternalFullHistogramFacet(facetName, comparatorType, entries, true);
}
class Collector extends FacetExecutor.Collector {
private final HistogramProc histoProc;
private DoubleValues keyValues;
public Collector() {
this.histoProc = new HistogramProc(interval, entries);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getDoubleValues();
histoProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
}
@Override
public void collect(int doc) throws IOException {
keyValues.forEachValueInDoc(doc, histoProc);
}
@Override
public void postCollection() {
}
}
public static class HistogramProc implements DoubleValues.ValueInDocProc {
final long interval;
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries = CacheRecycler.popLongObjectMap();
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries;
DoubleValues valueValues;
final ValueAggregator valueAggregator = new ValueAggregator();
public HistogramProc(long interval) {
public HistogramProc(long interval, ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries) {
this.interval = interval;
this.entries = entries;
}
@Override
@ -89,7 +110,7 @@ public class ValueHistogramFacetCollector extends AbstractFacetCollector {
@Override
public void onValue(int docId, double value) {
long bucket = FullHistogramFacetCollector.bucket(value, interval);
long bucket = FullHistogramFacetExecutor.bucket(value, interval);
InternalFullHistogramFacet.FullEntry entry = entries.get(bucket);
if (entry == null) {
entry = new InternalFullHistogramFacet.FullEntry(bucket, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0);

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.search.facet.histogram.unbounded;
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
@ -26,9 +26,8 @@ import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -38,60 +37,85 @@ import java.util.Map;
* A histogram facet collector that uses the same field as the key as well as the
* value.
*/
public class ValueScriptHistogramFacetCollector extends AbstractFacetCollector {
public class ValueScriptHistogramFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData indexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
final SearchScript valueScript;
final long interval;
private DoubleValues values;
private final SearchScript valueScript;
private final HistogramProc histoProc;
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries;
public ValueScriptHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, String scriptLang, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
public ValueScriptHistogramFacetExecutor(IndexNumericFieldData indexFieldData, String scriptLang, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
this.interval = interval;
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
histoProc = new HistogramProc(interval, this.valueScript);
this.entries = CacheRecycler.popLongObjectMap();
}
@Override
protected void doCollect(int doc) throws IOException {
values.forEachValueInDoc(doc, histoProc);
public Collector collector() {
return new Collector();
}
@Override
public void setScorer(Scorer scorer) throws IOException {
valueScript.setScorer(scorer);
public Post post() {
return null;
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
valueScript.setNextReader(context);
}
@Override
public Facet facet() {
return new InternalFullHistogramFacet(facetName, comparatorType, histoProc.entries, true);
public InternalFacet buildFacet(String facetName) {
return new InternalFullHistogramFacet(facetName, comparatorType, entries, true);
}
public static long bucket(double value, long interval) {
return (((long) (value / interval)) * interval);
}
class Collector extends FacetExecutor.Collector {
private DoubleValues values;
private final HistogramProc histoProc;
public Collector() {
histoProc = new HistogramProc(interval, valueScript, entries);
}
@Override
public void setScorer(Scorer scorer) throws IOException {
valueScript.setScorer(scorer);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
valueScript.setNextReader(context);
}
@Override
public void collect(int doc) throws IOException {
values.forEachValueInDoc(doc, histoProc);
}
@Override
public void postCollection() {
}
}
public static class HistogramProc implements DoubleValues.ValueInDocProc {
private final long interval;
private final SearchScript valueScript;
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries = CacheRecycler.popLongObjectMap();
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries;
public HistogramProc(long interval, SearchScript valueScript) {
public HistogramProc(long interval, SearchScript valueScript, ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries) {
this.interval = interval;
this.valueScript = valueScript;
this.entries = entries;
}
@Override

View File

@ -1,107 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram.bounded;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
public class BoundedCountHistogramFacetCollector extends AbstractFacetCollector {
private final IndexNumericFieldData indexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
private LongValues values;
private final HistogramProc histoProc;
public BoundedCountHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, long interval, long from, long to, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
long normalizedFrom = (((long) ((double) from / interval)) * interval);
long normalizedTo = (((long) ((double) to / interval)) * interval);
if ((to % interval) != 0) {
normalizedTo += interval;
}
long offset = -normalizedFrom;
int size = (int) ((normalizedTo - normalizedFrom) / interval);
histoProc = new HistogramProc(from, to, interval, offset, size);
}
@Override
protected void doCollect(int doc) throws IOException {
values.forEachValueInDoc(doc, histoProc);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getLongValues();
}
@Override
public Facet facet() {
return new InternalBoundedCountHistogramFacet(facetName, comparatorType, histoProc.interval, -histoProc.offset, histoProc.size, histoProc.counts, true);
}
public static class HistogramProc implements LongValues.ValueInDocProc {
final long from;
final long to;
final long interval;
final long offset;
final int size;
final int[] counts;
public HistogramProc(long from, long to, long interval, long offset, int size) {
this.from = from;
this.to = to;
this.interval = interval;
this.offset = offset;
this.size = size;
this.counts = CacheRecycler.popIntArray(size);
}
@Override
public void onMissing(int docId) {
}
@Override
public void onValue(int docId, long value) {
if (value <= from || value > to) { // bounds check
return;
}
counts[((int) ((value + offset) / interval))]++;
}
}
}

View File

@ -1,152 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram.bounded;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
*
*/
public class BoundedValueHistogramFacetCollector extends AbstractFacetCollector {
private final IndexNumericFieldData keyIndexFieldData;
private final IndexNumericFieldData valueIndexFieldData;
private final long interval;
private final HistogramFacet.ComparatorType comparatorType;
private LongValues keyValues;
private final HistogramProc histoProc;
public BoundedValueHistogramFacetCollector(String facetName, IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, long interval, long from, long to, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.interval = interval;
this.comparatorType = comparatorType;
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
long normalizedFrom = (((long) ((double) from / interval)) * interval);
long normalizedTo = (((long) ((double) to / interval)) * interval);
if ((to % interval) != 0) {
normalizedTo += interval;
}
long offset = -normalizedFrom;
int size = (int) ((normalizedTo - normalizedFrom) / interval);
histoProc = new HistogramProc(from, to, interval, offset, size);
}
@Override
protected void doCollect(int doc) throws IOException {
keyValues.forEachValueInDoc(doc, histoProc);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getLongValues();
histoProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
}
@Override
public Facet facet() {
return new InternalBoundedFullHistogramFacet(facetName, comparatorType, interval, -histoProc.offset, histoProc.size, histoProc.entries, true);
}
public static class HistogramProc implements LongValues.ValueInDocProc {
final long from;
final long to;
final long interval;
final long offset;
final int size;
final Object[] entries;
DoubleValues valueValues;
final ValueAggregator valueAggregator = new ValueAggregator();
public HistogramProc(long from, long to, long interval, long offset, int size) {
this.from = from;
this.to = to;
this.interval = interval;
this.offset = offset;
this.size = size;
this.entries = CacheRecycler.popObjectArray(size);
}
@Override
public void onMissing(int docId) {
}
@Override
public void onValue(int docId, long value) {
if (value <= from || value > to) { // bounds check
return;
}
int index = ((int) ((value + offset) / interval));
InternalBoundedFullHistogramFacet.FullEntry entry = (InternalBoundedFullHistogramFacet.FullEntry) entries[index];
if (entry == null) {
entry = new InternalBoundedFullHistogramFacet.FullEntry(index, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0);
entries[index] = entry;
}
entry.count++;
valueAggregator.entry = entry;
valueValues.forEachValueInDoc(docId, valueAggregator);
}
public static class ValueAggregator implements DoubleValues.ValueInDocProc {
InternalBoundedFullHistogramFacet.FullEntry entry;
@Override
public void onMissing(int docId) {
}
@Override
public void onValue(int docId, double value) {
entry.totalCount++;
entry.total += value;
if (value < entry.min) {
entry.min = value;
}
if (value > entry.max) {
entry.max = value;
}
}
}
}
}

View File

@ -1,149 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram.bounded;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public class BoundedValueScriptHistogramFacetCollector extends AbstractFacetCollector {
private final IndexNumericFieldData indexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
private LongValues keyValues;
private final SearchScript valueScript;
private final HistogramProc histoProc;
public BoundedValueScriptHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, String scriptLang, String valueScript, Map<String, Object> params, long interval, long from, long to, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
long normalizedFrom = (((long) ((double) from / interval)) * interval);
long normalizedTo = (((long) ((double) to / interval)) * interval);
if ((to % interval) != 0) {
normalizedTo += interval;
}
long offset = -normalizedFrom;
int size = (int) ((normalizedTo - normalizedFrom) / interval);
histoProc = new HistogramProc(from, to, interval, offset, size, this.valueScript);
}
@Override
protected void doCollect(int doc) throws IOException {
keyValues.forEachValueInDoc(doc, histoProc);
}
@Override
public void setScorer(Scorer scorer) throws IOException {
valueScript.setScorer(scorer);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
keyValues = indexFieldData.load(context).getLongValues();
valueScript.setNextReader(context);
}
@Override
public Facet facet() {
return new InternalBoundedFullHistogramFacet(facetName, comparatorType, histoProc.interval, -histoProc.offset, histoProc.size, histoProc.entries, true);
}
public static long bucket(double value, long interval) {
return (((long) (value / interval)) * interval);
}
public static class HistogramProc implements LongValues.ValueInDocProc {
final long from;
final long to;
final long interval;
final long offset;
final int size;
final Object[] entries;
private final SearchScript valueScript;
public HistogramProc(long from, long to, long interval, long offset, int size, SearchScript valueScript) {
this.from = from;
this.to = to;
this.interval = interval;
this.offset = offset;
this.size = size;
this.entries = CacheRecycler.popObjectArray(size);
this.valueScript = valueScript;
}
@Override
public void onMissing(int docId) {
}
@Override
public void onValue(int docId, long value) {
if (value <= from || value > to) { // bounds check
return;
}
int index = ((int) ((value + offset) / interval));
valueScript.setNextDocId(docId);
double scriptValue = valueScript.runAsDouble();
InternalBoundedFullHistogramFacet.FullEntry entry = (InternalBoundedFullHistogramFacet.FullEntry) entries[index];
if (entry == null) {
entries[index] = new InternalBoundedFullHistogramFacet.FullEntry(index, 1, scriptValue, scriptValue, 1, scriptValue);
} else {
entry.count++;
entry.totalCount++;
entry.total += scriptValue;
if (scriptValue < entry.min) {
entry.min = scriptValue;
}
if (scriptValue > entry.max) {
entry.max = scriptValue;
}
}
}
}
}

View File

@ -1,303 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram.bounded;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.facet.histogram.InternalHistogramFacet;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
/**
*
*/
public class InternalBoundedCountHistogramFacet extends InternalHistogramFacet {
private static final String STREAM_TYPE = "cBdHistogram";
public static void registerStreams() {
Streams.registerStream(STREAM, STREAM_TYPE);
}
static Stream STREAM = new Stream() {
@Override
public Facet readFacet(String type, StreamInput in) throws IOException {
return readHistogramFacet(in);
}
};
@Override
public String streamType() {
return STREAM_TYPE;
}
/**
* A histogram entry representing a single entry within the result of a histogram facet.
*/
public static class CountEntry implements Entry {
private final long key;
private final long count;
public CountEntry(long key, long count) {
this.key = key;
this.count = count;
}
@Override
public long key() {
return key;
}
@Override
public long getKey() {
return key();
}
@Override
public long count() {
return count;
}
@Override
public long getCount() {
return count();
}
@Override
public double total() {
return Double.NaN;
}
@Override
public double getTotal() {
return total();
}
@Override
public long totalCount() {
return 0;
}
@Override
public long getTotalCount() {
return 0;
}
@Override
public double mean() {
return Double.NaN;
}
@Override
public double getMean() {
return mean();
}
@Override
public double min() {
return Double.NaN;
}
@Override
public double getMin() {
return Double.NaN;
}
@Override
public double max() {
return Double.NaN;
}
@Override
public double getMax() {
return Double.NaN;
}
}
private String name;
ComparatorType comparatorType;
boolean cachedCounts;
int[] counts;
int size;
long interval;
long offset;
CountEntry[] entries = null;
private InternalBoundedCountHistogramFacet() {
}
public InternalBoundedCountHistogramFacet(String name, ComparatorType comparatorType, long interval, long offset, int size, int[] counts, boolean cachedCounts) {
this.name = name;
this.comparatorType = comparatorType;
this.interval = interval;
this.offset = offset;
this.counts = counts;
this.size = size;
this.cachedCounts = cachedCounts;
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return name();
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return type();
}
@Override
public List<CountEntry> entries() {
return Arrays.asList(computeEntries());
}
@Override
public List<CountEntry> getEntries() {
return entries();
}
@Override
public Iterator<Entry> iterator() {
return (Iterator) entries().iterator();
}
private CountEntry[] computeEntries() {
if (entries != null) {
return entries;
}
entries = new CountEntry[size];
for (int i = 0; i < size; i++) {
entries[i] = new CountEntry((i * interval) + offset, counts[i]);
}
releaseCache();
return entries;
}
void releaseCache() {
if (cachedCounts) {
cachedCounts = false;
CacheRecycler.pushIntArray(counts);
counts = null;
}
}
@Override
public Facet reduce(List<Facet> facets) {
if (facets.size() == 1) {
InternalBoundedCountHistogramFacet firstHistoFacet = (InternalBoundedCountHistogramFacet) facets.get(0);
if (comparatorType != ComparatorType.KEY) {
Arrays.sort(firstHistoFacet.entries, comparatorType.comparator());
}
return facets.get(0);
}
InternalBoundedCountHistogramFacet firstHistoFacet = (InternalBoundedCountHistogramFacet) facets.get(0);
for (int i = 1; i < facets.size(); i++) {
InternalBoundedCountHistogramFacet histoFacet = (InternalBoundedCountHistogramFacet) facets.get(i);
for (int j = 0; j < firstHistoFacet.size; j++) {
firstHistoFacet.counts[j] += histoFacet.counts[j];
}
histoFacet.releaseCache();
}
if (comparatorType != ComparatorType.KEY) {
Arrays.sort(firstHistoFacet.entries, comparatorType.comparator());
}
return firstHistoFacet;
}
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString ENTRIES = new XContentBuilderString("entries");
static final XContentBuilderString KEY = new XContentBuilderString("key");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.startArray(Fields.ENTRIES);
for (int i = 0; i < size; i++) {
builder.startObject();
builder.field(Fields.KEY, (i * interval) + offset);
builder.field(Fields.COUNT, counts[i]);
builder.endObject();
}
builder.endArray();
builder.endObject();
releaseCache();
return builder;
}
public static InternalBoundedCountHistogramFacet readHistogramFacet(StreamInput in) throws IOException {
InternalBoundedCountHistogramFacet facet = new InternalBoundedCountHistogramFacet();
facet.readFrom(in);
return facet;
}
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
comparatorType = ComparatorType.fromId(in.readByte());
offset = in.readLong();
interval = in.readVLong();
size = in.readVInt();
counts = CacheRecycler.popIntArray(size);
cachedCounts = true;
for (int i = 0; i < size; i++) {
counts[i] = in.readVInt();
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeByte(comparatorType.id());
out.writeLong(offset);
out.writeVLong(interval);
out.writeVInt(size);
for (int i = 0; i < size; i++) {
out.writeVInt(counts[i]);
}
releaseCache();
}
}

View File

@ -1,377 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram.bounded;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.facet.histogram.InternalHistogramFacet;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
/**
*
*/
public class InternalBoundedFullHistogramFacet extends InternalHistogramFacet {
private static final String STREAM_TYPE = "fBdHistogram";
public static void registerStreams() {
Streams.registerStream(STREAM, STREAM_TYPE);
}
static Stream STREAM = new Stream() {
@Override
public Facet readFacet(String type, StreamInput in) throws IOException {
return readHistogramFacet(in);
}
};
@Override
public String streamType() {
return STREAM_TYPE;
}
/**
* A histogram entry representing a single entry within the result of a histogram facet.
*/
public static class FullEntry implements Entry {
long key;
long count;
long totalCount;
double total;
double min = Double.POSITIVE_INFINITY;
double max = Double.NEGATIVE_INFINITY;
public FullEntry(long key, long count, double min, double max, long totalCount, double total) {
this.key = key;
this.count = count;
this.min = min;
this.max = max;
this.totalCount = totalCount;
this.total = total;
}
@Override
public long key() {
return key;
}
@Override
public long getKey() {
return key();
}
@Override
public long count() {
return count;
}
@Override
public long getCount() {
return count();
}
@Override
public double total() {
return total;
}
@Override
public double getTotal() {
return total();
}
@Override
public long totalCount() {
return totalCount;
}
@Override
public long getTotalCount() {
return this.totalCount;
}
@Override
public double mean() {
if (totalCount == 0) {
return 0;
}
return total / totalCount;
}
@Override
public double getMean() {
return total / totalCount;
}
@Override
public double min() {
return this.min;
}
@Override
public double getMin() {
return this.min;
}
@Override
public double max() {
return this.max;
}
@Override
public double getMax() {
return this.max;
}
}
private String name;
private ComparatorType comparatorType;
Object[] entries;
List<Object> entriesList;
boolean cachedEntries;
int size;
long interval;
long offset;
boolean normalized;
private InternalBoundedFullHistogramFacet() {
}
public InternalBoundedFullHistogramFacet(String name, ComparatorType comparatorType, long interval, long offset, int size, Object[] entries, boolean cachedEntries) {
this.name = name;
this.comparatorType = comparatorType;
this.interval = interval;
this.offset = offset;
this.size = size;
this.entries = entries;
this.cachedEntries = cachedEntries;
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return name();
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return type();
}
@Override
public List<FullEntry> entries() {
normalize();
if (entriesList == null) {
Object[] newEntries = new Object[size];
System.arraycopy(entries, 0, newEntries, 0, size);
entriesList = Arrays.asList(newEntries);
}
releaseCache();
return (List) entriesList;
}
@Override
public List<FullEntry> getEntries() {
return entries();
}
@Override
public Iterator<Entry> iterator() {
return (Iterator) entries().iterator();
}
private void releaseCache() {
if (cachedEntries) {
cachedEntries = false;
CacheRecycler.pushObjectArray(entries);
}
}
@Override
public Facet reduce(List<Facet> facets) {
if (facets.size() == 1) {
// we need to sort it
InternalBoundedFullHistogramFacet internalFacet = (InternalBoundedFullHistogramFacet) facets.get(0);
if (comparatorType != ComparatorType.KEY) {
Arrays.sort(internalFacet.entries, (Comparator) comparatorType.comparator());
}
return internalFacet;
}
InternalBoundedFullHistogramFacet first = (InternalBoundedFullHistogramFacet) facets.get(0);
for (int f = 1; f < facets.size(); f++) {
InternalBoundedFullHistogramFacet internalFacet = (InternalBoundedFullHistogramFacet) facets.get(f);
for (int i = 0; i < size; i++) {
FullEntry aggEntry = (FullEntry) first.entries[i];
FullEntry entry = (FullEntry) internalFacet.entries[i];
if (aggEntry == null) {
first.entries[i] = entry;
} else if (entry != null) {
aggEntry.count += entry.count;
aggEntry.totalCount += entry.totalCount;
aggEntry.total += entry.total;
if (entry.min < aggEntry.min) {
aggEntry.min = entry.min;
}
if (entry.max > aggEntry.max) {
aggEntry.max = entry.max;
}
}
}
internalFacet.releaseCache();
}
if (comparatorType != ComparatorType.KEY) {
Arrays.sort(first.entries, (Comparator) comparatorType.comparator());
}
return first;
}
private void normalize() {
if (normalized) {
return;
}
normalized = true;
for (int i = 0; i < size; i++) {
FullEntry entry = (FullEntry) entries[i];
if (entry == null) {
entries[i] = new FullEntry((i * interval) + offset, 0, Double.NaN, Double.NaN, 0, 0);
} else {
entry.key = (i * interval) + offset;
}
}
}
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString ENTRIES = new XContentBuilderString("entries");
static final XContentBuilderString KEY = new XContentBuilderString("key");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString TOTAL_COUNT = new XContentBuilderString("total_count");
static final XContentBuilderString MEAN = new XContentBuilderString("mean");
static final XContentBuilderString MIN = new XContentBuilderString("min");
static final XContentBuilderString MAX = new XContentBuilderString("max");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.startArray(Fields.ENTRIES);
for (int i = 0; i < size; i++) {
FullEntry entry = (FullEntry) entries[i];
builder.startObject();
if (normalized) {
builder.field(Fields.KEY, entry.key());
} else {
builder.field(Fields.KEY, (i * interval) + offset);
}
if (entry == null) {
builder.field(Fields.COUNT, 0);
builder.field(Fields.TOTAL, 0);
builder.field(Fields.TOTAL_COUNT, 0);
} else {
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.MIN, entry.min());
builder.field(Fields.MAX, entry.max());
builder.field(Fields.TOTAL, entry.total());
builder.field(Fields.TOTAL_COUNT, entry.totalCount());
builder.field(Fields.MEAN, entry.mean());
}
builder.endObject();
}
builder.endArray();
builder.endObject();
releaseCache();
return builder;
}
public static InternalBoundedFullHistogramFacet readHistogramFacet(StreamInput in) throws IOException {
InternalBoundedFullHistogramFacet facet = new InternalBoundedFullHistogramFacet();
facet.readFrom(in);
return facet;
}
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
comparatorType = ComparatorType.fromId(in.readByte());
offset = in.readLong();
interval = in.readVLong();
size = in.readVInt();
entries = CacheRecycler.popObjectArray(size);
cachedEntries = true;
for (int i = 0; i < size; i++) {
if (in.readBoolean()) {
entries[i] = new FullEntry(i, in.readVLong(), in.readDouble(), in.readDouble(), in.readVLong(), in.readDouble());
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeByte(comparatorType.id());
out.writeLong(offset);
out.writeVLong(interval);
out.writeVInt(size);
for (int i = 0; i < size; i++) {
FullEntry entry = (FullEntry) entries[i];
if (entry == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
// out.writeLong(entry.key);
out.writeVLong(entry.count);
out.writeDouble(entry.min);
out.writeDouble(entry.max);
out.writeVLong(entry.totalCount);
out.writeDouble(entry.total);
}
}
releaseCache();
}
}

View File

@ -1,107 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram.unbounded;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public class ScriptHistogramFacetCollector extends AbstractFacetCollector {
private final SearchScript keyScript;
private final SearchScript valueScript;
private final long interval;
private final HistogramFacet.ComparatorType comparatorType;
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries = CacheRecycler.popLongObjectMap();
public ScriptHistogramFacetCollector(String facetName, String scriptLang, String keyScript, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.keyScript = context.scriptService().search(context.lookup(), scriptLang, keyScript, params);
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
this.interval = interval > 0 ? interval : 0;
this.comparatorType = comparatorType;
}
@Override
protected void doCollect(int doc) throws IOException {
keyScript.setNextDocId(doc);
valueScript.setNextDocId(doc);
long bucket;
if (interval == 0) {
bucket = keyScript.runAsLong();
} else {
bucket = bucket(keyScript.runAsDouble(), interval);
}
double value = valueScript.runAsDouble();
InternalFullHistogramFacet.FullEntry entry = entries.get(bucket);
if (entry == null) {
entry = new InternalFullHistogramFacet.FullEntry(bucket, 1, value, value, 1, value);
entries.put(bucket, entry);
} else {
entry.count++;
entry.totalCount++;
entry.total += value;
if (value < entry.min) {
entry.min = value;
}
if (value > entry.max) {
entry.max = value;
}
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
keyScript.setScorer(scorer);
valueScript.setScorer(scorer);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
keyScript.setNextReader(context);
valueScript.setNextReader(context);
}
@Override
public Facet facet() {
return new InternalFullHistogramFacet(facetName, comparatorType, entries, true);
}
public static long bucket(double value, long interval) {
return (((long) (value / interval)) * interval);
}
}

View File

@ -0,0 +1,204 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.nested;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.XCollector;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
*/
public class NestedFacetExecutor extends FacetExecutor {
private final FacetExecutor facetExecutor;
private final Filter parentFilter;
private final Filter childFilter;
public NestedFacetExecutor(FacetExecutor facetExecutor, SearchContext context, String nestedPath) {
this.facetExecutor = facetExecutor;
MapperService.SmartNameObjectMapper mapper = context.smartNameObjectMapper(nestedPath);
if (mapper == null) {
throw new SearchParseException(context, "facet nested path [" + nestedPath + "] not found");
}
ObjectMapper objectMapper = mapper.mapper();
if (objectMapper == null) {
throw new SearchParseException(context, "facet nested path [" + nestedPath + "] not found");
}
if (!objectMapper.nested().isNested()) {
throw new SearchParseException(context, "facet nested path [" + nestedPath + "] is not nested");
}
parentFilter = context.filterCache().cache(NonNestedDocsFilter.INSTANCE);
childFilter = context.filterCache().cache(objectMapper.nestedTypeFilter());
}
@Override
public InternalFacet buildFacet(String facetName) {
return facetExecutor.buildFacet(facetName);
}
@Override
public Collector collector() {
XCollector collector = facetExecutor.collector();
if (collector == null) {
return null;
}
return new Collector(collector, parentFilter, childFilter);
}
@Override
public Post post() {
FacetExecutor.Post post = facetExecutor.post();
if (post == null) {
return null;
}
return new Post(post, parentFilter, childFilter);
}
public static class Post extends FacetExecutor.Post {
private final FacetExecutor.Post post;
private final Filter parentFilter;
private final Filter childFilter;
public Post(FacetExecutor.Post post, Filter parentFilter, Filter childFilter) {
this.post = post;
this.parentFilter = parentFilter;
this.childFilter = childFilter;
}
@Override
public void executePost(List<ContextDocIdSet> docSets) throws IOException {
List<ContextDocIdSet> nestedEntries = new ArrayList<ContextDocIdSet>(docSets.size());
for (int i = 0; i < docSets.size(); i++) {
ContextDocIdSet entry = docSets.get(i);
AtomicReaderContext context = entry.context;
// Can use null as acceptedDocs here, since only live doc ids are being pushed to collect method.
DocIdSet docIdSet = parentFilter.getDocIdSet(context, null);
if (DocIdSets.isEmpty(docIdSet)) {
continue;
}
// Im ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
Bits childDocs = DocIdSets.toSafeBits(context.reader(), childFilter.getDocIdSet(context, null));
FixedBitSet parentDocs = (FixedBitSet) docIdSet;
DocIdSetIterator iter = entry.docSet.iterator();
int parentDoc = iter.nextDoc();
if (parentDoc == DocIdSetIterator.NO_MORE_DOCS) {
continue;
}
if (parentDoc == 0) {
parentDoc = iter.nextDoc();
}
if (parentDoc == DocIdSetIterator.NO_MORE_DOCS) {
continue;
}
FixedBitSet childSet = new FixedBitSet(context.reader().maxDoc());
do {
int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1);
for (int childDocId = (parentDoc - 1); childDocId > prevParentDoc; childDocId--) {
if (childDocs.get(childDocId)) {
childSet.set(childDocId);
}
}
} while ((parentDoc = iter.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS);
nestedEntries.add(new ContextDocIdSet(entry.context, childSet));
}
post.executePost(nestedEntries);
}
}
public static class Collector extends FacetExecutor.Collector {
private final org.apache.lucene.search.Collector collector;
private final Filter parentFilter;
private final Filter childFilter;
private Bits childDocs;
private FixedBitSet parentDocs;
public Collector(org.apache.lucene.search.Collector collector, Filter parentFilter, Filter childFilter) {
this.collector = collector;
this.parentFilter = parentFilter;
this.childFilter = childFilter;
}
@Override
public void postCollection() {
if (collector instanceof XCollector) {
((XCollector) collector).postCollection();
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
collector.setScorer(scorer);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
collector.setNextReader(context);
// Can use null as acceptedDocs here, since only live doc ids are being pushed to collect method.
DocIdSet docIdSet = parentFilter.getDocIdSet(context, null);
// Im ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
childDocs = DocIdSets.toSafeBits(context.reader(), childFilter.getDocIdSet(context, null));
if (DocIdSets.isEmpty(docIdSet)) {
parentDocs = null;
} else {
parentDocs = (FixedBitSet) docIdSet;
}
}
@Override
public boolean acceptsDocsOutOfOrder() {
return collector.acceptsDocsOutOfOrder();
}
@Override
public void collect(int parentDoc) throws IOException {
if (parentDoc == 0 || parentDocs == null) {
return;
}
int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1);
for (int i = (parentDoc - 1); i > prevParentDoc; i--) {
if (childDocs.get(i)) {
collector.collect(i);
}
}
}
}
}

View File

@ -32,7 +32,7 @@ import java.util.List;
/**
*
*/
public class InternalQueryFacet implements QueryFacet, InternalFacet {
public class InternalQueryFacet extends InternalFacet implements QueryFacet {
private static final String STREAM_TYPE = "query";
@ -52,8 +52,6 @@ public class InternalQueryFacet implements QueryFacet, InternalFacet {
return STREAM_TYPE;
}
private String name;
private long count;
private InternalQueryFacet() {
@ -61,42 +59,15 @@ public class InternalQueryFacet implements QueryFacet, InternalFacet {
}
public InternalQueryFacet(String name, long count) {
this.name = name;
super(name);
this.count = count;
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return TYPE;
}
/**
* The "logical" name of the facet.
*/
public String name() {
return name;
}
@Override
public String getName() {
return name();
}
/**
* The count of the facet.
*/
public long count() {
return count;
}
/**
* The count of the facet.
*/
public long getCount() {
return count;
}
@ -108,11 +79,9 @@ public class InternalQueryFacet implements QueryFacet, InternalFacet {
}
int count = 0;
for (Facet facet : facets) {
if (facet.name().equals(name)) {
count += ((QueryFacet) facet).count();
}
count += ((QueryFacet) facet).getCount();
}
return new InternalQueryFacet(name, count);
return new InternalQueryFacet(getName(), count);
}
static final class Fields {
@ -122,7 +91,7 @@ public class InternalQueryFacet implements QueryFacet, InternalFacet {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, QueryFacet.TYPE);
builder.field(Fields.COUNT, count);
builder.endObject();
@ -137,13 +106,13 @@ public class InternalQueryFacet implements QueryFacet, InternalFacet {
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
count = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeVLong(count);
}
}

View File

@ -23,8 +23,6 @@ import org.elasticsearch.search.facet.Facet;
/**
* A query facets returns the count (number of hits) for a facet based on a query.
*
*
*/
public interface QueryFacet extends Facet {
@ -33,11 +31,6 @@ public interface QueryFacet extends Facet {
*/
public static final String TYPE = "query";
/**
* The count of the facet.
*/
long count();
/**
* The count of the facet.
*/

View File

@ -23,14 +23,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
/**
*
*/
public class QueryFacetBuilder extends AbstractFacetBuilder {
public class QueryFacetBuilder extends FacetBuilder {
private QueryBuilder query;

View File

@ -22,33 +22,30 @@ package org.elasticsearch.search.facet.query;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.AndDocIdSet;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.OptimizeGlobalFacetCollector;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import java.io.IOException;
import java.util.List;
/**
*
*/
public class QueryFacetCollector extends AbstractFacetCollector implements OptimizeGlobalFacetCollector {
public class QueryFacetExecutor extends FacetExecutor {
private final Query query;
private final Filter filter;
private Bits bits;
// default to not initialized
int count = -1;
private int count = 0;
public QueryFacetCollector(String facetName, Query query, FilterCache filterCache) {
super(facetName);
public QueryFacetExecutor(Query query) {
this.query = query;
Filter possibleFilter = extractFilterIfApplicable(query);
if (possibleFilter != null) {
@ -59,37 +56,63 @@ public class QueryFacetCollector extends AbstractFacetCollector implements Optim
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
bits = DocIdSets.toSafeBits(context.reader(), filter.getDocIdSet(context, context.reader().getLiveDocs()));
public Collector collector() {
return new Collector();
}
@Override
protected void doCollect(int doc) throws IOException {
if (bits.get(doc)) {
count++;
}
public Post post() {
return new Post();
}
@Override
public void optimizedGlobalExecution(SearchContext searchContext) throws IOException {
Query query = this.query;
if (super.filter != null) {
query = new XFilteredQuery(query, super.filter);
}
Filter searchFilter = searchContext.mapperService().searchFilter(searchContext.types());
if (searchFilter != null) {
query = new XFilteredQuery(query, searchContext.filterCache().cache(searchFilter));
}
TotalHitCountCollector collector = new TotalHitCountCollector();
searchContext.searcher().search(query, collector);
count = collector.getTotalHits();
}
@Override
public Facet facet() {
public InternalFacet buildFacet(String facetName) {
return new InternalQueryFacet(facetName, count);
}
class Post extends FacetExecutor.Post {
@Override
public void executePost(List<ContextDocIdSet> docSets) throws IOException {
int count = 0;
for (ContextDocIdSet entry : docSets) {
DocIdSet filteredDocIdSet = filter.getDocIdSet(entry.context, entry.context.reader().getLiveDocs());
if (filteredDocIdSet == null || entry.docSet == null) {
continue;
}
DocIdSetIterator iter = new AndDocIdSet(new DocIdSet[]{entry.docSet, filteredDocIdSet}).iterator();
while (iter.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
count++;
}
}
QueryFacetExecutor.this.count = count;
}
}
class Collector extends FacetExecutor.Collector {
private int count = 0;
private Bits bits;
@Override
public void collect(int doc) throws IOException {
if (bits.get(doc)) {
count++;
}
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
bits = DocIdSets.toSafeBits(context.reader(), filter.getDocIdSet(context, context.reader().getLiveDocs()));
}
@Override
public void postCollection() {
bits = null;
QueryFacetExecutor.this.count = count;
}
}
/**
* If its a filtered query with a match all, then we just need the inner filter.
*/

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.search.facet.FacetProcessor;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.FacetParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -33,10 +33,10 @@ import java.io.IOException;
/**
*
*/
public class QueryFacetProcessor extends AbstractComponent implements FacetProcessor {
public class QueryFacetParser extends AbstractComponent implements FacetParser {
@Inject
public QueryFacetProcessor(Settings settings) {
public QueryFacetParser(Settings settings) {
super(settings);
InternalQueryFacet.registerStreams();
}
@ -47,8 +47,18 @@ public class QueryFacetProcessor extends AbstractComponent implements FacetProce
}
@Override
public FacetCollector parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
public FacetExecutor.Mode defaultMainMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor.Mode defaultGlobalMode() {
return FacetExecutor.Mode.POST;
}
@Override
public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
Query facetQuery = context.queryParserService().parse(parser).query();
return new QueryFacetCollector(facetName, facetQuery, context.filterCache());
return new QueryFacetExecutor(facetQuery);
}
}

View File

@ -34,7 +34,7 @@ import java.util.List;
/**
*
*/
public class InternalRangeFacet implements RangeFacet, InternalFacet {
public class InternalRangeFacet extends InternalFacet implements RangeFacet {
private static final String STREAM_TYPE = "range";
@ -54,51 +54,29 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
return STREAM_TYPE;
}
private String name;
Entry[] entries;
InternalRangeFacet() {
}
public InternalRangeFacet(String name, Entry[] entries) {
this.name = name;
super(name);
this.entries = entries;
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return name();
}
@Override
public String type() {
return RangeFacet.TYPE;
}
@Override
public String getType() {
return RangeFacet.TYPE;
}
@Override
public List<Entry> entries() {
public List<Entry> getEntries() {
return ImmutableList.copyOf(entries);
}
@Override
public List<Entry> getEntries() {
return entries();
}
@Override
public Iterator<Entry> iterator() {
return entries().iterator();
return getEntries().iterator();
}
@Override
@ -138,7 +116,7 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
entries = new Entry[in.readVInt()];
for (int i = 0; i < entries.length; i++) {
Entry entry = new Entry();
@ -161,7 +139,7 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeVInt(entries.length);
for (Entry entry : entries) {
out.writeDouble(entry.from);
@ -203,7 +181,7 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, "range");
builder.startArray(Fields.RANGES);
for (Entry entry : entries) {
@ -220,15 +198,15 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
if (entry.toAsString != null) {
builder.field(Fields.TO_STR, entry.toAsString);
}
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.COUNT, entry.getCount());
// only output min and max if there are actually documents matching this range...
if (entry.totalCount() > 0) {
builder.field(Fields.MIN, entry.min());
builder.field(Fields.MAX, entry.max());
if (entry.getTotalCount() > 0) {
builder.field(Fields.MIN, entry.getMin());
builder.field(Fields.MAX, entry.getMax());
}
builder.field(Fields.TOTAL_COUNT, entry.totalCount());
builder.field(Fields.TOTAL, entry.total());
builder.field(Fields.MEAN, entry.mean());
builder.field(Fields.TOTAL_COUNT, entry.getTotalCount());
builder.field(Fields.TOTAL, entry.getTotal());
builder.field(Fields.MEAN, entry.getMean());
builder.endObject();
}
builder.endArray();

View File

@ -22,8 +22,8 @@ package org.elasticsearch.search.facet.range;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -31,43 +31,62 @@ import java.io.IOException;
/**
*
*/
public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
public class KeyValueRangeFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData keyIndexFieldData;
private final IndexNumericFieldData valueIndexFieldData;
private final RangeFacet.Entry[] entries;
private final RangeProc rangeProc;
private DoubleValues keyValues;
public KeyValueRangeFacetCollector(String facetName, IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, RangeFacet.Entry[] entries, SearchContext context) {
super(facetName);
public KeyValueRangeFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, RangeFacet.Entry[] entries, SearchContext context) {
this.entries = entries;
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
this.rangeProc = new RangeProc(entries);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getDoubleValues();
rangeProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
public Collector collector() {
return new Collector();
}
@Override
protected void doCollect(int doc) throws IOException {
for (RangeFacet.Entry entry : entries) {
entry.foundInDoc = false;
}
keyValues.forEachValueInDoc(doc, rangeProc);
public Post post() {
return null;
}
@Override
public Facet facet() {
public InternalFacet buildFacet(String facetName) {
return new InternalRangeFacet(facetName, entries);
}
class Collector extends FacetExecutor.Collector {
private final RangeProc rangeProc;
private DoubleValues keyValues;
public Collector() {
this.rangeProc = new RangeProc(entries);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getDoubleValues();
rangeProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
}
@Override
public void collect(int doc) throws IOException {
for (RangeFacet.Entry entry : entries) {
entry.foundInDoc = false;
}
keyValues.forEachValueInDoc(doc, rangeProc);
}
@Override
public void postCollection() {
}
}
public static class RangeProc implements DoubleValues.ValueInDocProc {
private final RangeFacet.Entry[] entries;

View File

@ -33,11 +33,6 @@ public interface RangeFacet extends Facet, Iterable<RangeFacet.Entry> {
*/
public static final String TYPE = "range";
/**
* An ordered list of range facet entries.
*/
List<Entry> entries();
/**
* An ordered list of range facet entries.
*/
@ -46,16 +41,11 @@ public interface RangeFacet extends Facet, Iterable<RangeFacet.Entry> {
public class Entry {
double from = Double.NEGATIVE_INFINITY;
double to = Double.POSITIVE_INFINITY;
String fromAsString;
String toAsString;
long count;
long totalCount;
double total;
double min = Double.POSITIVE_INFINITY;
double max = Double.NEGATIVE_INFINITY;
@ -68,97 +58,54 @@ public interface RangeFacet extends Facet, Iterable<RangeFacet.Entry> {
Entry() {
}
public double from() {
public double getFrom() {
return this.from;
}
public double getFrom() {
return from();
}
public String fromAsString() {
public String getFromAsString() {
if (fromAsString != null) {
return fromAsString;
}
return Double.toString(from);
}
public String getFromAsString() {
return fromAsString();
}
public double to() {
public double getTo() {
return this.to;
}
public double getTo() {
return to();
}
public String toAsString() {
public String getToAsString() {
if (toAsString != null) {
return toAsString;
}
return Double.toString(to);
}
public String getToAsString() {
return toAsString();
}
public long count() {
return this.count;
}
public long getCount() {
return count();
}
public long totalCount() {
return this.totalCount;
return this.count;
}
public long getTotalCount() {
return this.totalCount;
}
public double total() {
return this.total;
}
public double getTotal() {
return total();
}
/**
* The mean of this facet interval.
*/
public double mean() {
if (totalCount == 0) {
return 0;
}
return total / totalCount;
return this.total;
}
/**
* The mean of this facet interval.
*/
public double getMean() {
return mean();
}
public double min() {
return this.min;
if (totalCount == 0) {
return 0;
}
return total / totalCount;
}
public double getMin() {
return this.min;
}
public double max() {
return this.max;
}
public double getMax() {
return this.max;
}

View File

@ -23,7 +23,7 @@ import com.google.common.collect.Lists;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
import java.util.List;
@ -31,7 +31,7 @@ import java.util.List;
/**
* A facet builder of range facets.
*/
public class RangeFacetBuilder extends AbstractFacetBuilder {
public class RangeFacetBuilder extends FacetBuilder {
private String keyFieldName;
private String valueFieldName;

View File

@ -22,8 +22,8 @@ package org.elasticsearch.search.facet.range;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -31,39 +31,59 @@ import java.io.IOException;
/**
*
*/
public class RangeFacetCollector extends AbstractFacetCollector {
public class RangeFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData indexFieldData;
private DoubleValues values;
private final RangeFacet.Entry[] entries;
private final RangeProc rangeProc;
public RangeFacetCollector(String facetName, IndexNumericFieldData indexFieldData, RangeFacet.Entry[] entries, SearchContext context) {
super(facetName);
public RangeFacetExecutor(IndexNumericFieldData indexFieldData, RangeFacet.Entry[] entries, SearchContext context) {
this.indexFieldData = indexFieldData;
this.entries = entries;
rangeProc = new RangeProc(entries);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
public Collector collector() {
return new Collector();
}
@Override
protected void doCollect(int doc) throws IOException {
for (RangeFacet.Entry entry : entries) {
entry.foundInDoc = false;
}
values.forEachValueInDoc(doc, rangeProc);
public Post post() {
return null;
}
@Override
public Facet facet() {
public InternalFacet buildFacet(String facetName) {
return new InternalRangeFacet(facetName, entries);
}
class Collector extends FacetExecutor.Collector {
private final RangeProc rangeProc;
private DoubleValues values;
public Collector() {
rangeProc = new RangeProc(entries);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
}
@Override
public void collect(int doc) throws IOException {
for (RangeFacet.Entry entry : entries) {
entry.foundInDoc = false;
}
values.forEachValueInDoc(doc, rangeProc);
}
@Override
public void postCollection() {
}
}
public static class RangeProc implements DoubleValues.ValueInDocProc {
private final RangeFacet.Entry[] entries;

View File

@ -26,9 +26,9 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.FacetParser;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.facet.FacetProcessor;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -38,10 +38,10 @@ import java.util.Map;
/**
*
*/
public class RangeFacetProcessor extends AbstractComponent implements FacetProcessor {
public class RangeFacetParser extends AbstractComponent implements FacetParser {
@Inject
public RangeFacetProcessor(Settings settings) {
public RangeFacetParser(Settings settings) {
super(settings);
InternalRangeFacet.registerStreams();
}
@ -52,7 +52,17 @@ public class RangeFacetProcessor extends AbstractComponent implements FacetProce
}
@Override
public FacetCollector parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
public FacetExecutor.Mode defaultMainMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor.Mode defaultGlobalMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
String keyField = null;
String valueField = null;
String scriptLang = null;
@ -120,7 +130,7 @@ public class RangeFacetProcessor extends AbstractComponent implements FacetProce
RangeFacet.Entry[] rangeEntries = entries.toArray(new RangeFacet.Entry[entries.size()]);
if (keyScript != null && valueScript != null) {
return new ScriptRangeFacetCollector(facetName, scriptLang, keyScript, valueScript, params, rangeEntries, context);
return new ScriptRangeFacetExecutor(scriptLang, keyScript, valueScript, params, rangeEntries, context);
}
if (keyField == null) {
@ -144,7 +154,7 @@ public class RangeFacetProcessor extends AbstractComponent implements FacetProce
IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyFieldMapper);
if (valueField == null || keyField.equals(valueField)) {
return new RangeFacetCollector(facetName, keyIndexFieldData, rangeEntries, context);
return new RangeFacetExecutor(keyIndexFieldData, rangeEntries, context);
} else {
FieldMapper valueFieldMapper = context.smartNameFieldMapper(valueField);
if (valueFieldMapper == null) {
@ -152,7 +162,7 @@ public class RangeFacetProcessor extends AbstractComponent implements FacetProce
}
IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueFieldMapper);
// we have a value field, and its different than the key
return new KeyValueRangeFacetCollector(facetName, keyIndexFieldData, valueIndexFieldData, rangeEntries, context);
return new KeyValueRangeFacetExecutor(keyIndexFieldData, valueIndexFieldData, rangeEntries, context);
}
}
}

View File

@ -24,7 +24,7 @@ import com.google.common.collect.Maps;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
import java.util.List;
@ -33,7 +33,7 @@ import java.util.Map;
/**
*
*/
public class RangeScriptFacetBuilder extends AbstractFacetBuilder {
public class RangeScriptFacetBuilder extends FacetBuilder {
private String lang;
private String keyScript;

View File

@ -1,88 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.range;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public class ScriptRangeFacetCollector extends AbstractFacetCollector {
private final SearchScript keyScript;
private final SearchScript valueScript;
private final RangeFacet.Entry[] entries;
public ScriptRangeFacetCollector(String facetName, String scriptLang, String keyScript, String valueScript, Map<String, Object> params, RangeFacet.Entry[] entries, SearchContext context) {
super(facetName);
this.keyScript = context.scriptService().search(context.lookup(), scriptLang, keyScript, params);
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
this.entries = entries;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
keyScript.setScorer(scorer);
valueScript.setScorer(scorer);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
keyScript.setNextReader(context);
valueScript.setNextReader(context);
}
@Override
protected void doCollect(int doc) throws IOException {
keyScript.setNextDocId(doc);
valueScript.setNextDocId(doc);
double key = keyScript.runAsDouble();
double value = valueScript.runAsDouble();
for (RangeFacet.Entry entry : entries) {
if (key >= entry.getFrom() && key < entry.getTo()) {
entry.count++;
entry.totalCount++;
entry.total += value;
if (value < entry.min) {
entry.min = value;
}
if (value > entry.max) {
entry.max = value;
}
}
}
}
@Override
public Facet facet() {
return new InternalRangeFacet(facetName, entries);
}
}

View File

@ -0,0 +1,103 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.range;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public class ScriptRangeFacetExecutor extends FacetExecutor {
final SearchScript keyScript;
final SearchScript valueScript;
private final RangeFacet.Entry[] entries;
public ScriptRangeFacetExecutor(String scriptLang, String keyScript, String valueScript, Map<String, Object> params, RangeFacet.Entry[] entries, SearchContext context) {
this.keyScript = context.scriptService().search(context.lookup(), scriptLang, keyScript, params);
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
this.entries = entries;
}
@Override
public Collector collector() {
return new Collector();
}
@Override
public Post post() {
return null;
}
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalRangeFacet(facetName, entries);
}
class Collector extends FacetExecutor.Collector {
@Override
public void setScorer(Scorer scorer) throws IOException {
keyScript.setScorer(scorer);
valueScript.setScorer(scorer);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
keyScript.setNextReader(context);
valueScript.setNextReader(context);
}
@Override
public void collect(int doc) throws IOException {
keyScript.setNextDocId(doc);
valueScript.setNextDocId(doc);
double key = keyScript.runAsDouble();
double value = valueScript.runAsDouble();
for (RangeFacet.Entry entry : entries) {
if (key >= entry.getFrom() && key < entry.getTo()) {
entry.count++;
entry.totalCount++;
entry.total += value;
if (value < entry.min) {
entry.min = value;
}
if (value > entry.max) {
entry.max = value;
}
}
}
}
@Override
public void postCollection() {
}
}
}

View File

@ -32,7 +32,7 @@ import java.util.List;
/**
*
*/
public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet {
public class InternalStatisticalFacet extends InternalFacet implements StatisticalFacet {
private static final String STREAM_TYPE = "statistical";
@ -52,23 +52,17 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
return STREAM_TYPE;
}
private String name;
private double min;
private double max;
private double total;
private double sumOfSquares;
private long count;
private InternalStatisticalFacet() {
}
public InternalStatisticalFacet(String name, double min, double max, double total, double sumOfSquares, long count) {
this.name = name;
super(name);
this.min = min;
this.max = max;
this.total = total;
@ -76,58 +70,28 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
this.count = count;
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return name();
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return TYPE;
}
@Override
public long count() {
public long getCount() {
return this.count;
}
@Override
public long getCount() {
return count();
}
@Override
public double total() {
public double getTotal() {
return this.total;
}
@Override
public double getTotal() {
return total();
}
@Override
public double sumOfSquares() {
public double getSumOfSquares() {
return this.sumOfSquares;
}
@Override
public double getSumOfSquares() {
return sumOfSquares();
}
@Override
public double mean() {
public double getMean() {
if (count == 0) {
return 0;
}
@ -135,44 +99,21 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
}
@Override
public double getMean() {
return mean();
}
@Override
public double min() {
public double getMin() {
return this.min;
}
@Override
public double getMin() {
return min();
}
@Override
public double max() {
public double getMax() {
return this.max;
}
@Override
public double getMax() {
return max();
}
public double variance() {
public double getVariance() {
return (sumOfSquares - ((total * total) / count)) / count;
}
public double getVariance() {
return variance();
}
public double stdDeviation() {
return Math.sqrt(variance());
}
public double getStdDeviation() {
return stdDeviation();
return Math.sqrt(getVariance());
}
@Override
@ -187,22 +128,19 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
long count = 0;
for (Facet facet : facets) {
if (!facet.name().equals(name)) {
continue;
}
InternalStatisticalFacet statsFacet = (InternalStatisticalFacet) facet;
if (statsFacet.min() < min || Double.isNaN(min)) {
min = statsFacet.min();
if (statsFacet.getMin() < min || Double.isNaN(min)) {
min = statsFacet.getMin();
}
if (statsFacet.max() > max || Double.isNaN(max)) {
max = statsFacet.max();
if (statsFacet.getMax() > max || Double.isNaN(max)) {
max = statsFacet.getMax();
}
total += statsFacet.total();
sumOfSquares += statsFacet.sumOfSquares();
count += statsFacet.count();
total += statsFacet.getTotal();
sumOfSquares += statsFacet.getSumOfSquares();
count += statsFacet.getCount();
}
return new InternalStatisticalFacet(name, min, max, total, sumOfSquares, count);
return new InternalStatisticalFacet(getName(), min, max, total, sumOfSquares, count);
}
static final class Fields {
@ -219,16 +157,16 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, StatisticalFacet.TYPE);
builder.field(Fields.COUNT, count());
builder.field(Fields.TOTAL, total());
builder.field(Fields.MIN, min());
builder.field(Fields.MAX, max());
builder.field(Fields.MEAN, mean());
builder.field(Fields.SUM_OF_SQUARES, sumOfSquares());
builder.field(Fields.VARIANCE, variance());
builder.field(Fields.STD_DEVIATION, stdDeviation());
builder.field(Fields.COUNT, getCount());
builder.field(Fields.TOTAL, getTotal());
builder.field(Fields.MIN, getMin());
builder.field(Fields.MAX, getMax());
builder.field(Fields.MEAN, getMean());
builder.field(Fields.SUM_OF_SQUARES, getSumOfSquares());
builder.field(Fields.VARIANCE, getVariance());
builder.field(Fields.STD_DEVIATION, getStdDeviation());
builder.endObject();
return builder;
}
@ -241,7 +179,7 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
count = in.readVLong();
total = in.readDouble();
min = in.readDouble();
@ -251,7 +189,7 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeVLong(count);
out.writeDouble(total);
out.writeDouble(min);

View File

@ -1,78 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.statistical;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public class ScriptStatisticalFacetCollector extends AbstractFacetCollector {
private final SearchScript script;
private double min = Double.POSITIVE_INFINITY;
private double max = Double.NEGATIVE_INFINITY;
private double total = 0;
private double sumOfSquares = 0.0;
private long count;
public ScriptStatisticalFacetCollector(String facetName, String scriptLang, String script, Map<String, Object> params, SearchContext context) {
super(facetName);
this.script = context.scriptService().search(context.lookup(), scriptLang, script, params);
}
@Override
protected void doCollect(int doc) throws IOException {
script.setNextDocId(doc);
double value = script.runAsDouble();
if (value < min) {
min = value;
}
if (value > max) {
max = value;
}
sumOfSquares += value * value;
total += value;
count++;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
script.setScorer(scorer);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
script.setNextReader(context);
}
@Override
public Facet facet() {
return new InternalStatisticalFacet(facetName, min, max, total, sumOfSquares, count);
}
}

View File

@ -0,0 +1,106 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.statistical;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public class ScriptStatisticalFacetExecutor extends FacetExecutor {
private final SearchScript script;
private double min = Double.POSITIVE_INFINITY;
private double max = Double.NEGATIVE_INFINITY;
private double total = 0;
private double sumOfSquares = 0.0;
private long count;
public ScriptStatisticalFacetExecutor(String scriptLang, String script, Map<String, Object> params, SearchContext context) {
this.script = context.scriptService().search(context.lookup(), scriptLang, script, params);
}
@Override
public Collector collector() {
return new Collector();
}
@Override
public Post post() {
return null;
}
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalStatisticalFacet(facetName, min, max, total, sumOfSquares, count);
}
class Collector extends FacetExecutor.Collector {
private double min = Double.POSITIVE_INFINITY;
private double max = Double.NEGATIVE_INFINITY;
private double total = 0;
private double sumOfSquares = 0.0;
private long count;
@Override
public void setScorer(Scorer scorer) throws IOException {
script.setScorer(scorer);
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
script.setNextReader(context);
}
@Override
public void collect(int doc) throws IOException {
script.setNextDocId(doc);
double value = script.runAsDouble();
if (value < min) {
min = value;
}
if (value > max) {
max = value;
}
sumOfSquares += value * value;
total += value;
count++;
}
@Override
public void postCollection() {
ScriptStatisticalFacetExecutor.this.min = min;
ScriptStatisticalFacetExecutor.this.max = max;
ScriptStatisticalFacetExecutor.this.total = total;
ScriptStatisticalFacetExecutor.this.sumOfSquares = sumOfSquares;
ScriptStatisticalFacetExecutor.this.count = count;
}
}
}

View File

@ -23,8 +23,6 @@ import org.elasticsearch.search.facet.Facet;
/**
* Numeric statistical information.
*
*
*/
public interface StatisticalFacet extends Facet {
@ -33,81 +31,41 @@ public interface StatisticalFacet extends Facet {
*/
public static final String TYPE = "statistical";
/**
* The number of values counted.
*/
long count();
/**
* The number of values counted.
*/
long getCount();
/**
* The total (sum) of values.
*/
double total();
/**
* The total (sum) of values.
*/
double getTotal();
/**
* The sum of squares of the values.
*/
double sumOfSquares();
/**
* The sum of squares of the values.
*/
double getSumOfSquares();
/**
* The mean (average) of the values.
*/
double mean();
/**
* The mean (average) of the values.
*/
double getMean();
/**
* The minimum value.
*/
double min();
/**
* The minimum value.
*/
double getMin();
/**
* The maximum value.
*/
double max();
/**
* The maximum value.
*/
double getMax();
/**
* Variance of the values.
*/
double variance();
/**
* Variance of the values.
*/
double getVariance();
/**
* Standard deviation of the values.
*/
double stdDeviation();
/**
* Standard deviation of the values.
*/

View File

@ -22,14 +22,14 @@ package org.elasticsearch.search.facet.statistical;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
/**
*
*/
public class StatisticalFacetBuilder extends AbstractFacetBuilder {
public class StatisticalFacetBuilder extends FacetBuilder {
private String[] fieldsNames;
private String fieldName;

View File

@ -22,8 +22,8 @@ package org.elasticsearch.search.facet.statistical;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -31,46 +31,69 @@ import java.io.IOException;
/**
*
*/
public class StatisticalFacetCollector extends AbstractFacetCollector {
public class StatisticalFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData indexFieldData;
private DoubleValues values;
double min = Double.POSITIVE_INFINITY;
double max = Double.NEGATIVE_INFINITY;
double total = 0;
double sumOfSquares = 0.0;
long count;
int missing;
private final StatsProc statsProc = new StatsProc();
public StatisticalFacetCollector(String facetName, IndexNumericFieldData indexFieldData, SearchContext context) {
super(facetName);
public StatisticalFacetExecutor(IndexNumericFieldData indexFieldData, SearchContext context) {
this.indexFieldData = indexFieldData;
}
@Override
protected void doCollect(int doc) throws IOException {
values.forEachValueInDoc(doc, statsProc);
public Collector collector() {
return new Collector();
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
public Post post() {
return null;
}
@Override
public Facet facet() {
return new InternalStatisticalFacet(facetName, statsProc.min(), statsProc.max(), statsProc.total(), statsProc.sumOfSquares(), statsProc.count());
public InternalFacet buildFacet(String facetName) {
return new InternalStatisticalFacet(facetName, min, max, total, sumOfSquares, count);
}
class Collector extends FacetExecutor.Collector {
private final StatsProc statsProc = new StatsProc();
private DoubleValues values;
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
}
@Override
public void collect(int doc) throws IOException {
values.forEachValueInDoc(doc, statsProc);
}
@Override
public void postCollection() {
StatisticalFacetExecutor.this.min = statsProc.min;
StatisticalFacetExecutor.this.max = statsProc.max;
StatisticalFacetExecutor.this.total = statsProc.total;
StatisticalFacetExecutor.this.sumOfSquares = statsProc.sumOfSquares;
StatisticalFacetExecutor.this.count = statsProc.count;
StatisticalFacetExecutor.this.missing = statsProc.missing;
}
}
public static class StatsProc implements DoubleValues.ValueInDocProc {
double min = Double.POSITIVE_INFINITY;
double max = Double.NEGATIVE_INFINITY;
double total = 0;
double sumOfSquares = 0.0;
long count;
int missing;
@Override

View File

@ -26,9 +26,9 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.FacetParser;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.facet.FacetProcessor;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -38,10 +38,10 @@ import java.util.Map;
/**
*
*/
public class StatisticalFacetProcessor extends AbstractComponent implements FacetProcessor {
public class StatisticalFacetParser extends AbstractComponent implements FacetParser {
@Inject
public StatisticalFacetProcessor(Settings settings) {
public StatisticalFacetParser(Settings settings) {
super(settings);
InternalStatisticalFacet.registerStreams();
}
@ -52,7 +52,17 @@ public class StatisticalFacetProcessor extends AbstractComponent implements Face
}
@Override
public FacetCollector parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
public FacetExecutor.Mode defaultMainMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor.Mode defaultGlobalMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
String field = null;
String[] fieldsNames = null;
@ -96,7 +106,7 @@ public class StatisticalFacetProcessor extends AbstractComponent implements Face
}
indexFieldDatas[i] = context.fieldData().getForField(fieldMapper);
}
return new StatisticalFieldsFacetCollector(facetName, indexFieldDatas, context);
return new StatisticalFieldsFacetExecutor(indexFieldDatas, context);
}
if (script == null && field == null) {
throw new FacetPhaseExecutionException(facetName, "statistical facet requires either [script] or [field] to be set");
@ -107,9 +117,9 @@ public class StatisticalFacetProcessor extends AbstractComponent implements Face
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + field + "]");
}
IndexNumericFieldData indexFieldData = context.fieldData().getForField(fieldMapper);
return new StatisticalFacetCollector(facetName, indexFieldData, context);
return new StatisticalFacetExecutor(indexFieldData, context);
} else {
return new ScriptStatisticalFacetCollector(facetName, scriptLang, script, params, context);
return new ScriptStatisticalFacetExecutor(scriptLang, script, params, context);
}
}
}

View File

@ -22,8 +22,8 @@ package org.elasticsearch.search.facet.statistical;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -31,37 +31,68 @@ import java.io.IOException;
/**
*
*/
public class StatisticalFieldsFacetCollector extends AbstractFacetCollector {
public class StatisticalFieldsFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData[] indexFieldDatas;
private DoubleValues[] values;
double min = Double.POSITIVE_INFINITY;
double max = Double.NEGATIVE_INFINITY;
double total = 0;
double sumOfSquares = 0.0;
long count;
int missing;
private final StatsProc statsProc = new StatsProc();
public StatisticalFieldsFacetCollector(String facetName, IndexNumericFieldData[] indexFieldDatas, SearchContext context) {
super(facetName);
public StatisticalFieldsFacetExecutor(IndexNumericFieldData[] indexFieldDatas, SearchContext context) {
this.indexFieldDatas = indexFieldDatas;
this.values = new DoubleValues[indexFieldDatas.length];
}
@Override
protected void doCollect(int doc) throws IOException {
for (DoubleValues value : values) {
value.forEachValueInDoc(doc, statsProc);
public Collector collector() {
return new Collector();
}
@Override
public Post post() {
return null;
}
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalStatisticalFacet(facetName, min, max, total, sumOfSquares, count);
}
class Collector extends FacetExecutor.Collector {
private final StatsProc statsProc = new StatsProc();
private DoubleValues[] values;
public Collector() {
this.values = new DoubleValues[indexFieldDatas.length];
}
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
for (int i = 0; i < indexFieldDatas.length; i++) {
values[i] = indexFieldDatas[i].load(context).getDoubleValues();
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
for (int i = 0; i < indexFieldDatas.length; i++) {
values[i] = indexFieldDatas[i].load(context).getDoubleValues();
}
}
}
@Override
public Facet facet() {
return new InternalStatisticalFacet(facetName, statsProc.min(), statsProc.max(), statsProc.total(), statsProc.sumOfSquares(), statsProc.count());
@Override
public void collect(int doc) throws IOException {
for (DoubleValues value : values) {
value.forEachValueInDoc(doc, statsProc);
}
}
@Override
public void postCollection() {
StatisticalFieldsFacetExecutor.this.min = statsProc.min;
StatisticalFieldsFacetExecutor.this.max = statsProc.max;
StatisticalFieldsFacetExecutor.this.total = statsProc.total;
StatisticalFieldsFacetExecutor.this.sumOfSquares = statsProc.sumOfSquares;
StatisticalFieldsFacetExecutor.this.count = statsProc.count;
StatisticalFieldsFacetExecutor.this.missing = statsProc.missing;
}
}
public static class StatsProc implements DoubleValues.ValueInDocProc {

View File

@ -23,7 +23,7 @@ import com.google.common.collect.Maps;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
import java.util.Map;
@ -31,7 +31,7 @@ import java.util.Map;
/**
*
*/
public class StatisticalScriptFacetBuilder extends AbstractFacetBuilder {
public class StatisticalScriptFacetBuilder extends FacetBuilder {
private String lang;
private String script;
private Map<String, Object> params;

View File

@ -27,11 +27,23 @@ import org.elasticsearch.search.facet.terms.strings.InternalStringTermsFacet;
/**
*
*/
public abstract class InternalTermsFacet implements TermsFacet, InternalFacet {
public abstract class InternalTermsFacet extends InternalFacet implements TermsFacet {
public static void registerStreams() {
InternalStringTermsFacet.registerStream();
InternalLongTermsFacet.registerStream();
InternalDoubleTermsFacet.registerStream();
}
protected InternalTermsFacet() {
}
protected InternalTermsFacet(String facetName) {
super(facetName);
}
@Override
public final String getType() {
return TYPE;
}
}

View File

@ -19,9 +19,7 @@
package org.elasticsearch.search.facet.terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.facet.Facet;
@ -30,8 +28,6 @@ import java.util.List;
/**
* Terms facet allows to return facets of the most popular terms within the search query.
*
*
*/
public interface TermsFacet extends Facet, Iterable<TermsFacet.Entry> {
@ -42,16 +38,10 @@ public interface TermsFacet extends Facet, Iterable<TermsFacet.Entry> {
public interface Entry extends Comparable<Entry> {
Text term();
Text getTerm();
Number termAsNumber();
Number getTermAsNumber();
int count();
int getCount();
}
@ -66,7 +56,7 @@ public interface TermsFacet extends Facet, Iterable<TermsFacet.Entry> {
@Override
public int compare(Entry o1, Entry o2) {
int i = o2.count() - o1.count();
int i = o2.getCount() - o1.getCount();
if (i == 0) {
i = o2.compareTo(o1);
if (i == 0) {
@ -151,41 +141,21 @@ public interface TermsFacet extends Facet, Iterable<TermsFacet.Entry> {
}
}
/**
* The number of docs missing a value.
*/
long missingCount();
/**
* The number of docs missing a value.
*/
long getMissingCount();
/**
* The total count of terms.
*/
long totalCount();
/**
* The total count of terms.
*/
long getTotalCount();
/**
* The count of terms other than the one provided by the entries.
*/
long otherCount();
/**
* The count of terms other than the one provided by the entries.
*/
long getOtherCount();
/**
* The terms and counts.
*/
List<? extends TermsFacet.Entry> entries();
/**
* The terms and counts.
*/

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.facet.AbstractFacetBuilder;
import org.elasticsearch.search.facet.FacetBuilder;
import java.io.IOException;
import java.util.Map;
@ -32,7 +32,7 @@ import java.util.Map;
/**
* Term facets allow to collect frequency of terms within one (or more) field.
*/
public class TermsFacetBuilder extends AbstractFacetBuilder {
public class TermsFacetBuilder extends FacetBuilder {
private String fieldName;
private String[] fieldsNames;
private int size = 10;

View File

@ -31,16 +31,16 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.FacetParser;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.facet.FacetProcessor;
import org.elasticsearch.search.facet.terms.doubles.TermsDoubleFacetCollector;
import org.elasticsearch.search.facet.terms.index.IndexNameFacetCollector;
import org.elasticsearch.search.facet.terms.longs.TermsLongFacetCollector;
import org.elasticsearch.search.facet.terms.strings.FieldsTermsStringFacetCollector;
import org.elasticsearch.search.facet.terms.strings.ScriptTermsStringFieldFacetCollector;
import org.elasticsearch.search.facet.terms.strings.TermsStringFacetCollector;
import org.elasticsearch.search.facet.terms.strings.TermsStringOrdinalsFacetCollector;
import org.elasticsearch.search.facet.terms.doubles.TermsDoubleFacetExecutor;
import org.elasticsearch.search.facet.terms.index.IndexNameFacetExecutor;
import org.elasticsearch.search.facet.terms.longs.TermsLongFacetExecutor;
import org.elasticsearch.search.facet.terms.strings.FieldsTermsStringFacetExecutor;
import org.elasticsearch.search.facet.terms.strings.ScriptTermsStringFieldFacetExecutor;
import org.elasticsearch.search.facet.terms.strings.TermsStringFacetExecutor;
import org.elasticsearch.search.facet.terms.strings.TermsStringOrdinalsFacetExecutor;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -51,10 +51,10 @@ import java.util.regex.Pattern;
/**
*
*/
public class TermsFacetProcessor extends AbstractComponent implements FacetProcessor {
public class TermsFacetParser extends AbstractComponent implements FacetParser {
@Inject
public TermsFacetProcessor(Settings settings) {
public TermsFacetParser(Settings settings) {
super(settings);
InternalTermsFacet.registerStreams();
}
@ -65,7 +65,17 @@ public class TermsFacetProcessor extends AbstractComponent implements FacetProce
}
@Override
public FacetCollector parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
public FacetExecutor.Mode defaultMainMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor.Mode defaultGlobalMode() {
return FacetExecutor.Mode.COLLECTOR;
}
@Override
public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
String field = null;
int size = 10;
@ -129,7 +139,7 @@ public class TermsFacetProcessor extends AbstractComponent implements FacetProce
}
if ("_index".equals(field)) {
return new IndexNameFacetCollector(facetName, context.shardTarget().index(), comparatorType, size);
return new IndexNameFacetExecutor(context.shardTarget().index(), comparatorType, size);
}
Pattern pattern = null;
@ -143,10 +153,10 @@ public class TermsFacetProcessor extends AbstractComponent implements FacetProce
}
if (fieldsNames != null) {
return new FieldsTermsStringFacetCollector(facetName, fieldsNames, size, comparatorType, allTerms, context, excluded, pattern, searchScript);
return new FieldsTermsStringFacetExecutor(facetName, fieldsNames, size, comparatorType, allTerms, context, excluded, pattern, searchScript);
}
if (field == null && fieldsNames == null && script != null) {
return new ScriptTermsStringFieldFacetCollector(facetName, size, comparatorType, context, excluded, pattern, scriptLang, script, params);
return new ScriptTermsStringFieldFacetExecutor(size, comparatorType, context, excluded, pattern, scriptLang, script, params);
}
FieldMapper fieldMapper = context.smartNameFieldMapper(field);
@ -158,17 +168,17 @@ public class TermsFacetProcessor extends AbstractComponent implements FacetProce
if (indexFieldData instanceof IndexNumericFieldData) {
IndexNumericFieldData indexNumericFieldData = (IndexNumericFieldData) indexFieldData;
if (indexNumericFieldData.getNumericType().isFloatingPoint()) {
return new TermsDoubleFacetCollector(facetName, indexNumericFieldData, size, comparatorType, allTerms, context, excluded, searchScript);
return new TermsDoubleFacetExecutor(indexNumericFieldData, size, comparatorType, allTerms, context, excluded, searchScript);
} else {
return new TermsLongFacetCollector(facetName, indexNumericFieldData, size, comparatorType, allTerms, context, excluded, searchScript);
return new TermsLongFacetExecutor(indexNumericFieldData, size, comparatorType, allTerms, context, excluded, searchScript);
}
} else {
if (script != null || "map".equals(executionHint)) {
return new TermsStringFacetCollector(facetName, indexFieldData, size, comparatorType, allTerms, context, excluded, pattern, searchScript);
return new TermsStringFacetExecutor(indexFieldData, size, comparatorType, allTerms, context, excluded, pattern, searchScript);
} else if (indexFieldData instanceof IndexFieldData.WithOrdinals) {
return new TermsStringOrdinalsFacetCollector(facetName, (IndexFieldData.WithOrdinals) indexFieldData, size, comparatorType, allTerms, context, excluded, pattern);
return new TermsStringOrdinalsFacetExecutor((IndexFieldData.WithOrdinals) indexFieldData, size, comparatorType, allTerms, context, excluded, pattern);
} else {
return new TermsStringFacetCollector(facetName, indexFieldData, size, comparatorType, allTerms, context, excluded, pattern, searchScript);
return new TermsStringFacetExecutor(indexFieldData, size, comparatorType, allTerms, context, excluded, pattern, searchScript);
}
}
}

View File

@ -73,30 +73,18 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
this.count = count;
}
public Text term() {
return new StringText(Double.toString(term));
}
public Text getTerm() {
return term();
}
@Override
public Number termAsNumber() {
return term;
return new StringText(Double.toString(term));
}
@Override
public Number getTermAsNumber() {
return termAsNumber();
}
public int count() {
return count;
return term;
}
@Override
public int getCount() {
return count();
return count;
}
@Override
@ -106,7 +94,7 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
return -1;
}
if (term == anotherVal) {
int i = count - o.count();
int i = count - o.getCount();
if (i == 0) {
i = System.identityHashCode(this) - System.identityHashCode(o);
}
@ -116,22 +104,17 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
}
}
private String name;
int requiredSize;
long missing;
long total;
Collection<DoubleEntry> entries = ImmutableList.of();
ComparatorType comparatorType;
InternalDoubleTermsFacet() {
}
public InternalDoubleTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<DoubleEntry> entries, long missing, long total) {
this.name = name;
super(name);
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
@ -140,38 +123,13 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return this.name;
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return type();
}
@Override
public List<DoubleEntry> entries() {
public List<DoubleEntry> getEntries() {
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<DoubleEntry>) entries;
}
@Override
public List<DoubleEntry> getEntries() {
return entries();
}
@SuppressWarnings({"unchecked"})
@Override
public Iterator<Entry> iterator() {
@ -179,39 +137,24 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
}
@Override
public long missingCount() {
public long getMissingCount() {
return this.missing;
}
@Override
public long getMissingCount() {
return missingCount();
}
@Override
public long totalCount() {
public long getTotalCount() {
return this.total;
}
@Override
public long getTotalCount() {
return totalCount();
}
@Override
public long otherCount() {
public long getOtherCount() {
long other = total;
for (Entry entry : entries) {
other -= entry.count();
other -= entry.getCount();
}
return other;
}
@Override
public long getOtherCount() {
return otherCount();
}
@Override
public Facet reduce(List<Facet> facets) {
if (facets.size() == 1) {
@ -223,10 +166,10 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
long total = 0;
for (Facet facet : facets) {
InternalDoubleTermsFacet mFacet = (InternalDoubleTermsFacet) facet;
missing += mFacet.missingCount();
total += mFacet.totalCount();
missing += mFacet.getMissingCount();
total += mFacet.getTotalCount();
for (DoubleEntry entry : mFacet.entries) {
aggregated.adjustOrPutValue(entry.term, entry.count(), entry.count());
aggregated.adjustOrPutValue(entry.term, entry.getCount(), entry.getCount());
}
}
@ -256,16 +199,16 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, TermsFacet.TYPE);
builder.field(Fields.MISSING, missing);
builder.field(Fields.TOTAL, total);
builder.field(Fields.OTHER, otherCount());
builder.field(Fields.OTHER, getOtherCount());
builder.startArray(Fields.TERMS);
for (DoubleEntry entry : entries) {
builder.startObject();
builder.field(Fields.TERM, entry.term);
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.COUNT, entry.getCount());
builder.endObject();
}
builder.endArray();
@ -281,7 +224,7 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
missing = in.readVLong();
@ -296,7 +239,7 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);
out.writeVLong(missing);
@ -305,7 +248,7 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
out.writeVInt(entries.size());
for (DoubleEntry entry : entries) {
out.writeDouble(entry.term);
out.writeVInt(entry.count());
out.writeVInt(entry.getCount());
}
}
}

View File

@ -32,8 +32,8 @@ import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.facet.terms.TermsFacet;
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
import org.elasticsearch.search.internal.SearchContext;
@ -45,36 +45,29 @@ import java.util.Set;
/**
*
*/
public class TermsDoubleFacetCollector extends AbstractFacetCollector {
public class TermsDoubleFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData indexFieldData;
private final TermsFacet.ComparatorType comparatorType;
private final int size;
private final int numberOfShards;
private DoubleValues values;
private final StaticAggregatorValueProc aggregator;
private final SearchScript script;
private final ImmutableSet<BytesRef> excluded;
public TermsDoubleFacetCollector(String facetName, IndexNumericFieldData indexFieldData, int size, TermsFacet.ComparatorType comparatorType, boolean allTerms, SearchContext context,
ImmutableSet<BytesRef> excluded, SearchScript script) {
super(facetName);
final TDoubleIntHashMap facets;
long missing;
long total;
public TermsDoubleFacetExecutor(IndexNumericFieldData indexFieldData, int size, TermsFacet.ComparatorType comparatorType, boolean allTerms, SearchContext context,
ImmutableSet<BytesRef> excluded, SearchScript script) {
this.indexFieldData = indexFieldData;
this.size = size;
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.script = script;
this.excluded = excluded;
if (this.script == null && excluded.isEmpty()) {
aggregator = new StaticAggregatorValueProc(CacheRecycler.popDoubleIntMap());
} else {
aggregator = new AggregatorValueProc(CacheRecycler.popDoubleIntMap(), excluded, this.script);
}
this.facets = CacheRecycler.popDoubleIntMap();
// TODO: we need to support this with the new field data....
// if (allTerms) {
@ -90,31 +83,20 @@ public class TermsDoubleFacetCollector extends AbstractFacetCollector {
}
@Override
public void setScorer(Scorer scorer) throws IOException {
if (script != null) {
script.setScorer(scorer);
}
public Collector collector() {
return new Collector();
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
if (script != null) {
script.setNextReader(context);
}
public Post post() {
return null;
}
@Override
protected void doCollect(int doc) throws IOException {
values.forEachValueInDoc(doc, aggregator);
}
@Override
public Facet facet() {
TDoubleIntHashMap facets = aggregator.facets();
public InternalFacet buildFacet(String facetName) {
if (facets.isEmpty()) {
CacheRecycler.pushDoubleIntMap(facets);
return new InternalDoubleTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalDoubleTermsFacet.DoubleEntry>of(), aggregator.missing(), aggregator.total());
return new InternalDoubleTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalDoubleTermsFacet.DoubleEntry>of(), missing, total);
} else {
if (size < EntryPriorityQueue.LIMIT) {
EntryPriorityQueue ordered = new EntryPriorityQueue(size, comparatorType.comparator());
@ -127,7 +109,7 @@ public class TermsDoubleFacetCollector extends AbstractFacetCollector {
list[i] = (InternalDoubleTermsFacet.DoubleEntry) ordered.pop();
}
CacheRecycler.pushDoubleIntMap(facets);
return new InternalDoubleTermsFacet(facetName, comparatorType, size, Arrays.asList(list), aggregator.missing(), aggregator.total());
return new InternalDoubleTermsFacet(facetName, comparatorType, size, Arrays.asList(list), missing, total);
} else {
BoundedTreeSet<InternalDoubleTermsFacet.DoubleEntry> ordered = new BoundedTreeSet<InternalDoubleTermsFacet.DoubleEntry>(comparatorType.comparator(), size);
for (TDoubleIntIterator it = facets.iterator(); it.hasNext(); ) {
@ -135,11 +117,51 @@ public class TermsDoubleFacetCollector extends AbstractFacetCollector {
ordered.add(new InternalDoubleTermsFacet.DoubleEntry(it.key(), it.value()));
}
CacheRecycler.pushDoubleIntMap(facets);
return new InternalDoubleTermsFacet(facetName, comparatorType, size, ordered, aggregator.missing(), aggregator.total());
return new InternalDoubleTermsFacet(facetName, comparatorType, size, ordered, missing, total);
}
}
}
class Collector extends FacetExecutor.Collector {
private final StaticAggregatorValueProc aggregator;
private DoubleValues values;
public Collector() {
if (script == null && excluded.isEmpty()) {
aggregator = new StaticAggregatorValueProc(facets);
} else {
aggregator = new AggregatorValueProc(facets, excluded, script);
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
if (script != null) {
script.setScorer(scorer);
}
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getDoubleValues();
if (script != null) {
script.setNextReader(context);
}
}
@Override
public void collect(int doc) throws IOException {
values.forEachValueInDoc(doc, aggregator);
}
@Override
public void postCollection() {
TermsDoubleFacetExecutor.this.missing = aggregator.missing();
TermsDoubleFacetExecutor.this.total = aggregator.total();
}
}
public static class AggregatorValueProc extends StaticAggregatorValueProc {
private final SearchScript script;

View File

@ -21,8 +21,8 @@ package org.elasticsearch.search.facet.terms.index;
import com.google.common.collect.Sets;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.facet.terms.TermsFacet;
import org.elasticsearch.search.facet.terms.strings.InternalStringTermsFacet;
@ -31,34 +31,51 @@ import java.io.IOException;
/**
*
*/
public class IndexNameFacetCollector extends AbstractFacetCollector {
public class IndexNameFacetExecutor extends FacetExecutor {
private final String indexName;
private final InternalStringTermsFacet.ComparatorType comparatorType;
private final int size;
private int count = 0;
public IndexNameFacetCollector(String facetName, String indexName, TermsFacet.ComparatorType comparatorType, int size) {
super(facetName);
public IndexNameFacetExecutor(String indexName, TermsFacet.ComparatorType comparatorType, int size) {
this.indexName = indexName;
this.comparatorType = comparatorType;
this.size = size;
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
public Collector collector() {
return new Collector();
}
@Override
protected void doCollect(int doc) throws IOException {
count++;
public Post post() {
return null;
}
@Override
public Facet facet() {
public InternalFacet buildFacet(String facetName) {
return new InternalStringTermsFacet(facetName, comparatorType, size, Sets.newHashSet(new InternalStringTermsFacet.TermEntry(indexName, count)), 0, count);
}
class Collector extends FacetExecutor.Collector {
private int count;
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
}
@Override
public void collect(int doc) throws IOException {
count++;
}
@Override
public void postCollection() {
IndexNameFacetExecutor.this.count = count;
}
}
}

View File

@ -73,30 +73,19 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
this.count = count;
}
public Text term() {
return new StringText(Long.toString(term));
}
public Text getTerm() {
return term();
}
@Override
public Number termAsNumber() {
return term;
public Text getTerm() {
return new StringText(Long.toString(term));
}
@Override
public Number getTermAsNumber() {
return termAsNumber();
}
public int count() {
return count;
return term;
}
@Override
public int getCount() {
return count();
return count;
}
@Override
@ -106,7 +95,7 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
return -1;
}
if (term == anotherVal) {
int i = count - o.count();
int i = count - o.getCount();
if (i == 0) {
i = System.identityHashCode(this) - System.identityHashCode(o);
}
@ -116,22 +105,17 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
}
}
private String name;
int requiredSize;
long missing;
long total;
Collection<LongEntry> entries = ImmutableList.of();
ComparatorType comparatorType;
InternalLongTermsFacet() {
}
public InternalLongTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<LongEntry> entries, long missing, long total) {
this.name = name;
super(name);
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
@ -140,38 +124,13 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
}
@Override
public String name() {
return this.name;
}
@Override
public String getName() {
return this.name;
}
@Override
public String type() {
return TYPE;
}
@Override
public String getType() {
return type();
}
@Override
public List<LongEntry> entries() {
public List<LongEntry> getEntries() {
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<LongEntry>) entries;
}
@Override
public List<LongEntry> getEntries() {
return entries();
}
@SuppressWarnings({"unchecked"})
@Override
public Iterator<Entry> iterator() {
@ -179,39 +138,24 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
}
@Override
public long missingCount() {
public long getMissingCount() {
return this.missing;
}
@Override
public long getMissingCount() {
return missingCount();
}
@Override
public long totalCount() {
public long getTotalCount() {
return this.total;
}
@Override
public long getTotalCount() {
return totalCount();
}
@Override
public long otherCount() {
public long getOtherCount() {
long other = total;
for (Entry entry : entries) {
other -= entry.count();
other -= entry.getCount();
}
return other;
}
@Override
public long getOtherCount() {
return otherCount();
}
@Override
public Facet reduce(List<Facet> facets) {
if (facets.size() == 1) {
@ -223,10 +167,10 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
long total = 0;
for (Facet facet : facets) {
InternalLongTermsFacet mFacet = (InternalLongTermsFacet) facet;
missing += mFacet.missingCount();
total += mFacet.totalCount();
missing += mFacet.getMissingCount();
total += mFacet.getTotalCount();
for (LongEntry entry : mFacet.entries) {
aggregated.adjustOrPutValue(entry.term, entry.count(), entry.count());
aggregated.adjustOrPutValue(entry.term, entry.getCount(), entry.getCount());
}
}
@ -256,16 +200,16 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startObject(getName());
builder.field(Fields._TYPE, TermsFacet.TYPE);
builder.field(Fields.MISSING, missing);
builder.field(Fields.TOTAL, total);
builder.field(Fields.OTHER, otherCount());
builder.field(Fields.OTHER, getOtherCount());
builder.startArray(Fields.TERMS);
for (LongEntry entry : entries) {
builder.startObject();
builder.field(Fields.TERM, entry.term);
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.COUNT, entry.getCount());
builder.endObject();
}
builder.endArray();
@ -281,7 +225,7 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
super.readFrom(in);
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
missing = in.readVLong();
@ -296,7 +240,7 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
super.writeTo(out);
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);
out.writeVLong(missing);
@ -305,7 +249,7 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
out.writeVInt(entries.size());
for (LongEntry entry : entries) {
out.writeLong(entry.term);
out.writeVInt(entry.count());
out.writeVInt(entry.getCount());
}
}
}

Some files were not shown because too many files have changed in this diff Show More