SOLR-5302 Analytics component. Checking in to trunk, we'll let it back then port to 4x

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1543651 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Erick Erickson 2013-11-19 23:45:31 +00:00
parent 6b67d485dc
commit 122171155f
93 changed files with 13610 additions and 39 deletions

View File

@ -0,0 +1,163 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.accumulator;
import java.io.IOException;
import java.util.Collections;
import java.util.Date;
import java.util.Set;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.solr.analytics.expression.Expression;
import org.apache.solr.analytics.expression.ExpressionFactory;
import org.apache.solr.analytics.request.AnalyticsRequest;
import org.apache.solr.analytics.request.ExpressionRequest;
import org.apache.solr.analytics.statistics.StatsCollector;
import org.apache.solr.analytics.statistics.StatsCollectorSupplierFactory;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.schema.TrieDateField;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.SolrIndexSearcher;
import com.google.common.base.Supplier;
/**
* A <code>BasicAccumulator</code> manages the ValueCounters and Expressions without regard to Facets.
*/
public class BasicAccumulator extends ValueAccumulator {
protected final SolrIndexSearcher searcher;
protected final AnalyticsRequest request;
protected final DocSet docs;
protected final Supplier<StatsCollector[]> statsCollectorArraySupplier;
protected final StatsCollector[] statsCollectors;
protected final Expression[] expressions;
protected final String[] expressionNames;
protected final String[] expressionStrings;
protected final Set<String> hiddenExpressions;
protected AtomicReaderContext context = null;
public BasicAccumulator(SolrIndexSearcher searcher, DocSet docs, AnalyticsRequest request) throws IOException {
this.searcher = searcher;
this.docs = docs;
this.request = request;
statsCollectorArraySupplier = StatsCollectorSupplierFactory.create(searcher.getSchema(), request);
statsCollectors = statsCollectorArraySupplier.get();
int size = request.getExpressions().size();
expressionNames = new String[size];
expressionStrings = new String[size];
int count = 0;
Collections.sort(request.getExpressions());
for (ExpressionRequest expRequest : request.getExpressions()) {
expressionNames[count] = expRequest.getName();
expressionStrings[count++] = expRequest.getExpressionString();
}
expressions = makeExpressions(statsCollectors);
hiddenExpressions = request.getHiddenExpressions();
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
this.context = context;
for (StatsCollector counter : statsCollectors) {
counter.setNextReader(context);
}
}
public static BasicAccumulator create(SolrIndexSearcher searcher, DocSet docs, AnalyticsRequest request) throws IOException {
return new BasicAccumulator(searcher,docs,request);
}
/**
* Passes the documents on to the {@link StatsCollector}s to be collected.
* @param doc Document to collect from
*/
@Override
public void collect(int doc) throws IOException {
for (StatsCollector statsCollector : statsCollectors) {
statsCollector.collect(doc);
}
}
@Override
public void compute() {
for (StatsCollector statsCollector : statsCollectors) {
statsCollector.compute();
}
}
public NamedList<?> export(){
NamedList<Object> base = new NamedList<Object>();
for (int count = 0; count < expressions.length; count++) {
if (!hiddenExpressions.contains(expressionNames[count])) {
base.add(expressionNames[count], expressions[count].getValue());
}
}
return base;
}
/**
* Builds an array of Expressions with the given list of counters
* @param statsCollectors the stats collectors
* @return The array of Expressions
*/
public Expression[] makeExpressions(StatsCollector[] statsCollectors) {
Expression[] expressions = new Expression[expressionStrings.length];
for (int count = 0; count < expressionStrings.length; count++) {
expressions[count] = ExpressionFactory.create(expressionStrings[count], statsCollectors);
}
return expressions;
}
/**
* Returns the value of an expression to use in a field or query facet.
* @param expressionName the name of the expression
* @return String String representation of pivot value
*/
@SuppressWarnings({ "deprecation", "rawtypes" })
public String getResult(String expressionName) {
for (int count = 0; count < expressionNames.length; count++) {
if (expressionName.equals(expressionNames[count])) {
Comparable value = expressions[count].getValue();
if (value.getClass().equals(Date.class)) {
return TrieDateField.formatExternal((Date)value);
} else {
return value.toString();
}
}
}
throw new SolrException(ErrorCode.BAD_REQUEST, "Pivot expression "+expressionName+" not found.");
}
/**
* Used for JMX stats collecting. Counts the number of stats requests
* @return number of unique stats collectors
*/
public long getNumStatsCollectors() {
return statsCollectors.length;
}
/**
* Used for JMX stats collecting. Counts the number of queries in all query facets
* @return number of queries requested in all query facets.
*/
public long getNumQueries() {
return 0l;
}
}

View File

@ -0,0 +1,722 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.accumulator;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.solr.analytics.accumulator.facet.FacetValueAccumulator;
import org.apache.solr.analytics.accumulator.facet.FieldFacetAccumulator;
import org.apache.solr.analytics.accumulator.facet.QueryFacetAccumulator;
import org.apache.solr.analytics.accumulator.facet.RangeFacetAccumulator;
import org.apache.solr.analytics.expression.Expression;
import org.apache.solr.analytics.expression.ExpressionFactory;
import org.apache.solr.analytics.request.AnalyticsContentHandler;
import org.apache.solr.analytics.request.AnalyticsRequest;
import org.apache.solr.analytics.request.FieldFacetRequest;
import org.apache.solr.analytics.request.FieldFacetRequest.FacetSortSpecification;
import org.apache.solr.analytics.request.QueryFacetRequest;
import org.apache.solr.analytics.request.RangeFacetRequest;
import org.apache.solr.analytics.statistics.StatsCollector;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.analytics.util.RangeEndpointCalculator;
import org.apache.solr.analytics.util.RangeEndpointCalculator.FacetRange;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.schema.TrieDateField;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.QParser;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SyntaxError;
import com.google.common.collect.Iterables;
/**
* A <code>FacetingAccumulator</code> manages the StatsCollectors and Expressions for facets.
*/
public class FacetingAccumulator extends BasicAccumulator implements FacetValueAccumulator {
public static final String MISSING_VALUE = "(MISSING)";
protected boolean basicsAndFieldFacetsComputed;
protected int leafNum;
protected AtomicReaderContext leaf;
protected final AnalyticsRequest analyticsRequest;
protected final Map<String,Map<String,Expression[]>> fieldFacetExpressions;
protected final Map<String,Map<String,Expression[]>> rangeFacetExpressions;
protected final Map<String,Map<String,Expression[]>> queryFacetExpressions;
protected final Map<String,Map<String,StatsCollector[]>> fieldFacetCollectors;
protected final Map<String,Map<String,StatsCollector[]>> rangeFacetCollectors;
protected final Map<String,Map<String,StatsCollector[]>> queryFacetCollectors;
protected final List<FieldFacetAccumulator> facetAccumulators;
protected final Set<String> hiddenFieldFacets;
/** the current value of this stat field */
protected final SolrQueryRequest queryRequest;
protected List<RangeFacetRequest> rangeFacets = null;
protected List<QueryFacetRequest> queryFacets = null;
protected long queryCount;
public FacetingAccumulator(SolrIndexSearcher searcher, DocSet docs, AnalyticsRequest request, SolrQueryRequest queryRequest) throws IOException {
// The parent Basic Accumulator keeps track of overall stats while
// the Faceting Accumulator only manages the facet stats
super(searcher, docs, request);
this.analyticsRequest = request;
this.queryRequest = queryRequest;
basicsAndFieldFacetsComputed = false;
List<FieldFacetRequest> fieldFreqs = request.getFieldFacets();
List<RangeFacetRequest> rangeFreqs = request.getRangeFacets();
List<QueryFacetRequest> queryFreqs = request.getQueryFacets();
this.fieldFacetExpressions = new LinkedHashMap<String,Map<String,Expression[]>>(fieldFreqs.size());
this.rangeFacetExpressions = new LinkedHashMap<String,Map<String,Expression[]>>(rangeFreqs.size());
this.queryFacetExpressions = new LinkedHashMap<String,Map<String,Expression[]>>(queryFreqs.size());
this.fieldFacetCollectors = new LinkedHashMap<String,Map<String,StatsCollector[]>>(fieldFreqs.size());
this.rangeFacetCollectors = new LinkedHashMap<String,Map<String,StatsCollector[]>>(rangeFreqs.size());
this.queryFacetCollectors = new LinkedHashMap<String,Map<String,StatsCollector[]>>(queryFreqs.size());
this.facetAccumulators = new ArrayList<FieldFacetAccumulator>();
this.hiddenFieldFacets = new HashSet<String>();
/**
* For each field facet request add a bucket to the {@link Expression} map and {@link StatsCollector} map.
* Field facets are computed during the initial collection of documents, therefore
* the FieldFacetAccumulators are created initially.
*/
for( FieldFacetRequest freq : fieldFreqs ){
final FieldFacetRequest fr = (FieldFacetRequest) freq;
if (fr.isHidden()) {
hiddenFieldFacets.add(fr.getName());
}
final SchemaField ff = fr.getField();
final FieldFacetAccumulator facc = FieldFacetAccumulator.create(searcher, this, ff);
facetAccumulators.add(facc);
fieldFacetExpressions.put(freq.getName(), new LinkedHashMap<String,Expression[]>() );
fieldFacetCollectors.put(freq.getName(), new LinkedHashMap<String,StatsCollector[]>());
}
/**
* For each range and query facet request add a bucket to the corresponding
* {@link Expression} map and {@link StatsCollector} map.
* Range and Query Facets are computed in the post processing, so the accumulators
* are not created initially.
*/
for( RangeFacetRequest freq : rangeFreqs ){
if( rangeFacets == null ) rangeFacets = new ArrayList<RangeFacetRequest>();
rangeFacets.add(freq);
rangeFacetExpressions.put(freq.getName(), new LinkedHashMap<String,Expression[]>() );
rangeFacetCollectors.put(freq.getName(), new LinkedHashMap<String,StatsCollector[]>());
}
for( QueryFacetRequest freq : queryFreqs ){
if( queryFacets == null ) queryFacets = new ArrayList<QueryFacetRequest>();
queryFacets.add(freq);
queryFacetExpressions.put(freq.getName(), new LinkedHashMap<String,Expression[]>() );
queryFacetCollectors.put(freq.getName(), new LinkedHashMap<String,StatsCollector[]>());
}
this.queryCount = 0l;
}
public static FacetingAccumulator create(SolrIndexSearcher searcher, DocSet docs, AnalyticsRequest request, SolrQueryRequest queryRequest) throws IOException {
return new FacetingAccumulator(searcher,docs,request,queryRequest);
}
/**
* Update the readers for the {@link BasicAccumulator}, field facets and field facet {@link StatsCollector}s.
* @param context The context to read documents from.
* @throws IOException if there is an error setting the next reader
*/
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
super.setNextReader(context);
for( Map<String,StatsCollector[]> valueList : fieldFacetCollectors.values() ){
for (StatsCollector[] statsCollectorList : valueList.values()) {
for (StatsCollector statsCollector : statsCollectorList) {
statsCollector.setNextReader(context);
}
}
}
for (FieldFacetAccumulator fa : facetAccumulators) {
fa.setNextReader(context);
}
}
/**
* Updates the reader for all of the range facet {@link StatsCollector}s.
* @param context The context to read documents from.
* @throws IOException if there is an error setting the next reader
*/
public void setRangeStatsCollectorReaders(AtomicReaderContext context) throws IOException {
super.setNextReader(context);
for( Map<String,StatsCollector[]> rangeList : rangeFacetCollectors.values() ){
for (StatsCollector[] statsCollectorList : rangeList.values()) {
for (StatsCollector statsCollector : statsCollectorList) {
statsCollector.setNextReader(context);
}
}
}
}
/**
* Updates the reader for all of the query facet {@link StatsCollector}s.
* @param context The context to read documents from.
* @throws IOException if there is an error setting the next reader
*/
public void setQueryStatsCollectorReaders(AtomicReaderContext context) throws IOException {
super.setNextReader(context);
for( Map<String,StatsCollector[]> queryList : queryFacetCollectors.values() ){
for (StatsCollector[] statsCollectorList : queryList.values()) {
for (StatsCollector statsCollector : statsCollectorList) {
statsCollector.setNextReader(context);
}
}
}
}
/**
* Called from Analytics stats, adds documents to the field
* facets and the super {@link BasicAccumulator}.
*/
@Override
public void collect(int doc) throws IOException {
for( FieldFacetAccumulator fa : facetAccumulators ){
fa.collect(doc);
}
super.collect(doc);
}
/**
* Given a document, fieldFacet field and facetValue, adds the document to the
* {@link StatsCollector}s held in the bucket corresponding to the fieldFacet field and facetValue.
* Called during initial document collection.
*/
@Override
public void collectField(int doc, String facetField, String facetValue) throws IOException {
Map<String,StatsCollector[]> map = fieldFacetCollectors.get(facetField);
StatsCollector[] statsCollectors = map.get(facetValue);
// If the facetValue has not been seen yet, a StatsCollector array is
// created and associated with that bucket.
if( statsCollectors == null ){
statsCollectors = statsCollectorArraySupplier.get();
map.put(facetValue,statsCollectors);
fieldFacetExpressions.get(facetField).put(facetValue,makeExpressions(statsCollectors));
for (StatsCollector statsCollector : statsCollectors) {
statsCollector.setNextReader(context);
}
}
for (StatsCollector statsCollector : statsCollectors) {
statsCollector.collect(doc);
}
}
/**
* Given a document, rangeFacet field and range, adds the document to the
* {@link StatsCollector}s held in the bucket corresponding to the rangeFacet field and range.
* Called during post processing.
*/
@Override
public void collectRange(int doc, String facetField, String range) throws IOException {
Map<String,StatsCollector[]> map = rangeFacetCollectors.get(facetField);
StatsCollector[] statsCollectors = map.get(range);
// If the range has not been seen yet, a StatsCollector array is
// created and associated with that bucket.
if( statsCollectors == null ){
statsCollectors = statsCollectorArraySupplier.get();
map.put(range,statsCollectors);
rangeFacetExpressions.get(facetField).put(range,makeExpressions(statsCollectors));
for (StatsCollector statsCollector : statsCollectors) {
statsCollector.setNextReader(context);
}
}
for (StatsCollector statsCollector : statsCollectors) {
statsCollector.collect(doc);
}
}
/**
* Given a document, queryFacet name and query, adds the document to the
* {@link StatsCollector}s held in the bucket corresponding to the queryFacet name and query.
* Called during post processing.
*/
@Override
public void collectQuery(int doc, String facetName, String query) throws IOException {
Map<String,StatsCollector[]> map = queryFacetCollectors.get(facetName);
StatsCollector[] statsCollectors = map.get(query);
// If the query has not been seen yet, a StatsCollector array is
// created and associated with that bucket.
if( statsCollectors == null ){
statsCollectors = statsCollectorArraySupplier.get();
map.put(query,statsCollectors);
queryFacetExpressions.get(facetName).put(query,makeExpressions(statsCollectors));
for (StatsCollector statsCollector : statsCollectors) {
statsCollector.setNextReader(context);
}
}
for (StatsCollector statsCollector : statsCollectors) {
statsCollector.collect(doc);
}
}
/**
* A comparator to compare expression values for field facet sorting.
*/
public static class EntryComparator implements Comparator<Entry<String,Expression[]>> {
private final Comparator<Expression> comp;
private final int comparatorExpressionPlace;
public EntryComparator(Comparator<Expression> comp, int comparatorExpressionPlace) {
this.comp = comp;
this.comparatorExpressionPlace = comparatorExpressionPlace;
}
@Override
public int compare(Entry<String,Expression[]> o1, Entry<String,Expression[]> o2) {
return comp.compare(o1.getValue()[comparatorExpressionPlace], o2.getValue()[comparatorExpressionPlace]);
}
}
/**
* Finalizes the statistics within the each facet bucket before exporting;
*/
@Override
public void compute() {
if (!basicsAndFieldFacetsComputed) {
super.compute();
for( Map<String, StatsCollector[]> f : fieldFacetCollectors.values() ){
for( StatsCollector[] arr : f.values() ){
for( StatsCollector b : arr ){
b.compute();
}
}
}
basicsAndFieldFacetsComputed = true;
}
}
/**
* Finalizes the statistics within the a specific query facet before exporting;
*/
public void computeQueryFacet(String facet) {
Map<String, StatsCollector[]> f = queryFacetCollectors.get(facet);
for( StatsCollector[] arr : f.values() ){
for( StatsCollector b : arr ){
b.compute();
}
}
}
/**
* Finalizes the statistics within the a specific range facet before exporting;
*/
public void computeRangeFacet(String facet) {
Map<String, StatsCollector[]> f = rangeFacetCollectors.get(facet);
for( StatsCollector[] arr : f.values() ){
for( StatsCollector b : arr ){
b.compute();
}
}
}
/**
* Returns the value of an expression to use in a range or query facet.
* @param expressionName the name of the expression
* @param fieldFacet the facet field
* @param facetValue the facet value
* @return String String representation of pivot value
*/
@SuppressWarnings({ "deprecation", "rawtypes" })
public String getResult(String expressionName, String fieldFacet, String facetValue) {
if (facetValue.contains(AnalyticsParams.RESULT) && !facetValue.contains(AnalyticsParams.QUERY_RESULT)) {
try {
String[] pivotStr = ExpressionFactory.getArguments(facetValue.substring(facetValue.indexOf('(')+1,facetValue.lastIndexOf(')')).trim());
if (pivotStr.length==1) {
facetValue = getResult(pivotStr[0]);
} else if (pivotStr.length==3) {
facetValue = getResult(pivotStr[0],pivotStr[1],pivotStr[2]);
} else {
throw new SolrException(ErrorCode.BAD_REQUEST, "Result request "+facetValue+" has an invalid amount of arguments.");
}
} catch (IndexOutOfBoundsException e) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Result request "+facetValue+" is invalid. Lacks parentheses.",e);
}
}
if (fieldFacetExpressions.get(fieldFacet)!=null) {
Expression[] facetExpressions = fieldFacetExpressions.get(fieldFacet).get(facetValue);
for (int count = 0; count < expressionNames.length; count++) {
if (expressionName.equals(expressionNames[count])) {
Comparable value = facetExpressions[count].getValue();
if (value.getClass().equals(Date.class)) {
return TrieDateField.formatExternal((Date)value);
} else {
return value.toString();
}
}
}
}
throw new SolrException(ErrorCode.BAD_REQUEST,"Field Facet Pivot expression "+expressionName+" not found.");
}
/**
* Returns the value of an expression to use in a range or query facet.
* @param currentFacet the name of the current facet
* @param expressionName the name of the expression
* @param queryFacet the facet query
* @param facetValue the field value
* @return String String representation of pivot value
*/
@SuppressWarnings({ "deprecation", "rawtypes" })
public String getQueryResult(String currentFacet, String expressionName, String queryFacet, String facetValue) {
if (facetValue.contains(AnalyticsParams.RESULT) && !facetValue.contains(AnalyticsParams.QUERY_RESULT)) {
try {
String[] pivotStr = ExpressionFactory.getArguments(facetValue.substring(facetValue.indexOf('(')+1,facetValue.lastIndexOf(')')).trim());
if (pivotStr.length==1) {
facetValue = getResult(pivotStr[0]);
} else if (pivotStr.length==3) {
facetValue = getResult(pivotStr[0],pivotStr[1],pivotStr[2]);
} else {
throw new SolrException(ErrorCode.BAD_REQUEST, "Result request "+facetValue+" has an invalid amount of arguments.");
}
} catch (IndexOutOfBoundsException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Result request "+facetValue+" is invalid. Lacks parentheses.",e);
}
}
if (facetValue.contains(AnalyticsParams.QUERY_RESULT)) {
try {
String[] pivotStr = ExpressionFactory.getArguments(facetValue.substring(facetValue.indexOf('(')+1,facetValue.lastIndexOf(')')).trim());
if (pivotStr.length==1) {
facetValue = getResult(pivotStr[0]);
} else if (pivotStr.length==3) {
facetValue = getQueryResult(currentFacet,pivotStr[0],pivotStr[1],pivotStr[2]);
} else {
throw new SolrException(ErrorCode.BAD_REQUEST,"Result request "+facetValue+" has an invalid amount of arguments.");
}
} catch (IndexOutOfBoundsException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Result request "+facetValue+" is invalid. Lacks parentheses.",e);
}
}
if (queryFacetExpressions.get(queryFacet)!=null) {
Expression[] facetExpressions = queryFacetExpressions.get(queryFacet).get(facetValue);
for (int count = 0; count < expressionNames.length; count++) {
if (expressionName.equals(expressionNames[count])) {
Comparable value = facetExpressions[count].getValue();
if (value.getClass().equals(Date.class)) {
return TrieDateField.formatExternal((Date)value);
} else {
return value.toString();
}
}
}
}
throw new SolrException(ErrorCode.BAD_REQUEST,"Field Facet Pivot expression "+expressionName+" not found.");
}
@Override
@SuppressWarnings("unchecked")
public NamedList<?> export() {
final NamedList<Object> base = (NamedList<Object>)super.export();
NamedList<NamedList<?>> facetList = new NamedList<NamedList<?>>();
// Add the field facet buckets to the output
base.add("fieldFacets",facetList);
for( FieldFacetRequest freq : request.getFieldFacets() ){
final String name = freq.getName();
if (hiddenFieldFacets.contains(name)) {
continue;
}
final Map<String,Expression[]> buckets = fieldFacetExpressions.get(name);
final NamedList<Object> bucketBase = new NamedList<Object>();
Iterable<Entry<String,Expression[]>> iter = buckets.entrySet();
final FieldFacetRequest fr = (FieldFacetRequest) freq;
final FacetSortSpecification sort = fr.getSort();
final int limit = fr.getLimit();
final int offset = fr.getOffset();
final boolean showMissing = fr.showsMissing();
if (!showMissing) {
buckets.remove(MISSING_VALUE);
}
// Sorting the buckets if a sort specification is provided
if( sort != null && buckets.values().iterator().hasNext()){
int sortPlace = Arrays.binarySearch(expressionNames, sort.getStatistic());
final Expression first = buckets.values().iterator().next()[sortPlace];
final Comparator<Expression> comp = (Comparator<Expression>) first.comparator(sort.getDirection());
final List<Entry<String,Expression[]>> sorted = new ArrayList<Entry<String,Expression[]>>(buckets.size());
Iterables.addAll(sorted, iter);
Collections.sort(sorted, new EntryComparator(comp,sortPlace));
iter = sorted;
}
// apply the limit
if( limit > AnalyticsContentHandler.DEFAULT_FACET_LIMIT ){
if( offset > 0 ){
iter = Iterables.skip(iter, offset);
}
iter = Iterables.limit(iter, limit);
}
// Export each expression in the bucket.
for( Entry<String,Expression[]> bucket : iter ){
bucketBase.add(bucket.getKey(),export(bucket.getValue()));
}
facetList.add(name, bucketBase);
}
// Add the range facet buckets to the output
facetList = new NamedList<NamedList<?>>();
base.add("rangeFacets",facetList);
for( RangeFacetRequest freq : request.getRangeFacets() ){
final String name = freq.getName();
final Map<String,Expression[]> buckets = rangeFacetExpressions.get(name);
final NamedList<Object> bucketBase = new NamedList<Object>();
Iterable<Entry<String,Expression[]>> iter = buckets.entrySet();
for( Entry<String,Expression[]> bucket : iter ){
bucketBase.add(bucket.getKey(),export(bucket.getValue()));
}
facetList.add(name, bucketBase);
}
// Add the query facet buckets to the output
facetList = new NamedList<NamedList<?>>();
base.add("queryFacets",facetList);
for( QueryFacetRequest freq : request.getQueryFacets() ){
final String name = freq.getName();
final Map<String,Expression[]> buckets = queryFacetExpressions.get(name);
final NamedList<Object> bucketBase = new NamedList<Object>();
Iterable<Entry<String,Expression[]>> iter = buckets.entrySet();
for( Entry<String,Expression[]> bucket : iter ){
bucketBase.add(bucket.getKey(),export(bucket.getValue()));
}
facetList.add(name, bucketBase);
}
return base;
}
/**
* Exports a list of expressions as a NamedList
* @param expressionArr an array of expressions
* @return named list of expressions
*/
public NamedList<?> export(Expression[] expressionArr) {
NamedList<Object> base = new NamedList<Object>();
for (int count = 0; count < expressionArr.length; count++) {
if (!hiddenExpressions.contains(expressionNames[count])) {
base.add(expressionNames[count], expressionArr[count].getValue());
}
}
return base;
}
/**
* Processes the query and range facets.
* Must be called if range and/or query facets are supported.
*/
@Override
public void postProcess() throws IOException {
super.compute();
for( Map<String, StatsCollector[]> f : fieldFacetCollectors.values() ){
for( StatsCollector[] arr : f.values() ){
for( StatsCollector b : arr ){
b.compute();
}
}
}
basicsAndFieldFacetsComputed = true;
final Filter filter = docs.getTopFilter();
if( rangeFacets != null ){
processRangeFacets(filter);
}
if( queryFacets != null ){
processQueryFacets(filter);
}
}
/**
* Initiates the collecting of query facets
* @param filter the base filter to work against
* @throws IOException if searching failed
*/
public void processQueryFacets(final Filter filter) throws IOException {
for( QueryFacetRequest qfr : queryFacets ){
for( String query : qfr.getQueries() ){
if (query.contains(AnalyticsParams.RESULT) && !query.contains(AnalyticsParams.QUERY_RESULT)) {
try {
String[] pivotStr = ExpressionFactory.getArguments(query.substring(query.indexOf('(')+1,query.lastIndexOf(')')).trim());
if (pivotStr.length==1) {
query = getResult(pivotStr[0]);
} else if (pivotStr.length==3) {
query = getResult(pivotStr[0],pivotStr[1],pivotStr[2]);
} else {
throw new SolrException(ErrorCode.BAD_REQUEST,"Result request "+query+" has an invalid amount of arguments.");
}
} catch (IndexOutOfBoundsException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Result request "+query+" is invalid. Lacks parentheses.",e);
}
} else if (query.contains(AnalyticsParams.QUERY_RESULT)) {
try {
String[] pivotStr = ExpressionFactory.getArguments(query.substring(query.indexOf('(')+1,query.lastIndexOf(')')).trim());
if (pivotStr.length==3) {
query = getQueryResult(qfr.getName(),pivotStr[0],pivotStr[1],pivotStr[2]);
} else {
throw new SolrException(ErrorCode.BAD_REQUEST,"Result request "+query+" has an invalid amount of arguments.");
}
} catch (IndexOutOfBoundsException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Result request "+query+" is invalid. Lacks parentheses.",e);
}
}
QueryFacetAccumulator qAcc = new QueryFacetAccumulator(this,qfr.getName(),query);
final Query q;
try {
q = QParser.getParser(query, null, queryRequest).getQuery();
} catch( SyntaxError e ){
throw new SolrException(ErrorCode.BAD_REQUEST,"Invalid query '"+query+"'",e);
}
// The searcher sends docIds to the QueryFacetAccumulator which forwards
// them to <code>collectQuery()</code> in this class for collection.
searcher.search(q, filter, qAcc);
computeQueryFacet(qfr.getName());
queryCount++;
}
}
}
@Override
public long getNumQueries() {
return queryCount;
}
/**
* Initiates the collecting of range facets
* @param filter the base filter to use
* @throws IOException if searching fails
*/
public void processRangeFacets(final Filter filter) throws IOException {
for( RangeFacetRequest rfr : rangeFacets ){
String[] pivotStr;
String start = rfr.getStart();
if (start.contains(AnalyticsParams.QUERY_RESULT)) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Query result requests can not be used in Range Facets");
} else if (start.contains(AnalyticsParams.RESULT)) {
try {
pivotStr = ExpressionFactory.getArguments(start.substring(start.indexOf('(')+1,start.indexOf(')')).trim());
if (pivotStr.length==1) {
rfr.setStart(getResult(pivotStr[0]));
} else if (pivotStr.length==3) {
rfr.setStart(getResult(pivotStr[0],pivotStr[1],pivotStr[2]));
} else {
throw new SolrException(ErrorCode.BAD_REQUEST, "Result request "+start+" has an invalid amount of arguments.");
}
} catch (IndexOutOfBoundsException e) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Result request "+start+" is invalid. Lacks parentheses.",e);
}
}
String end = rfr.getEnd();
if (end.contains(AnalyticsParams.QUERY_RESULT)) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Query result requests can not be used in Range Facets");
} else if (end.contains(AnalyticsParams.RESULT)) {
try {
pivotStr = ExpressionFactory.getArguments(end.substring(end.indexOf('(')+1,end.indexOf(')')).trim());
if (pivotStr.length==1) {
rfr.setEnd(getResult(pivotStr[0]));
} else if (pivotStr.length==3) {
rfr.setEnd(getResult(pivotStr[0],pivotStr[1],pivotStr[2]));
} else {
throw new SolrException(ErrorCode.BAD_REQUEST, "Result request "+end+" has an invalid amount of arguments.");
}
} catch (IndexOutOfBoundsException e) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Result request "+end+" is invalid. Lacks parentheses.",e);
}
}
String[] gaps = rfr.getGaps();
for (int count = 0; count<gaps.length; count++){
String gap = gaps[count];
if (gap.contains(AnalyticsParams.QUERY_RESULT)) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Query result requests can not be used in Range Facets");
} else if (gap.contains(AnalyticsParams.RESULT)) {
try {
pivotStr = ExpressionFactory.getArguments(gap.substring(gap.indexOf('(')+1,gap.indexOf(')')).trim());
if (pivotStr.length==1) {
gaps[count]=getResult(pivotStr[0]);
} else if (pivotStr.length==3) {
gaps[count]=getResult(pivotStr[0],pivotStr[1],pivotStr[2]);
} else {
throw new SolrException(ErrorCode.BAD_REQUEST, "Result request "+gap+" has an invalid amount of arguments.");
}
} catch (IndexOutOfBoundsException e) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Result request "+gap+" is invalid. Lacks parentheses.",e);
}
}
}
// Computes the end points of the ranges in the rangeFacet
final RangeEndpointCalculator<? extends Comparable<?>> rec = RangeEndpointCalculator.create(rfr);
final SchemaField sf = rfr.getField();
// Create a rangeFacetAccumulator for each range and
// collect the documents for that range.
for( FacetRange range : rec.getRanges() ){
final String upper;
final String lower;
String facetValue = "";
if( range.lower == null ){
facetValue = "(*";
lower = null;
} else {
lower = range.lower;
facetValue = ((range.includeLower)?"[":"(") + range.lower;
}
facetValue+=" TO ";
if( range.upper == null ){
upper = null;
facetValue += "*)";
} else {
upper = range.upper;
facetValue += range.upper + ((range.includeUpper)?"]":")");
}
Query q = sf.getType().getRangeQuery(null, sf, lower, upper, range.includeLower,range.includeUpper);
RangeFacetAccumulator rAcc = new RangeFacetAccumulator(this,rfr.getName(),facetValue);
// The searcher sends docIds to the RangeFacetAccumulator which forwards
// them to <code>collectRange()</code> in this class for collection.
searcher.search(q, filter, rAcc);
computeRangeFacet(sf.getName());
}
}
}
}

View File

@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.accumulator;
import java.io.IOException;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Scorer;
import org.apache.solr.common.util.NamedList;
/**
* Abstract Collector that manages all StatsCollectors, Expressions and Facets.
*/
public abstract class ValueAccumulator extends Collector {
/**
* @param context The context to read documents from.
* @throws IOException if setting next reader fails
*/
public abstract void setNextReader(AtomicReaderContext context) throws IOException;
/**
* Finalizes the statistics within each StatsCollector.
* Must be called before <code>export()</code>.
*/
public abstract void compute();
public abstract NamedList<?> export();
public void postProcess() throws IOException {
// NOP
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
// NOP
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.accumulator.facet;
import java.io.IOException;
import org.apache.lucene.index.AtomicReaderContext;
/**
* Interface that describes the methods needed for an Accumulator to be able to handle
* fieldFacets, rangeFacets and queryFacets.
*/
public interface FacetValueAccumulator {
void collectField(int doc, String facetName, String facetValue) throws IOException;
void collectQuery(int doc, String facetName, String facetValue) throws IOException;
void collectRange(int doc, String facetName, String facetValue) throws IOException;
void setQueryStatsCollectorReaders(AtomicReaderContext context) throws IOException;
void setRangeStatsCollectorReaders(AtomicReaderContext context) throws IOException;
}

View File

@ -0,0 +1,143 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.accumulator.facet;
import java.io.IOException;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.analytics.accumulator.FacetingAccumulator;
import org.apache.solr.analytics.accumulator.ValueAccumulator;
import org.apache.solr.analytics.util.AnalyticsParsers;
import org.apache.solr.analytics.util.AnalyticsParsers.NumericParser;
import org.apache.solr.analytics.util.AnalyticsParsers.Parser;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.schema.TrieDateField;
import org.apache.solr.search.SolrIndexSearcher;
/**
* An Accumulator that manages the faceting for fieldFacets.
* Collects the field facet values.
*/
public class FieldFacetAccumulator extends ValueAccumulator {
protected final Parser parser;
protected final FacetValueAccumulator parent;
protected final String name;
protected final SolrIndexSearcher searcher;
protected final SchemaField schemaField;
protected final boolean multiValued;
protected final boolean numField;
protected final boolean dateField;
protected final BytesRef value;
protected SortedSetDocValues setValues;
protected SortedDocValues sortValues;
protected NumericDocValues numValues;
protected Bits numValuesBits;
public FieldFacetAccumulator(SolrIndexSearcher searcher, FacetValueAccumulator parent, SchemaField schemaField) throws IOException {
if( !schemaField.hasDocValues() ){
throw new SolrException(ErrorCode.BAD_REQUEST, "Field '"+schemaField.getName()+"' does not have docValues");
}
this.searcher = searcher;
this.schemaField = schemaField;
this.name = schemaField.getName();
if (!schemaField.hasDocValues()) {
throw new IOException(name+" does not have docValues and therefore cannot be faceted over.");
}
this.multiValued = schemaField.multiValued();
this.numField = schemaField.getType().getNumericType()!=null;
this.dateField = schemaField.getType().getClass().equals(TrieDateField.class);
this.parent = parent;
this.value = new BytesRef();
this.parser = AnalyticsParsers.getParser(schemaField.getType().getClass());
}
public static FieldFacetAccumulator create(SolrIndexSearcher searcher, FacetValueAccumulator parent, SchemaField facetField) throws IOException{
return new FieldFacetAccumulator(searcher,parent,facetField);
}
/**
* Move to the next set of documents to add to the field facet.
*/
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
if (multiValued) {
setValues = context.reader().getSortedSetDocValues(name);
} else {
if (numField) {
numValues = context.reader().getNumericDocValues(name);
numValuesBits = context.reader().getDocsWithField(name);
} else {
sortValues = context.reader().getSortedDocValues(name);
}
}
}
/**
* Tell the FacetingAccumulator to collect the doc with the
* given fieldFacet and value(s).
*/
@Override
public void collect(int doc) throws IOException {
if (multiValued) {
boolean exists = false;
if (setValues!=null) {
setValues.setDocument(doc);
int term;
while ((term = (int)setValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
exists = true;
setValues.lookupOrd(term, value);
parent.collectField(doc, name, parser.parse(value) );
}
}
if (!exists) {
parent.collectField(doc, name, FacetingAccumulator.MISSING_VALUE );
}
} else {
if(numField){
long v = numValues.get(doc);
if( v != 0 || numValuesBits.get(doc) ){
parent.collectField(doc, name, ((NumericParser)parser).parseNum(numValues.get(doc)));
} else {
parent.collectField(doc, name, FacetingAccumulator.MISSING_VALUE );
}
} else {
sortValues.get(doc,value);
if( BytesRef.EMPTY_BYTES == value.bytes ){
parent.collectField(doc, name, FacetingAccumulator.MISSING_VALUE );
} else {
parent.collectField(doc, name, parser.parse(value) );
}
}
}
}
@Override
public void compute() {}
@Override
public NamedList<?> export() { return null; }
}

View File

@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.accumulator.facet;
import java.io.IOException;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.solr.analytics.accumulator.ValueAccumulator;
import org.apache.solr.analytics.statistics.StatsCollector;
import org.apache.solr.common.util.NamedList;
/**
* An Accumulator that manages a certain query of a given query facet.
*/
public class QueryFacetAccumulator extends ValueAccumulator {
protected final FacetValueAccumulator parent;
protected final String facetName;
protected final String facetValue;
public QueryFacetAccumulator(FacetValueAccumulator parent, String facetName, String facetValue) {
this.parent = parent;
this.facetName = facetName;
this.facetValue = facetValue;
}
/**
* Tell the FacetingAccumulator to collect the doc with the
* given queryFacet and query.
*/
@Override
public void collect(int doc) throws IOException {
parent.collectQuery(doc, facetName, facetValue);
}
/**
* Update the readers of the queryFacet {@link StatsCollector}s in FacetingAccumulator
*/
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
parent.setQueryStatsCollectorReaders(context);
}
@Override
public void compute() {
// NOP
}
@Override
public NamedList<?> export() {
// NOP
return null;
}
}

View File

@ -0,0 +1,50 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.accumulator.facet;
import java.io.IOException;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.solr.analytics.statistics.StatsCollector;
/**
* An Accumulator that manages a certain range of a given range facet.
*/
public class RangeFacetAccumulator extends QueryFacetAccumulator {
public RangeFacetAccumulator(FacetValueAccumulator parent, String facetName, String facetValue) {
super(parent, facetName, facetValue);
}
/**
* Tell the FacetingAccumulator to collect the doc with the
* given rangeFacet and range.
*/
@Override
public void collect(int doc) throws IOException {
parent.collectRange(doc, facetName, facetValue);
}
/**
* Update the readers of the rangeFacet {@link StatsCollector}s in FacetingAccumulator
*/
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
parent.setRangeStatsCollectorReaders(context);
}
}

View File

@ -0,0 +1,27 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
Accumulators for accumulating over differnt types of facets
</p>
</body>
</html>

View File

@ -0,0 +1,27 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
Accumulators accumulate values over different types of strucuture (eg result, facet, etc..)
</p>
</body>
</html>

View File

@ -0,0 +1,86 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.expression;
import java.util.Date;
import org.apache.solr.analytics.statistics.StatsCollector;
/**
* <code>BaseExpression</code> returns the value returned by the {@link StatsCollector} for the specified stat.
*/
public class BaseExpression extends Expression {
protected final StatsCollector statsCollector;
protected final String stat;
public BaseExpression(StatsCollector statsCollector, String stat) {
this.statsCollector = statsCollector;
this.stat = stat;
}
public Comparable getValue() {
return statsCollector.getStat(stat);
}
}
/**
* <code>ConstantStringExpression</code> returns the specified constant double.
*/
class ConstantNumberExpression extends Expression {
protected final Double constant;
public ConstantNumberExpression(double d) {
constant = new Double(d);
}
public Comparable getValue() {
return constant;
}
}
/**
* <code>ConstantStringExpression</code> returns the specified constant date.
*/
class ConstantDateExpression extends Expression {
protected final Date constant;
public ConstantDateExpression(Date date) {
constant = date;
}
public ConstantDateExpression(Long date) {
constant = new Date(date);
}
public Comparable getValue() {
return constant;
}
}
/**
* <code>ConstantStringExpression</code> returns the specified constant string.
*/
class ConstantStringExpression extends Expression {
protected final String constant;
public ConstantStringExpression(String str) {
constant = str;
}
public Comparable getValue() {
return constant;
}
}

View File

@ -0,0 +1,100 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.expression;
/**
* Abstraction of an expression that applies a function to two delegate expressions.
*/
public abstract class DualDelegateExpression extends Expression {
protected Expression a;
protected Expression b;
public DualDelegateExpression(Expression a, Expression b) {
this.a = a;
this.b = b;
}
}
/**
* <code>DivideExpression</code> returns the quotient of 'a' and 'b'.
*/
class DivideExpression extends DualDelegateExpression {
/**
* @param a numerator
* @param b divisor
*/
public DivideExpression(Expression a, Expression b) {
super(a,b);
}
@Override
public Comparable getValue() {
Comparable aComp = a.getValue();
Comparable bComp = b.getValue();
if (aComp==null || bComp==null) {
return null;
}
double div = ((Number)aComp).doubleValue();
div = div / ((Number)bComp).doubleValue();
return new Double(div);
}
}
/**
* <code>PowerExpression</code> returns 'a' to the power of 'b'.
*/
class PowerExpression extends DualDelegateExpression {
/**
* @param a base
* @param b exponent
*/
public PowerExpression(Expression a, Expression b) {
super(a,b);
}
@Override
public Comparable getValue() {
Comparable aComp = a.getValue();
Comparable bComp = b.getValue();
if (aComp==null || bComp==null) {
return null;
}
return new Double(Math.pow(((Number)aComp).doubleValue(),((Number)bComp).doubleValue()));
}
}
/**
* <code>LogExpression</code> returns the log of the delegate's value given a base number.
*/
class LogExpression extends DualDelegateExpression {
/**
* @param a number
* @param b base
*/
public LogExpression(Expression a, Expression b) {
super(a,b);
}
@Override
public Comparable getValue() {
Comparable aComp = a.getValue();
Comparable bComp = b.getValue();
if (aComp==null || bComp==null) {
return null;
}
return Math.log(((Number)aComp).doubleValue())/Math.log(((Number)bComp).doubleValue());
}
}

View File

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.expression;
import java.util.Comparator;
import org.apache.solr.analytics.request.FieldFacetRequest.FacetSortDirection;
/**
* Expressions map either zero, one, two or many inputs to a single value.
* They can be defined recursively to compute complex math.
*/
public abstract class Expression {
public abstract Comparable getValue();
public Comparator<Expression> comparator(final FacetSortDirection direction) {
return new Comparator<Expression>(){
@SuppressWarnings("unchecked")
@Override
public int compare(Expression a, Expression b) {
if( direction == FacetSortDirection.ASCENDING ){
return a.getValue().compareTo(b.getValue());
} else {
return b.getValue().compareTo(a.getValue());
}
}
};
}
}

View File

@ -0,0 +1,185 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.expression;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import org.apache.solr.analytics.statistics.StatsCollector;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.schema.TrieDateField;
public class ExpressionFactory {
/**
* Creates a single expression that contains delegate expressions and/or
* a StatsCollector.
* StatsCollectors are given as input and not created within the method so that
* expressions can share the same StatsCollectors, minimizing computation.
*
* @param expression String representation of the desired expression
* @param statsCollectors List of StatsCollectors to build the expression with.
* @return the expression
*/
@SuppressWarnings("deprecation")
public static Expression create(String expression, StatsCollector[] statsCollectors) {
int paren = expression.indexOf('(');
if (paren<=0) {
throw new SolrException(ErrorCode.BAD_REQUEST, "The expression ["+expression+"] has no arguments and is not supported.");
}
String topOperation = expression.substring(0,paren).trim();
String operands;
try {
operands = expression.substring(paren+1, expression.lastIndexOf(')')).trim();
} catch (Exception e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Missing closing parenthesis in ["+expression+"]",e);
}
// Builds a statistic, constant or recursively builds an expression tree
// Statistic
if (AnalyticsParams.ALL_STAT_SET.contains(topOperation)) {
if (topOperation.equals(AnalyticsParams.STAT_PERCENTILE)) {
operands = expression.substring(expression.indexOf(',')+1, expression.lastIndexOf(')')).trim();
topOperation = topOperation+"_"+expression.substring(expression.indexOf('(')+1, expression.indexOf(',')).trim();
}
StatsCollector collector = null;
// Finds the desired counter and builds an expression around it and the desired statistic.
for (StatsCollector c : statsCollectors) {
if (c.valueSourceString().equals(operands)) {
collector = c;
break;
}
}
if (collector == null) {
throw new SolrException(ErrorCode.BAD_REQUEST, "ValueSource ["+operands+"] in Expression ["+expression+"] not found.");
}
return new BaseExpression(collector, topOperation);
}
// Constant
if (topOperation.equals(AnalyticsParams.CONSTANT_NUMBER)) {
try {
return new ConstantNumberExpression(Double.parseDouble(operands));
} catch (NumberFormatException e) {
throw new SolrException(ErrorCode.BAD_REQUEST, "The constant "+operands+" cannot be converted into a number.",e);
}
} else if (topOperation.equals(AnalyticsParams.CONSTANT_DATE)) {
try {
return new ConstantDateExpression(TrieDateField.parseDate(operands));
} catch (ParseException e) {
throw new SolrException(ErrorCode.BAD_REQUEST, "The constant "+operands+" cannot be converted into a date.",e);
}
} else if (topOperation.equals(AnalyticsParams.CONSTANT_STRING)) {
operands = expression.substring(paren+1, expression.lastIndexOf(')'));
return new ConstantStringExpression(operands);
}
// Complex Delegating Expressions
String[] arguments = getArguments(operands);
Expression[] expArgs = new Expression[arguments.length];
for (int count = 0; count < arguments.length; count++) {
// Recursively builds delegate expressions
expArgs[count] = create(arguments[count], statsCollectors);
}
// Single Delegate Expressions
if (expArgs.length==1) {
// Numeric Expression
if (topOperation.equals(AnalyticsParams.NEGATE)) {
return new NegateExpression(expArgs[0]);
}
if (topOperation.equals(AnalyticsParams.ABSOLUTE_VALUE)) {
return new AbsoluteValueExpression(expArgs[0]);
}
// String Expression
else if (topOperation.equals(AnalyticsParams.REVERSE)) {
return new ReverseExpression(expArgs[0]);
}
throw new SolrException(ErrorCode.BAD_REQUEST, topOperation+" does not have the correct number of arguments.");
} else {
// Multi Delegate Expressions
// Numeric Expression
if (topOperation.equals(AnalyticsParams.ADD)) {
return new AddExpression(expArgs);
} else if (topOperation.equals(AnalyticsParams.MULTIPLY)) {
return new MultiplyExpression(expArgs);
}
// Date Expression
else if (topOperation.equals(AnalyticsParams.DATE_MATH)) {
return new DateMathExpression(expArgs);
}
// String Expression
else if (topOperation.equals(AnalyticsParams.CONCATENATE)) {
return new ConcatenateExpression(expArgs);
}
// Dual Delegate Expressions
else if (expArgs.length==2 && (topOperation.equals(AnalyticsParams.DIVIDE) || topOperation.equals(AnalyticsParams.POWER)
|| topOperation.equals(AnalyticsParams.LOG))) {
// Numeric Expression
if (topOperation.equals(AnalyticsParams.DIVIDE)) {
return new DivideExpression(expArgs[0], expArgs[1]);
} else if (topOperation.equals(AnalyticsParams.POWER)) {
return new PowerExpression(expArgs[0], expArgs[1]);
} else if (topOperation.equals(AnalyticsParams.LOG)) {
return new LogExpression(expArgs[0], expArgs[1]);
}
return null;
}
throw new SolrException(ErrorCode.BAD_REQUEST, topOperation+" does not have the correct number of arguments or is unsupported.");
}
}
/**
* Splits up an Expression's arguments.
*
* @param expression Current expression string
* @return List The list of arguments
*/
public static String[] getArguments(String expression) {
String[] strings = new String[1];
int stack = 0;
int start = 0;
List<String> arguments = new ArrayList<String>();
char[] chars = expression.toCharArray();
boolean escapedCharacter = false;
for (int count = 0; count < expression.length(); count++) {
char c = chars[count];
if (c==',' && stack == 0 && !escapedCharacter) {
arguments.add(expression.substring(start, count).replace("\\(","(").replace("\\)",")").replace("\\,",",").trim());
start = count+1;
} else if (c == '(' && !escapedCharacter) {
stack ++;
} else if (c == ')' && !escapedCharacter) {
stack --;
} else if (c == '\\') {
escapedCharacter=true;
}
if (escapedCharacter) {
escapedCharacter=false;
}
}
if (stack==0) {
arguments.add(expression.substring(start).trim());
}
return arguments.toArray(strings);
}
}

View File

@ -0,0 +1,132 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.expression;
import java.text.ParseException;
import java.util.Date;
import org.apache.solr.util.DateMathParser;
/**
* Abstraction of an expression that applies a function to an array of delegate expressions.
*/
public abstract class MultiDelegateExpression extends Expression {
protected final Expression[] delegates;
public MultiDelegateExpression(Expression[] delegates) {
this.delegates = delegates;
}
}
/**
* <code>AddExpression</code> returns the sum of it's components' values.
*/
class AddExpression extends MultiDelegateExpression {
public AddExpression(Expression[] delegates) {
super(delegates);
}
@Override
public Comparable getValue() {
double sum = 0;
for (Expression delegate : delegates) {
Comparable dComp = delegate.getValue();
if (dComp==null) {
return null;
} else if (dComp.getClass().equals(Date.class)) {
dComp = new Long(((Date)dComp).getTime());
}
sum += ((Number)dComp).doubleValue();
}
return new Double(sum);
}
}
/**
* <code>MultiplyExpression</code> returns the product of it's delegates' values.
*/
class MultiplyExpression extends MultiDelegateExpression {
public MultiplyExpression(Expression[] delegates) {
super(delegates);
}
@Override
public Comparable getValue() {
double prod = 1;
for (Expression delegate : delegates) {
Comparable dComp = delegate.getValue();
if (dComp==null) {
return null;
}
prod *= ((Number)dComp).doubleValue();
}
return new Double(prod);
}
}
/**
* <code>DateMathExpression</code> returns the start date modified by the DateMath operations
*/
class DateMathExpression extends MultiDelegateExpression {
/**
* @param delegates A list of Expressions. The first element in the list
* should be a numeric Expression which represents the starting date.
* The rest of the field should be string Expression objects which contain
* the DateMath operations to perform on the start date.
*/
public DateMathExpression(Expression[] delegates) {
super(delegates);
}
@Override
public Comparable getValue() {
DateMathParser parser = new DateMathParser();
parser.setNow((Date)delegates[0].getValue());
try {
for (int count = 1; count<delegates.length; count++) {
Comparable dComp = delegates[count].getValue();
if (dComp==null) {
return null;
}
parser.setNow(parser.parseMath((String)dComp));
}
return parser.getNow();
} catch (ParseException e) {
e.printStackTrace();
return parser.getNow();
}
}
}
/**
* <code>ConcatenateExpression</code> returns the concatenation of it's delegates' values in the order given.
*/
class ConcatenateExpression extends MultiDelegateExpression {
public ConcatenateExpression(Expression[] delegates) {
super(delegates);
}
@Override
public Comparable getValue() {
StringBuilder builder = new StringBuilder();
for (Expression delegate : delegates) {
Comparable dComp = delegate.getValue();
if (dComp==null) {
return null;
}
builder.append(dComp.toString());
}
return builder.toString();
}
}

View File

@ -0,0 +1,89 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.expression;
import java.util.Date;
/**
* Abstraction of an expression that applies a function to one delegate expression.
*/
public abstract class SingleDelegateExpression extends Expression {
protected Expression delegate;
public SingleDelegateExpression(Expression delegate) {
this.delegate = delegate;
}
}
/**
* <code>NegateExpression</code> returns the negation of the delegate's value.
*/
class NegateExpression extends SingleDelegateExpression {
public NegateExpression(Expression delegate) {
super(delegate);
}
@Override
public Comparable getValue() {
Comparable nComp = delegate.getValue();
if (nComp==null) {
return null;
} else if (nComp.getClass().equals(Date.class)) {
nComp = new Long(((Date)nComp).getTime());
}
return new Double(((Number)nComp).doubleValue()*-1);
}
}
/**
* <code>AbsoluteValueExpression</code> returns the negation of the delegate's value.
*/
class AbsoluteValueExpression extends SingleDelegateExpression {
public AbsoluteValueExpression(Expression delegate) {
super(delegate);
}
@Override
public Comparable getValue() {
Comparable nComp = delegate.getValue();
if (nComp==null) {
return null;
}
double d = ((Number)nComp).doubleValue();
if (d<0) {
return new Double(d*-1);
} else {
return new Double(d);
}
}
}
/**
* <code>StringExpression</code> returns the reverse of the delegate's string value.
*/
class ReverseExpression extends SingleDelegateExpression {
public ReverseExpression(Expression delegate) {
super(delegate);
}
@Override
public Comparable getValue() {
Comparable rComp = delegate.getValue();
if (rComp==null) {
return null;
}
return new StringBuilder(rComp.toString()).reverse().toString();
}
}

View File

@ -0,0 +1,27 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
Expressions map either zero, one, two or many inputs to a single value. They can be defined recursively to compute complex math.
</p>
</body>
</html>

View File

@ -0,0 +1,114 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.plugin;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.util.stats.Snapshot;
import org.apache.solr.util.stats.Timer;
import org.apache.solr.util.stats.TimerContext;
public class AnalyticsStatisticsCollector {
private final AtomicLong numRequests;
private final AtomicLong numAnalyticsRequests;
private final AtomicLong numStatsRequests;
private final AtomicLong numCollectedStats;
private final AtomicLong numFieldFacets;
private final AtomicLong numRangeFacets;
private final AtomicLong numQueryFacets;
private final AtomicLong numQueries;
private final Timer requestTimes;
public TimerContext currentTimer;
public AnalyticsStatisticsCollector() {
numRequests = new AtomicLong();
numAnalyticsRequests = new AtomicLong();
numStatsRequests = new AtomicLong();
numCollectedStats = new AtomicLong();
numFieldFacets = new AtomicLong();
numRangeFacets = new AtomicLong();
numQueryFacets = new AtomicLong();
numQueries = new AtomicLong();
requestTimes = new Timer();
}
public void startRequest() {
numRequests.incrementAndGet();
currentTimer = requestTimes.time();
}
public void addRequests(long num) {
numAnalyticsRequests.addAndGet(num);
}
public void addStatsRequests(long num) {
numStatsRequests.addAndGet(num);
}
public void addStatsCollected(long num) {
numCollectedStats.addAndGet(num);
}
public void addFieldFacets(long num) {
numFieldFacets.addAndGet(num);
}
public void addRangeFacets(long num) {
numRangeFacets.addAndGet(num);
}
public void addQueryFacets(long num) {
numQueryFacets.addAndGet(num);
}
public void addQueries(long num) {
numQueries.addAndGet(num);
}
public void endRequest() {
currentTimer.stop();
}
public NamedList<Object> getStatistics() {
NamedList<Object> lst = new SimpleOrderedMap<Object>();
Snapshot snapshot = requestTimes.getSnapshot();
lst.add("requests", numRequests.longValue());
lst.add("analyticsRequests", numAnalyticsRequests.longValue());
lst.add("statsRequests", numStatsRequests.longValue());
lst.add("statsCollected", numCollectedStats.longValue());
lst.add("fieldFacets", numFieldFacets.longValue());
lst.add("rangeFacets", numRangeFacets.longValue());
lst.add("queryFacets", numQueryFacets.longValue());
lst.add("queriesInQueryFacets", numQueries.longValue());
lst.add("totalTime", requestTimes.getSum());
lst.add("avgRequestsPerSecond", requestTimes.getMeanRate());
lst.add("5minRateReqsPerSecond", requestTimes.getFiveMinuteRate());
lst.add("15minRateReqsPerSecond", requestTimes.getFifteenMinuteRate());
lst.add("avgTimePerRequest", requestTimes.getMean());
lst.add("medianRequestTime", snapshot.getMedian());
lst.add("75thPcRequestTime", snapshot.get75thPercentile());
lst.add("95thPcRequestTime", snapshot.get95thPercentile());
lst.add("99thPcRequestTime", snapshot.get99thPercentile());
lst.add("999thPcRequestTime", snapshot.get999thPercentile());
return lst;
}
}

View File

@ -0,0 +1,27 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
MBean plugins for stats collection
</p>
</body>
</html>

View File

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
import org.apache.solr.schema.SchemaField;
/**
* An abstract request for a facet over a single field, such as a field or range facet.
*/
public abstract class AbstractFieldFacetRequest implements FacetRequest {
protected SchemaField field = null;
public AbstractFieldFacetRequest(SchemaField field) {
this.field = field;
}
public SchemaField getField() {
return field;
}
public void setField(SchemaField field) {
this.field = field;
}
public String getName() {
return field.getName();
}
}

View File

@ -0,0 +1,315 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import org.apache.solr.analytics.request.FieldFacetRequest.FacetSortDirection;
import org.apache.solr.analytics.request.FieldFacetRequest.FacetSortSpecification;
import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
import org.apache.solr.common.params.FacetParams.FacetRangeOther;
import org.apache.solr.schema.IndexSchema;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
/**
* Handles the parsing of the AnalysisRequestEnvelope elements if passed in through XML.
*/
public class AnalyticsContentHandler implements ContentHandler {
// XML Element/Attribute Name Constants
public final String ANALYTICS_REQUEST_ENVELOPE="analyticsRequestEnvelope";
public final String ANALYTICS_REQUEST="analyticsRequest";
public final String NAME="name";
public final String STATISTIC="statistic";
public final String EXPRESSION="expression";
public final String FIELD_FACET="fieldFacet";
public final String FIELD="field";
public final String SHOW_MISSING="showMissing";
public final String LIMIT="limit";
public final String MIN_COUNT="minCount";
public final String SORT_SPECIFICATION="sortSpecification";
public final String STAT_NAME="statName";
public final String DIRECTION="direction";
public final String RANGE_FACET="rangeFacet";
public final String START="start";
public final String END="end";
public final String GAP="gap";
public final String INCLUDE_BOUNDARY="includeBoundary";
public final String OTHER_RANGE="otherRange";
public final String HARD_END="hardend";
public final String QUERY_FACET="queryFacet";
public final String QUERY="query";
// Default Values
public static final int DEFAULT_FACET_LIMIT = -1;
public static final boolean DEFAULT_FACET_HARDEND = false;
public static final int DEFAULT_FACET_MINCOUNT = 0;
public static final boolean DEFAULT_FACET_FIELD_SHOW_MISSING = false;
boolean inEnvelope = false;
boolean inRequest = false;
boolean inStatistic = false;
boolean inFieldFacet = false;
boolean inSortSpecification = false;
boolean inQueryFacet = false;
boolean inRangeFacet = false;
private final IndexSchema schema;
// Objects to use while building the Analytics Requests
String currentElementText;
List<AnalyticsRequest> requests;
AnalyticsRequest analyticsRequest;
List<ExpressionRequest> expressionList;
List<FieldFacetRequest> fieldFacetList;
List<RangeFacetRequest> rangeFacetList;
List<QueryFacetRequest> queryFacetList;
ExpressionRequest expression;
FieldFacetRequest fieldFacet;
int limit;
int minCount;
boolean showMissing;
FacetSortSpecification sortSpecification;
RangeFacetRequest rangeFacet;
boolean hardend;
List<String> gaps;
EnumSet<FacetRangeInclude> includeBoundaries;
EnumSet<FacetRangeOther> otherRanges;
String queryName;
List<String> queries;
public AnalyticsContentHandler(IndexSchema schema) {
this.schema = schema;
}
@Override
public void setDocumentLocator(Locator locator) { }
@Override
public void startDocument() throws SAXException { }
@Override
public void endDocument() throws SAXException { }
@Override
public void startPrefixMapping(String prefix, String uri) throws SAXException { }
@Override
public void endPrefixMapping(String prefix) throws SAXException { }
@Override
public void startElement(String uri, String localName, String qName, Attributes atts) throws SAXException {
currentElementText = "";
if (inEnvelope) {
if (inRequest) {
if (localName.equals(STATISTIC)) {
// Start a Statistic Request
inStatistic = true;
} else if (inFieldFacet) {
if (localName.equals(SORT_SPECIFICATION)) {
// Start a Sort Specification
inSortSpecification = true;
sortSpecification = new FacetSortSpecification();
}
} else if (localName.equals(FIELD_FACET)) {
// Start a Field Facet Request
// Get attributes (limit, minCount, showMissing)
String att = atts.getValue(uri,LIMIT);
if (att!=null) {
limit = Integer.parseInt(att);
} else {
limit = DEFAULT_FACET_LIMIT;
}
att = atts.getValue(uri,MIN_COUNT);
if (att!=null) {
minCount = Integer.parseInt(att);
} else {
minCount = DEFAULT_FACET_MINCOUNT;
}
att = atts.getValue(uri,SHOW_MISSING);
if (att!=null) {
showMissing = Boolean.parseBoolean(att);
} else {
showMissing = DEFAULT_FACET_FIELD_SHOW_MISSING;
}
inFieldFacet = true;
} else if (localName.equals(RANGE_FACET)) {
// Start a Range Facet Request
// Get attributes (hardEnd)
String att = atts.getValue(uri,HARD_END);
if (att!=null) {
hardend = Boolean.parseBoolean(att);
} else {
hardend = false;
}
// Initiate Range Facet classes
gaps = new ArrayList<String>();
includeBoundaries = EnumSet.noneOf(FacetRangeInclude.class);
otherRanges = EnumSet.noneOf(FacetRangeOther.class);
inRangeFacet = true;
} else if (localName.equals(QUERY_FACET)) {
// Start a Query Facet Request
queries = new ArrayList<String>();
inQueryFacet = true;
}
} else if (localName.equals(ANALYTICS_REQUEST)){
// Start an Analytics Request
// Renew each list.
fieldFacetList = new ArrayList<FieldFacetRequest>();
rangeFacetList = new ArrayList<RangeFacetRequest>();
queryFacetList = new ArrayList<QueryFacetRequest>();
expressionList = new ArrayList<ExpressionRequest>();
inRequest = true;
}
} else if (localName.equals(ANALYTICS_REQUEST_ENVELOPE)){
//Begin the parsing of the Analytics Requests
requests = new ArrayList<AnalyticsRequest>();
inEnvelope = true;
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (inEnvelope) {
if (inRequest) {
if (inStatistic) {
if (localName.equals(EXPRESSION)) {
expression = new ExpressionRequest(currentElementText,currentElementText);
} else if (localName.equals(NAME)) {
expression.setName(currentElementText);
} else if (localName.equals(STATISTIC)) {
// Finished Parsing the Statistic Request
expressionList.add(expression);
inStatistic = false;
}
} else if (inFieldFacet) {
if (inSortSpecification) {
if (localName.equals(STAT_NAME)) {
sortSpecification.setStatistic(currentElementText);
} else if (localName.equals(DIRECTION)) {
sortSpecification.setDirection(FacetSortDirection.fromExternal(currentElementText));
} else if (localName.equals(SORT_SPECIFICATION)) {
// Finished Parsing the Sort Specification
fieldFacet.setSort(sortSpecification);
inSortSpecification = false;
}
} else if (localName.equals(FIELD)) {
fieldFacet = new FieldFacetRequest(schema.getField(currentElementText));
} else if (localName.equals(FIELD_FACET)) {
// Finished Parsing the Field Facet Request
fieldFacet.setLimit(limit);
fieldFacet.showMissing(showMissing);
fieldFacetList.add(fieldFacet);
inFieldFacet = false;
}
} else if (inRangeFacet) {
if (localName.equals(FIELD)) {
rangeFacet = new RangeFacetRequest(schema.getField(currentElementText), "", "", new String[1]);
} else if (localName.equals(START)) {
rangeFacet.setStart(currentElementText);
} else if (localName.equals(END)) {
rangeFacet.setEnd(currentElementText);
} else if (localName.equals(GAP)) {
gaps.add(currentElementText);
} else if (localName.equals(INCLUDE_BOUNDARY)) {
includeBoundaries.add(FacetRangeInclude.get(currentElementText));
} else if (localName.equals(OTHER_RANGE)) {
otherRanges.add(FacetRangeOther.get(currentElementText));
} else if (localName.equals(RANGE_FACET)) {
// Finished Parsing the Range Facet Request
rangeFacet.setHardEnd(hardend);
rangeFacet.setGaps(gaps.toArray(new String[1]));
rangeFacet.setInclude(includeBoundaries);
rangeFacet.setOthers(otherRanges);
inRangeFacet = false;
rangeFacetList.add(rangeFacet);
}
} else if (inQueryFacet) {
if (localName.equals(NAME)) {
queryName = currentElementText;
} else if (localName.equals(QUERY)) {
queries.add(currentElementText);
} else if (localName.equals(QUERY_FACET)) {
// Finished Parsing the Query Facet Request
QueryFacetRequest temp = new QueryFacetRequest(queryName);
temp.setQueries(queries);
queryFacetList.add(temp);
inQueryFacet = false;
}
} else if (localName.equals(NAME)) {
analyticsRequest = new AnalyticsRequest(currentElementText);
} else if (localName.equals(ANALYTICS_REQUEST)){
// Finished Parsing the Analytics Request
analyticsRequest.setExpressions(expressionList);
analyticsRequest.setFieldFacets(fieldFacetList);
analyticsRequest.setRangeFacets(rangeFacetList);
analyticsRequest.setQueryFacets(queryFacetList);
requests.add(analyticsRequest);
inRequest = false;
}
} else if (localName.equals(ANALYTICS_REQUEST_ENVELOPE)){
// Finished Parsing
inEnvelope = false;
}
}
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
currentElementText += new String(ch,start,length);
}
@Override
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { }
@Override
public void processingInstruction(String target, String data) throws SAXException { }
@Override
public void skippedEntity(String name) throws SAXException { }
/**
* Returns the list of Analytics Requests built during parsing.
*
* @return List of {@link AnalyticsRequest} objects specified by the given XML file
*/
public List<AnalyticsRequest> getAnalyticsRequests() {
return requests;
}
}

View File

@ -0,0 +1,115 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Contains the specifications of an Analytics Request, specifically a name,
* a list of Expressions, a list of field facets, a list of range facets, a list of query facets
* and the list of expressions and their results calculated in previous AnalyticsRequests.
*/
public class AnalyticsRequest {
private String name;
private List<ExpressionRequest> expressions;
private Set<String> hiddenExpressions;
private List<FieldFacetRequest> fieldFacets;
private List<RangeFacetRequest> rangeFacets;
private List<QueryFacetRequest> queryFacets;
public AnalyticsRequest(String name) {
this.name = name;
expressions = new ArrayList<ExpressionRequest>();
hiddenExpressions = new HashSet<String>();
fieldFacets = new ArrayList<FieldFacetRequest>();
rangeFacets = new ArrayList<RangeFacetRequest>();
queryFacets = new ArrayList<QueryFacetRequest>();
}
public String getName() {
return name;
}
public void setExpressions(List<ExpressionRequest> expressions) {
this.expressions = expressions;
}
public void addExpression(ExpressionRequest expressionRequest) {
expressions.add(expressionRequest);
}
public List<ExpressionRequest> getExpressions() {
return expressions;
}
public void addHiddenExpression(ExpressionRequest expressionRequest) {
expressions.add(expressionRequest);
hiddenExpressions.add(expressionRequest.getName());
}
public Set<String> getHiddenExpressions() {
return hiddenExpressions;
}
public void setFieldFacets(List<FieldFacetRequest> fieldFacets) {
this.fieldFacets = fieldFacets;
}
public List<FieldFacetRequest> getFieldFacets() {
return fieldFacets;
}
public void setRangeFacets(List<RangeFacetRequest> rangeFacets) {
this.rangeFacets = rangeFacets;
}
public List<RangeFacetRequest> getRangeFacets() {
return rangeFacets;
}
public void setQueryFacets(List<QueryFacetRequest> queryFacets) {
this.queryFacets = queryFacets;
}
public List<QueryFacetRequest> getQueryFacets() {
return queryFacets;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder("<AnalyticsRequest name=" + name + ">");
for (ExpressionRequest exp : expressions) {
builder.append(exp.toString());
}
for (FieldFacetRequest facet : fieldFacets) {
builder.append(facet.toString());
}
for (RangeFacetRequest facet : rangeFacets) {
builder.append(facet.toString());
}
for (QueryFacetRequest facet : queryFacets) {
builder.append(facet.toString());
}
builder.append("</AnalyticsRequest>");
return builder.toString();
}
}

View File

@ -0,0 +1,309 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.solr.analytics.request.FieldFacetRequest.FacetSortSpecification;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
import org.apache.solr.common.params.FacetParams.FacetRangeOther;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.schema.IndexSchema;
/**
* Parses the SolrParams to create a list of analytics requests.
*/
public class AnalyticsRequestFactory implements AnalyticsParams {
public static final Pattern statPattern = Pattern.compile("^o(?:lap)?\\.([^\\.]+)\\.(?:"+EXPRESSION+")\\.([^\\.]+)$", Pattern.CASE_INSENSITIVE);
public static final Pattern hiddenStatPattern = Pattern.compile("^o(?:lap)?\\.([^\\.]+)\\.(?:"+HIDDEN_EXPRESSION+")\\.([^\\.]+)$", Pattern.CASE_INSENSITIVE);
public static final Pattern fieldFacetPattern = Pattern.compile("^o(?:lap)?\\.([^\\.]+)\\.(?:"+FIELD_FACET+")$", Pattern.CASE_INSENSITIVE);
public static final Pattern fieldFacetParamPattern = Pattern.compile("^o(?:lap)?\\.([^\\.]+)\\.(?:"+FIELD_FACET+")\\.([^\\.]+)\\.("+LIMIT+"|"+OFFSET+"|"+HIDDEN+"|"+SHOW_MISSING+"|"+SORT_STATISTIC+"|"+SORT_DIRECTION+")$", Pattern.CASE_INSENSITIVE);
public static final Pattern rangeFacetPattern = Pattern.compile("^o(?:lap)?\\.([^\\.]+)\\.(?:"+RANGE_FACET+")$", Pattern.CASE_INSENSITIVE);
public static final Pattern rangeFacetParamPattern = Pattern.compile("^o(?:lap)?\\.([^\\.]+)\\.(?:"+RANGE_FACET+")\\.([^\\.]+)\\.("+START+"|"+END+"|"+GAP+"|"+HARDEND+"|"+INCLUDE_BOUNDARY+"|"+OTHER_RANGE+")$", Pattern.CASE_INSENSITIVE);
public static final Pattern queryFacetPattern = Pattern.compile("^o(?:lap)?\\.([^\\.]+)\\.(?:"+QUERY_FACET+")$", Pattern.CASE_INSENSITIVE);
public static final Pattern queryFacetParamPattern = Pattern.compile("^o(?:lap)?\\.([^\\.]+)\\.(?:"+QUERY_FACET+")\\.([^\\.]+)\\.("+QUERY+"|"+DEPENDENCY+")$", Pattern.CASE_INSENSITIVE);
public static List<AnalyticsRequest> parse(IndexSchema schema, SolrParams params) {
Map<String, AnalyticsRequest> requestMap = new HashMap<String, AnalyticsRequest>();
Map<String, Map<String,FieldFacetRequest>> fieldFacetMap = new HashMap<String, Map<String,FieldFacetRequest>>();
Map<String, Set<String>> fieldFacetSet = new HashMap<String,Set<String>>();
Map<String, Map<String,RangeFacetRequest>> rangeFacetMap = new HashMap<String, Map<String,RangeFacetRequest>>();
Map<String, Set<String>> rangeFacetSet = new HashMap<String,Set<String>>();
Map<String, Map<String,QueryFacetRequest>> queryFacetMap = new HashMap<String, Map<String,QueryFacetRequest>>();
Map<String, Set<String>> queryFacetSet = new HashMap<String,Set<String>>();
List<AnalyticsRequest> requestList = new ArrayList<AnalyticsRequest>();
Iterator<String> paramsIterator = params.getParameterNamesIterator();
while (paramsIterator.hasNext()) {
String param = paramsIterator.next();
CharSequence paramSequence = param.subSequence(0, param.length());
// Check if stat
Matcher m = statPattern.matcher(paramSequence);
if (m.matches()) {
makeExpression(requestMap,m.group(1),m.group(2),params.get(param));
} else {
// Check if hidden stat
m = hiddenStatPattern.matcher(paramSequence);
if (m.matches()) {
makeHiddenExpression(requestMap,m.group(1),m.group(2),params.get(param));
} else {
// Check if field facet
m = fieldFacetPattern.matcher(paramSequence);
if (m.matches()) {
makeFieldFacet(schema,fieldFacetMap,fieldFacetSet,m.group(1),params.getParams(param));
} else {
// Check if field facet parameter
m = fieldFacetParamPattern.matcher(paramSequence);
if (m.matches()) {
setFieldFacetParam(schema,fieldFacetMap,m.group(1),m.group(2),m.group(3),params.getParams(param));
} else {
// Check if range facet
m = rangeFacetPattern.matcher(paramSequence);
if (m.matches()) {
makeRangeFacet(schema,rangeFacetSet,m.group(1),params.getParams(param));
} else {
// Check if range facet parameter
m = rangeFacetParamPattern.matcher(paramSequence);
if (m.matches()) {
setRangeFacetParam(schema,rangeFacetMap,m.group(1),m.group(2),m.group(3),params.getParams(param));
} else {
// Check if query facet
m = queryFacetPattern.matcher(paramSequence);
if (m.matches()) {
makeQueryFacet(schema,queryFacetSet,m.group(1),params.getParams(param));
} else {
// Check if query
m = queryFacetParamPattern.matcher(paramSequence);
if (m.matches()) {
setQueryFacetParam(schema,queryFacetMap,m.group(1),m.group(2),m.group(3),params.getParams(param));
}
}
}
}
}
}
}
}
}
for (String reqName : requestMap.keySet()) {
AnalyticsRequest ar = requestMap.get(reqName);
List<FieldFacetRequest> ffrs = new ArrayList<FieldFacetRequest>();
if (fieldFacetSet.get(reqName)!=null) {
for (String field : fieldFacetSet.get(reqName)) {
ffrs.add(fieldFacetMap.get(reqName).get(field));
}
}
ar.setFieldFacets(ffrs);
List<RangeFacetRequest> rfrs = new ArrayList<RangeFacetRequest>();
if (rangeFacetSet.get(reqName)!=null) {
for (String field : rangeFacetSet.get(reqName)) {
RangeFacetRequest rfr = rangeFacetMap.get(reqName).get(field);
if (rfr != null) {
rfrs.add(rfr);
}
}
}
ar.setRangeFacets(rfrs);
List<QueryFacetRequest> qfrs = new ArrayList<QueryFacetRequest>();
if (queryFacetSet.get(reqName)!=null) {
for (String name : queryFacetSet.get(reqName)) {
QueryFacetRequest qfr = queryFacetMap.get(reqName).get(name);
if (qfr != null) {
addQueryFacet(qfrs,qfr);
}
}
}
for (QueryFacetRequest qfr : qfrs) {
if (qfr.getDependencies().size()>0) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The query facet dependencies "+qfr.getDependencies().toString()+" either do not exist or are defined in a dependency looop.");
}
}
ar.setQueryFacets(qfrs);
requestList.add(ar);
}
return requestList;
}
private static void makeFieldFacet(IndexSchema schema, Map<String, Map<String, FieldFacetRequest>> fieldFacetMap, Map<String, Set<String>> fieldFacetSet, String requestName, String[] fields) {
Map<String, FieldFacetRequest> facetMap = fieldFacetMap.get(requestName);
if (facetMap == null) {
facetMap = new HashMap<String, FieldFacetRequest>();
fieldFacetMap.put(requestName, facetMap);
}
Set<String> set = fieldFacetSet.get(requestName);
if (set == null) {
set = new HashSet<String>();
fieldFacetSet.put(requestName, set);
}
for (String field : fields) {
if (facetMap.get(field) == null) {
facetMap.put(field,new FieldFacetRequest(schema.getField(field)));
}
set.add(field);
}
}
private static void setFieldFacetParam(IndexSchema schema, Map<String, Map<String, FieldFacetRequest>> fieldFacetMap, String requestName, String field, String paramType, String[] params) {
Map<String, FieldFacetRequest> facetMap = fieldFacetMap.get(requestName);
if (facetMap == null) {
facetMap = new HashMap<String, FieldFacetRequest>();
fieldFacetMap.put(requestName, facetMap);
}
FieldFacetRequest fr = facetMap.get(field);
if (fr == null) {
fr = new FieldFacetRequest(schema.getField(field));
facetMap.put(field,fr);
}
if (paramType.equals("limit")||paramType.equals("l")) {
fr.setLimit(Integer.parseInt(params[0]));
} else if (paramType.equals("offset")||paramType.equals("off")) {
fr.setOffset(Integer.parseInt(params[0]));
} else if (paramType.equals("hidden")||paramType.equals("h")) {
fr.setHidden(Boolean.parseBoolean(params[0]));
} else if (paramType.equals("showmissing")||paramType.equals("sm")) {
fr.showMissing(Boolean.parseBoolean(params[0]));
} else if (paramType.equals("sortstatistic")||paramType.equals("sortstat")||paramType.equals("ss")) {
fr.setSort(new FacetSortSpecification(params[0],fr.getDirection()));
} else if (paramType.equals("sortdirection")||paramType.equals("sd")) {
fr.setDirection(params[0]);
}
}
private static void makeRangeFacet(IndexSchema schema, Map<String, Set<String>> rangeFacetSet, String requestName, String[] fields) {
Set<String> set = rangeFacetSet.get(requestName);
if (set == null) {
set = new HashSet<String>();
rangeFacetSet.put(requestName, set);
}
for (String field : fields) {
set.add(field);
}
}
private static void setRangeFacetParam(IndexSchema schema, Map<String, Map<String, RangeFacetRequest>> rangeFacetMap, String requestName, String field, String paramType, String[] params) {
Map<String, RangeFacetRequest> facetMap = rangeFacetMap.get(requestName);
if (facetMap == null) {
facetMap = new HashMap<String, RangeFacetRequest>();
rangeFacetMap.put(requestName, facetMap);
}
RangeFacetRequest rr = facetMap.get(field);
if (rr == null) {
rr = new RangeFacetRequest(schema.getField(field));
facetMap.put(field,rr);
}
if (paramType.equals("start")||paramType.equals("st")) {
rr.setStart(params[0]);
} else if (paramType.equals("end")||paramType.equals("e")) {
rr.setEnd(params[0]);
} else if (paramType.equals("gap")||paramType.equals("g")) {
rr.setGaps(params[0].split(","));
} else if (paramType.equals("hardend")||paramType.equals("he")) {
rr.setHardEnd(Boolean.parseBoolean(params[0]));
} else if (paramType.equals("includebound")||paramType.equals("ib")) {
for (String param : params) {
rr.addInclude(FacetRangeInclude.get(param));
}
} else if (paramType.equals("otherrange")||paramType.equals("or")) {
for (String param : params) {
rr.addOther(FacetRangeOther.get(param));
}
}
}
private static void makeQueryFacet(IndexSchema schema,Map<String, Set<String>> queryFacetSet, String requestName, String[] names) {
Set<String> set = queryFacetSet.get(requestName);
if (set == null) {
set = new HashSet<String>();
queryFacetSet.put(requestName, set);
}
for (String name : names) {
set.add(name);
}
}
private static void setQueryFacetParam(IndexSchema schema, Map<String, Map<String, QueryFacetRequest>> queryFacetMap, String requestName, String name, String paramType, String[] params) {
Map<String, QueryFacetRequest> facetMap = queryFacetMap.get(requestName);
if (facetMap == null) {
facetMap = new HashMap<String, QueryFacetRequest>();
queryFacetMap.put(requestName, facetMap);
}
QueryFacetRequest qr = facetMap.get(name);
if (qr == null) {
qr = new QueryFacetRequest(name);
facetMap.put(name,qr);
}
if (paramType.equals("query")||paramType.equals("q")) {
for (String query : params) {
qr.addQuery(query);
}
} else if (paramType.equals("dependency")||paramType.equals("d")) {
for (String depend : params) {
qr.addDependency(depend);
}
}
}
private static void makeHiddenExpression(Map<String, AnalyticsRequest> requestMap, String requestName, String expressionName, String expression) {
AnalyticsRequest req = requestMap.get(requestName);
if (req == null) {
req = new AnalyticsRequest(requestName);
requestMap.put(requestName, req);
}
req.addHiddenExpression(new ExpressionRequest(expressionName,expression));
}
private static void makeExpression(Map<String, AnalyticsRequest> requestMap, String requestName, String expressionName, String expression) {
AnalyticsRequest req = requestMap.get(requestName);
if (req == null) {
req = new AnalyticsRequest(requestName);
requestMap.put(requestName, req);
}
req.addExpression(new ExpressionRequest(expressionName,expression));
}
private static void addQueryFacet(List<QueryFacetRequest> currentList, QueryFacetRequest queryFacet) {
Set<String> depends = queryFacet.getDependencies();
int place = 0;
for (QueryFacetRequest qfr : currentList) {
if (qfr.getDependencies().remove(queryFacet)) {
break;
}
place++;
depends.remove(qfr.getName());
}
currentList.add(place,queryFacet);
for (int count = place+1; count < currentList.size(); count++) {
currentList.get(count).getDependencies().remove(queryFacet.getName());
}
}
}

View File

@ -0,0 +1,132 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
import java.io.IOException;
import java.util.List;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.solr.analytics.accumulator.BasicAccumulator;
import org.apache.solr.analytics.accumulator.FacetingAccumulator;
import org.apache.solr.analytics.accumulator.ValueAccumulator;
import org.apache.solr.analytics.plugin.AnalyticsStatisticsCollector;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.SolrIndexSearcher;
/**
* Class which computes the set of {@link AnalyticsRequest}s.
*/
public class AnalyticsStats {
protected DocSet docs;
protected SolrParams params;
protected SolrIndexSearcher searcher;
protected SolrQueryRequest req;
protected AnalyticsStatisticsCollector statsCollector;
public AnalyticsStats(SolrQueryRequest req, DocSet docs, SolrParams params, AnalyticsStatisticsCollector statsCollector) {
this.req = req;
this.searcher = req.getSearcher();
this.docs = docs;
this.params = params;
this.statsCollector = statsCollector;
}
/**
* Calculates the analytics requested in the Parameters.
*
* @return List of results formated to mirror the input XML.
* @throws IOException if execution fails
*/
public NamedList<?> execute() throws IOException {
statsCollector.startRequest();
NamedList<Object> res = new NamedList<Object>();
List<AnalyticsRequest> requests;
requests = AnalyticsRequestFactory.parse(searcher.getSchema(), params);
if(requests == null || requests.size()==0){
return res;
}
statsCollector.addRequests(requests.size());
// Computing each Analytics Request Seperately
for( AnalyticsRequest areq : requests ){
// The Accumulator which will control the statistics generation
// for the entire analytics request
ValueAccumulator accumulator;
// The number of total facet requests
int facets = areq.getFieldFacets().size()+areq.getRangeFacets().size()+areq.getQueryFacets().size();
try {
if( facets== 0 ){
accumulator = BasicAccumulator.create(searcher, docs, areq);
} else {
accumulator = FacetingAccumulator.create(searcher, docs, areq, req);
}
} catch (IOException e) {
System.err.println(e.getMessage());
continue;
}
statsCollector.addStatsCollected(((BasicAccumulator)accumulator).getNumStatsCollectors());
statsCollector.addStatsRequests(areq.getExpressions().size());
statsCollector.addFieldFacets(areq.getFieldFacets().size());
statsCollector.addRangeFacets(areq.getRangeFacets().size());
statsCollector.addQueryFacets(areq.getQueryFacets().size());
statsCollector.addQueries(((BasicAccumulator)accumulator).getNumQueries());
// Loop through the documents returned by the query and add to accumulator
Filter filter = docs.getTopFilter();
List<AtomicReaderContext> contexts = searcher.getTopReaderContext().leaves();
for (int leafNum = 0; leafNum < contexts.size(); leafNum++) {
AtomicReaderContext context = contexts.get(leafNum);
DocIdSet dis = filter.getDocIdSet(context, null); // solr docsets already exclude any deleted docs
DocIdSetIterator disi = null;
if (dis != null) {
disi = dis.iterator();
}
if (disi != null) {
accumulator.setNextReader(context);
int doc = disi.nextDoc();
while( doc != DocIdSetIterator.NO_MORE_DOCS){
// Add a document to the statistics being generated
accumulator.collect(doc);
doc = disi.nextDoc();
}
}
}
// do some post-processing
accumulator.postProcess();
// compute the stats
accumulator.compute();
res.add(areq.getName(),accumulator.export());
}
statsCollector.endRequest();
return res;
}
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
import org.apache.solr.analytics.expression.Expression;
/**
* Contains name and string representation of an expression.
*/
public class ExpressionRequest implements Comparable<ExpressionRequest> {
private String name;
private String expressionString;
private Expression expression;
/**
* @param name The name of the Expression.
* @param expressionString The string representation of the desired Expression.
*/
public ExpressionRequest(String name, String expressionString) {
this.name = name;
this.expressionString = expressionString;
}
public void setExpressionString(String expressionString) {
this.expressionString = expressionString;
}
public String getExpressionString() {
return expressionString;
}
public void setExpression(Expression expression) {
this.expression = expression;
}
public Expression getExpression() {
return expression;
}
public void setName(String name) {
this.name = name;
}
public String getName() {
return name;
}
@Override
public int compareTo(ExpressionRequest o) {
return name.compareTo(o.getName());
}
@Override
public String toString() {
return "<ExpressionRequest name=" + name + " expression=" + expressionString + "/>";
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
public interface FacetRequest {
/**
* Get the name of this facet (commonly the field name)
* @return the name
*/
String getName();
}

View File

@ -0,0 +1,173 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.schema.SchemaField;
import java.util.Locale;
/**
* Contains all of the specifications for a field facet.
*/
public class FieldFacetRequest extends AbstractFieldFacetRequest {
private FacetSortSpecification sort = null;
private FacetSortDirection dir = null;
private int limit;
private int offset;
private boolean missing;
private boolean hidden;
public static enum FacetSortDirection {
ASCENDING ,
DESCENDING;
public static FacetSortDirection fromExternal(String value){
final String sort = value.toLowerCase(Locale.ROOT);
if( "asc".equals(sort) ) return ASCENDING;
if( "ascending".equals(sort) ) return ASCENDING;
if( "desc".equals(sort) ) return DESCENDING;
if( "descending".equals(sort) ) return DESCENDING;
return Enum.valueOf(FacetSortDirection.class, value);
}
}
/**
* Specifies how to sort the buckets of a field facet.
*
*/
public static class FacetSortSpecification {
private String statistic;
private FacetSortDirection direction = FacetSortDirection.DESCENDING;
public FacetSortSpecification(){}
/**
* @param statistic The name of a statistic specified in the {@link AnalyticsRequest}
* which is wrapping the {@link FieldFacetRequest} being sorted.
*/
public FacetSortSpecification(String statistic) {
this.statistic = statistic;
}
public FacetSortSpecification(String statistic, FacetSortDirection direction) {
this(statistic);
this.direction = direction;
}
public String getStatistic() {
return statistic;
}
public void setStatistic(String statistic) {
this.statistic = statistic;
}
public FacetSortDirection getDirection() {
return direction;
}
public void setDirection(FacetSortDirection direction) {
this.direction = direction;
}
public static FacetSortSpecification fromExternal(String spec){
String[] parts = spec.split(" ",2);
if( parts.length == 1 ){
return new FacetSortSpecification(parts[0]);
} else {
return new FacetSortSpecification(parts[0], FacetSortDirection.fromExternal(parts[1]));
}
}
@Override
public String toString() {
return "<SortSpec stat=" + statistic + " dir=" + direction + ">";
}
}
public FieldFacetRequest(SchemaField field) {
super(field);
this.limit = AnalyticsParams.DEFAULT_LIMIT;
this.hidden = AnalyticsParams.DEFAULT_HIDDEN;
}
public FacetSortDirection getDirection() {
return dir;
}
public void setDirection(String dir) {
this.dir = FacetSortDirection.fromExternal(dir);
if (sort!=null) {
sort.setDirection(this.dir);
}
}
public FacetSortSpecification getSort() {
return sort;
}
public void setSort(FacetSortSpecification sort) {
this.sort = sort;
}
public boolean showsMissing() {
return missing;
}
/**
* If there are missing values in the facet field, include the bucket
* for the missing facet values in the facet response.
* @param missing true/false if we calculate missing
*/
public void showMissing(boolean missing) {
this.missing = missing;
}
public int getLimit() {
return limit;
}
public void setLimit(int limit) {
this.limit = limit;
}
public int getOffset() {
return offset;
}
public void setOffset(int offset) {
this.offset = offset;
}
public boolean isHidden() {
return hidden;
}
public void setHidden(boolean hidden) {
this.hidden = hidden;
}
@Override
public String toString() {
return "<FieldFacetRequest field="+field.getName()+(sort==null?"":" sort=" + sort) + " limit=" + limit+" offset="+offset+">";
}
}

View File

@ -0,0 +1,75 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Contains all of the specifications for a query facet.
*/
public class QueryFacetRequest implements FacetRequest {
private String name;
private List<String> queries;
private Set<String> dependencies;
public QueryFacetRequest() {
dependencies = new HashSet<String>();
}
public QueryFacetRequest(String name) {
this.name = name;
this.queries = new ArrayList<String>();
dependencies = new HashSet<String>();
}
public List<String> getQueries() {
return queries;
}
public void setQueries(List<String> queries) {
this.queries = queries;
}
public void addQuery(String query) {
queries.add(query);
}
public Set<String> getDependencies() {
return dependencies;
}
public void setDependencies(Set<String> dependencies) {
this.dependencies = dependencies;
}
public void addDependency(String dependency) {
dependencies.add(dependency);
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}

View File

@ -0,0 +1,130 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.request;
import java.util.Arrays;
import java.util.EnumSet;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
import org.apache.solr.common.params.FacetParams.FacetRangeOther;
import org.apache.solr.schema.SchemaField;
/**
* Contains all of the specifications for a range facet.
*/
public class RangeFacetRequest extends AbstractFieldFacetRequest {
protected String start;
protected String end;
protected String[] gaps;
protected boolean hardEnd = false;
protected EnumSet<FacetRangeInclude> include;
protected boolean includeCalled = false;
protected EnumSet<FacetRangeOther> others;
protected boolean othersCalled = false;
public RangeFacetRequest(SchemaField field) {
super(field);
include = EnumSet.of(AnalyticsParams.DEFAULT_INCLUDE);
others = EnumSet.of(AnalyticsParams.DEFAULT_OTHER);
}
public RangeFacetRequest(SchemaField field, String start, String end, String[] gaps) {
super(field);
this.start = start;
this.end = end;
this.gaps = gaps;
}
public String getStart() {
return start;
}
public void setStart(String start) {
this.start = start;
}
public String getEnd() {
return end;
}
public void setEnd(String end) {
this.end = end;
}
public EnumSet<FacetRangeInclude> getInclude() {
return include;
}
public void setInclude(EnumSet<FacetRangeInclude> include) {
includeCalled = true;
this.include = include;
}
public void addInclude(FacetRangeInclude include) {
if (includeCalled) {
this.include.add(include);
} else {
includeCalled = true;
this.include = EnumSet.of(include);
}
}
public String[] getGaps() {
return gaps;
}
public void setGaps(String[] gaps) {
this.gaps = gaps;
}
public boolean isHardEnd() {
return hardEnd;
}
public void setHardEnd(boolean hardEnd) {
this.hardEnd = hardEnd;
}
public EnumSet<FacetRangeOther> getOthers() {
return others;
}
public void setOthers(EnumSet<FacetRangeOther> others) {
othersCalled = true;
this.others = others;
}
public void addOther(FacetRangeOther other) {
if (othersCalled) {
this.others.add(other);
} else {
othersCalled = true;
this.others = EnumSet.of(other);
}
}
@Override
public String toString() {
return "<RangeFacetRequest field="+field.getName() + " start=" + start + ", end=" + end + ", gap=" + Arrays.toString(gaps) + ", hardEnd=" + hardEnd +
", include=" + include + ", others=" + others +">";
}
}

View File

@ -0,0 +1,27 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
Request objects for creating Analytics requests
</p>
</body>
</html>

View File

@ -0,0 +1,75 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.statistics;
import java.io.IOException;
import java.util.Set;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.util.mutable.MutableValue;
/**
* <code>AbstractDelegationStatsCollector</code> objects wrap other StatsCollectors.
* While they compute their own statistics they pass along all inputs and requests
* to the delegates as well.
*/
public abstract class AbstractDelegatingStatsCollector implements StatsCollector{
protected final StatsCollector delegate;
protected final Set<String> statsList;
MutableValue value;
FunctionValues function;
/**
* @param delegate The delegate computing statistics on the same set of values.
*/
public AbstractDelegatingStatsCollector(StatsCollector delegate) {
this.delegate = delegate;
this.statsList = delegate.getStatsList();
}
public void setNextReader(AtomicReaderContext context) throws IOException {
delegate.setNextReader(context);
value = getValue();
function = getFunction();
}
public StatsCollector delegate(){
return delegate;
}
public Set<String> getStatsList(){
return statsList;
}
public MutableValue getValue() {
return delegate.getValue();
}
public FunctionValues getFunction() {
return delegate.getFunction();
}
public void collect(int doc) {
delegate.collect(doc);
}
public String valueSourceString() {
return delegate.valueSourceString();
}
}

View File

@ -0,0 +1,76 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.statistics;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.solr.analytics.util.MedianCalculator;
/**
* <code>MedianStatsCollector</code> computes the median.
*/
public class MedianStatsCollector extends AbstractDelegatingStatsCollector{
private final List<Double> values = new ArrayList<Double>();
protected double median;
public MedianStatsCollector(StatsCollector delegate) {
super(delegate);
}
public Double getMedian() {
return new Double(MedianCalculator.getMedian(values));
}
@Override
public Comparable getStat(String stat) {
if (stat.equals("median")) {
return new Double(median);
}
return delegate.getStat(stat);
}
public void compute(){
delegate.compute();
median = getMedian();
}
@Override
public void collect(int doc) {
super.collect(doc);
if (value.exists) {
values.add(function.doubleVal(doc));
}
}
}
class DateMedianStatsCollector extends MedianStatsCollector{
public DateMedianStatsCollector(StatsCollector delegate) {
super(delegate);
}
@Override
public Comparable getStat(String stat) {
if (stat.equals("median")) {
return new Date((long)median);
}
return delegate.getStat(stat);
}
}

View File

@ -0,0 +1,113 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.statistics;
import java.io.IOException;
import java.util.Locale;
import java.util.Set;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.FunctionValues.ValueFiller;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.util.mutable.MutableValue;
/**
* <code>MinMaxStatsCollector</code> computes the min, max, number of values and number of missing values.
*/
public class MinMaxStatsCollector implements StatsCollector{
protected long missingCount = 0;
protected long valueCount = 0;
protected MutableValue max;
protected MutableValue min;
protected MutableValue value;
protected final Set<String> statsList;
protected final ValueSource source;
protected FunctionValues function;
protected ValueFiller valueFiller;
public MinMaxStatsCollector(ValueSource source, Set<String> statsList) {
this.source = source;
this.statsList = statsList;
}
public void setNextReader(AtomicReaderContext context) throws IOException {
function = source.getValues(null, context);
valueFiller = function.getValueFiller();
value = valueFiller.getValue();
}
public void collect(int doc) {
valueFiller.fillValue(doc);
if( value.exists ){
valueCount += 1;
if ( max==null ) max = value.duplicate();
else if( !max.exists || value.compareTo(max) > 0 ) max.copy(value);
if ( min==null ) min = value.duplicate();
else if( !min.exists || value.compareTo(min) < 0 ) min.copy(value);
} else {
missingCount += 1;
}
}
@Override
public String toString() {
return String.format(Locale.ROOT, "<min=%s max=%s c=%d m=%d>", min, max, valueCount, missingCount );
}
public Comparable getStat(String stat){
if (stat.equals("min")&&min!=null) {
return (Comparable)min.toObject();
}
if (stat.equals("max")&&min!=null) {
return (Comparable)max.toObject();
}
if (stat.equals("count")) {
return new Long(valueCount);
}
if (stat.equals("missing")) {
return new Long(missingCount);
}
return null;
}
public Set<String> getStatsList() {
return statsList;
}
@Override
public void compute() { }
@Override
public MutableValue getValue() {
return value;
}
@Override
public FunctionValues getFunction() {
return function;
}
public String valueSourceString() {
return source.toString();
}
public String statString(String stat) {
return stat+"("+valueSourceString()+")";
}
}

View File

@ -0,0 +1,68 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.statistics;
import java.util.Set;
import org.apache.lucene.queries.function.ValueSource;
/**
* <code>NumericStatsCollector</code> computes the sum, sum of squares, mean and standard deviation.
*/
public class NumericStatsCollector extends MinMaxStatsCollector {
protected double sum = 0;
protected double sumOfSquares = 0;
protected double mean = 0;
protected double stddev = 0;
public NumericStatsCollector(ValueSource source, Set<String> statsList) {
super(source, statsList);
}
public void collect(int doc) {
super.collect(doc);
double value = function.doubleVal(doc);
sum += value;
sumOfSquares += (value * value);
}
@Override
public Comparable getStat(String stat) {
if (stat.equals("sum")) {
return new Double(sum);
}
if (stat.equals("sumofsquares")) {
return new Double(sumOfSquares);
}
if (stat.equals("mean")) {
return new Double(mean);
}
if (stat.equals("stddev")) {
return new Double(stddev);
}
return super.getStat(stat);
}
@Override
public void compute(){
super.compute();
mean = (valueCount==0)? 0:sum / valueCount;
stddev = (valueCount <= 1) ? 0.0D : Math.sqrt((sumOfSquares/valueCount) - (mean*mean));
}
}

View File

@ -0,0 +1,80 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.statistics;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.solr.analytics.util.PercentileCalculator;
import com.google.common.collect.Iterables;
/**
* <code>PercentileStatsCollector</code> computes a given list of percentiles.
*/
@SuppressWarnings("rawtypes")
public class PercentileStatsCollector extends AbstractDelegatingStatsCollector{
public final List<Comparable> values = new ArrayList<Comparable>();
public static final Pattern PERCENTILE_PATTERN = Pattern.compile("perc(?:entile)?_(\\d+)",Pattern.CASE_INSENSITIVE);
protected final double[] percentiles;
protected final String[] percentileNames;
protected Comparable[] results;
public PercentileStatsCollector(StatsCollector delegate, double[] percentiles, String[] percentileNames) {
super(delegate);
this.percentiles = percentiles;
this.percentileNames = percentileNames;
}
@Override
public Comparable getStat(String stat) {
for( int i=0; i < percentiles.length; i++ ){
if (stat.equals(percentileNames[i])) {
if (results!=null) {
return results[i];
} else {
return null;
}
}
}
return delegate.getStat(stat);
}
public void compute(){
delegate.compute();
if (values.size()>0) {
results = Iterables.toArray(getPercentiles(),Comparable.class);
} else {
results = null;
}
}
@SuppressWarnings({ "unchecked"})
protected List<Comparable> getPercentiles() {
return PercentileCalculator.getPercentiles(values, percentiles);
}
public void collect(int doc) {
super.collect(doc);
if (value.exists) {
values.add((Comparable)value.toObject());
}
}
}

View File

@ -0,0 +1,70 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.statistics;
import java.io.IOException;
import java.util.Set;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.util.mutable.MutableValue;
/**
* <code>StatsCollector</code> implementations reduce a list of Objects to a single value.
* Most implementations reduce a list to a statistic on that list.
*/
public interface StatsCollector {
/**
* Collect values from the value source and add to statistics.
* @param doc Document to collect from
*/
void collect(int doc);
/**
* @param context The context to read documents from.
* @throws IOException if setting next reader fails
*/
void setNextReader(AtomicReaderContext context) throws IOException;
MutableValue getValue();
FunctionValues getFunction();
/**
* @return The set of statistics being computed by the stats collector.
*/
Set<String> getStatsList();
/**
* Return the value of the given statistic.
* @param stat the stat
* @return a comparable
*/
Comparable getStat(String stat);
/**
* After all documents have been collected, this method should be
* called to finalize the calculations of each statistic.
*/
void compute();
/**
* @return The string representation of the value source.
*/
String valueSourceString();
}

View File

@ -0,0 +1,649 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.statistics;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource;
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.search.FieldCache;
import org.apache.solr.analytics.expression.ExpressionFactory;
import org.apache.solr.analytics.request.AnalyticsRequest;
import org.apache.solr.analytics.request.ExpressionRequest;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.analytics.util.AnalyticsParsers;
import org.apache.solr.analytics.util.valuesource.AbsoluteValueDoubleFunction;
import org.apache.solr.analytics.util.valuesource.AddDoubleFunction;
import org.apache.solr.analytics.util.valuesource.ConcatStringFunction;
import org.apache.solr.analytics.util.valuesource.ConstDateSource;
import org.apache.solr.analytics.util.valuesource.ConstDoubleSource;
import org.apache.solr.analytics.util.valuesource.ConstStringSource;
import org.apache.solr.analytics.util.valuesource.DateFieldSource;
import org.apache.solr.analytics.util.valuesource.DateMathFunction;
import org.apache.solr.analytics.util.valuesource.DivDoubleFunction;
import org.apache.solr.analytics.util.valuesource.DualDoubleFunction;
import org.apache.solr.analytics.util.valuesource.FilterFieldSource;
import org.apache.solr.analytics.util.valuesource.LogDoubleFunction;
import org.apache.solr.analytics.util.valuesource.MultiDateFunction;
import org.apache.solr.analytics.util.valuesource.MultiDoubleFunction;
import org.apache.solr.analytics.util.valuesource.MultiplyDoubleFunction;
import org.apache.solr.analytics.util.valuesource.NegateDoubleFunction;
import org.apache.solr.analytics.util.valuesource.PowDoubleFunction;
import org.apache.solr.analytics.util.valuesource.ReverseStringFunction;
import org.apache.solr.analytics.util.valuesource.SingleDoubleFunction;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.schema.StrField;
import org.apache.solr.schema.TrieDateField;
import org.apache.solr.schema.TrieDoubleField;
import org.apache.solr.schema.TrieFloatField;
import org.apache.solr.schema.TrieIntField;
import org.apache.solr.schema.TrieLongField;
import com.google.common.base.Supplier;
public class StatsCollectorSupplierFactory {
// FunctionTypes
final static int NUMBER_TYPE = 0;
final static int DATE_TYPE = 1;
final static int STRING_TYPE = 2;
final static int FIELD_TYPE = 3;
final static int FILTER_TYPE = 4;
/**
* Builds a Supplier that will generate identical arrays of new StatsCollectors.
*
* @param schema The Schema being used.
* @param request The AnalyticsRequest to generate a StatsCollector[] from.
* @return A Supplier that will return an array of new StatsCollector.
*/
@SuppressWarnings("unchecked")
public static Supplier<StatsCollector[]> create(IndexSchema schema, AnalyticsRequest request) {
final Map<String, Set<String>> collectorStats = new HashMap<String, Set<String>>();
final Map<String, Set<Integer>> collectorPercs = new HashMap<String, Set<Integer>>();
final Map<String, ValueSource> collectorSources = new HashMap<String, ValueSource>();
// Iterate through all expression request to make a list of ValueSource strings
// and statistics that need to be calculated on those ValueSources.
for (ExpressionRequest expRequest : request.getExpressions()) {
String statExpression = expRequest.getExpressionString();
Set<String> statistics = getStatistics(statExpression);
if (statistics == null) {
continue;
}
for (String statExp : statistics) {
String stat;
String operands;
try {
stat = statExp.substring(0, statExp.indexOf('(')).trim();
operands = statExp.substring(statExp.indexOf('(')+1, statExp.lastIndexOf(')')).trim();
} catch (Exception e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Unable to parse statistic: ["+statExpression+"]",e);
}
String[] arguments = ExpressionFactory.getArguments(operands);
String source = arguments[0];
if (stat.equals(AnalyticsParams.STAT_PERCENTILE)) {
// The statistic is a percentile, extra parsing is required
if (arguments.length<2) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Too few arguments given for "+stat+"() in ["+statExp+"].");
} else if (arguments.length>2) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Too many arguments given for "+stat+"() in ["+statExp+"].");
}
source = arguments[1];
Set<Integer> percs = collectorPercs.get(source);
if (percs == null) {
percs = new HashSet<Integer>();
collectorPercs.put(source, percs);
}
try {
int perc = Integer.parseInt(arguments[0]);
if (perc>0 && perc<100) {
percs.add(perc);
} else {
throw new SolrException(ErrorCode.BAD_REQUEST,"The percentile in ["+statExp+"] is not between 0 and 100, exculsive.");
}
} catch (NumberFormatException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"\""+arguments[0]+"\" cannot be converted into a percentile.",e);
}
} else if (arguments.length>1) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Too many arguments given for "+stat+"() in ["+statExp+"].");
} else if (arguments.length==0) {
throw new SolrException(ErrorCode.BAD_REQUEST,"No arguments given for "+stat+"() in ["+statExp+"].");
}
// Only unique ValueSources will be made; therefore statistics must be accumulated for
// each ValueSource, even across different expression requests
Set<String> stats = collectorStats.get(source);
if (stats == null) {
stats = new HashSet<String>();
collectorStats.put(source, stats);
}
stats.add(stat);
}
}
String[] keys = collectorStats.keySet().toArray(new String[0]);
for (String sourceStr : keys) {
// Build one ValueSource for each unique value source string
ValueSource source = buildSourceTree(schema, sourceStr);
if (source == null) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The statistic ["+sourceStr+"] could not be parsed.");
}
String builtString = source.toString();
collectorSources.put(builtString,source);
// Replace the user given string with the correctly built string
if (!builtString.equals(sourceStr)) {
Set<String> stats = collectorStats.remove(sourceStr);
if (stats!=null) {
collectorStats.put(builtString, stats);
}
Set<Integer> percs = collectorPercs.remove(sourceStr);
if (percs!=null) {
collectorPercs.put(builtString, percs);
}
for (ExpressionRequest er : request.getExpressions()) {
er.setExpressionString(er.getExpressionString().replace(sourceStr, builtString));
}
}
}
if (collectorSources.size()==0) {
return new Supplier<StatsCollector[]>() {
@Override
public StatsCollector[] get() {
return new StatsCollector[0];
}
};
}
// All information is stored in final arrays so that nothing
// has to be computed when the Supplier's get() method is called.
final Set<String>[] statsArr = collectorStats.values().toArray(new Set[0]);
final ValueSource[] sourceArr = collectorSources.values().toArray(new ValueSource[0]);
final boolean[] uniqueBools = new boolean[statsArr.length];
final boolean[] medianBools = new boolean[statsArr.length];
final boolean[] numericBools = new boolean[statsArr.length];
final boolean[] dateBools = new boolean[statsArr.length];
final double[][] percsArr = new double[statsArr.length][];
final String[][] percsNames = new String[statsArr.length][];
for (int count = 0; count < sourceArr.length; count++) {
uniqueBools[count] = statsArr[count].contains(AnalyticsParams.STAT_UNIQUE);
medianBools[count] = statsArr[count].contains(AnalyticsParams.STAT_MEDIAN);
numericBools[count] = statsArr[count].contains(AnalyticsParams.STAT_SUM)||statsArr[count].contains(AnalyticsParams.STAT_SUM_OF_SQUARES)||statsArr[count].contains(AnalyticsParams.STAT_MEAN)||statsArr[count].contains(AnalyticsParams.STAT_STANDARD_DEVIATION);
dateBools[count] = (sourceArr[count] instanceof DateFieldSource) | (sourceArr[count] instanceof MultiDateFunction) | (sourceArr[count] instanceof ConstDateSource);
Set<Integer> ps = collectorPercs.get(sourceArr[count].toString());
if (ps!=null) {
percsArr[count] = new double[ps.size()];
percsNames[count] = new String[ps.size()];
int percCount = 0;
for (int p : ps) {
percsArr[count][percCount] = p/100.0;
percsNames[count][percCount++] = AnalyticsParams.STAT_PERCENTILE+"_"+p;
}
}
}
// Making the Supplier
return new Supplier<StatsCollector[]>() {
public StatsCollector[] get() {
StatsCollector[] collectors = new StatsCollector[statsArr.length];
for (int count = 0; count < statsArr.length; count++) {
if(numericBools[count]){
StatsCollector sc = new NumericStatsCollector(sourceArr[count], statsArr[count]);
if(uniqueBools[count]) sc = new UniqueStatsCollector(sc);
if(medianBools[count]) sc = new MedianStatsCollector(sc);
if(percsArr[count]!=null) sc = new PercentileStatsCollector(sc,percsArr[count],percsNames[count]);
collectors[count]=sc;
} else if (dateBools[count]) {
StatsCollector sc = new MinMaxStatsCollector(sourceArr[count], statsArr[count]);
if(uniqueBools[count]) sc = new UniqueStatsCollector(sc);
if(medianBools[count]) sc = new DateMedianStatsCollector(sc);
if(percsArr[count]!=null) sc = new PercentileStatsCollector(sc,percsArr[count],percsNames[count]);
collectors[count]=sc;
} else {
StatsCollector sc = new MinMaxStatsCollector(sourceArr[count], statsArr[count]);
if(uniqueBools[count]) sc = new UniqueStatsCollector(sc);
if(medianBools[count]) sc = new MedianStatsCollector(sc);
if(percsArr[count]!=null) sc = new PercentileStatsCollector(sc,percsArr[count],percsNames[count]);
collectors[count]=sc;
}
}
return collectors;
}
};
}
/**
* Finds the set of statistics that must be computed for the expression.
* @param expression The string representation of an expression
* @return The set of statistics (sum, mean, median, etc.) found in the expression
*/
public static Set<String> getStatistics(String expression) {
HashSet<String> set = new HashSet<String>();
int firstParen = expression.indexOf('(');
if (firstParen>0) {
String topOperation = expression.substring(0,firstParen).trim();
if (AnalyticsParams.ALL_STAT_SET.contains(topOperation)) {
set.add(expression);
} else if (!(topOperation.equals(AnalyticsParams.CONSTANT_NUMBER)||topOperation.equals(AnalyticsParams.CONSTANT_DATE)||topOperation.equals(AnalyticsParams.CONSTANT_STRING))) {
String operands = expression.substring(firstParen+1, expression.lastIndexOf(')')).trim();
String[] arguments = ExpressionFactory.getArguments(operands);
for (String argument : arguments) {
Set<String> more = getStatistics(argument);
if (more!=null) {
set.addAll(more);
}
}
}
}
if (set.size()==0) {
return null;
}
return set;
}
/**
* Builds a Value Source from a given string
*
* @param schema The schema being used.
* @param expression The string to be turned into an expression.
* @return The completed ValueSource
*/
private static ValueSource buildSourceTree(IndexSchema schema, String expression) {
return buildSourceTree(schema,expression,FIELD_TYPE);
}
/**
* Builds a Value Source from a given string and a given source type
*
* @param schema The schema being used.
* @param expression The string to be turned into an expression.
* @param sourceType The type of source that must be returned.
* @return The completed ValueSource
*/
private static ValueSource buildSourceTree(IndexSchema schema, String expression, int sourceType) {
int expressionType = getSourceType(expression);
if (sourceType != FIELD_TYPE && expressionType != FIELD_TYPE &&
expressionType != FILTER_TYPE && expressionType != sourceType) {
return null;
}
switch (expressionType) {
case NUMBER_TYPE : return buildNumericSource(schema, expression);
case DATE_TYPE : return buildDateSource(schema, expression);
case STRING_TYPE : return buildStringSource(schema, expression);
case FIELD_TYPE : return buildFieldSource(schema, expression, sourceType);
case FILTER_TYPE : return buildFilterSource(schema, expression.substring(expression.indexOf('(')+1,expression.lastIndexOf(')')), sourceType);
default : throw new SolrException(ErrorCode.BAD_REQUEST,expression+" is not a valid operation.");
}
}
/**
* Determines what type of value source the expression represents.
*
* @param expression The expression representing the desired ValueSource
* @return NUMBER_TYPE, DATE_TYPE, STRING_TYPE or -1
*/
private static int getSourceType(String expression) {
int paren = expression.indexOf('(');
if (paren<0) {
return FIELD_TYPE;
}
String operation = expression.substring(0,paren).trim();
if (AnalyticsParams.NUMERIC_OPERATION_SET.contains(operation)) {
return NUMBER_TYPE;
} else if (AnalyticsParams.DATE_OPERATION_SET.contains(operation)) {
return DATE_TYPE;
} else if (AnalyticsParams.STRING_OPERATION_SET.contains(operation)) {
return STRING_TYPE;
} else if (operation.equals(AnalyticsParams.FILTER)) {
return FILTER_TYPE;
}
throw new SolrException(ErrorCode.BAD_REQUEST,"The operation \""+operation+"\" in ["+expression+"] is not supported.");
}
/**
* Builds a value source for a given field, making sure that the field fits a given source type.
* @param schema the schema
* @param expressionString The name of the field to build a Field Source from.
* @param sourceType FIELD_TYPE for any type of field, NUMBER_TYPE for numeric fields,
* DATE_TYPE for date fields and STRING_TYPE for string fields.
* @return a value source
*/
private static ValueSource buildFieldSource(IndexSchema schema, String expressionString, int sourceType) {
SchemaField sf;
try {
sf = schema.getField(expressionString);
} catch (SolrException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The field "+expressionString+" does not exist.",e);
}
FieldType type = sf.getType();
if ( type instanceof TrieIntField) {
if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) {
return null;
}
return new IntFieldSource(expressionString, FieldCache.NUMERIC_UTILS_INT_PARSER) {
public String description() {
return field;
}
};
} else if (type instanceof TrieLongField) {
if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) {
return null;
}
return new LongFieldSource(expressionString, FieldCache.NUMERIC_UTILS_LONG_PARSER) {
public String description() {
return field;
}
};
} else if (type instanceof TrieFloatField) {
if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) {
return null;
}
return new FloatFieldSource(expressionString, FieldCache.NUMERIC_UTILS_FLOAT_PARSER) {
public String description() {
return field;
}
};
} else if (type instanceof TrieDoubleField) {
if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) {
return null;
}
return new DoubleFieldSource(expressionString, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER) {
public String description() {
return field;
}
};
} else if (type instanceof TrieDateField) {
if (sourceType!=DATE_TYPE&&sourceType!=FIELD_TYPE) {
return null;
}
return new DateFieldSource(expressionString, AnalyticsParsers.DEFAULT_DATE_PARSER) {
public String description() {
return field;
}
};
} else if (type instanceof StrField) {
if (sourceType!=STRING_TYPE&&sourceType!=FIELD_TYPE) {
return null;
}
return new BytesRefFieldSource(expressionString) {
public String description() {
return field;
}
};
}
throw new SolrException(ErrorCode.BAD_REQUEST, type.toString()+" is not a supported field type in Solr Analytics.");
}
/**
* Builds a default is missing source that wraps a given source. A missing value is required for all
* non-field value sources.
* @param schema the schema
* @param expressionString The name of the field to build a Field Source from.
* @param sourceType FIELD_TYPE for any type of field, NUMBER_TYPE for numeric fields,
* DATE_TYPE for date fields and STRING_TYPE for string fields.
* @return a value source
*/
@SuppressWarnings("deprecation")
private static ValueSource buildFilterSource(IndexSchema schema, String expressionString, int sourceType) {
String[] arguments = ExpressionFactory.getArguments(expressionString);
if (arguments.length!=2) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Invalid arguments were given for \""+AnalyticsParams.FILTER+"\".");
}
ValueSource delegateSource = buildSourceTree(schema, arguments[0], sourceType);
if (delegateSource==null) {
return null;
}
Object defaultObject;
Class<? extends ValueSource> type = delegateSource.getClass();
ValueSource src = delegateSource;
if (delegateSource instanceof FilterFieldSource) {
src = ((FilterFieldSource)delegateSource).getRootSource();
}
if ( src instanceof IntFieldSource) {
try {
defaultObject = new Integer(arguments[1]);
} catch (NumberFormatException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The filter value "+arguments[1]+" cannot be converted into an integer.",e);
}
} else if ( src instanceof LongFieldSource ) {
try {
defaultObject = new Long(arguments[1]);
} catch (NumberFormatException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The filter value "+arguments[1]+" cannot be converted into a long.",e);
}
} else if ( src instanceof FloatFieldSource ) {
try {
defaultObject = new Float(arguments[1]);
} catch (NumberFormatException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The filter value "+arguments[1]+" cannot be converted into a float.",e);
}
} else if ( src instanceof DoubleFieldSource || src instanceof SingleDoubleFunction ||
src instanceof DualDoubleFunction|| src instanceof MultiDoubleFunction) {
try {
defaultObject = new Double(arguments[1]);
} catch (NumberFormatException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The filter value "+arguments[1]+" cannot be converted into a double.",e);
}
} else if ( src instanceof DateFieldSource || src instanceof MultiDateFunction) {
try {
defaultObject = TrieDateField.parseDate(arguments[1]);
} catch (ParseException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The filter value "+arguments[1]+" cannot be converted into a date.",e);
}
} else {
defaultObject = arguments[1];
}
return new FilterFieldSource(delegateSource,defaultObject);
}
/**
* Recursively parses and breaks down the expression string to build a numeric ValueSource.
*
* @param schema The schema to pull fields from.
* @param expressionString The expression string to build a ValueSource from.
* @return The value source represented by the given expressionString
*/
private static ValueSource buildNumericSource(IndexSchema schema, String expressionString) {
int paren = expressionString.indexOf('(');
String[] arguments;
String operands;
if (paren<0) {
return buildFieldSource(schema,expressionString,NUMBER_TYPE);
} else {
try {
operands = expressionString.substring(paren+1, expressionString.lastIndexOf(')')).trim();
} catch (Exception e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"Missing closing parenthesis in ["+expressionString+"]");
}
arguments = ExpressionFactory.getArguments(operands);
}
String operation = expressionString.substring(0, paren).trim();
if (operation.equals(AnalyticsParams.CONSTANT_NUMBER)) {
if (arguments.length!=1) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The constant number declaration ["+expressionString+"] does not have exactly 1 argument.");
}
return new ConstDoubleSource(Double.parseDouble(arguments[0]));
} else if (operation.equals(AnalyticsParams.NEGATE)) {
if (arguments.length!=1) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The negate operation ["+expressionString+"] does not have exactly 1 argument.");
}
ValueSource argSource = buildNumericSource(schema, arguments[0]);
if (argSource==null) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The operation \""+AnalyticsParams.NEGATE+"\" requires a numeric field or operation as argument. \""+arguments[0]+"\" is not a numeric field or operation.");
}
return new NegateDoubleFunction(argSource);
} else if (operation.equals(AnalyticsParams.ABSOLUTE_VALUE)) {
if (arguments.length!=1) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The absolute value operation ["+expressionString+"] does not have exactly 1 argument.");
}
ValueSource argSource = buildNumericSource(schema, arguments[0]);
if (argSource==null) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The operation \""+AnalyticsParams.NEGATE+"\" requires a numeric field or operation as argument. \""+arguments[0]+"\" is not a numeric field or operation.");
}
return new AbsoluteValueDoubleFunction(argSource);
} else if (operation.equals(AnalyticsParams.FILTER)) {
return buildFilterSource(schema, operands, NUMBER_TYPE);
}
List<ValueSource> subExpressions = new ArrayList<ValueSource>();
for (String argument : arguments) {
ValueSource argSource = buildNumericSource(schema, argument);
if (argSource == null) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The operation \""+operation+"\" requires numeric fields or operations as arguments. \""+argument+"\" is not a numeric field or operation.");
}
subExpressions.add(argSource);
}
if (operation.equals(AnalyticsParams.ADD)) {
return new AddDoubleFunction(subExpressions.toArray(new ValueSource[0]));
} else if (operation.equals(AnalyticsParams.MULTIPLY)) {
return new MultiplyDoubleFunction(subExpressions.toArray(new ValueSource[0]));
} else if (operation.equals(AnalyticsParams.DIVIDE)) {
if (subExpressions.size()!=2) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The divide operation ["+expressionString+"] does not have exactly 2 arguments.");
}
return new DivDoubleFunction(subExpressions.get(0),subExpressions.get(1));
} else if (operation.equals(AnalyticsParams.POWER)) {
if (subExpressions.size()!=2) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The power operation ["+expressionString+"] does not have exactly 2 arguments.");
}
return new PowDoubleFunction(subExpressions.get(0),subExpressions.get(1));
} else if (operation.equals(AnalyticsParams.LOG)) {
if (subExpressions.size()!=2) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The log operation ["+expressionString+"] does not have exactly 2 arguments.");
}
return new LogDoubleFunction(subExpressions.get(0), subExpressions.get(1));
}
if (AnalyticsParams.DATE_OPERATION_SET.contains(operation)||AnalyticsParams.STRING_OPERATION_SET.contains(operation)) {
return null;
}
throw new SolrException(ErrorCode.BAD_REQUEST,"The operation ["+expressionString+"] is not supported.");
}
/**
* Recursively parses and breaks down the expression string to build a date ValueSource.
*
* @param schema The schema to pull fields from.
* @param expressionString The expression string to build a ValueSource from.
* @return The value source represented by the given expressionString
*/
@SuppressWarnings("deprecation")
private static ValueSource buildDateSource(IndexSchema schema, String expressionString) {
int paren = expressionString.indexOf('(');
String[] arguments;
if (paren<0) {
return buildFieldSource(schema, expressionString, DATE_TYPE);
} else {
arguments = ExpressionFactory.getArguments(expressionString.substring(paren+1, expressionString.lastIndexOf(')')).trim());
}
String operands = arguments[0];
String operation = expressionString.substring(0, paren).trim();
if (operation.equals(AnalyticsParams.CONSTANT_DATE)) {
if (arguments.length!=1) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The constant date declaration ["+expressionString+"] does not have exactly 1 argument.");
}
try {
return new ConstDateSource(TrieDateField.parseDate(operands));
} catch (ParseException e) {
throw new SolrException(ErrorCode.BAD_REQUEST,"The constant "+operands+" cannot be converted into a date.",e);
}
} else if (operation.equals(AnalyticsParams.FILTER)) {
return buildFilterSource(schema, operands, DATE_TYPE);
}
if (operation.equals(AnalyticsParams.DATE_MATH)) {
List<ValueSource> subExpressions = new ArrayList<ValueSource>();
boolean first = true;
for (String argument : arguments) {
ValueSource argSource;
if (first) {
first = false;
argSource = buildDateSource(schema, argument);
if (argSource == null) {
throw new SolrException(ErrorCode.BAD_REQUEST,"\""+AnalyticsParams.DATE_MATH+"\" requires the first argument be a date operation or field. ["+argument+"] is not a date operation or field.");
}
} else {
argSource = buildStringSource(schema, argument);
if (argSource == null) {
throw new SolrException(ErrorCode.BAD_REQUEST,"\""+AnalyticsParams.DATE_MATH+"\" requires that all arguments except the first be string operations. ["+argument+"] is not a string operation.");
}
}
subExpressions.add(argSource);
}
return new DateMathFunction(subExpressions.toArray(new ValueSource[0]));
}
if (AnalyticsParams.NUMERIC_OPERATION_SET.contains(operation)||AnalyticsParams.STRING_OPERATION_SET.contains(operation)) {
return null;
}
throw new SolrException(ErrorCode.BAD_REQUEST,"The operation ["+expressionString+"] is not supported.");
}
/**
* Recursively parses and breaks down the expression string to build a string ValueSource.
*
* @param schema The schema to pull fields from.
* @param expressionString The expression string to build a ValueSource from.
* @return The value source represented by the given expressionString
*/
private static ValueSource buildStringSource(IndexSchema schema, String expressionString) {
int paren = expressionString.indexOf('(');
String[] arguments;
if (paren<0) {
return buildFieldSource(schema, expressionString, FIELD_TYPE);
} else {
arguments = ExpressionFactory.getArguments(expressionString.substring(paren+1, expressionString.lastIndexOf(')')).trim());
}
String operands = arguments[0];
String operation = expressionString.substring(0, paren).trim();
if (operation.equals(AnalyticsParams.CONSTANT_STRING)) {
operands = expressionString.substring(paren+1, expressionString.lastIndexOf(')'));
return new ConstStringSource(operands);
} else if (operation.equals(AnalyticsParams.FILTER)) {
return buildFilterSource(schema,operands,FIELD_TYPE);
} else if (operation.equals(AnalyticsParams.REVERSE)) {
if (arguments.length!=1) {
throw new SolrException(ErrorCode.BAD_REQUEST,"\""+AnalyticsParams.REVERSE+"\" requires exactly one argument. The number of arguments in "+expressionString+" is not 1.");
}
return new ReverseStringFunction(buildStringSource(schema, operands));
}
List<ValueSource> subExpressions = new ArrayList<ValueSource>();
for (String argument : arguments) {
subExpressions.add(buildSourceTree(schema, argument));
}
if (operation.equals(AnalyticsParams.CONCATENATE)) {
return new ConcatStringFunction(subExpressions.toArray(new ValueSource[0]));
}
if (AnalyticsParams.NUMERIC_OPERATION_SET.contains(operation)) {
return buildNumericSource(schema, expressionString);
} else if (AnalyticsParams.DATE_OPERATION_SET.contains(operation)) {
return buildDateSource(schema, expressionString);
}
throw new SolrException(ErrorCode.BAD_REQUEST,"The operation ["+expressionString+"] is not supported.");
}
}

View File

@ -0,0 +1,53 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.statistics;
import java.util.HashSet;
import java.util.Set;
/**
* <code>UniqueValueCounter</code> computes the number of unique values.
*/
public class UniqueStatsCollector extends AbstractDelegatingStatsCollector{
private final Set<Object> uniqueValues = new HashSet<Object>();
public UniqueStatsCollector(StatsCollector delegate) {
super(delegate);
}
@Override
public void collect(int doc) {
super.collect(doc);
if (value.exists) {
uniqueValues.add(value.toObject());
}
}
@Override
public Comparable getStat(String stat) {
if (stat.equals("unique")) {
return new Long(uniqueValues.size());
}
return delegate.getStat(stat);
}
@Override
public void compute() {
delegate.compute();
}
}

View File

@ -0,0 +1,27 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
Statistics collectors reduce a list of Objects to a single value. Most implementations reduce a list to a statistic on that list.
</p>
</body>
</html>

View File

@ -0,0 +1,114 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
import org.apache.solr.common.params.FacetParams.FacetRangeOther;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
public interface AnalyticsParams {
// Full length Analytics Params
public static final String ANALYTICS = "olap";
public static final String REQUEST = "o|olap";
public static final String EXPRESSION = "s|stat|statistic";
public static final String HIDDEN_EXPRESSION = "hs|hiddenstat|hiddenstatistic";
public static final String FIELD_FACET = "ff|fieldfacet";
public static final String LIMIT = "l|limit";
public static final String OFFSET = "off|offset";
public static final String HIDDEN = "h|hidden";
public static final String SHOW_MISSING = "sm|showmissing";
public static final String SORT_STATISTIC ="ss|sortstat|sortstatistic";
public static final String SORT_DIRECTION ="sd|sortdirection";
public static final String RANGE_FACET = "rf|rangefacet";
public static final String START = "st|start";
public static final String END = "e|end";
public static final String GAP = "g|gap";
public static final String HARDEND = "he|hardend";
public static final String INCLUDE_BOUNDARY = "ib|includebound";
public static final String OTHER_RANGE = "or|otherrange";
public static final String QUERY_FACET = "qf|queryfacet";
public static final String DEPENDENCY = "d|dependecy";
public static final String QUERY = "q|query";
//Defaults
public static final boolean DEFAULT_ABBREVIATE_PREFIX = true;
public static final String DEFAULT_SORT_DIRECTION = "ascending";
public static final int DEFAULT_LIMIT = -1;
public static final boolean DEFAULT_HIDDEN = false;
public static final boolean DEFAULT_HARDEND = false;
public static final boolean DEFAULT_SHOW_MISSING = false;
public static final FacetRangeInclude DEFAULT_INCLUDE = FacetRangeInclude.LOWER;
public static final FacetRangeOther DEFAULT_OTHER = FacetRangeOther.NONE;
// Statistic Function Names (Cannot share names with ValueSource & Expression Functions)
public static final String STAT_COUNT = "count";
public static final String STAT_MISSING = "missing";
public static final String STAT_SUM = "sum";
public static final String STAT_SUM_OF_SQUARES = "sumofsquares";
public static final String STAT_STANDARD_DEVIATION = "stddev";
public static final String STAT_MEAN = "mean";
public static final String STAT_UNIQUE = "unique";
public static final String STAT_MEDIAN = "median";
public static final String STAT_PERCENTILE = "percentile";
public static final String STAT_MIN = "min";
public static final String STAT_MAX = "max";
public static final List<String> ALL_STAT_LIST = Collections.unmodifiableList(Lists.newArrayList(STAT_COUNT, STAT_MISSING, STAT_SUM, STAT_SUM_OF_SQUARES, STAT_STANDARD_DEVIATION, STAT_MEAN, STAT_UNIQUE, STAT_MEDIAN, STAT_PERCENTILE,STAT_MIN,STAT_MAX));
public static final Set<String> ALL_STAT_SET = Collections.unmodifiableSet(Sets.newLinkedHashSet(ALL_STAT_LIST));
// ValueSource & Expression Function Names (Cannot share names with Statistic Functions)
// No specific type
final static String FILTER = "filter";
final static String RESULT = "result";
final static String QUERY_RESULT = "qresult";
// Numbers
final static String CONSTANT_NUMBER = "const_num";
final static String NEGATE = "neg";
final static String ABSOLUTE_VALUE = "abs";
final static String LOG = "log";
final static String ADD = "add";
final static String MULTIPLY = "mult";
final static String DIVIDE = "div";
final static String POWER = "pow";
public static final Set<String> NUMERIC_OPERATION_SET = Collections.unmodifiableSet(Sets.newLinkedHashSet(Lists.newArrayList(CONSTANT_NUMBER,NEGATE,ABSOLUTE_VALUE,LOG,ADD,MULTIPLY,DIVIDE,POWER)));
// Dates
final static String CONSTANT_DATE = "const_date";
final static String DATE_MATH = "date_math";
public static final Set<String> DATE_OPERATION_SET = Collections.unmodifiableSet(Sets.newLinkedHashSet(Lists.newArrayList(CONSTANT_DATE,DATE_MATH)));
//Strings
final static String CONSTANT_STRING = "const_str";
final static String REVERSE = "rev";
final static String CONCATENATE = "concat";
public static final Set<String> STRING_OPERATION_SET = Collections.unmodifiableSet(Sets.newLinkedHashSet(Lists.newArrayList(CONSTANT_STRING,REVERSE,CONCATENATE)));
// Field Source Wrappers
}

View File

@ -0,0 +1,200 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util;
import java.io.IOException;
import java.text.ParseException;
import java.util.Arrays;
import java.util.Date;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldCache.LongParser;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.TrieDateField;
import org.apache.solr.schema.TrieDoubleField;
import org.apache.solr.schema.TrieFloatField;
import org.apache.solr.schema.TrieIntField;
import org.apache.solr.schema.TrieLongField;
/**
* Class to hold the parsers used for Solr Analytics.
*/
public class AnalyticsParsers {
/**
* Returns a parser that will translate a BytesRef or long from DocValues into
* a String that correctly represents the value.
* @param class1 class of the FieldType of the field being faceted on.
* @return A Parser
*/
public static Parser getParser(Class<? extends FieldType> class1) {
if (class1.equals(TrieIntField.class)) {
return AnalyticsParsers.INT_DOC_VALUES_PARSER;
} else if (class1.equals(TrieLongField.class)) {
return AnalyticsParsers.LONG_DOC_VALUES_PARSER;
} else if (class1.equals(TrieFloatField.class)) {
return AnalyticsParsers.FLOAT_DOC_VALUES_PARSER;
} else if (class1.equals(TrieDoubleField.class)) {
return AnalyticsParsers.DOUBLE_DOC_VALUES_PARSER;
} else if (class1.equals(TrieDateField.class)) {
return AnalyticsParsers.DATE_DOC_VALUES_PARSER;
} else {
return AnalyticsParsers.STRING_PARSER;
}
}
/** Long Parser that takes in String representations of dates and
* converts them into longs
*/
public final static LongParser DEFAULT_DATE_PARSER = new LongParser() {
@SuppressWarnings("deprecation")
@Override
public long parseLong(BytesRef term) {
try {
return TrieDateField.parseDate(term.utf8ToString()).getTime();
} catch (ParseException e) {
System.err.println("Cannot parse date "+term.utf8ToString());
return 0;
}
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_DATE_PARSER";
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
};
/**
* For use in classes that grab values by docValue.
* Converts a BytesRef object into the correct readable text.
*/
public static interface Parser {
String parse(BytesRef bytes) throws IOException;
}
/**
* Converts the long returned by NumericDocValues into the
* correct number and return it as a string.
*/
public static interface NumericParser extends Parser {
String parseNum(long l);
}
/**
* Converts the BytesRef or long to the correct int string.
*/
public static final NumericParser INT_DOC_VALUES_PARSER = new NumericParser() {
public String parse(BytesRef bytes) throws IOException {
try {
return ""+NumericUtils.prefixCodedToInt(bytes);
} catch (NumberFormatException e) {
throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to an int.");
}
}
@Override
public String parseNum(long l) {
return ""+(int)l;
}
};
/**
* Converts the BytesRef or long to the correct long string.
*/
public static final NumericParser LONG_DOC_VALUES_PARSER = new NumericParser() {
public String parse(BytesRef bytes) throws IOException {
try {
return ""+NumericUtils.prefixCodedToLong(bytes);
} catch (NumberFormatException e) {
throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to a long.");
}
}
@Override
public String parseNum(long l) {
return ""+l;
}
};
/**
* Converts the BytesRef or long to the correct float string.
*/
public static final NumericParser FLOAT_DOC_VALUES_PARSER = new NumericParser() {
public String parse(BytesRef bytes) throws IOException {
try {
return ""+NumericUtils.sortableIntToFloat(NumericUtils.prefixCodedToInt(bytes));
} catch (NumberFormatException e) {
throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to a float.");
}
}
@Override
public String parseNum(long l) {
return ""+NumericUtils.sortableIntToFloat((int)l);
}
};
/**
* Converts the BytesRef or long to the correct double string.
*/
public static final NumericParser DOUBLE_DOC_VALUES_PARSER = new NumericParser() {
public String parse(BytesRef bytes) throws IOException {
try {
return ""+NumericUtils.sortableLongToDouble(NumericUtils.prefixCodedToLong(bytes));
} catch (NumberFormatException e) {
throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to a double.");
}
}
@Override
public String parseNum(long l) {
return ""+NumericUtils.sortableLongToDouble(l);
}
};
/**
* Converts the BytesRef or long to the correct date string.
*/
public static final NumericParser DATE_DOC_VALUES_PARSER = new NumericParser() {
@SuppressWarnings("deprecation")
public String parse(BytesRef bytes) throws IOException {
try {
return TrieDateField.formatExternal(new Date(NumericUtils.prefixCodedToLong(bytes)));
} catch (NumberFormatException e) {
throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to a date.");
}
}
@SuppressWarnings("deprecation")
@Override
public String parseNum(long l) {
return ""+TrieDateField.formatExternal(new Date(l));
}
};
/**
* Converts the BytesRef to the correct string.
*/
public static final Parser STRING_PARSER = new Parser() {
public String parse(BytesRef bytes) {
return bytes.utf8ToString();
}
};
}

View File

@ -0,0 +1,128 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util;
import java.util.List;
public class MedianCalculator {
/**
* Calculates the median of the given list of numbers.
*
* @param list A list of {@link Comparable} {@link Number} objects
* @return The median of the given list as a double.
*/
public static <T extends Number & Comparable<T>> double getMedian(List<T> list) {
int size = list.size() - 1;
if (size == -1) {
return 0;
}
select(list, .5 * size, 0, size);
int firstIdx = (int) (Math.floor(.5 * size));
int secondIdx = (firstIdx <= size && size % 2 == 1) ? firstIdx + 1 : firstIdx;
double result = list.get(firstIdx).doubleValue() * .5 + list.get(secondIdx).doubleValue() * .5;
return result;
}
private static <T extends Comparable<T>> void select(List<T> list, double place, int begin, int end) {
T split;
if (end - begin < 10) {
split = list.get((int) (Math.random() * (end - begin + 1)) + begin);
} else {
split = split(list, begin, end);
}
Point result = partition(list, begin, end, split);
if (place < result.low) {
select(list, place, begin, result.low);
} else if (place > result.high) {
select(list, place, result.high, end);
} else {
if (result.low == (int) (Math.floor(place)) && result.low > begin) {
select(list, result.low, begin, result.low);
}
if (result.high == (int) (Math.ceil(place)) && result.high < end) {
select(list, result.high, result.high, end);
}
}
}
private static <T extends Comparable<T>> T split(List<T> list, int begin, int end) {
T temp;
int num = (end - begin + 1);
int recursiveSize = (int) Math.sqrt((double) num);
int step = num / recursiveSize;
for (int i = 1; i < recursiveSize; i++) {
int swapFrom = i * step + begin;
int swapTo = i + begin;
temp = list.get(swapFrom);
list.set(swapFrom, list.get(swapTo));
list.set(swapTo, temp);
}
recursiveSize--;
select(list, recursiveSize / 2 + begin, begin, recursiveSize + begin);
return list.get(recursiveSize / 2 + begin);
}
private static <T extends Comparable<T>> Point partition(List<T> list, int begin, int end, T indexElement) {
T temp;
int left, right;
for (left = begin, right = end; left < right; left++, right--) {
while (list.get(left).compareTo(indexElement) < 0) {
left++;
}
while (right != begin - 1 && list.get(right).compareTo(indexElement) >= 0) {
right--;
}
if (right <= left) {
left--;
right++;
break;
}
temp = list.get(left);
list.set(left, list.get(right));
list.set(right, temp);
}
while (left != begin - 1 && list.get(left).compareTo(indexElement) >= 0) {
left--;
}
while (right != end + 1 && list.get(right).compareTo(indexElement) <= 0) {
right++;
}
int rightMove = right + 1;
while (rightMove < end + 1) {
if (list.get(rightMove).equals(indexElement)) {
temp = list.get(rightMove);
list.set(rightMove, list.get(right));
list.set(right, temp);
do {
right++;
} while (list.get(right).equals(indexElement));
if (rightMove <= right) {
rightMove = right;
}
}
rightMove++;
}
return new Point(left, right);
}
}

View File

@ -0,0 +1,177 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class PercentileCalculator {
/**
* Calculates a list of percentile values for a given list of objects and percentiles.
*
* @param list The list of {@link Comparable} objects to calculate the percentiles of.
* @param percents The array of percentiles (.01 to .99) to calculate.
* @return a list of comparables
*/
public static <T extends Comparable<T>> List<T> getPercentiles(List<T> list, double[] percents) {
int size = list.size();
if (size == 0) {
return null;
}
int[] percs = new int[percents.length];
for (int i = 0; i < percs.length; i++) {
percs[i] = (int) Math.round(percents[i] * size - .5);
}
int[] percentiles = Arrays.copyOf(percs, percs.length);
Arrays.sort(percentiles);
if (percentiles[0] < 0 || percentiles[percentiles.length - 1] > size - 1) {
throw new IllegalArgumentException();
}
List<T> results = new ArrayList<T>(percs.length);
distributeAndFind(list, percentiles, 0, percentiles.length - 1);
for (int i = 0; i < percs.length; i++) {
results.add(list.get(percs[i]));
}
return results;
}
private static <T extends Comparable<T>> void distributeAndFind(List<T> list, int[] percentiles, int beginIdx, int endIdx) {
if (endIdx < beginIdx) {
return;
}
int middleIdxb = beginIdx;
int middleIdxe = beginIdx;
int begin = (beginIdx == 0) ? -1 : percentiles[beginIdx - 1];
int end = (endIdx == percentiles.length - 1) ? list.size() : percentiles[endIdx + 1];
double middle = (begin + end) / 2.0;
for (int i = beginIdx; i <= endIdx; i++) {
double value = Math.abs(percentiles[i] - middle) - Math.abs(percentiles[middleIdxb] - middle);
if (percentiles[i] == percentiles[middleIdxb]) {
middleIdxe = i;
} else if (value < 0) {
middleIdxb = i;
do {
middleIdxe = i;
i++;
} while (i <= endIdx && percentiles[middleIdxb] == percentiles[i]);
break;
}
}
int middlePlace = percentiles[middleIdxb];
int beginPlace = begin + 1;
int endPlace = end - 1;
select(list, middlePlace, beginPlace, endPlace);
distributeAndFind(list, percentiles, beginIdx, middleIdxb - 1);
distributeAndFind(list, percentiles, middleIdxe + 1, endIdx);
}
private static <T extends Comparable<T>> void select(List<T> list, int place, int begin, int end) {
T split;
if (end - begin < 10) {
split = list.get((int) (Math.random() * (end - begin + 1)) + begin);
} else {
split = split(list, begin, end);
}
Point result = partition(list, begin, end, split);
if (place <= result.low) {
select(list, place, begin, result.low);
} else if (place >= result.high) {
select(list, place, result.high, end);
}
}
private static <T extends Comparable<T>> T split(List<T> list, int begin, int end) {
T temp;
int num = (end - begin + 1);
int recursiveSize = (int) Math.sqrt((double) num);
int step = num / recursiveSize;
for (int i = 1; i < recursiveSize; i++) {
int swapFrom = i * step + begin;
int swapTo = i + begin;
temp = list.get(swapFrom);
list.set(swapFrom, list.get(swapTo));
list.set(swapTo, temp);
}
recursiveSize--;
select(list, recursiveSize / 2 + begin, begin, recursiveSize + begin);
return list.get(recursiveSize / 2 + begin);
}
private static <T extends Comparable<T>> Point partition(List<T> list, int begin, int end, T indexElement) {
T temp;
int left, right;
for (left = begin, right = end; left <= right; left++, right--) {
while (list.get(left).compareTo(indexElement) < 0) {
left++;
}
while (right != begin - 1 && list.get(right).compareTo(indexElement) >= 0) {
right--;
}
if (right <= left) {
left--;
right++;
break;
}
temp = list.get(left);
list.set(left, list.get(right));
list.set(right, temp);
}
while (left > begin - 1 && list.get(left).compareTo(indexElement) >= 0) {
left--;
}
while (right < end + 1 && list.get(right).compareTo(indexElement) <= 0) {
right++;
}
int rightMove = right + 1;
while (rightMove < end + 1) {
if (list.get(rightMove).equals(indexElement)) {
temp = list.get(rightMove);
list.set(rightMove, list.get(right));
list.set(right, temp);
do {
right++;
} while (list.get(right).equals(indexElement));
if (rightMove <= right) {
rightMove = right;
}
}
rightMove++;
}
return new Point(left, right);
}
}
class Point {
public int low;
public int high;
public Point(int low, int high) {
this.low = low;
this.high = high;
}
}

View File

@ -0,0 +1,358 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util;
import java.util.ArrayList;
import java.util.Date;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import org.apache.solr.analytics.request.RangeFacetRequest;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
import org.apache.solr.common.params.FacetParams.FacetRangeOther;
import org.apache.solr.schema.DateField;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.schema.TrieField;
import org.apache.solr.util.DateMathParser;
@SuppressWarnings("deprecation")
public abstract class RangeEndpointCalculator<T extends Comparable<T>> {
protected final SchemaField field;
protected final RangeFacetRequest request;
public RangeEndpointCalculator(final RangeFacetRequest request) {
this.field = request.getField();
this.request = request;
}
/**
* Formats a Range endpoint for use as a range label name in the response.
* Default Impl just uses toString()
*/
public String formatValue(final T val) {
return val.toString();
}
/**
* Parses a String param into an Range endpoint value throwing
* a useful exception if not possible
*/
public final T getValue(final String rawval) {
try {
return parseVal(rawval);
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can't parse value "+rawval+" for field: " + field.getName(), e);
}
}
/**
* Parses a String param into an Range endpoint.
* Can throw a low level format exception as needed.
*/
protected abstract T parseVal(final String rawval) throws java.text.ParseException;
/**
* Parses a String param into a value that represents the gap and
* can be included in the response, throwing
* a useful exception if not possible.
*
* Note: uses Object as the return type instead of T for things like
* Date where gap is just a DateMathParser string
*/
public final Object getGap(final String gap) {
try {
return parseGap(gap);
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can't parse gap "+gap+" for field: " + field.getName(), e);
}
}
/**
* Parses a String param into a value that represents the gap and
* can be included in the response.
* Can throw a low level format exception as needed.
*
* Default Impl calls parseVal
*/
protected Object parseGap(final String rawval) throws java.text.ParseException {
return parseVal(rawval);
}
/**
* Adds the String gap param to a low Range endpoint value to determine
* the corrisponding high Range endpoint value, throwing
* a useful exception if not possible.
*/
public final T addGap(T value, String gap) {
try {
return parseAndAddGap(value, gap);
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can't add gap "+gap+" to value " + value + " for field: " + field.getName(), e);
}
}
/**
* Adds the String gap param to a low Range endpoint value to determine
* the corrisponding high Range endpoint value.
* Can throw a low level format exception as needed.
*/
protected abstract T parseAndAddGap(T value, String gap) throws java.text.ParseException;
public static class FacetRange {
public final String name;
public final String lower;
public final String upper;
public final boolean includeLower;
public final boolean includeUpper;
public FacetRange(String name, String lower, String upper, boolean includeLower, boolean includeUpper) {
this.name = name;
this.lower = lower;
this.upper = upper;
this.includeLower = includeLower;
this.includeUpper = includeUpper;
}
}
public List<FacetRange> getRanges(){
final T start = getValue(request.getStart());
T end = getValue(request.getEnd()); // not final, hardend may change this
if( end.compareTo(start) < 0 ){
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "range facet 'end' comes before 'start': "+end+" < "+start);
}
// explicitly return the gap. compute this early so we are more
// likely to catch parse errors before attempting math
final String[] gaps = request.getGaps();
String gap = gaps[0];
final EnumSet<FacetRangeInclude> include = request.getInclude();
T low = start;
List<FacetRange> ranges = new ArrayList<FacetRange>();
int gapCounter = 0;
while (low.compareTo(end) < 0) {
if (gapCounter<gaps.length) {
gap = gaps[gapCounter++];
}
T high = addGap(low,gap);
if (end.compareTo(high) < 0) {
if (request.isHardEnd()){
high = end;
} else {
end = high;
}
}
if (high.compareTo(low) < 0) {
throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, "range facet infinite loop (is gap negative? did the math overflow?)");
}
if (high.compareTo(low) == 0) {
throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high );
}
final boolean includeLower = (include.contains(FacetRangeInclude.ALL) ||
include.contains(FacetRangeInclude.LOWER) ||
(include.contains(FacetRangeInclude.EDGE) &&
0 == low.compareTo(start)));
final boolean includeUpper = (include.contains(FacetRangeInclude.ALL) ||
include.contains(FacetRangeInclude.UPPER) ||
(include.contains(FacetRangeInclude.EDGE) &&
0 == high.compareTo(end)));
final String lowS = formatValue(low);
final String highS = formatValue(high);
ranges.add( new FacetRange(lowS,lowS,highS,includeLower,includeUpper) );
low = high;
}
final Set<FacetRangeOther> others = request.getOthers();
if (null != others && 0 < others.size() ) {
// no matter what other values are listed, we don't do
// anything if "none" is specified.
if( !others.contains(FacetRangeOther.NONE) ) {
boolean all = others.contains(FacetRangeOther.ALL);
if (all || others.contains(FacetRangeOther.BEFORE)) {
// include upper bound if "outer" or if first gap doesn't already include it
ranges.add( new FacetRange(FacetRangeOther.BEFORE.toString(),
null, formatValue(start), false, include.contains(FacetRangeInclude.OUTER) || include.contains(FacetRangeInclude.ALL) ||
!(include.contains(FacetRangeInclude.LOWER) || include.contains(FacetRangeInclude.EDGE)) ) );
}
if (all || others.contains(FacetRangeOther.AFTER)) {
// include lower bound if "outer" or if last gap doesn't already include it
ranges.add( new FacetRange(FacetRangeOther.AFTER.toString(),
formatValue(end), null, include.contains(FacetRangeInclude.OUTER) || include.contains(FacetRangeInclude.ALL) ||
!(include.contains(FacetRangeInclude.UPPER) || include.contains(FacetRangeInclude.EDGE)), false) );
}
if (all || others.contains(FacetRangeOther.BETWEEN)) {
ranges.add( new FacetRange(FacetRangeOther.BETWEEN.toString(), formatValue(start), formatValue(end),
include.contains(FacetRangeInclude.LOWER) || include.contains(FacetRangeInclude.EDGE) || include.contains(FacetRangeInclude.ALL),
include.contains(FacetRangeInclude.UPPER) || include.contains(FacetRangeInclude.EDGE) || include.contains(FacetRangeInclude.ALL)) );
}
}
}
return ranges;
}
public static RangeEndpointCalculator<? extends Comparable<?>> create(RangeFacetRequest request){
final SchemaField sf = request.getField();
final FieldType ft = sf.getType();
final RangeEndpointCalculator<?> calc;
if (ft instanceof TrieField) {
final TrieField trie = (TrieField)ft;
switch (trie.getType()) {
case FLOAT:
calc = new FloatRangeEndpointCalculator(request);
break;
case DOUBLE:
calc = new DoubleRangeEndpointCalculator(request);
break;
case INTEGER:
calc = new IntegerRangeEndpointCalculator(request);
break;
case LONG:
calc = new LongRangeEndpointCalculator(request);
break;
default:
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on tried field of unexpected type:" + sf.getName());
}
} else if (ft instanceof DateField) {
calc = new DateRangeEndpointCalculator(request, null);
} else {
throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on field:" + sf);
}
return calc;
}
public static class FloatRangeEndpointCalculator extends RangeEndpointCalculator<Float> {
public FloatRangeEndpointCalculator(final RangeFacetRequest request) { super(request); }
@Override
protected Float parseVal(String rawval) {
return Float.valueOf(rawval);
}
@Override
public Float parseAndAddGap(Float value, String gap) {
return new Float(value.floatValue() + Float.valueOf(gap).floatValue());
}
}
public static class DoubleRangeEndpointCalculator extends RangeEndpointCalculator<Double> {
public DoubleRangeEndpointCalculator(final RangeFacetRequest request) { super(request); }
@Override
protected Double parseVal(String rawval) {
return Double.valueOf(rawval);
}
@Override
public Double parseAndAddGap(Double value, String gap) {
return new Double(value.doubleValue() + Double.valueOf(gap).doubleValue());
}
}
public static class IntegerRangeEndpointCalculator extends RangeEndpointCalculator<Integer> {
public IntegerRangeEndpointCalculator(final RangeFacetRequest request) { super(request); }
@Override
protected Integer parseVal(String rawval) {
return Integer.valueOf(rawval);
}
@Override
public Integer parseAndAddGap(Integer value, String gap) {
return new Integer(value.intValue() + Integer.valueOf(gap).intValue());
}
}
public static class LongRangeEndpointCalculator extends RangeEndpointCalculator<Long> {
public LongRangeEndpointCalculator(final RangeFacetRequest request) { super(request); }
@Override
protected Long parseVal(String rawval) {
return Long.valueOf(rawval);
}
@Override
public Long parseAndAddGap(Long value, String gap) {
return new Long(value.longValue() + Long.valueOf(gap).longValue());
}
}
public static class DateRangeEndpointCalculator extends RangeEndpointCalculator<Date> {
private final Date now;
public DateRangeEndpointCalculator(final RangeFacetRequest request, final Date now) {
super(request);
this.now = now;
if (! (field.getType() instanceof DateField) ) {
throw new IllegalArgumentException("SchemaField must use filed type extending DateField");
}
}
@Override
@SuppressWarnings("deprecation")
public String formatValue(Date val) {
return ((DateField)field.getType()).toExternal(val);
}
@Override
@SuppressWarnings("deprecation")
protected Date parseVal(String rawval) {
return ((DateField)field.getType()).parseMath(now, rawval);
}
@Override
protected Object parseGap(final String rawval) {
return rawval;
}
@Override
public Date parseAndAddGap(Date value, String gap) throws java.text.ParseException {
final DateMathParser dmp = new DateMathParser();
dmp.setNow(value);
return dmp.parseMath(gap);
}
}
}

View File

@ -0,0 +1,27 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
Utilities used by analytics component
</p>
</body>
</html>

View File

@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>AbsoluteValueDoubleFunction</code> takes the absolute value of the double value of the source it contains.
*/
public class AbsoluteValueDoubleFunction extends SingleDoubleFunction {
public final static String NAME = AnalyticsParams.ABSOLUTE_VALUE;
public AbsoluteValueDoubleFunction(ValueSource source) {
super(source);
}
protected String name() {
return NAME;
}
@Override
public String description() {
return name()+"("+source.description()+")";
}
protected double func(int doc, FunctionValues vals) {
double d = vals.doubleVal(doc);
if (d<0) {
return d*-1;
} else {
return d;
}
}
@Override
public boolean equals(Object o) {
if (getClass() != o.getClass()) return false;
AbsoluteValueDoubleFunction other = (AbsoluteValueDoubleFunction)o;
return this.source.equals(other.source);
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>AddDoubleFunction</code> returns the sum of it's components.
*/
public class AddDoubleFunction extends MultiDoubleFunction {
public final static String NAME = AnalyticsParams.ADD;
public AddDoubleFunction(ValueSource[] sources) {
super(sources);
}
@Override
protected String name() {
return NAME;
}
@Override
protected double func(int doc, FunctionValues[] valsArr) {
double sum = 0d;
for (FunctionValues val : valsArr) {
sum += val.doubleVal(doc);
}
return sum;
}
}

View File

@ -0,0 +1,53 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>ConcatStringFunction</code> concatenates the string values of its
* components in the order given.
*/
public class ConcatStringFunction extends MultiStringFunction {
public final static String NAME = AnalyticsParams.CONCATENATE;
public ConcatStringFunction(ValueSource[] sources) {
super(sources);
}
protected String name() {
return NAME;
}
@Override
protected String func(int doc, FunctionValues[] valsArr) {
StringBuilder sb = new StringBuilder();
for (FunctionValues val : valsArr) {
String v = val.strVal(doc);
if(v == null){
return null;
} else {
sb.append(v);
}
}
return sb.toString();
}
}

View File

@ -0,0 +1,114 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.text.ParseException;
import java.util.Date;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.docvalues.FloatDocValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueDate;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.schema.TrieDateField;
/**
* <code>ConstDateSource</code> returns a constant date for all documents
*/
public class ConstDateSource extends ConstDoubleSource {
public final static String NAME = AnalyticsParams.CONSTANT_DATE;
public ConstDateSource(Date constant) throws ParseException {
super(constant.getTime());
}
public ConstDateSource(Long constant) {
super(constant);
}
@SuppressWarnings("deprecation")
@Override
public String description() {
return name()+"(" + TrieDateField.formatExternal(new Date(getLong())) + ")";
}
protected String name() {
return NAME;
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
return new FloatDocValues(this) {
@Override
public float floatVal(int doc) {
return getFloat();
}
@Override
public int intVal(int doc) {
return getInt();
}
@Override
public long longVal(int doc) {
return getLong();
}
@Override
public double doubleVal(int doc) {
return getDouble();
}
@Override
public String toString(int doc) {
return description();
}
@Override
public Object objectVal(int doc) {
return new Date(longVal(doc));
}
@SuppressWarnings("deprecation")
@Override
public String strVal(int doc) {
return TrieDateField.formatExternal(new Date(longVal(doc)));
}
@Override
public boolean boolVal(int doc) {
return getFloat() != 0.0f;
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueDate mval = new MutableValueDate();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.value = longVal(doc);
mval.exists = true;
}
};
}
};
}
}

View File

@ -0,0 +1,106 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.queries.function.valuesource.ConstNumberSource;
import org.apache.lucene.queries.function.valuesource.ConstValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>ConstDoubleSource</code> returns a constant double for all documents
*/
public class ConstDoubleSource extends ConstNumberSource {
public final static String NAME = AnalyticsParams.CONSTANT_NUMBER;
final double constant;
public ConstDoubleSource(double constant) {
this.constant = constant;
}
@Override
public String description() {
return name()+"(" + getFloat() + ")";
}
protected String name() {
return NAME;
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
return new DoubleDocValues(this) {
@Override
public double doubleVal(int doc) {
return constant;
}
@Override
public boolean exists(int doc) {
return true;
}
};
}
@Override
public int hashCode() {
return (int)Double.doubleToLongBits(constant) * 31;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof ConstValueSource)) return false;
ConstDoubleSource other = (ConstDoubleSource)o;
return this.constant == other.constant;
}
@Override
public int getInt() {
return (int)constant;
}
@Override
public long getLong() {
return (long)constant;
}
@Override
public float getFloat() {
return (float)constant;
}
@Override
public double getDouble() {
return constant;
}
@Override
public Number getNumber() {
return new Double(constant);
}
@Override
public boolean getBool() {
return constant != 0.0f;
}
}

View File

@ -0,0 +1,51 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.queries.function.valuesource.LiteralValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>ConstStringSource</code> returns a constant string for all documents
*/
public class ConstStringSource extends LiteralValueSource {
public final static String NAME = AnalyticsParams.CONSTANT_STRING;
public ConstStringSource(String string) {
super(string);
}
@Override
public String description() {
return name()+"(" + string + ")";
}
protected String name() {
return NAME;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ConstStringSource)) return false;
ConstStringSource that = (ConstStringSource) o;
return getValue().equals(that.getValue());
}
}

View File

@ -0,0 +1,127 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.text.ParseException;
import java.util.Date;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.docvalues.LongDocValues;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueDate;
import org.apache.solr.schema.TrieDateField;
/**
* Extends {@link LongFieldSource} to have a field source that takes in
* and returns {@link Date} values while working with long values internally.
*/
public class DateFieldSource extends LongFieldSource {
public DateFieldSource(String field) throws ParseException {
super(field, null);
}
public DateFieldSource(String field, FieldCache.LongParser parser) {
super(field, parser);
}
public long externalToLong(String extVal) {
return parser.parseLong(new BytesRef(extVal));
}
public Object longToObject(long val) {
return new Date(val);
}
@SuppressWarnings("deprecation")
public String longToString(long val) {
return TrieDateField.formatExternal((Date)longToObject(val));
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FieldCache.Longs arr = cache.getLongs(readerContext.reader(), field, parser, true);
final Bits valid = cache.getDocsWithField(readerContext.reader(), field);
return new LongDocValues(this) {
@Override
public long longVal(int doc) {
return arr.get(doc);
}
@Override
public boolean exists(int doc) {
return valid.get(doc);
}
@Override
public Object objectVal(int doc) {
return exists(doc) ? longToObject(arr.get(doc)) : null;
}
@Override
public String strVal(int doc) {
return exists(doc) ? longToString(arr.get(doc)) : null;
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueDate mval = new MutableValueDate();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.value = arr.get(doc);
mval.exists = exists(doc);
}
};
}
};
}
@Override
public boolean equals(Object o) {
if (o.getClass() != this.getClass()) return false;
DateFieldSource other = (DateFieldSource) o;
if (parser==null) {
return field.equals(other.field);
} else {
return field.equals(other.field) && parser.equals(other.parser);
}
}
@Override
public int hashCode() {
int h = parser == null ? this.getClass().hashCode() : parser.getClass().hashCode();
h += super.hashCode();
return h;
}
}

View File

@ -0,0 +1,71 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.text.ParseException;
import java.util.Date;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.util.DateMathParser;
/**
* <code>DateMathFunction</code> returns a start date modified by a list of DateMath operations.
*/
public class DateMathFunction extends MultiDateFunction {
public final static String NAME = AnalyticsParams.DATE_MATH;
final private DateMathParser parser;
/**
* @param sources A list of ValueSource objects. The first element in the list
* should be a {@link DateFieldSource} or {@link ConstDateSource} object which
* represents the starting date. The rest of the field should be {@link BytesRefFieldSource}
* or {@link ConstStringSource} objects which contain the DateMath operations to perform on
* the start date.
*/
public DateMathFunction(ValueSource[] sources) {
super(sources);
parser = new DateMathParser();
}
@Override
protected String name() {
return NAME;
}
@Override
protected long func(int doc, FunctionValues[] valsArr) {
long time = 0;
Date date = (Date)valsArr[0].objectVal(doc);
try {
parser.setNow(date);
for (int count = 1; count < valsArr.length; count++) {
date = parser.parseMath(valsArr[count].strVal(doc));
parser.setNow(date);
}
time = parser.getNow().getTime();
} catch (ParseException e) {
e.printStackTrace();
time = date.getTime();
}
return time;
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>DivDoubleFunction</code> returns the quotient of 'a' and 'b'.
*/
public class DivDoubleFunction extends DualDoubleFunction {
public final static String NAME = AnalyticsParams.DIVIDE;
/**
* @param a the numerator.
* @param b the denominator.
*/
public DivDoubleFunction(ValueSource a, ValueSource b) {
super(a, b);
}
protected String name() {
return NAME;
}
@Override
protected double func(int doc, FunctionValues aVals, FunctionValues bVals) {
return aVals.doubleVal(doc)/bVals.doubleVal(doc);
}
}

View File

@ -0,0 +1,95 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.search.IndexSearcher;
/**
* Abstract {@link ValueSource} implementation which wraps two ValueSources
* and applies an extendible double function to their values.
**/
public abstract class DualDoubleFunction extends ValueSource {
protected final ValueSource a;
protected final ValueSource b;
public DualDoubleFunction(ValueSource a, ValueSource b) {
this.a = a;
this.b = b;
}
protected abstract String name();
protected abstract double func(int doc, FunctionValues aVals, FunctionValues bVals);
@Override
public String description() {
return name() + "(" + a.description() + "," + b.description() + ")";
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FunctionValues aVals = a.getValues(context, readerContext);
final FunctionValues bVals = b.getValues(context, readerContext);
return new DoubleDocValues(this) {
@Override
public double doubleVal(int doc) {
return func(doc, aVals, bVals);
}
@Override
public boolean exists(int doc) {
return aVals.exists(doc) & bVals.exists(doc);
}
@Override
public String toString(int doc) {
return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')';
}
};
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
a.createWeight(context,searcher);
b.createWeight(context,searcher);
}
@Override
public boolean equals(Object o) {
if (getClass() != o.getClass()) return false;
DualDoubleFunction other = (DualDoubleFunction)o;
return this.a.equals(other.a)
&& this.b.equals(other.b);
}
@Override
public int hashCode() {
int h = a.hashCode();
h ^= (h << 13) | (h >>> 20);
h += b.hashCode();
h ^= (h << 23) | (h >>> 10);
h += name().hashCode();
return h;
}
}

View File

@ -0,0 +1,156 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.util.Date;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.schema.TrieDateField;
/**
* <code>DefaultIsMissingFieldSource</code> wraps a field source to return missing values
* if the value is equal to the default value.
*/
public class FilterFieldSource extends ValueSource {
public final static String NAME = AnalyticsParams.FILTER;
public final Object missValue;
protected final ValueSource source;
public FilterFieldSource(ValueSource source, Object missValue) {
this.source = source;
this.missValue = missValue;
}
protected String name() {
return NAME;
}
@SuppressWarnings("deprecation")
@Override
public String description() {
if (missValue.getClass().equals(Date.class)) {
return name()+"("+source.description()+","+TrieDateField.formatExternal((Date)missValue)+")";
} else {
return name()+"("+source.description()+","+missValue.toString()+")";
}
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FunctionValues vals = source.getValues(context, readerContext);
return new FunctionValues() {
@Override
public byte byteVal(int doc) {
return vals.byteVal(doc);
}
@Override
public short shortVal(int doc) {
return vals.shortVal(doc);
}
@Override
public float floatVal(int doc) {
return vals.floatVal(doc);
}
@Override
public int intVal(int doc) {
return vals.intVal(doc);
}
@Override
public long longVal(int doc) {
return vals.longVal(doc);
}
@Override
public double doubleVal(int doc) {
return vals.doubleVal(doc);
}
@Override
public String strVal(int doc) {
return vals.strVal(doc);
}
@Override
public Object objectVal(int doc) {
return exists(doc)? vals.objectVal(doc) : null;
}
@Override
public boolean exists(int doc) {
Object other = vals.objectVal(doc);
return other!=null&&!missValue.equals(other);
}
@Override
public String toString(int doc) {
return NAME + '(' + vals.toString(doc) + ')';
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final ValueFiller delegateFiller = vals.getValueFiller();
private final MutableValue mval = delegateFiller.getValue();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
delegateFiller.fillValue(doc);
mval.exists = exists(doc);
}
};
}
};
}
public ValueSource getRootSource() {
if (source instanceof FilterFieldSource) {
return ((FilterFieldSource)source).getRootSource();
} else {
return source;
}
}
@Override
public boolean equals(Object o) {
if (getClass() != o.getClass()) return false;
FilterFieldSource other = (FilterFieldSource)o;
return this.source.equals(other.source) && this.missValue.equals(other.missValue);
}
@Override
public int hashCode() {
return source.hashCode()+name().hashCode();
}
}

View File

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>LogDoubleFunction</code> returns the log of a double value with a given base.
*/
public class LogDoubleFunction extends DualDoubleFunction {
public final static String NAME = AnalyticsParams.LOG;
public LogDoubleFunction(ValueSource a, ValueSource b) {
super(a,b);
}
protected String name() {
return NAME;
}
@Override
protected double func(int doc, FunctionValues aVals, FunctionValues bVals) {
return Math.log(aVals.doubleVal(doc))/Math.log(bVals.doubleVal(doc));
}
}

View File

@ -0,0 +1,134 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.LongDocValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueDate;
/**
* Abstract {@link ValueSource} implementation which wraps multiple ValueSources
* and applies an extendible date function to their values.
**/
public abstract class MultiDateFunction extends ValueSource {
protected final ValueSource[] sources;
public MultiDateFunction(ValueSource[] sources) {
this.sources = sources;
}
abstract protected String name();
abstract protected long func(int doc, FunctionValues[] valsArr);
@Override
public String description() {
StringBuilder sb = new StringBuilder();
sb.append(name()).append('(');
boolean firstTime=true;
for (ValueSource source : sources) {
if (firstTime) {
firstTime=false;
} else {
sb.append(',');
}
sb.append(source);
}
sb.append(')');
return sb.toString();
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FunctionValues[] valsArr = new FunctionValues[sources.length];
for (int i=0; i<sources.length; i++) {
valsArr[i] = sources[i].getValues(context, readerContext);
}
return new LongDocValues(this) {
@Override
public long longVal(int doc) {
return func(doc, valsArr);
}
@Override
public boolean exists(int doc) {
boolean exists = true;
for (FunctionValues val : valsArr) {
exists = exists & val.exists(doc);
}
return exists;
}
@Override
public String toString(int doc) {
StringBuilder sb = new StringBuilder();
sb.append(name()).append('(');
boolean firstTime=true;
for (FunctionValues vals : valsArr) {
if (firstTime) {
firstTime=false;
} else {
sb.append(',');
}
sb.append(vals.toString(doc));
}
sb.append(')');
return sb.toString();
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueDate mval = new MutableValueDate();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.value = longVal(doc);
mval.exists = exists(doc);
}
};
}
};
}
@Override
public boolean equals(Object o) {
if (getClass() != o.getClass()) return false;
MultiDateFunction other = (MultiDateFunction)o;
return this.name().equals(other.name())
&& Arrays.equals(this.sources, other.sources);
}
@Override
public int hashCode() {
return Arrays.hashCode(sources) + name().hashCode();
}
}

View File

@ -0,0 +1,120 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.search.IndexSearcher;
/**
* Abstract {@link ValueSource} implementation which wraps multiple ValueSources
* and applies an extendible double function to their values.
**/
public abstract class MultiDoubleFunction extends ValueSource {
protected final ValueSource[] sources;
public MultiDoubleFunction(ValueSource[] sources) {
this.sources = sources;
}
abstract protected String name();
abstract protected double func(int doc, FunctionValues[] valsArr);
@Override
public String description() {
StringBuilder sb = new StringBuilder();
sb.append(name()).append('(');
boolean firstTime=true;
for (ValueSource source : sources) {
if (firstTime) {
firstTime=false;
} else {
sb.append(',');
}
sb.append(source);
}
sb.append(')');
return sb.toString();
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FunctionValues[] valsArr = new FunctionValues[sources.length];
for (int i=0; i<sources.length; i++) {
valsArr[i] = sources[i].getValues(context, readerContext);
}
return new DoubleDocValues(this) {
@Override
public double doubleVal(int doc) {
return func(doc, valsArr);
}
@Override
public boolean exists(int doc) {
boolean exists = true;
for (FunctionValues val : valsArr) {
exists = exists & val.exists(doc);
}
return exists;
}
@Override
public String toString(int doc) {
StringBuilder sb = new StringBuilder();
sb.append(name()).append('(');
boolean firstTime=true;
for (FunctionValues vals : valsArr) {
if (firstTime) {
firstTime=false;
} else {
sb.append(',');
}
sb.append(vals.toString(doc));
}
sb.append(')');
return sb.toString();
}
};
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
for (ValueSource source : sources)
source.createWeight(context, searcher);
}
@Override
public boolean equals(Object o) {
if (getClass() != o.getClass()) return false;
MultiDoubleFunction other = (MultiDoubleFunction)o;
return this.name().equals(other.name())
&& Arrays.equals(this.sources, other.sources);
}
@Override
public int hashCode() {
return Arrays.hashCode(sources) + name().hashCode();
}
}

View File

@ -0,0 +1,149 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.StrDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueStr;
/**
* Abstract {@link ValueSource} implementation which wraps multiple ValueSources
* and applies an extendible string function to their values.
**/
public abstract class MultiStringFunction extends ValueSource {
protected final ValueSource[] sources;
public MultiStringFunction(ValueSource[] sources) {
this.sources = sources;
}
abstract protected String name();
abstract protected CharSequence func(int doc, FunctionValues[] valsArr);
@Override
public String description() {
StringBuilder sb = new StringBuilder();
sb.append(name()).append('(');
boolean firstTime=true;
for (ValueSource source : sources) {
if (firstTime) {
firstTime=false;
} else {
sb.append(',');
}
sb.append(source);
}
sb.append(')');
return sb.toString();
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FunctionValues[] valsArr = new FunctionValues[sources.length];
for (int i=0; i<sources.length; i++) {
valsArr[i] = sources[i].getValues(context, readerContext);
}
return new StrDocValues(this) {
@Override
public String strVal(int doc) {
CharSequence cs = func(doc, valsArr);
return cs != null ? cs.toString() : null;
}
@Override
public boolean exists(int doc) {
boolean exists = true;
for (FunctionValues val : valsArr) {
exists = exists & val.exists(doc);
}
return exists;
}
@Override
public boolean bytesVal(int doc, BytesRef bytes) {
CharSequence cs = func(doc, valsArr);
if( cs != null ){
bytes.copyChars(func(doc,valsArr));
return true;
} else {
bytes.bytes = BytesRef.EMPTY_BYTES;
bytes.length = 0;
bytes.offset = 0;
return false;
}
}
@Override
public String toString(int doc) {
StringBuilder sb = new StringBuilder();
sb.append(name()).append('(');
boolean firstTime=true;
for (FunctionValues vals : valsArr) {
if (firstTime) {
firstTime=false;
} else {
sb.append(',');
}
sb.append(vals.toString(doc));
}
sb.append(')');
return sb.toString();
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueStr mval = new MutableValueStr();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.exists = bytesVal(doc, mval.value);
}
};
}
};
}
@Override
public boolean equals(Object o) {
if (getClass() != o.getClass()) return false;
MultiStringFunction other = (MultiStringFunction)o;
return this.name().equals(other.name())
&& Arrays.equals(this.sources, other.sources);
}
@Override
public int hashCode() {
return Arrays.hashCode(sources) + name().hashCode();
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>MultiplyDoubleFunction</code> returns the product of it's components.
*/
public class MultiplyDoubleFunction extends MultiDoubleFunction {
public final static String NAME = AnalyticsParams.MULTIPLY;
public MultiplyDoubleFunction(ValueSource[] sources) {
super(sources);
}
@Override
protected String name() {
return NAME;
}
@Override
protected double func(int doc, FunctionValues[] valsArr) {
double product = 1d;
for (FunctionValues val : valsArr) {
product *= val.doubleVal(doc);
}
return product;
}
}

View File

@ -0,0 +1,54 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>NegateDoubleFunction</code> negates the double value of the source it contains.
*/
public class NegateDoubleFunction extends SingleDoubleFunction {
public final static String NAME = AnalyticsParams.NEGATE;
public NegateDoubleFunction(ValueSource source) {
super(source);
}
protected String name() {
return NAME;
}
@Override
public String description() {
return name()+"("+source.description()+")";
}
protected double func(int doc, FunctionValues vals) {
return vals.doubleVal(doc)*-1;
}
@Override
public boolean equals(Object o) {
if (getClass() != o.getClass()) return false;
NegateDoubleFunction other = (NegateDoubleFunction)o;
return this.source.equals(other.source);
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>PowDoubleFunction</code> returns 'a' raised to the power of 'b'.
*/
public class PowDoubleFunction extends DualDoubleFunction {
public final static String NAME = AnalyticsParams.POWER;
/**
* @param a the base.
* @param b the exponent.
*/
public PowDoubleFunction(ValueSource a, ValueSource b) {
super(a, b);
}
@Override
protected String name() {
return NAME;
}
@Override
protected double func(int doc, FunctionValues aVals, FunctionValues bVals) {
return Math.pow(aVals.doubleVal(doc), bVals.doubleVal(doc));
}
}

View File

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.commons.lang.StringUtils;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.solr.analytics.util.AnalyticsParams;
/**
* <code>ReverseStringFunction</code> reverses the string value of the source it contains.
*/
public class ReverseStringFunction extends SingleStringFunction {
public final static String NAME = AnalyticsParams.REVERSE;
public ReverseStringFunction(ValueSource source) {
super(source);
}
protected String name() {
return NAME;
}
protected CharSequence func(int doc, FunctionValues vals) {
String val = vals.strVal(doc);
return val != null ? StringUtils.reverse(val) : null;
}
}

View File

@ -0,0 +1,80 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
/**
* Abstract {@link ValueSource} implementation which wraps one ValueSource
* and applies an extendible double function to its values.
*/
public abstract class SingleDoubleFunction extends ValueSource {
protected final ValueSource source;
public SingleDoubleFunction(ValueSource source) {
this.source = source;
}
@Override
public String description() {
return name()+"("+source.description()+")";
}
abstract String name();
abstract double func(int doc, FunctionValues vals);
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FunctionValues vals = source.getValues(context, readerContext);
return new DoubleDocValues(this) {
@Override
public double doubleVal(int doc) {
return func(doc, vals);
}
@Override
public boolean exists(int doc) {
return vals.exists(doc);
}
@Override
public String toString(int doc) {
return name() + '(' + vals.toString(doc) + ')';
}
};
}
@Override
public boolean equals(Object o) {
if (getClass() != o.getClass()) return false;
SingleDoubleFunction other = (SingleDoubleFunction)o;
return this.source.equals(other.source);
}
@Override
public int hashCode() {
return source.hashCode()+name().hashCode();
}
}

View File

@ -0,0 +1,120 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.StrDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueStr;
/**
* Abstract {@link ValueSource} implementation which wraps one ValueSource
* and applies an extendible string function to its values.
*/
public abstract class SingleStringFunction extends ValueSource {
protected final ValueSource source;
public SingleStringFunction(ValueSource source) {
this.source = source;
}
@Override
public String description() {
return name()+"("+source.description()+")";
}
abstract String name();
abstract CharSequence func(int doc, FunctionValues vals);
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FunctionValues vals = source.getValues(context, readerContext);
return new StrDocValues(this) {
@Override
public String strVal(int doc) {
CharSequence cs = func(doc, vals);
return cs != null ? cs.toString() : null;
}
@Override
public boolean bytesVal(int doc, BytesRef bytes) {
CharSequence cs = func(doc, vals);
if( cs != null ){
bytes.copyChars(func(doc,vals));
return true;
} else {
bytes.bytes = BytesRef.EMPTY_BYTES;
bytes.length = 0;
bytes.offset = 0;
return false;
}
}
@Override
public Object objectVal(int doc) {
return strVal(doc);
}
@Override
public boolean exists(int doc) {
return vals.exists(doc);
}
@Override
public String toString(int doc) {
return name() + '(' + strVal(doc) + ')';
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueStr mval = new MutableValueStr();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.exists = bytesVal(doc, mval.value);
}
};
}
};
}
@Override
public boolean equals(Object o) {
if (getClass() != o.getClass()) return false;
SingleStringFunction other = (SingleStringFunction)o;
return this.source.equals(other.source);
}
@Override
public int hashCode() {
return source.hashCode()+name().hashCode();
}
}

View File

@ -0,0 +1,27 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
ValueSource function/sources used by analytics component
</p>
</body>
</html>

View File

@ -17,9 +17,42 @@
package org.apache.solr.core;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Writer;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.io.IOUtils;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.index.IndexWriter;
@ -40,6 +73,7 @@ import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.handler.SnapPuller;
import org.apache.solr.handler.admin.ShowFileRequestHandler;
import org.apache.solr.handler.component.AnalyticsComponent;
import org.apache.solr.handler.component.DebugComponent;
import org.apache.solr.handler.component.FacetComponent;
import org.apache.solr.handler.component.HighlightComponent;
@ -93,39 +127,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Writer;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
/**
*
@ -1197,6 +1198,7 @@ public final class SolrCore implements SolrInfoMBean {
addIfNotPresent(components,StatsComponent.COMPONENT_NAME,StatsComponent.class);
addIfNotPresent(components,DebugComponent.COMPONENT_NAME,DebugComponent.class);
addIfNotPresent(components,RealTimeGetComponent.COMPONENT_NAME,RealTimeGetComponent.class);
addIfNotPresent(components,AnalyticsComponent.COMPONENT_NAME,AnalyticsComponent.class);
return components;
}
private <T> void addIfNotPresent(Map<String ,T> registry, String name, Class<? extends T> c){

View File

@ -0,0 +1,97 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.component;
import java.io.IOException;
import org.apache.solr.analytics.plugin.AnalyticsStatisticsCollector;
import org.apache.solr.analytics.request.AnalyticsStats;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
public class AnalyticsComponent extends SearchComponent {
public static final String COMPONENT_NAME = "analytics";
private final AnalyticsStatisticsCollector analyticsCollector = new AnalyticsStatisticsCollector();;
@Override
public void prepare(ResponseBuilder rb) throws IOException {
if (rb.req.getParams().getBool(AnalyticsParams.ANALYTICS,false)) {
rb.setNeedDocSet( true );
}
}
@Override
public void process(ResponseBuilder rb) throws IOException {
if (rb.req.getParams().getBool(AnalyticsParams.ANALYTICS,false)) {
SolrParams params = rb.req.getParams();
AnalyticsStats s = new AnalyticsStats(rb.req, rb.getResults().docSet, params, analyticsCollector);
rb.rsp.add( "stats", s.execute() );
}
}
/*
@Override
public int distributedProcess(ResponseBuilder rb) throws IOException {
return ResponseBuilder.STAGE_DONE;
}
@Override
public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest sreq) {
// TODO Auto-generated method stub
super.modifyRequest(rb, who, sreq);
}
@Override
public void handleResponses(ResponseBuilder rb, ShardRequest sreq) {
// TODO Auto-generated method stub
super.handleResponses(rb, sreq);
}
@Override
public void finishStage(ResponseBuilder rb) {
// TODO Auto-generated method stub
super.finishStage(rb);
}
*/
@Override
public String getName() {
return COMPONENT_NAME;
}
@Override
public String getDescription() {
return "Perform analytics";
}
@Override
public String getSource() {
return "$URL$";
}
@Override
public String getVersion() {
return getClass().getPackage().getSpecificationVersion();
}
@Override
public NamedList getStatistics() {
return analyticsCollector.getStatistics();
}
}

View File

@ -17,27 +17,27 @@
package org.apache.solr.handler.component;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.ShardParams;
import org.apache.solr.util.RTimer;
import org.apache.solr.core.CloseHook;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.RTimer;
import org.apache.solr.util.SolrPluginUtils;
import org.apache.solr.util.plugin.PluginInfoInitialized;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
/**
*
@ -69,6 +69,7 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
names.add( HighlightComponent.COMPONENT_NAME );
names.add( StatsComponent.COMPONENT_NAME );
names.add( DebugComponent.COMPONENT_NAME );
names.add( AnalyticsComponent.COMPONENT_NAME );
return names;
}

View File

@ -0,0 +1,70 @@
o.ar.s.sum=sum(int_id)
o.ar.s.unique=unique(long_ld)
o.ar.s.su=add(sum(int_id),unique(long_ld))
o.ar.s.mean=mean(int_id)
o.ar.s.count=count(long_ld)
o.ar.s.median=median(int_id)
o.ar.s.mcm=add(mean(int_id),count(long_ld),median(int_id))
o.mr.s.sum=sum(int_id)
o.mr.s.unique=unique(long_ld)
o.mr.s.su=mult(sum(int_id),unique(long_ld))
o.mr.s.mean=mean(int_id)
o.mr.s.count=count(long_ld)
o.mr.s.median=median(int_id)
o.mr.s.mcm=mult(mean(int_id),count(long_ld),median(int_id))
o.dr.s.sum=sum(int_id)
o.dr.s.unique=unique(long_ld)
o.dr.s.su=div(sum(int_id),unique(long_ld))
o.dr.s.mean=mean(int_id)
o.dr.s.count=count(long_ld)
o.dr.s.mc=div(mean(int_id),count(long_ld))
o.pr.s.sum=sum(int_id)
o.pr.s.unique=unique(long_ld)
o.pr.s.su=pow(sum(int_id),unique(long_ld))
o.pr.s.mean=mean(int_id)
o.pr.s.count=count(long_ld)
o.pr.s.mc=pow(mean(int_id),count(long_ld))
o.nr.s.sum=sum(int_id)
o.nr.s.s=neg(sum(int_id))
o.nr.s.count=count(long_ld)
o.nr.s.c=neg(count(long_ld))
o.avr.s.sum=sum(int_id)
o.avr.s.s=abs(neg(sum(int_id)))
o.avr.s.count=count(long_ld)
o.avr.s.c=abs(neg(count(long_ld)))
o.cnr.s.c8=const_num(8)
o.cnr.s.c10=const_num(10)
o.dmr.s.median=median(date_dtd)
o.dmr.s.cme=const_str(+2YEARS)
o.dmr.s.dmme=date_math(median(date_dtd),const_str(+2YEARS))
o.dmr.s.max=max(date_dtd)
o.dmr.s.cma=const_str(+2MONTHS)
o.dmr.s.dmma=date_math(max(date_dtd),const_str(+2MONTHS))
o.cdr.s.cd1=const_date(1800-12-31T23:59:59Z)
o.cdr.s.cs1=const_str(1800-12-31T23:59:59Z)
o.cdr.s.cd2=const_date(1804-06-30T23:59:59Z)
o.cdr.s.cs2=const_str(1804-06-30T23:59:59Z)
o.csr.s.cs1=const_str(this is the first)
o.csr.s.cs2=const_str(this is the second)
o.csr.s.cs3=const_str(this is the third)
o.cr.s.csmin=const_str(this is the first)
o.cr.s.min=min(string_sd)
o.cr.s.ccmin=concat(const_str(this is the first),min(string_sd))
o.cr.s.csmax=const_str(this is the second)
o.cr.s.max=max(string_sd)
o.cr.s.ccmax=concat(const_str(this is the second),max(string_sd))
o.rr.s.min=min(string_sd)
o.rr.s.rmin=rev(min(string_sd))
o.rr.s.max=max(string_sd)
o.rr.s.rmax=rev(max(string_sd))

View File

@ -0,0 +1,66 @@
o.sr.s.mean=mean(int_id)
o.sr.s.median=median(int_id)
o.sr.s.count=count(int_id)
o.sr.s.percentile_20=percentile(20,int_id)
o.sr.ff=long_ld
o.sr.ff.long_ld.ss=mean
o.sr.ff.long_ld.sd=asc
o.sr.ff=float_fd
o.sr.ff.float_fd.ss=median
o.sr.ff.float_fd.sd=desc
o.sr.ff=double_dd
o.sr.ff.double_dd.ss=count
o.sr.ff.double_dd.sd=asc
o.sr.ff=string_sd
o.sr.ff.string_sd.ss=percentile_20
o.sr.ff.string_sd.sd=desc
o.lr.s.mean=mean(int_id)
o.lr.s.median=median(int_id)
o.lr.s.count=count(int_id)
o.lr.s.percentile_20=percentile(20,int_id)
o.lr.ff=long_ld
o.lr.ff.long_ld.ss=mean
o.lr.ff.long_ld.sd=asc
o.lr.ff.long_ld.limit=5
o.lr.ff=float_fd
o.lr.ff.float_fd.ss=median
o.lr.ff.float_fd.sd=desc
o.lr.ff.float_fd.limit=3
o.lr.ff=double_dd
o.lr.ff.double_dd.ss=count
o.lr.ff.double_dd.sd=asc
o.lr.ff.double_dd.limit=7
o.lr.ff=string_sd
o.lr.ff.string_sd.ss=percentile_20
o.lr.ff.string_sd.sd=desc
o.lr.ff.string_sd.limit=1
o.offAll.s.mean=mean(int_id)
o.offAll.ff=long_ld
o.offAll.ff.long_ld.ss=mean
o.offAll.ff.long_ld.sd=asc
o.offAll.ff.long_ld.limit=7
o.off0.s.mean=mean(int_id)
o.off0.ff=long_ld
o.off0.ff.long_ld.ss=mean
o.off0.ff.long_ld.sd=asc
o.off0.ff.long_ld.limit=2
o.off0.ff.long_ld.offset=0
o.off1.s.mean=mean(int_id)
o.off1.ff=long_ld
o.off1.ff.long_ld.ss=mean
o.off1.ff.long_ld.sd=asc
o.off1.ff.long_ld.limit=2
o.off1.ff.long_ld.offset=2
o.off2.s.mean=mean(int_id)
o.off2.ff=long_ld
o.off2.ff.long_ld.ss=mean
o.off2.ff.long_ld.sd=asc
o.off2.ff.long_ld.limit=3
o.off2.ff.long_ld.offset=4

View File

@ -0,0 +1,132 @@
o.sum.s.int=sum(int_id)
o.sum.s.long=sum(long_ld)
o.sum.s.float=sum(float_fd)
o.sum.s.double=sum(double_dd)
o.sum.ff=string_sd
o.sum.ff=date_dtd
o.mean.s.int=mean(int_id)
o.mean.s.long=mean(long_ld)
o.mean.s.float=mean(float_fd)
o.mean.s.double=mean(double_dd)
o.mean.ff=string_sd
o.mean.ff=date_dtd
o.sumOfSquares.s.int=sumofsquares(int_id)
o.sumOfSquares.s.long=sumofsquares(long_ld)
o.sumOfSquares.s.float=sumofsquares(float_fd)
o.sumOfSquares.s.double=sumofsquares(double_dd)
o.sumOfSquares.ff=string_sd
o.sumOfSquares.ff=date_dtd
o.stddev.s.int=stddev(int_id)
o.stddev.s.long=stddev(long_ld)
o.stddev.s.float=stddev(float_fd)
o.stddev.s.double=stddev(double_dd)
o.stddev.ff=string_sd
o.stddev.ff=date_dtd
o.median.s.int=median(int_id)
o.median.s.long=median(long_ld)
o.median.s.float=median(float_fd)
o.median.s.double=median(double_dd)
o.median.ff=string_sd
o.median.ff=date_dtd
o.percentile_20n.s.int=percentile(20,int_id)
o.percentile_20n.s.long=percentile(20,long_ld)
o.percentile_20n.s.float=percentile(20,float_fd)
o.percentile_20n.s.double=percentile(20,double_dd)
o.percentile_20n.ff=string_sd
o.percentile_20n.ff=date_dtd
o.percentile_20.s.str=percentile(20,string_sd)
o.percentile_20.s.date=percentile(20,date_dtd)
o.percentile_20.ff=int_id
o.percentile_20.ff=long_ld
o.percentile_60n.s.int=percentile(60,int_id)
o.percentile_60n.s.long=percentile(60,long_ld)
o.percentile_60n.s.float=percentile(60,float_fd)
o.percentile_60n.s.double=percentile(60,double_dd)
o.percentile_60n.ff=string_sd
o.percentile_60n.ff=date_dtd
o.percentile_60.s.str=percentile(60,string_sd)
o.percentile_60.s.date=percentile(60,date_dtd)
o.percentile_60.ff=int_id
o.percentile_60.ff=long_ld
o.minn.s.int=min(int_id)
o.minn.s.long=min(long_ld)
o.minn.s.float=min(float_fd)
o.minn.s.double=min(double_dd)
o.minn.ff=string_sd
o.minn.ff=date_dtd
o.min.s.str=min(string_sd)
o.min.s.date=min(date_dtd)
o.min.ff=int_id
o.min.ff=long_ld
o.maxn.s.int=max(int_id)
o.maxn.s.long=max(long_ld)
o.maxn.s.float=max(float_fd)
o.maxn.s.double=max(double_dd)
o.maxn.ff=string_sd
o.maxn.ff=date_dtd
o.max.s.str=max(string_sd)
o.max.s.date=max(date_dtd)
o.max.ff=int_id
o.max.ff=long_ld
o.countn.s.int=count(int_id)
o.countn.s.long=count(long_ld)
o.countn.s.float=count(float_fd)
o.countn.s.double=count(double_dd)
o.countn.ff=string_sd
o.countn.ff=date_dtd
o.count.s.str=count(string_sd)
o.count.s.date=count(date_dtd)
o.count.ff=int_id
o.count.ff=long_ld
o.uniquen.s.int=unique(int_id)
o.uniquen.s.long=unique(long_ld)
o.uniquen.s.float=unique(float_fd)
o.uniquen.s.double=unique(double_dd)
o.uniquen.ff=string_sd
o.uniquen.ff=date_dtd
o.unique.s.str=unique(string_sd)
o.unique.s.date=unique(date_dtd)
o.unique.ff=int_id
o.unique.ff=long_ld
o.missingn.s.int=missing(int_id)
o.missingn.s.long=missing(long_ld)
o.missingn.s.float=missing(float_fd)
o.missingn.s.double=missing(double_dd)
o.missingn.ff=string_sd
o.missingn.ff=date_dtd
o.missing.s.str=missing(string_sd)
o.missing.s.date=missing(date_dtd)
o.missing.ff=int_id
o.missing.ff=long_ld
o.multivalued.s.mean=mean(int_id)
o.multivalued.ff=long_ldm
o.multivalued.ff=string_sdm
o.multivalued.ff=date_dtdm
o.missingf.s.mean=mean(int_id)
o.missingf.ff=date_dtd
o.missingf.ff.date_dtd.dim=true
o.missingf.ff=string_sd
o.missingf.ff.string_sd.dim=true
o.missingf.ff.string_sd.sm=true
o.missingf.ff=date_dtdm
o.missingf.ff.date_dtdm.sm=true

View File

@ -0,0 +1,62 @@
o.ar.s.sum=sum(add(int_id,float_fd))
o.ar.s.sumc=sum(add_if_dd)
o.ar.s.mean=mean(add(long_ld,double_dd,float_fd))
o.ar.s.meanc=mean(add_ldf_dd)
o.mr.s.sum=sum(mult(int_id,float_fd))
o.mr.s.sumc=sum(mult_if_dd)
o.mr.s.mean=mean(mult(long_ld,double_dd,float_fd))
o.mr.s.meanc=mean(mult_ldf_dd)
o.dr.s.sum=sum(div(int_id,float_fd))
o.dr.s.sumc=sum(div_if_dd)
o.dr.s.mean=mean(div(long_ld,double_dd))
o.dr.s.meanc=mean(div_ld_dd)
o.pr.s.sum=sum(pow(int_id,float_fd))
o.pr.s.sumc=sum(pow_if_dd)
o.pr.s.mean=mean(pow(long_ld,double_dd))
o.pr.s.meanc=mean(pow_ld_dd)
o.nr.s.sum=sum(neg(int_id))
o.nr.s.sumc=sum(neg_i_dd)
o.nr.s.mean=mean(neg(long_ld))
o.nr.s.meanc=mean(neg_l_dd)
o.nr.s.sum=sum(abs(neg(int_id)))
o.nr.s.sumc=sum(int_id)
o.nr.s.mean=mean(abs(neg(long_ld)))
o.nr.s.meanc=mean(int_id)
o.cnr.s.sum=sum(const_num(8))
o.cnr.s.sumc=sum(const_8_dd)
o.cnr.s.mean=mean(const_num(10))
o.cnr.s.meanc=mean(const_10_dd)
o.dmr.s.median=median(date_math(date_dtd,const_str(+2YEARS)))
o.dmr.s.medianc=median(dm_2y_dtd)
o.dmr.s.max=max(date_math(date_dtd,const_str(+2MONTHS)))
o.dmr.s.maxc=max(dm_2m_dtd)
o.cdr.s.median=median(const_date(1800-06-30T23:59:59Z))
o.cdr.s.medianc=median(const_00_dtd)
o.cdr.s.max=max(const_date(1804-06-30T23:59:59Z))
o.cdr.s.maxc=max(const_04_dtd)
o.csr.s.min=min(const_str(this is the first))
o.csr.s.minc=min(const_first_sd)
o.csr.s.max=max(const_str(this is the second))
o.csr.s.maxc=max(const_second_sd)
o.cr.s.min=min(concat(const_str(this is the first),string_sd))
o.cr.s.minc=min(concat_first_sd)
o.cr.s.max=max(concat(const_str(this is the second),string_sd))
o.cr.s.maxc=max(concat_second_sd)
o.rr.s.min=min(rev(string_sd))
o.rr.s.minc=min(rev_sd)
o.rr.s.max=max(rev(string_sd))
o.rr.s.maxc=max(rev_sd)
o.ms.s.min=min(miss_dd)
o.ms.s.max=max(miss_dd)

View File

@ -0,0 +1,74 @@
o.sr.s.int_id=sum(int_i)
o.sr.s.long_ld=sum(long_l)
o.sr.s.float_fd=sum(float_f)
o.sr.s.double_dd=sum(double_d)
o.sosr.s.int_id=sumofsquares(int_id)
o.sosr.s.long_ld=sumofsquares(long_ld)
o.sosr.s.float_fd=sumofsquares(float_fd)
o.sosr.s.double_dd=sumofsquares(double_dd)
o.mr.s.int_id=mean(int_id)
o.mr.s.long_ld=mean(long_ld)
o.mr.s.float_fd=mean(float_fd)
o.mr.s.double_dd=mean(double_dd)
o.str.s.int_id=stddev(int_id)
o.str.s.long_ld=stddev(long_ld)
o.st.s.float_fd=stddev(float_fd)
o.str.s.double_dd=stddev(double_dd)
o.medr.s.int_id=median(int_id)
o.medr.s.long_ld=median(long_ld)
o.medr.s.float_fd=median(float_fd)
o.medr.s.double_dd=median(double_dd)
o.medr.s.date_dtd=median(date_dtd)
o.p2r.s.int_id=percentile(20,int_id)
o.p2r.s.long_ld=percentile(20,long_ld)
o.p2r.s.float_fd=percentile(20,float_fd)
o.p2r.s.double_dd=percentile(20,double_dd)
o.p2r.s.date_dtd=percentile(20,date_dtd)
o.p2r.s.string_sd=percentile(20,string_sd)
o.p6r.s.int_id=percentile(60,int_id)
o.p6r.s.long_ld=percentile(60,long_ld)
o.p6r.s.float_fd=percentile(60,float_fd)
o.p6r.s.double_dd=percentile(60,double_dd)
o.p6r.s.date_dtd=percentile(60,date_dtd)
o.p6r.s.string_sd=percentile(60,string_sd)
o.mir.s.int_id=min(int_id)
o.mir.s.long_ld=min(long_ld)
o.mir.s.float_fd=min(float_fd)
o.mir.s.double_dd=min(double_dd)
o.mir.s.date_dtd=min(date_dtd)
o.mir.s.string_sd=min(string_sd)
o.mar.s.int_id=max(int_id)
o.mar.s.long_ld=max(long_ld)
o.mar.s.float_fd=max(float_fd)
o.mar.s.double_dd=max(double_dd)
o.mar.s.date_dtd=max(date_dtd)
o.mar.s.string_sd=max(string_sd)
o.cr.s.int_id=count(int_id)
o.cr.s.long_ld=count(long_ld)
o.cr.s.float_fd=count(float_fd)
o.cr.s.double_dd=count(double_dd)
o.cr.s.date_dtd=count(date_dtd)
o.cr.s.string_sd=count(string_sd)
o.ur.s.int_id=unique(int_id)
o.ur.s.long_ld=unique(long_ld)
o.ur.s.float_fd=unique(float_fd)
o.ur.s.double_dd=unique(double_dd)
o.ur.s.date_dtd=unique(date_dtd)
o.ur.s.string_sd=unique(string_sd)
o.misr.s.int_id=missing(int_id)
o.misr.s.long_ld=missing(long_ld)
o.misr.s.float_fd=missing(float_fd)
o.misr.s.double_dd=missing(double_dd)
o.misr.s.date_dtd=missing(date_dtd)
o.misr.s.string_sd=missing(string_sd)

View File

@ -0,0 +1,45 @@
o.ir.s.sum=sum(int_id)
o.ir.s.mean=mean(int_id)
o.ir.s.median=median(int_id)
o.ir.s.percentile_8=percentile(8,int_id)
o.ir.ff=string_sd
o.ir.ff.string_sd.h=true
o.ir.qf=float1
o.ir.qf.float1.q=float_fd:[* TO 50]
o.ir.qf=float2
o.ir.qf.float2.q=float_fd:[* TO 30]
o.pr.s.sum=sum(int_id)
o.pr.s.mean=mean(int_id)
o.pr.s.median=median(int_id)
o.pr.s.q1=concat(const_str(float_fd:[), percentile(10,int_id), const_str( TO ), median(int_id), const_str(]))
o.pr.hs.q2=concat(const_str(float_fd:[), percentile(30,int_id), const_str( TO ), median(int_id), const_str(]))
o.pr.hs.q3=concat(const_str(float_fd:[), percentile(40,int_id), const_str( TO ), median(int_id), const_str(]))
o.pr.s.percentile_8=percentile(8,int_id)
o.pr.ff=string_sd
o.pr.ff.string_sd.h=true
o.pr.qf=float3
o.pr.qf.float3.q=result(q1)
o.pr.qf.float3.q=result(q2)
o.pr.qf.float3.q=result(q3)
o.pr.qf.float3.q=result(q1,string_sd,abc2)
o.pr.qf=float4
o.pr.qf.float4.d=float3
o.pr.qf.float4.q=qresult(q1,float3,result(q1))
o.lr.s.sum=sum(long_ld)
o.lr.s.mean=mean(long_ld)
o.lr.s.median=median(long_ld)
o.lr.s.percentile_8=percentile(8,long_ld)
o.lr.qf=string
o.lr.qf.string.q=string_sd:abc1
o.lr.qf.string.q=string_sd:abc2
o.fr.s.sum=sum(float_fd)
o.fr.s.mean=mean(float_fd)
o.fr.s.median=median(float_fd)
o.fr.s.percentile_8=percentile(8,float_fd)
o.fr.qf=lad
o.fr.qf.lad.q=long_ld:[20 TO *]
o.fr.qf.lad.q=long_ld:[30 TO *]
o.fr.qf.lad.q=double_dd:[* TO 50]

View File

@ -0,0 +1,170 @@
o.ri.s.sum=sum(int_id)
o.ri.s.mean=mean(int_id)
o.ri.s.median=median(int_id)
o.ri.s.count=count(int_id)
o.ri.s.sumOfSquares=sumofsquares(int_id)
o.ri.rf=long_ld
o.ri.rf.long_ld.st=5
o.ri.rf.long_ld.e=30
o.ri.rf.long_ld.g=5
o.ri.rf.long_ld.ib=lower
o.ri.rf.long_ld.or=all
o.ri.rf=double_dd
o.ri.rf.double_dd.st=3
o.ri.rf.double_dd.e=39
o.ri.rf.double_dd.g=7
o.ri.rf.double_dd.ib=upper
o.ri.rf.double_dd.ib=outer
o.ri.rf.double_dd.or=all
o.ri.rf=date_dtd
o.ri.rf.date_dtd.st=1007-01-01T23:59:59Z
o.ri.rf.date_dtd.e=1044-01-01T23:59:59Z
o.ri.rf.date_dtd.g=+7YEARS
o.ri.rf.date_dtd.ib=lower
o.ri.rf.date_dtd.ib=edge
o.ri.rf.date_dtd.ib=outer
o.ri.rf.date_dtd.or=all
o.rf.s.sum=sum(float_fd)
o.rf.s.mean=mean(float_fd)
o.rf.s.median=median(float_fd)
o.rf.s.count=count(float_fd)
o.rf.s.sumOfSquares=sumofsquares(float_fd)
o.rf.rf=long_ld
o.rf.rf.long_ld.st=0
o.rf.rf.long_ld.e=29
o.rf.rf.long_ld.g=4
o.rf.rf.long_ld.ib=all
o.rf.rf.long_ld.or=all
o.rf.rf=double_dd
o.rf.rf.double_dd.st=4
o.rf.rf.double_dd.e=47
o.rf.rf.double_dd.g=11
o.rf.rf.double_dd.ib=edge
o.rf.rf.double_dd.or=all
o.rf.rf=date_dtd
o.rf.rf.date_dtd.st=1004-01-01T23:59:59Z
o.rf.rf.date_dtd.e=1046-01-01T23:59:59Z
o.rf.rf.date_dtd.g=+5YEARS
o.rf.rf.date_dtd.ib=upper
o.rf.rf.date_dtd.ib=edge
o.rf.rf.date_dtd.or=all
o.hi.s.sum=sum(int_id)
o.hi.s.mean=mean(int_id)
o.hi.s.median=median(int_id)
o.hi.s.count=count(int_id)
o.hi.s.sumOfSquares=sumofsquares(int_id)
o.hi.rf=long_ld
o.hi.rf.long_ld.st=5
o.hi.rf.long_ld.e=30
o.hi.rf.long_ld.g=5
o.hi.rf.long_ld.he=true
o.hi.rf.long_ld.ib=lower
o.hi.rf.long_ld.or=all
o.hi.rf=double_dd
o.hi.rf.double_dd.st=3
o.hi.rf.double_dd.e=39
o.hi.rf.double_dd.g=7
o.hi.rf.double_dd.he=true
o.hi.rf.double_dd.ib=upper
o.hi.rf.double_dd.ib=outer
o.hi.rf.double_dd.or=all
o.hi.rf=date_dtd
o.hi.rf.date_dtd.st=1007-01-01T23:59:59Z
o.hi.rf.date_dtd.e=1044-01-01T23:59:59Z
o.hi.rf.date_dtd.g=+7YEARS
o.hi.rf.date_dtd.he=true
o.hi.rf.date_dtd.ib=lower
o.hi.rf.date_dtd.ib=edge
o.hi.rf.date_dtd.ib=outer
o.hi.rf.date_dtd.or=all
o.hf.s.sum=sum(float_fd)
o.hf.s.mean=mean(float_fd)
o.hf.s.median=median(float_fd)
o.hf.s.count=count(float_fd)
o.hf.s.sumOfSquares=sumofsquares(float_fd)
o.hf.rf=long_ld
o.hf.rf.long_ld.st=0
o.hf.rf.long_ld.e=29
o.hf.rf.long_ld.g=4
o.hf.rf.long_ld.he=true
o.hf.rf.long_ld.ib=all
o.hf.rf.long_ld.or=all
o.hf.rf=double_dd
o.hf.rf.double_dd.st=4
o.hf.rf.double_dd.e=47
o.hf.rf.double_dd.g=11
o.hf.rf.double_dd.he=true
o.hf.rf.double_dd.ib=edge
o.hf.rf.double_dd.or=all
o.hf.rf=date_dtd
o.hf.rf.date_dtd.st=1004-01-01T23:59:59Z
o.hf.rf.date_dtd.e=1046-01-01T23:59:59Z
o.hf.rf.date_dtd.g=+5YEARS
o.hf.rf.date_dtd.he=true
o.hf.rf.date_dtd.ib=upper
o.hf.rf.date_dtd.ib=edge
o.hf.rf.date_dtd.or=all
o.mi.s.sum=sum(int_id)
o.mi.s.mean=mean(int_id)
o.mi.s.median=median(int_id)
o.mi.s.count=count(int_id)
o.mi.s.sumOfSquares=sumofsquares(int_id)
o.mi.rf=long_ld
o.mi.rf.long_ld.st=5
o.mi.rf.long_ld.e=30
o.mi.rf.long_ld.g=4,2,6,3
o.mi.rf.long_ld.ib=lower
o.mi.rf.long_ld.or=all
o.mi.rf=double_dd
o.mi.rf.double_dd.st=3
o.mi.rf.double_dd.e=39
o.mi.rf.double_dd.g=3,1,7
o.mi.rf.double_dd.ib=upper
o.mi.rf.double_dd.ib=outer
o.mi.rf.double_dd.or=all
o.mi.rf=date_dtd
o.mi.rf.date_dtd.st=1007-01-01T23:59:59Z
o.mi.rf.date_dtd.e=1044-01-01T23:59:59Z
o.mi.rf.date_dtd.g=+2YEARS,+7YEARS
o.mi.rf.date_dtd.ib=lower
o.mi.rf.date_dtd.ib=edge
o.mi.rf.date_dtd.ib=outer
o.mi.rf.date_dtd.or=all
o.mf.s.sum=sum(float_fd)
o.mf.s.mean=mean(float_fd)
o.mf.s.median=median(float_fd)
o.mf.s.count=count(float_fd)
o.mf.s.sumOfSquares=sumofsquares(float_fd)
o.mf.rf=long_ld
o.mf.rf.long_ld.st=0
o.mf.rf.long_ld.e=29
o.mf.rf.long_ld.g=1,4
o.mf.rf.long_ld.ib=all
o.mf.rf.long_ld.or=all
o.mf.rf=double_dd
o.mf.rf.double_dd.st=4
o.mf.rf.double_dd.e=47
o.mf.rf.double_dd.g=2,3,11
o.mf.rf.double_dd.ib=edge
o.mf.rf.double_dd.or=all
o.mf.rf=date_dtd
o.mf.rf.date_dtd.st=1004-01-01T23:59:59Z
o.mf.rf.date_dtd.e=1046-01-01T23:59:59Z
o.mf.rf.date_dtd.g=+4YEARS,+5YEARS
o.mf.rf.date_dtd.ib=upper
o.mf.rf.date_dtd.ib=edge
o.mf.rf.date_dtd.or=all
o.pf.s.mean=mean(float_fd)
o.pf.hs.min=min(date_dtd)
o.pf.hs.max=max(date_dtd)
o.pf.hs.gap=const_str(+5YEARS)
o.pf.rf=date_dtd
o.pf.rf.date_dtd.st=result(min)
o.pf.rf.date_dtd.e=result(max)
o.pf.rf.date_dtd.g=result(gap)

View File

@ -0,0 +1,285 @@
<?xml version="1.0" encoding="UTF-8"?>
<analyticsRequestEnvelope stats="true" olap="true">
<analyticsRequest>
<name>Add Request</name>
<statistic>
<expression>sum(int(int_id))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>unique(long(long_ld))</expression>
<name>unique</name>
</statistic>
<statistic>
<expression>add(sum(int(int_id)),unique(long(long_ld)))</expression>
<name>add sum and unique</name>
</statistic>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>count(long(long_ld))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>median(int(int_id))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>add(mean(int(int_id)),count(long(long_ld)),median(int(int_id)))</expression>
<name>add mean and count and median</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Multiply Request</name>
<statistic>
<expression>sum(int(int_id))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>unique(long(long_ld))</expression>
<name>unique</name>
</statistic>
<statistic>
<expression>mult(sum(int(int_id)),unique(long(long_ld)))</expression>
<name>multiply sum and unique</name>
</statistic>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>count(long(long_ld))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>median(int(int_id))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>mult(mean(int(int_id)),count(long(long_ld)),median(int(int_id)))</expression>
<name>multiply mean and count and median</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Divide Request</name>
<statistic>
<expression>sum(int(int_id))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>unique(long(long_ld))</expression>
<name>unique</name>
</statistic>
<statistic>
<expression>div(sum(int(int_id)),unique(long(long_ld)))</expression>
<name>divide sum by unique</name>
</statistic>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>count(long(long_ld))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>div(mean(int(int_id)),count(long(long_ld)))</expression>
<name>divide mean by count</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Power Request</name>
<statistic>
<expression>sum(int(int_id))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>unique(long(long_ld))</expression>
<name>unique</name>
</statistic>
<statistic>
<expression>pow(sum(int(int_id)),unique(long(long_ld)))</expression>
<name>power sum by unique</name>
</statistic>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>count(long(long_ld))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>pow(mean(int(int_id)),count(long(long_ld)))</expression>
<name>power mean by count</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Negate Request</name>
<statistic>
<expression>sum(int(int_id))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>neg(sum(int(int_id)))</expression>
<name>negate of sum</name>
</statistic>
<statistic>
<expression>count(long(long_ld))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>neg(count(long(long_ld)))</expression>
<name>negate of count</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Const Num Request</name>
<statistic>
<expression>const_num(8)</expression>
<name>constant 8</name>
</statistic>
<statistic>
<expression>const_num(10)</expression>
<name>constant 10</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Date Math Request</name>
<statistic>
<expression>median(date(date_dtd))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>const_str(+2YEARS)</expression>
<name>constant str median</name>
</statistic>
<statistic>
<expression>date_math(median(date(date_dtd)),const_str(+2YEARS))</expression>
<name>date math median</name>
</statistic>
<statistic>
<expression>max(date(date_dtd))</expression>
<name>max</name>
</statistic>
<statistic>
<expression>const_str(+2MONTHS)</expression>
<name>constant str max</name>
</statistic>
<statistic>
<expression>date_math(max(date(date_dtd)),const_str(+2MONTHS))</expression>
<name>date math max</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Constant Date Request</name>
<statistic>
<expression>const_str(1800-12-31T23:59:59Z)</expression>
<name>const str 1</name>
</statistic>
<statistic>
<expression>const_date(1800-12-31T23:59:59Z)</expression>
<name>const date 1</name>
</statistic>
<statistic>
<expression>const_str(1804-06-30T23:59:59Z)</expression>
<name>const str 2</name>
</statistic>
<statistic>
<expression>const_date(1804-06-30T23:59:59Z)</expression>
<name>const date 2</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Constant String Request</name>
<statistic>
<expression>const_str(this is the first)</expression>
<name>const str 1</name>
</statistic>
<statistic>
<expression>const_str(this is the second)</expression>
<name>const str 2</name>
</statistic>
<statistic>
<expression>const_str(this is the third)</expression>
<name>const str 3</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Concatenate Request</name>
<statistic>
<expression>const_str(this is the first)</expression>
<name>const str min</name>
</statistic>
<statistic>
<expression>min(str(string_sd))</expression>
<name>min</name>
</statistic>
<statistic>
<expression>concat(const_str(this is the first),min(str(string_sd)))</expression>
<name>concat const and min</name>
</statistic>
<statistic>
<expression>const_str(this is the second)</expression>
<name>const str max</name>
</statistic>
<statistic>
<expression>max(str(string_sd))</expression>
<name>max</name>
</statistic>
<statistic>
<expression>concat(const_str(this is the second),max(str(string_sd)))</expression>
<name>concat const and max</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Reverse Request</name>
<statistic>
<expression>min(str(string_sd))</expression>
<name>min</name>
</statistic>
<statistic>
<expression>rev(min(str(string_sd)))</expression>
<name>reverse min</name>
</statistic>
<statistic>
<expression>max(str(string_sd))</expression>
<name>max</name>
</statistic>
<statistic>
<expression>rev(max(str(string_sd)))</expression>
<name>reverse max</name>
</statistic>
</analyticsRequest>
</analyticsRequestEnvelope>

View File

@ -0,0 +1,101 @@
<?xml version="1.0" encoding="UTF-8"?>
<analyticsRequestEnvelope stats="true" olap="true">
<analyticsRequest>
<name>sort request</name>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>median(int(int_id))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>count(int(int_id))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>perc(20,int(int_id))</expression>
<name>perc_20</name>
</statistic>
<fieldFacet>
<field>long_ld</field>
<sortSpecification>
<statName>mean</statName>
<direction>asc</direction>
</sortSpecification>
</fieldFacet>
<fieldFacet>
<field>float_fd</field>
<sortSpecification>
<statName>median</statName>
<direction>desc</direction>
</sortSpecification>
</fieldFacet>
<fieldFacet>
<field>double_dd</field>
<sortSpecification>
<statName>count</statName>
<direction>asc</direction>
</sortSpecification>
</fieldFacet>
<fieldFacet>
<field>string_sd</field>
<sortSpecification>
<statName>perc_20</statName>
<direction>desc</direction>
</sortSpecification>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>limit request</name>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>median(int(int_id))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>count(int(int_id))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>perc(20,int(int_id))</expression>
<name>perc_20</name>
</statistic>
<fieldFacet limit="5">
<field>long_ld</field>
<sortSpecification>
<statName>mean</statName>
<direction>asc</direction>
</sortSpecification>
</fieldFacet>
<fieldFacet limit="3">
<field>float_fd</field>
<sortSpecification>
<statName>median</statName>
<direction>desc</direction>
</sortSpecification>
</fieldFacet>
<fieldFacet limit="7">
<field>double_dd</field>
<sortSpecification>
<statName>count</statName>
<direction>asc</direction>
</sortSpecification>
</fieldFacet>
<fieldFacet limit="1">
<field>string_sd</field>
<sortSpecification>
<statName>perc_20</statName>
<direction>desc</direction>
</sortSpecification>
</fieldFacet>
</analyticsRequest>
</analyticsRequestEnvelope>

View File

@ -0,0 +1,496 @@
<?xml version="1.0" encoding="UTF-8"?>
<analyticsRequestEnvelope stats="true" olap="true">
<analyticsRequest>
<name>sum</name>
<statistic>
<expression>sum(int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>sum(long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>sum(float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>sum(double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>mean</name>
<statistic>
<expression>mean(int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>mean(long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>mean(float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>mean(double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>sumOfSquares</name>
<statistic>
<expression>sumofsquares(int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>sumofsquares(long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>sumofsquares(float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>sumofsquares(double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>stddev</name>
<statistic>
<expression>stddev(int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>stddev(long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>stddev(float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>stddev(double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>median</name>
<statistic>
<expression>median(int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>median(long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>median(float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>median(double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>perc_20 numeric</name>
<statistic>
<expression>perc(20,int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>perc(20,long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>perc(20,float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>perc(20,double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>perc_20</name>
<statistic>
<expression>perc(20,str(string_sd))</expression>
<name>str</name>
</statistic>
<statistic>
<expression>perc(20,date(date_dtd))</expression>
<name>date</name>
</statistic>
<fieldFacet>
<field>int_id</field>
</fieldFacet>
<fieldFacet>
<field>long_ld</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>perc_60 numeric</name>
<statistic>
<expression>perc(60,int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>perc(60,long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>perc(60,float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>perc(60,double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>perc_60</name>
<statistic>
<expression>perc(60,str(string_sd))</expression>
<name>str</name>
</statistic>
<statistic>
<expression>perc(60,date(date_dtd))</expression>
<name>date</name>
</statistic>
<fieldFacet>
<field>int_id</field>
</fieldFacet>
<fieldFacet>
<field>long_ld</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>min numeric</name>
<statistic>
<expression>min(int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>min(long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>min(float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>min(double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>min</name>
<statistic>
<expression>min(str(string_sd))</expression>
<name>str</name>
</statistic>
<statistic>
<expression>min(date(date_dtd))</expression>
<name>date</name>
</statistic>
<fieldFacet>
<field>int_id</field>
</fieldFacet>
<fieldFacet>
<field>long_ld</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>max numeric</name>
<statistic>
<expression>max(int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>max(long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>max(float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>max(double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>max</name>
<statistic>
<expression>max(str(string_sd))</expression>
<name>str</name>
</statistic>
<statistic>
<expression>max(date(date_dtd))</expression>
<name>date</name>
</statistic>
<fieldFacet>
<field>int_id</field>
</fieldFacet>
<fieldFacet>
<field>long_ld</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>count numeric</name>
<statistic>
<expression>count(int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>count(long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>count(float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>count(double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>count</name>
<statistic>
<expression>count(str(string_sd))</expression>
<name>str</name>
</statistic>
<statistic>
<expression>count(date(date_dtd))</expression>
<name>date</name>
</statistic>
<fieldFacet>
<field>int_id</field>
</fieldFacet>
<fieldFacet>
<field>long_ld</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>unique numeric</name>
<statistic>
<expression>unique(int(int_id))</expression>
<name>int</name>
</statistic>
<statistic>
<expression>unique(long(long_ld))</expression>
<name>long</name>
</statistic>
<statistic>
<expression>unique(float(float_fd))</expression>
<name>float</name>
</statistic>
<statistic>
<expression>unique(double(double_dd))</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>unique</name>
<statistic>
<expression>unique(str(string_sd))</expression>
<name>str</name>
</statistic>
<statistic>
<expression>unique(date(date_dtd))</expression>
<name>date</name>
</statistic>
<fieldFacet>
<field>int_id</field>
</fieldFacet>
<fieldFacet>
<field>long_ld</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>missing numeric</name>
<statistic>
<expression>missing(int{int_id})</expression>
<name>int</name>
</statistic>
<statistic>
<expression>missing(long{long_ld})</expression>
<name>long</name>
</statistic>
<statistic>
<expression>missing(float{float_fd})</expression>
<name>float</name>
</statistic>
<statistic>
<expression>missing(double{double_dd})</expression>
<name>double</name>
</statistic>
<fieldFacet>
<field>string_sd</field>
</fieldFacet>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>missing</name>
<statistic>
<expression>missing(str{string_sd})</expression>
<name>str</name>
</statistic>
<statistic>
<expression>missing(date{date_dtd})</expression>
<name>date</name>
</statistic>
<fieldFacet>
<field>int_id</field>
</fieldFacet>
<fieldFacet>
<field>long_ld</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>multivalued</name>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<fieldFacet>
<field>long_ldm</field>
</fieldFacet>
<fieldFacet>
<field>string_sdm</field>
</fieldFacet>
<fieldFacet>
<field>date_dtdm</field>
</fieldFacet>
</analyticsRequest>
<analyticsRequest>
<name>missing facet</name>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<fieldFacet>
<field>date_dtd</field>
</fieldFacet>
<fieldFacet showMissing="true">
<field>string_sd</field>
</fieldFacet>
<fieldFacet showMissing="true">
<field>date_dtdm</field>
</fieldFacet>
</analyticsRequest>
</analyticsRequestEnvelope>

View File

@ -0,0 +1,246 @@
<?xml version="1.0" encoding="UTF-8"?>
<analyticsRequestEnvelope stats="true" olap="true">
<analyticsRequest>
<name>Add Request</name>
<statistic>
<expression>sum(add(int(int_id),float(float_fd)))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>sum(double(add_if_dd))</expression>
<name>sum calced</name>
</statistic>
<statistic>
<expression>mean(add(long(long_ld),double(double_dd),float(float_fd)))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>mean(double(add_ldf_dd))</expression>
<name>mean calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Multiply Request</name>
<statistic>
<expression>sum(mult(int(int_id),float(float_fd)))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>sum(double(mult_if_dd))</expression>
<name>sum calced</name>
</statistic>
<statistic>
<expression>mean(mult(long(long_ld),double(double_dd),float(float_fd)))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>mean(double(mult_ldf_dd))</expression>
<name>mean calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Divide Request</name>
<statistic>
<expression>sum(div(int(int_id),float(float_fd)))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>sum(double(div_if_dd))</expression>
<name>sum calced</name>
</statistic>
<statistic>
<expression>mean(div(long(long_ld),double(double_dd)))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>mean(double(div_ld_dd))</expression>
<name>mean calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Power Request</name>
<statistic>
<expression>sum(pow(int(int_id),float(float_fd))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>sum(double(pow_if_dd))</expression>
<name>sum calced</name>
</statistic>
<statistic>
<expression>mean(pow(long(long_ld),double(double_dd)))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>mean(double(pow_ld_dd))</expression>
<name>mean calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Negate Request</name>
<statistic>
<expression>sum(neg(int(int_id)))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>sum(double(neg_i_dd))</expression>
<name>sum calced</name>
</statistic>
<statistic>
<expression>mean(neg(long(long_ld)))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>mean(double(neg_l_dd))</expression>
<name>mean calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Const Num Request</name>
<statistic>
<expression>sum(const_num(8))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>sum(double(const_8_dd))</expression>
<name>sum calced</name>
</statistic>
<statistic>
<expression>mean(const_num(10))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>mean(double(const_10_dd))</expression>
<name>mean calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Date Math Request</name>
<statistic>
<expression>median(date_math(date(date_dtd),const_str(+2YEARS)))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>median(date(dm_2y_dtd))</expression>
<name>median calced</name>
</statistic>
<statistic>
<expression>max(date_math(date(date_dtd),const_str(+2MONTHS)))</expression>
<name>max</name>
</statistic>
<statistic>
<expression>max(date(dm_2m_dtd))</expression>
<name>max calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Constant Date Request</name>
<statistic>
<expression>median(const_date(1800-06-30T23:59:59Z))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>median(date(const_00_dtd))</expression>
<name>median calced</name>
</statistic>
<statistic>
<expression>max(const_date(1804-06-30T23:59:59Z))</expression>
<name>max</name>
</statistic>
<statistic>
<expression>max(date(const_04_dtd))</expression>
<name>max calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Constant String Request</name>
<statistic>
<expression>min(const_str(this is the first))</expression>
<name>min</name>
</statistic>
<statistic>
<expression>min(str(const_first_sd))</expression>
<name>min calced</name>
</statistic>
<statistic>
<expression>max(const_str(this is the second))</expression>
<name>max</name>
</statistic>
<statistic>
<expression>max(str(const_second_sd))</expression>
<name>max calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Concatenate Request</name>
<statistic>
<expression>min(concat(const_str(this is the first),str(string_sd)))</expression>
<name>min</name>
</statistic>
<statistic>
<expression>min(str(concat_first_sd))</expression>
<name>min calced</name>
</statistic>
<statistic>
<expression>max(concat(const_str(this is the second),str(string_sd)))</expression>
<name>max</name>
</statistic>
<statistic>
<expression>max(str(concat_second_sd))</expression>
<name>max calced</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Reverse Request</name>
<statistic>
<expression>min(rev(str(string_sd)))</expression>
<name>min</name>
</statistic>
<statistic>
<expression>min(str(rev_sd))</expression>
<name>min calced</name>
</statistic>
<statistic>
<expression>max(rev(str(string_sd)))</expression>
<name>max</name>
</statistic>
<statistic>
<expression>max(str(rev_sd))</expression>
<name>max calced</name>
</statistic>
</analyticsRequest>
</analyticsRequestEnvelope>

View File

@ -0,0 +1,310 @@
<?xml version="1.0" encoding="UTF-8"?>
<analyticsRequestEnvelope stats="true" olap="true">
<analyticsRequest>
<name>Sum Request</name>
<statistic>
<expression>sum(int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>sum(long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>sum(float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>sum(double(double_dd))</expression>
<name>double_dd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>SumOfSquares Request</name>
<statistic>
<expression>sumofsquares(int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>sumofsquares(long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>sumofsquares(float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>sumofsquares(double(double_dd))</expression>
<name>double_dd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Mean Request</name>
<statistic>
<expression>mean(int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>mean(long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>mean(float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>mean(double(double_dd))</expression>
<name>double_dd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Stddev Request</name>
<statistic>
<expression>stddev(int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>stddev(long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>stddev(float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>stddev(double(double_dd))</expression>
<name>double_dd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Median Request</name>
<statistic>
<expression>median(int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>median(long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>median(float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>median(double(double_dd))</expression>
<name>double_dd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Perc 20 Request</name>
<statistic>
<expression>perc(20,int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>perc(20,long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>perc(20,float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>perc(20,double(double_dd))</expression>
<name>double_dd</name>
</statistic>
<statistic>
<expression>perc(20,date(date_dtd))</expression>
<name>date_dtd</name>
</statistic>
<statistic>
<expression>perc(20,str(string_sd))</expression>
<name>string_sd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Perc 60 Request</name>
<statistic>
<expression>perc(60,int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>perc(60,long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>perc(60,float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>perc(60,double(double_dd))</expression>
<name>double_dd</name>
</statistic>
<statistic>
<expression>perc(60,date(date_dtd))</expression>
<name>date_dtd</name>
</statistic>
<statistic>
<expression>perc(60,str(string_sd))</expression>
<name>string_sd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Min Request</name>
<statistic>
<expression>min(int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>min(long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>min(float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>min(double(double_dd))</expression>
<name>double_dd</name>
</statistic>
<statistic>
<expression>min(date(date_dtd))</expression>
<name>date_dtd</name>
</statistic>
<statistic>
<expression>min(str(string_sd))</expression>
<name>string_sd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Max Request</name>
<statistic>
<expression>max(int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>max(long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>max(float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>max(double(double_dd))</expression>
<name>double_dd</name>
</statistic>
<statistic>
<expression>max(date(date_dtd))</expression>
<name>date_dtd</name>
</statistic>
<statistic>
<expression>max(str(string_sd))</expression>
<name>string_sd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Unique Request</name>
<statistic>
<expression>unique(int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>unique(long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>unique(float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>unique(double(double_dd))</expression>
<name>double_dd</name>
</statistic>
<statistic>
<expression>unique(date(date_dtd))</expression>
<name>date_dtd</name>
</statistic>
<statistic>
<expression>unique(str(string_sd))</expression>
<name>string_sd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Count Request</name>
<statistic>
<expression>count(int(int_id))</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>count(long(long_ld))</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>count(float(float_fd))</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>count(double(double_dd))</expression>
<name>double_dd</name>
</statistic>
<statistic>
<expression>count(date(date_dtd))</expression>
<name>date_dtd</name>
</statistic>
<statistic>
<expression>count(str(string_sd))</expression>
<name>string_sd</name>
</statistic>
</analyticsRequest>
<analyticsRequest>
<name>Missing Request</name>
<statistic>
<expression>missing(int{int_id})</expression>
<name>int_id</name>
</statistic>
<statistic>
<expression>missing(long{long_ld})</expression>
<name>long_ld</name>
</statistic>
<statistic>
<expression>missing(float{float_fd})</expression>
<name>float_fd</name>
</statistic>
<statistic>
<expression>missing(double{double_dd})</expression>
<name>double_dd</name>
</statistic>
<statistic>
<expression>missing(date{date_dtd})</expression>
<name>date_dtd</name>
</statistic>
<statistic>
<expression>missing(str{string_sd})</expression>
<name>string_sd</name>
</statistic>
</analyticsRequest>
</analyticsRequestEnvelope>

View File

@ -0,0 +1,94 @@
<?xml version="1.0" encoding="UTF-8"?>
<analyticsRequestEnvelope stats="true" olap="true">
<analyticsRequest>
<name>int request</name>
<statistic>
<expression>sum(int(int_id))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>median(int(int_id))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>perc(8,int(int_id))</expression>
<name>perc_8</name>
</statistic>
<queryFacet>
<name>float1</name>
<query>float_fd:[* TO 50]</query>
</queryFacet>
<queryFacet>
<name>float2</name>
<query>float_fd:[* TO 30]</query>
</queryFacet>
</analyticsRequest>
<analyticsRequest>
<name>long request</name>
<statistic>
<expression>sum(long(long_ld))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>mean(long(long_ld))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>median(long(long_ld))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>perc(8,long(long_ld))</expression>
<name>perc_8</name>
</statistic>
<queryFacet>
<name>string</name>
<query>string_sd:abc1</query>
<query>string_sd:abc2</query>
</queryFacet>
</analyticsRequest>
<analyticsRequest>
<name>float request</name>
<statistic>
<expression>sum(float(float_fd))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>mean(float(float_fd))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>median(float(float_fd))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>perc(8,float(float_fd))</expression>
<name>perc_8</name>
</statistic>
<queryFacet>
<name>long and double</name>
<query>long_ld:[20 TO *]</query>
<query>long_ld:[30 TO *]</query>
<query>double_dd:[* TO 50]</query>
</queryFacet>
</analyticsRequest>
</analyticsRequestEnvelope>

View File

@ -0,0 +1,319 @@
<?xml version="1.0" encoding="UTF-8"?>
<analyticsRequestEnvelope stats="true" olap="true">
<analyticsRequest>
<name>regular int</name>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>sum(int(int_id))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>median(int(int_id))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>count(int(int_id))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>sumofsquares(int(int_id))</expression>
<name>sumOfSquares</name>
</statistic>
<rangeFacet hardend="false">
<field>long_ld</field>
<start>5</start>
<end>30</end>
<gap>5</gap>
<includeBoundary>lower</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="false">
<field>double_dd</field>
<start>3</start>
<end>39</end>
<gap>7</gap>
<includeBoundary>upper</includeBoundary>
<includeBoundary>outer</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="false">
<field>date_dtd</field>
<start>1007-01-01T23:59:59Z</start>
<end>1044-01-01T23:59:59Z</end>
<gap>+7YEARS</gap>
<includeBoundary>lower</includeBoundary>
<includeBoundary>edge</includeBoundary>
<includeBoundary>outer</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
</analyticsRequest>
<analyticsRequest>
<name>regular float</name>
<statistic>
<expression>mean(float(float_fd))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>sum(float(float_fd))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>median(float(float_fd))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>count(float(float_fd))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>sumofsquares(float(float_fd))</expression>
<name>sumOfSquares</name>
</statistic>
<rangeFacet hardend="false">
<field>long_ld</field>
<start>0</start>
<end>29</end>
<gap>4</gap>
<includeBoundary>all</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="false">
<field>double_dd</field>
<start>4</start>
<end>47</end>
<gap>11</gap>
<includeBoundary>edge</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="false">
<field>date_dtd</field>
<start>1004-01-01T23:59:59Z</start>
<end>1046-01-01T23:59:59Z</end>
<gap>+5YEARS</gap>
<includeBoundary>upper</includeBoundary>
<includeBoundary>edge</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
</analyticsRequest>
<analyticsRequest>
<name>hardend int</name>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>sum(int(int_id))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>median(int(int_id))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>count(int(int_id))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>sumofsquares(int(int_id))</expression>
<name>sumOfSquares</name>
</statistic>
<rangeFacet hardend="true">
<field>long_ld</field>
<start>5</start>
<end>30</end>
<gap>5</gap>
<includeBoundary>lower</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="true">
<field>double_dd</field>
<start>3</start>
<end>39</end>
<gap>7</gap>
<includeBoundary>upper</includeBoundary>
<includeBoundary>outer</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="true">
<field>date_dtd</field>
<start>1007-01-01T23:59:59Z</start>
<end>1044-01-01T23:59:59Z</end>
<gap>+7YEARS</gap>
<includeBoundary>lower</includeBoundary>
<includeBoundary>edge</includeBoundary>
<includeBoundary>outer</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
</analyticsRequest>
<analyticsRequest>
<name>hardend float</name>
<statistic>
<expression>mean(float(float_fd))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>sum(float(float_fd))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>median(float(float_fd))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>count(float(float_fd))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>sumofsquares(float(float_fd))</expression>
<name>sumOfSquares</name>
</statistic>
<rangeFacet hardend="true">
<field>long_ld</field>
<start>0</start>
<end>29</end>
<gap>4</gap>
<includeBoundary>all</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="true">
<field>double_dd</field>
<start>4</start>
<end>47</end>
<gap>11</gap>
<includeBoundary>edge</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="true">
<field>date_dtd</field>
<start>1004-01-01T23:59:59Z</start>
<end>1046-01-01T23:59:59Z</end>
<gap>+5YEARS</gap>
<includeBoundary>upper</includeBoundary>
<includeBoundary>edge</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
</analyticsRequest>
<analyticsRequest>
<name>multigap int</name>
<statistic>
<expression>mean(int(int_id))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>sum(int(int_id))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>median(int(int_id))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>count(int(int_id))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>sumofsquares(int(int_id))</expression>
<name>sumOfSquares</name>
</statistic>
<rangeFacet hardend="false">
<field>long_ld</field>
<start>5</start>
<end>30</end>
<gap>4</gap>
<gap>2</gap>
<gap>6</gap>
<gap>3</gap>
<includeBoundary>lower</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="false">
<field>double_dd</field>
<start>3</start>
<end>39</end>
<gap>3</gap>
<gap>1</gap>
<gap>7</gap>
<includeBoundary>upper</includeBoundary>
<includeBoundary>outer</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="false">
<field>date_dtd</field>
<start>1007-01-01T23:59:59Z</start>
<end>1044-01-01T23:59:59Z</end>
<gap>+2YEARS</gap>
<gap>+7YEARS</gap>
<includeBoundary>lower</includeBoundary>
<includeBoundary>edge</includeBoundary>
<includeBoundary>outer</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
</analyticsRequest>
<analyticsRequest>
<name>multigap float</name>
<statistic>
<expression>mean(float(float_fd))</expression>
<name>mean</name>
</statistic>
<statistic>
<expression>sum(float(float_fd))</expression>
<name>sum</name>
</statistic>
<statistic>
<expression>median(float(float_fd))</expression>
<name>median</name>
</statistic>
<statistic>
<expression>count(float(float_fd))</expression>
<name>count</name>
</statistic>
<statistic>
<expression>sumofsquares(float(float_fd))</expression>
<name>sumOfSquares</name>
</statistic>
<rangeFacet hardend="false">
<field>long_ld</field>
<start>0</start>
<end>29</end>
<gap>1</gap>
<gap>4</gap>
<includeBoundary>all</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="false">
<field>double_dd</field>
<start>4</start>
<end>47</end>
<gap>2</gap>
<gap>3</gap>
<gap>11</gap>
<includeBoundary>edge</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
<rangeFacet hardend="false">
<field>date_dtd</field>
<start>1004-01-01T23:59:59Z</start>
<end>1046-01-01T23:59:59Z</end>
<gap>+4YEARS</gap>
<gap>+5YEARS</gap>
<includeBoundary>upper</includeBoundary>
<includeBoundary>edge</includeBoundary>
<otherRange>all</otherRange>
</rangeFacet>
</analyticsRequest>
</analyticsRequestEnvelope>

View File

@ -0,0 +1,94 @@
<?xml version="1.0" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- The Solr schema file. This file should be named "schema.xml" and
should be located where the classloader for the Solr webapp can find it.
This schema is used for testing, and as such has everything and the
kitchen sink thrown in. See example/solr/conf/schema.xml for a
more concise example.
-->
<schema name="schema-docValues" version="1.5">
<types>
<!-- field type definitions... note that the "name" attribute is
just a label to be used by field definitions. The "class"
attribute and any other attributes determine the real type and
behavior of the fieldtype.
-->
<!-- numeric field types that store and index the text
value verbatim (and hence don't sort correctly or support range queries.)
These are provided more for backward compatability, allowing one
to create a schema that matches an existing lucene index.
-->
<fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<!-- format for date is 1995-12-31T23:59:59.999Z and only the fractional
seconds part (.999) is optional.
-->
<fieldtype name="date" class="solr.TrieDateField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldtype name="boolean" class="solr.BoolField" />
<fieldtype name="string" class="solr.StrField" />
<fieldType name="uuid" class="solr.UUIDField" />
</types>
<fields>
<field name="id" type="string" required="true" />
<field name="floatdv" type="float" indexed="false" stored="false" docValues="true" default="1" />
<field name="intdv" type="int" indexed="false" stored="false" docValues="true" default="2" />
<field name="doubledv" type="double" indexed="false" stored="false" docValues="true" default="3" />
<field name="longdv" type="long" indexed="false" stored="false" docValues="true" default="4" />
<field name="datedv" type="date" indexed="false" stored="false" docValues="true" default="1995-12-31T23:59:59.999Z" />
<field name="stringdv" type="string" indexed="false" stored="false" docValues="true" default="solr" />
<field name="stringdvm" type="string" indexed="false" stored="false" docValues="true" default="solr" multiValued="true" />
<dynamicField name="*_i" type="int" indexed="true" stored="true" docValues="false" multiValued="false" />
<dynamicField name="*_id" type="int" indexed="true" stored="true" docValues="true" multiValued="false" />
<dynamicField name="*_idm" type="int" indexed="true" stored="true" docValues="true" multiValued="true" />
<dynamicField name="*_l" type="long" indexed="true" stored="true" docValues="false" multiValued="false" />
<dynamicField name="*_ld" type="long" indexed="true" stored="true" docValues="true" multiValued="false" />
<dynamicField name="*_ldm" type="long" indexed="true" stored="true" docValues="true" multiValued="true" />
<dynamicField name="*_f" type="float" indexed="true" stored="true" docValues="false" multiValued="false" />
<dynamicField name="*_fd" type="float" indexed="true" stored="true" docValues="true" multiValued="false" />
<dynamicField name="*_fdm" type="float" indexed="true" stored="true" docValues="true" multiValued="true" />
<dynamicField name="*_d" type="double" indexed="true" stored="true" docValues="false" multiValued="false" />
<dynamicField name="*_dd" type="double" indexed="true" stored="true" docValues="true" multiValued="false" />
<dynamicField name="*_ddm" type="double" indexed="true" stored="true" docValues="true" multiValued="true" />
<dynamicField name="*_dt" type="date" indexed="true" stored="true" docValues="false" multiValued="false" />
<dynamicField name="*_dtd" type="date" indexed="true" stored="true" docValues="true" multiValued="false" />
<dynamicField name="*_dtdm" type="date" indexed="true" stored="true" docValues="true" multiValued="true" />
<dynamicField name="*_s" type="string" indexed="true" stored="true" docValues="false" multiValued="false"/>
<dynamicField name="*_sd" type="string" indexed="true" stored="true" docValues="true" multiValued="false"/>
<dynamicField name="*_sdm" type="string" indexed="true" stored="true" docValues="true" multiValued="true" />
</fields>
<uniqueKey>id</uniqueKey>
</schema>

View File

@ -0,0 +1,156 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Scanner;
import org.apache.commons.lang.StringUtils;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.analytics.util.MedianCalculator;
import org.apache.solr.analytics.util.PercentileCalculator;
import org.apache.solr.request.SolrQueryRequest;
import com.google.common.collect.ObjectArrays;
import org.apache.solr.util.ExternalPaths;
@SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Appending","Asserting"})
public class AbstractAnalyticsStatsTest extends SolrTestCaseJ4 {
protected static final String[] BASEPARMS = new String[]{ "q", "*:*", "indent", "true", "olap", "true", "rows", "0" };
protected static final HashMap<String,Object> defaults = new HashMap<String,Object>();
public Object getStatResult(String response, String request, String type, String name) {
String cat = "\n <lst name=\""+request+"\">";
String begin = "<"+type+" name=\""+name+"\">";
String end = "</"+type+">";
int beginInt = response.indexOf(begin, response.indexOf(cat))+begin.length();
int endInt = response.indexOf(end, beginInt);
String resultStr = response.substring(beginInt, endInt);
if (type.equals("double")) {
return Double.parseDouble(resultStr);
} else if (type.equals("int")) {
return Integer.parseInt(resultStr);
} else if (type.equals("long")) {
return Long.parseLong(resultStr);
} else if (type.equals("float")) {
return Float.parseFloat(resultStr);
} else {
return resultStr;
}
}
public <T extends Number & Comparable<T>> Double calculateNumberStat(ArrayList<T> list, String stat) {
Double result;
if (stat.equals("median")) {
result = MedianCalculator.getMedian(list);
} else if (stat.equals("mean")) {
double d = 0;
for (T element : list) {
d += element.doubleValue();
}
result = Double.valueOf(d/list.size());
} else if (stat.equals("sum")) {
double d = 0;
for (T element : list) {
d += element.doubleValue();
}
result = Double.valueOf(d);
} else if (stat.equals("sumOfSquares")) {
double d = 0;
for (T element : list) {
d += element.doubleValue()*element.doubleValue();
}
result = Double.valueOf(d);
} else if (stat.equals("stddev")) {
double sum = 0;
double sumSquares = 0;
for (T element : list) {
sum += element.doubleValue();
sumSquares += element.doubleValue()*element.doubleValue();
}
result = Math.sqrt(sumSquares/list.size()-sum*sum/(list.size()*list.size()));
} else {
throw new IllegalArgumentException();
}
return result;
}
public <T extends Comparable<T>> Object calculateStat(ArrayList<T> list, String stat) {
Object result;
if (stat.contains("perc_")) {
double[] perc = new double[]{Double.parseDouble(stat.substring(5))/100};
result = PercentileCalculator.getPercentiles(list, perc).get(0);
} else if (stat.equals("count")) {
result = Long.valueOf(list.size());
} else if (stat.equals("unique")) {
HashSet<T> set = new HashSet<T>();
set.addAll(list);
result = Long.valueOf((long)set.size());
} else if (stat.equals("max")) {
Collections.sort(list);
result = list.get(list.size()-1);
} else if (stat.equals("min")) {
Collections.sort(list);
result = list.get(0);
} else {
result = null;
}
return result;
}
@SuppressWarnings("unchecked")
public <T extends Comparable<T>> Long calculateMissing(ArrayList<T> list, String type) {
T def = (T)defaults.get(type);
long miss = 0;
for (T element : list) {
if (element.compareTo(def)==0) {
miss++;
}
}
return Long.valueOf(miss);
}
public static SolrQueryRequest request(String...args){
return SolrTestCaseJ4.req( ObjectArrays.concat(BASEPARMS, args,String.class) );
}
public static String[] fileToStringArr(String fileName) throws FileNotFoundException {
Scanner file = new Scanner(new File(ExternalPaths.SOURCE_HOME, fileName), "UTF-8");
ArrayList<String> strList = new ArrayList<String>();
while (file.hasNextLine()) {
String line = file.nextLine();
line = line.trim();
if( StringUtils.isBlank(line) || line.startsWith("#")){
continue;
}
String[] param = line.split("=");
strList.add(param[0]);
strList.add(param[1]);
}
return strList.toArray(new String[0]);
}
}

View File

@ -0,0 +1,483 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.junit.BeforeClass;
import org.junit.Test;
@SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Appending","Asserting"})
public class NoFacetTest extends AbstractAnalyticsStatsTest {
static String fileName = "core/src/test-files/analytics/requestFiles/noFacets.txt";
static public final int INT = 71;
static public final int LONG = 36;
static public final int FLOAT = 93;
static public final int DOUBLE = 49;
static public final int DATE = 12;
static public final int STRING = 28;
static public final int NUM_LOOPS = 100;
//INT
static ArrayList<Integer> intTestStart;
static long intMissing = 0;
//LONG
static ArrayList<Long> longTestStart;
static long longMissing = 0;
//FLOAT
static ArrayList<Float> floatTestStart;
static long floatMissing = 0;
//DOUBLE
static ArrayList<Double> doubleTestStart;
static long doubleMissing = 0;
//DATE
static ArrayList<String> dateTestStart;
static long dateMissing = 0;
//STRING
static ArrayList<String> stringTestStart;
static long stringMissing = 0;
static String response;
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-basic.xml","schema-analytics.xml");
h.update("<delete><query>*:*</query></delete>");
defaults.put("int_id", new Integer(0));
defaults.put("long_ld", new Long(0));
defaults.put("float_fd", new Float(0));
defaults.put("double_dd", new Double(0));
defaults.put("date_dtd", "1800-12-31T23:59:59Z");
defaults.put("string_sd", "str0");
intTestStart = new ArrayList<Integer>();
longTestStart = new ArrayList<Long>();
floatTestStart = new ArrayList<Float>();
doubleTestStart = new ArrayList<Double>();
dateTestStart = new ArrayList<String>();
stringTestStart = new ArrayList<String>();
for (int j = 0; j < NUM_LOOPS; ++j) {
int i = j%INT;
long l = j%LONG;
float f = j%FLOAT;
double d = j%DOUBLE;
String dt = (1800+j%DATE) + "-12-31T23:59:59Z";
String s = "str" + (j%STRING);
List<String> fields = new ArrayList<String>();
fields.add("id"); fields.add("1000"+j);
if( i != 0 ){
fields.add("int_id"); fields.add("" + i);
intTestStart.add(i);
} else intMissing++;
if( l != 0l ){
fields.add("long_ld"); fields.add("" + l);
longTestStart.add(l);
} else longMissing++;
if( f != 0.0f ){
fields.add("float_fd"); fields.add("" + f);
floatTestStart.add(f);
} else floatMissing++;
if( d != 0.0d ){
fields.add("double_dd"); fields.add("" + d);
doubleTestStart.add(d);
} else doubleMissing++;
if( (j%DATE) != 0 ){
fields.add("date_dtd"); fields.add(dt);
dateTestStart.add(dt);
} else dateMissing++;
if( (j%STRING) != 0 ){
fields.add("string_sd"); fields.add(s);
stringTestStart.add(s);
} else stringMissing++;
fields.add("int_i"); fields.add("" + i);
fields.add("long_l"); fields.add("" + l);
fields.add("float_f"); fields.add("" + f);
fields.add("double_d"); fields.add("" + d);
assertU(adoc(fields.toArray(new String[0])));
if (usually()) {
assertU(commit()); // to have several segments
}
}
assertU(commit());
//Sort ascending tests
response = h.query(request(fileToStringArr(fileName)));
}
@Test
public void sumTest() throws Exception {
//Int
Double intResult = (Double)getStatResult(response, "sr", "double", "int_id");
Double intTest = (Double)calculateNumberStat(intTestStart, "sum");
assertEquals(intResult,intTest);
//Long
Double longResult = (Double)getStatResult(response, "sr", "double", "long_ld");
Double longTest = (Double)calculateNumberStat(longTestStart, "sum");
assertEquals(longResult,longTest);
//Float
Double floatResult = (Double)getStatResult(response, "sr", "double", "float_fd");
Double floatTest = (Double)calculateNumberStat(floatTestStart, "sum");
assertEquals(floatResult,floatTest);
//Double
Double doubleResult = (Double)getStatResult(response, "sr", "double", "double_dd");
Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "sum");
assertEquals(doubleResult,doubleTest);
}
@Test
public void sumOfSquaresTest() throws Exception {
//Int
Double intResult = (Double)getStatResult(response, "sosr", "double", "int_id");
Double intTest = (Double)calculateNumberStat(intTestStart, "sumOfSquares");
assertEquals(intResult,intTest);
//Long
Double longResult = (Double)getStatResult(response, "sosr", "double", "long_ld");
Double longTest = (Double)calculateNumberStat(longTestStart, "sumOfSquares");
assertEquals(longResult,longTest);
//Float
Double floatResult = (Double)getStatResult(response, "sosr", "double", "float_fd");
Double floatTest = (Double)calculateNumberStat(floatTestStart, "sumOfSquares");
assertEquals(floatResult,floatTest);
//Double
Double doubleResult = (Double)getStatResult(response, "sosr", "double", "double_dd");
Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "sumOfSquares");
assertEquals(doubleResult,doubleTest);
}
@Test
public void meanTest() throws Exception {
//Int
Double intResult = (Double)getStatResult(response, "mr", "double", "int_id");
Double intTest = (Double)calculateNumberStat(intTestStart, "mean");
assertEquals(intResult,intTest);
//Long
Double longResult = (Double)getStatResult(response, "mr", "double", "long_ld");
Double longTest = (Double)calculateNumberStat(longTestStart, "mean");
assertEquals(longResult,longTest);
//Float
Double floatResult = (Double)getStatResult(response, "mr", "double", "float_fd");
Double floatTest = (Double)calculateNumberStat(floatTestStart, "mean");
assertEquals(floatResult,floatTest);
//Double
Double doubleResult = (Double)getStatResult(response, "mr", "double", "double_dd");
Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "mean");
assertEquals(doubleResult,doubleTest);
}
@Test
public void stddevTest() throws Exception {
//Int
Double intResult = (Double)getStatResult(response, "str", "double", "int_id");
Double intTest = (Double)calculateNumberStat(intTestStart, "stddev");
assertTrue(Math.abs(intResult-intTest)<.00000000001);
//Long
Double longResult = (Double)getStatResult(response, "str", "double", "long_ld");
Double longTest = (Double)calculateNumberStat(longTestStart, "stddev");
assertTrue(Math.abs(longResult-longTest)<.00000000001);
//Float
Double floatResult = (Double)getStatResult(response, "str", "double", "float_fd");
Double floatTest = (Double)calculateNumberStat(floatTestStart, "stddev");
assertTrue(Math.abs(floatResult-floatTest)<.00000000001);
//Double
Double doubleResult = (Double)getStatResult(response, "str", "double", "double_dd");
Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "stddev");
assertTrue(Math.abs(doubleResult-doubleTest)<.00000000001);
}
@Test
public void medianTest() throws Exception {
//Int
Double intResult = (Double)getStatResult(response, "medr", "double", "int_id");
Double intTest = (Double)calculateNumberStat(intTestStart, "median");
assertEquals(intResult,intTest);
//Long
Double longResult = (Double)getStatResult(response, "medr", "double", "long_ld");
Double longTest = (Double)calculateNumberStat(longTestStart, "median");
assertEquals(longResult,longTest);
//Float
Double floatResult = (Double)getStatResult(response, "medr", "double", "float_fd");
Double floatTest = (Double)calculateNumberStat(floatTestStart, "median");
assertEquals(floatResult,floatTest);
//Double
Double doubleResult = (Double)getStatResult(response, "medr", "double", "double_dd");
Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "median");
assertEquals(doubleResult,doubleTest);
}
@Test
public void perc20Test() throws Exception {
//Int 20
Integer intResult = (Integer)getStatResult(response, "p2r", "int", "int_id");
Integer intTest = (Integer)calculateStat(intTestStart, "perc_20");
assertEquals(intResult,intTest);
//Long 20
Long longResult = (Long)getStatResult(response, "p2r", "long", "long_ld");
Long longTest = (Long)calculateStat(longTestStart, "perc_20");
assertEquals(longResult,longTest);
//Float 20
Float floatResult = (Float)getStatResult(response, "p2r", "float", "float_fd");
Float floatTest = (Float)calculateStat(floatTestStart, "perc_20");
assertEquals(floatResult,floatTest);
//Double 20
Double doubleResult = (Double)getStatResult(response, "p2r", "double", "double_dd");
Double doubleTest = (Double)calculateStat(doubleTestStart, "perc_20");
assertEquals(doubleResult,doubleTest);
//Date 20
String dateResult = (String)getStatResult(response, "p2r", "date", "date_dtd");
String dateTest = (String)calculateStat(dateTestStart, "perc_20");
assertEquals(dateResult,dateTest);
//String 20
String stringResult = (String)getStatResult(response, "p2r", "str", "string_sd");
String stringTest = (String)calculateStat(stringTestStart, "perc_20");
assertEquals(stringResult,stringTest);
}
@Test
public void perc60Test() throws Exception {
//Int 60
Integer intResult = (Integer)getStatResult(response, "p6r", "int", "int_id");
Integer intTest = (Integer)calculateStat(intTestStart, "perc_60");
assertEquals(intResult,intTest);
//Long 60
Long longResult = (Long)getStatResult(response, "p6r", "long", "long_ld");
Long longTest = (Long)calculateStat(longTestStart, "perc_60");
assertEquals(longResult,longTest);
//Float 60
Float floatResult = (Float)getStatResult(response, "p6r", "float", "float_fd");
Float floatTest = (Float)calculateStat(floatTestStart, "perc_60");
assertEquals(floatResult,floatTest);
//Double 60
Double doubleResult = (Double)getStatResult(response, "p6r", "double", "double_dd");
Double doubleTest = (Double)calculateStat(doubleTestStart, "perc_60");
assertEquals(doubleResult,doubleTest);
//Date 60
String dateResult = (String)getStatResult(response, "p6r", "date", "date_dtd");
String dateTest = (String)calculateStat(dateTestStart, "perc_60");
assertEquals(dateResult,dateTest);
//String 60
String stringResult = (String)getStatResult(response, "p6r", "str", "string_sd");
String stringTest = (String)calculateStat(stringTestStart, "perc_60");
assertEquals(stringResult,stringTest);
}
@Test
public void minTest() throws Exception {
//Int
Integer intResult = (Integer)getStatResult(response, "mir", "int", "int_id");
Integer intTest = (Integer)calculateStat(intTestStart, "min");
assertEquals(intResult,intTest);
//Long
Long longResult = (Long)getStatResult(response, "mir", "long", "long_ld");
Long longTest = (Long)calculateStat(longTestStart, "min");
assertEquals(longResult,longTest);
//Float
Float floatResult = (Float)getStatResult(response, "mir", "float", "float_fd");
Float floatTest = (Float)calculateStat(floatTestStart, "min");
assertEquals(floatResult,floatTest);
//Double
Double doubleResult = (Double)getStatResult(response, "mir", "double", "double_dd");
Double doubleTest = (Double)calculateStat(doubleTestStart, "min");
assertEquals(doubleResult,doubleTest);
//Date
String dateResult = (String)getStatResult(response, "mir", "date", "date_dtd");
String dateTest = (String)calculateStat(dateTestStart, "min");
assertEquals(dateResult,dateTest);
//String
String stringResult = (String)getStatResult(response, "mir", "str", "string_sd");
String stringTest = (String)calculateStat(stringTestStart, "min");
assertEquals(stringResult,stringTest);
}
@Test
public void maxTest() throws Exception {
//Int
Integer intResult = (Integer)getStatResult(response, "mar", "int", "int_id");
Integer intTest = (Integer)calculateStat(intTestStart, "max");
assertEquals(intResult,intTest);
//Long
Long longResult = (Long)getStatResult(response, "mar", "long", "long_ld");
Long longTest = (Long)calculateStat(longTestStart, "max");
assertEquals(longResult,longTest);
//Float
Float floatResult = (Float)getStatResult(response, "mar", "float", "float_fd");
Float floatTest = (Float)calculateStat(floatTestStart, "max");
assertEquals(floatResult,floatTest);
//Double
Double doubleResult = (Double)getStatResult(response, "mar", "double", "double_dd");
Double doubleTest = (Double)calculateStat(doubleTestStart, "max");
assertEquals(doubleResult,doubleTest);
//Date
String dateResult = (String)getStatResult(response, "mar", "date", "date_dtd");
String dateTest = (String)calculateStat(dateTestStart, "max");
assertEquals(dateResult,dateTest);
//String
String stringResult = (String)getStatResult(response, "mar", "str", "string_sd");
String stringTest = (String)calculateStat(stringTestStart, "max");
assertEquals(stringResult,stringTest);
}
@Test
public void uniqueTest() throws Exception {
//Int
Long intResult = (Long)getStatResult(response, "ur", "long", "int_id");
Long intTest = (Long)calculateStat(intTestStart, "unique");
assertEquals(intResult,intTest);
//Long
Long longResult = (Long)getStatResult(response, "ur", "long", "long_ld");
Long longTest = (Long)calculateStat(longTestStart, "unique");
assertEquals(longResult,longTest);
//Float
Long floatResult = (Long)getStatResult(response, "ur", "long", "float_fd");
Long floatTest = (Long)calculateStat(floatTestStart, "unique");
assertEquals(floatResult,floatTest);
//Double
Long doubleResult = (Long)getStatResult(response, "ur", "long", "double_dd");
Long doubleTest = (Long)calculateStat(doubleTestStart, "unique");
assertEquals(doubleResult,doubleTest);
//Date
Long dateResult = (Long)getStatResult(response, "ur", "long", "date_dtd");
Long dateTest = (Long)calculateStat(dateTestStart, "unique");
assertEquals(dateResult,dateTest);
//String
Long stringResult = (Long)getStatResult(response, "ur", "long", "string_sd");
Long stringTest = (Long)calculateStat(stringTestStart, "unique");
assertEquals(stringResult,stringTest);
}
@Test
public void countTest() throws Exception {
//Int
Long intResult = (Long)getStatResult(response, "cr", "long", "int_id");
Long intTest = (Long)calculateStat(intTestStart, "count");
assertEquals(intResult,intTest);
//Long
Long longResult = (Long)getStatResult(response, "cr", "long", "long_ld");
Long longTest = (Long)calculateStat(longTestStart, "count");
assertEquals(longResult,longTest);
//Float
Long floatResult = (Long)getStatResult(response, "cr", "long", "float_fd");
Long floatTest = (Long)calculateStat(floatTestStart, "count");
assertEquals(floatResult,floatTest);
//Double
Long doubleResult = (Long)getStatResult(response, "cr", "long", "double_dd");
Long doubleTest = (Long)calculateStat(doubleTestStart, "count");
assertEquals(doubleResult,doubleTest);
//Date
Long dateResult = (Long)getStatResult(response, "cr", "long", "date_dtd");
Long dateTest = (Long)calculateStat(dateTestStart, "count");
assertEquals(dateResult,dateTest);
//String
Long stringResult = (Long)getStatResult(response, "cr", "long", "string_sd");
Long stringTest = (Long)calculateStat(stringTestStart, "count");
assertEquals(stringResult,stringTest);
}
@Test
public void missingDefaultTest() throws Exception {
//Int
long intResult = (Long)getStatResult(response, "misr", "long", "int_id");
assertEquals(intMissing,intResult);
//Long
long longResult = (Long)getStatResult(response, "misr", "long", "long_ld");
assertEquals(longMissing,longResult);
//Float
long floatResult = (Long)getStatResult(response, "misr", "long", "float_fd");
assertEquals(floatMissing,floatResult);
//Double
long doubleResult = (Long)getStatResult(response, "misr", "long", "double_dd");
assertEquals(doubleMissing,doubleResult);
//Date
long dateResult = (Long)getStatResult(response, "misr", "long", "date_dtd");
assertEquals(dateMissing,dateResult);
//String
long stringResult = (Long)getStatResult(response, "misr", "long", "string_sd");
assertEquals(stringMissing, stringResult);
}
}

View File

@ -0,0 +1,270 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.expression;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Scanner;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.TrieDateField;
import org.apache.solr.util.DateMathParser;
import org.apache.solr.util.ExternalPaths;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.ObjectArrays;
@SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Appending","Asserting"})
public class ExpressionTest extends SolrTestCaseJ4 {
static String fileName = "core/src/test-files/analytics/requestFiles/expressions.txt";
protected static final String[] BASEPARMS = new String[]{ "q", "*:*", "indent", "true", "stats", "true", "olap", "true", "rows", "0" };
protected static final HashMap<String,Object> defaults = new HashMap<String,Object>();
static public final int INT = 71;
static public final int LONG = 36;
static public final int FLOAT = 93;
static public final int DOUBLE = 49;
static public final int DATE = 12;
static public final int STRING = 28;
static public final int NUM_LOOPS = 100;
static String response;
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-basic.xml","schema-analytics.xml");
h.update("<delete><query>*:*</query></delete>");
for (int j = 0; j < NUM_LOOPS; ++j) {
int i = j%INT;
long l = j%LONG;
float f = j%FLOAT;
double d = j%DOUBLE;
String dt = (1800+j%DATE) + "-12-31T23:59:59Z";
String s = "str" + (j%STRING);
assertU(adoc("id", "1000" + j, "int_id", "" + i, "long_ld", "" + l, "float_fd", "" + f,
"double_dd", "" + d, "date_dtd", dt, "string_sd", s));
if (usually()) {
commit(); // to have several segments
}
}
assertU(commit());
//Sort ascending tests
response = h.query(request(fileToStringArr(fileName)));
}
@Test
public void addTest() throws Exception {
double sumResult = (Double)getStatResult(response, "ar", "double", "sum");
double uniqueResult = ((Long)getStatResult(response, "ar", "long", "unique")).doubleValue();
double result = (Double)getStatResult(response, "ar", "double", "su");
assertTrue(sumResult+uniqueResult==result);
double meanResult = (Double)getStatResult(response, "ar", "double", "mean");
double medianResult = (Double)getStatResult(response, "ar", "double", "median");
double countResult = ((Long)getStatResult(response, "ar", "long", "count")).doubleValue();
result = (Double)getStatResult(response, "ar", "double", "mcm");
assertTrue(meanResult+countResult+medianResult==result);
}
@Test
public void multiplyTest() throws Exception {
double sumResult = (Double)getStatResult(response, "mr", "double", "sum");
double uniqueResult = ((Long)getStatResult(response, "mr", "long", "unique")).doubleValue();
double result = (Double)getStatResult(response, "mr", "double", "su");
assertTrue(sumResult*uniqueResult==result);
double meanResult = (Double)getStatResult(response, "mr", "double", "mean");
double medianResult = (Double)getStatResult(response, "mr", "double", "median");
double countResult = ((Long)getStatResult(response, "mr", "long", "count")).doubleValue();
result = (Double)getStatResult(response, "mr", "double", "mcm");
assertTrue(meanResult*countResult*medianResult==result);
}
@Test
public void divideTest() throws Exception {
double sumResult = (Double)getStatResult(response, "dr", "double", "sum");
double uniqueResult = ((Long)getStatResult(response, "dr", "long", "unique")).doubleValue();
double result = (Double)getStatResult(response, "dr", "double", "su");
assertTrue(sumResult/uniqueResult==result);
double meanResult = (Double)getStatResult(response, "dr", "double", "mean");
double countResult = ((Long)getStatResult(response, "dr", "long", "count")).doubleValue();
result = (Double)getStatResult(response, "dr", "double", "mc");
assertTrue(meanResult/countResult==result);
}
@Test
public void powerTest() throws Exception {
double sumResult = (Double)getStatResult(response, "pr", "double", "sum");
double uniqueResult = ((Long)getStatResult(response, "pr", "long", "unique")).doubleValue();
double result = (Double)getStatResult(response, "pr", "double", "su");
assertTrue(Math.pow(sumResult,uniqueResult)==result);
double meanResult = (Double)getStatResult(response, "pr", "double", "mean");
double countResult = ((Long)getStatResult(response, "pr", "long", "count")).doubleValue();
result = (Double)getStatResult(response, "pr", "double", "mc");
assertTrue(Math.pow(meanResult,countResult)==result);
}
@Test
public void negateTest() throws Exception {
double sumResult = (Double)getStatResult(response, "nr", "double", "sum");
double result = (Double)getStatResult(response, "nr", "double", "s");
assertTrue(-1*sumResult==result);
double countResult = ((Long)getStatResult(response, "nr", "long", "count")).doubleValue();
result = (Double)getStatResult(response, "nr", "double", "c");
assertTrue(-1*countResult==result);
}
@Test
public void absoluteValueTest() throws Exception {
double sumResult = (Double)getStatResult(response, "avr", "double", "sum");
double result = (Double)getStatResult(response, "avr", "double", "s");
assertTrue(sumResult==result);
double countResult = ((Long)getStatResult(response, "avr", "long", "count")).doubleValue();
result = (Double)getStatResult(response, "avr", "double", "c");
assertTrue(countResult==result);
}
@Test
public void constantNumberTest() throws Exception {
double result = (Double)getStatResult(response, "cnr", "double", "c8");
assertTrue(8==result);
result = (Double)getStatResult(response, "cnr", "double", "c10");
assertTrue(10==result);
}
@SuppressWarnings("deprecation")
@Test
public void dateMathTest() throws Exception {
String math = (String)getStatResult(response, "dmr", "str", "cme");
DateMathParser date = new DateMathParser();
date.setNow(TrieDateField.parseDate((String)getStatResult(response, "dmr", "date", "median")));
String dateMath = (String)getStatResult(response, "dmr", "date", "dmme");
assertTrue(TrieDateField.parseDate(dateMath).equals(date.parseMath(math)));
math = (String)getStatResult(response, "dmr", "str", "cma");
date = new DateMathParser();
date.setNow(TrieDateField.parseDate((String)getStatResult(response, "dmr", "date", "max")));
dateMath = (String)getStatResult(response, "dmr", "date", "dmma");
assertTrue(TrieDateField.parseDate(dateMath).equals(date.parseMath(math)));
}
@Test
public void constantDateTest() throws Exception {
String date = (String)getStatResult(response, "cdr", "date", "cd1");
String str = (String)getStatResult(response, "cdr", "str", "cs1");
assertTrue(date.equals(str));
date = (String)getStatResult(response, "cdr", "date", "cd2");
str = (String)getStatResult(response, "cdr", "str", "cs2");
assertTrue(date.equals(str));
}
@Test
public void constantStringTest() throws Exception {
String str = (String)getStatResult(response, "csr", "str", "cs1");
assertTrue(str.equals("this is the first"));
str = (String)getStatResult(response, "csr", "str", "cs2");
assertTrue(str.equals("this is the second"));
str = (String)getStatResult(response, "csr", "str", "cs3");
assertTrue(str.equals("this is the third"));
}
@Test
public void concatenateTest() throws Exception {
StringBuilder builder = new StringBuilder();
builder.append((String)getStatResult(response, "cr", "str", "csmin"));
builder.append((String)getStatResult(response, "cr", "str", "min"));
String concat = (String)getStatResult(response, "cr", "str", "ccmin");
assertTrue(concat.equals(builder.toString()));
builder.setLength(0);
builder.append((String)getStatResult(response, "cr", "str", "csmax"));
builder.append((String)getStatResult(response, "cr", "str", "max"));
concat = (String)getStatResult(response, "cr", "str", "ccmax");
assertTrue(concat.equals(builder.toString()));
}
@Test
public void reverseTest() throws Exception {
StringBuilder builder = new StringBuilder();
builder.append((String)getStatResult(response, "rr", "str", "min"));
String rev = (String)getStatResult(response, "rr", "str", "rmin");
assertTrue(rev.equals(builder.reverse().toString()));
builder.setLength(0);
builder.append((String)getStatResult(response, "rr", "str", "max"));
rev = (String)getStatResult(response, "rr", "str", "rmax");
assertTrue(rev.equals(builder.reverse().toString()));
}
public Object getStatResult(String response, String request, String type, String name) {
String cat = "\n <lst name=\""+request+"\">";
String begin = "<"+type+" name=\""+name+"\">";
String end = "</"+type+">";
int beginInt = response.indexOf(begin, response.indexOf(cat))+begin.length();
int endInt = response.indexOf(end, beginInt);
String resultStr = response.substring(beginInt, endInt);
if (type.equals("double")) {
return Double.parseDouble(resultStr);
} else if (type.equals("int")) {
return Integer.parseInt(resultStr);
} else if (type.equals("long")) {
return Long.parseLong(resultStr);
} else if (type.equals("float")) {
return Float.parseFloat(resultStr);
} else {
return resultStr;
}
}
public static SolrQueryRequest request(String...args){
return SolrTestCaseJ4.req( ObjectArrays.concat(BASEPARMS, args,String.class) );
}
public static String[] fileToStringArr(String fileName) throws FileNotFoundException {
Scanner file = new Scanner(new File(ExternalPaths.SOURCE_HOME, fileName), "UTF-8");
ArrayList<String> strList = new ArrayList<String>();
while (file.hasNextLine()) {
String line = file.nextLine();
if (line.length()<2) {
continue;
}
String[] param = line.split("=");
strList.add(param[0]);
strList.add(param[1]);
}
return strList.toArray(new String[0]);
}
}

View File

@ -0,0 +1,257 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.facet;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Scanner;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.analytics.util.MedianCalculator;
import org.apache.solr.analytics.util.PercentileCalculator;
import org.apache.solr.request.SolrQueryRequest;
import com.google.common.collect.ObjectArrays;
import org.apache.solr.util.ExternalPaths;
@SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Appending","Asserting"})
public class AbstractAnalyticsFacetTest extends SolrTestCaseJ4 {
protected static final HashMap<String,Object> defaults = new HashMap<String,Object>();
protected String latestType = "";
public String getFacetXML(String response, String requestName, String facetType, String facet) {
String cat = "\n <lst name=\""+requestName+"\">";
String begin = " <lst name=\""+facetType+"\">\n";
String end = "\n </lst>";
int beginInt = response.indexOf(begin, response.indexOf(cat))+begin.length();
int endInt = response.indexOf(end, beginInt);
String fieldStr = response.substring(beginInt, endInt);
begin = " <lst name=\""+facet+"\">";
end = "\n </lst>";
beginInt = fieldStr.indexOf(begin);
endInt = fieldStr.indexOf(end, beginInt);
String facetStr = "";
if (beginInt>=0) {
facetStr = fieldStr.substring(beginInt+begin.length(),endInt);
}
return facetStr+" ";
}
public static void increment(List<Long> list, int idx){
Long i = list.remove(idx);
list.add(idx, i+1);
}
public static String[] filter(String...args){
List<String> l = new ArrayList<String>();
for( int i=0; i <args.length; i+=2){
if( args[i+1].equals("0") || args[i+1].equals("0.0") ||
args[i+1].equals("1800-12-31T23:59:59Z") || args[i+1].equals("str0") ||
args[i+1].equals("this is the firststr0") ||
args[i+1].equals("this is the secondstr0") ){
continue;
}
l.add(args[i]);
l.add(args[i+1]);
}
return l.toArray(new String[0]);
}
protected void setLatestType(String latestType) {
this.latestType = latestType;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public ArrayList xmlToList(String facit, String type, String name) {
ArrayList list;
if (type.equals("double")) {
list = new ArrayList<Double>();
} else if (type.equals("int")) {
list = new ArrayList<Integer>();
} else if (type.equals("long")) {
list = new ArrayList<Long>();
} else if (type.equals("float")) {
list = new ArrayList<Float>();
} else {
list = new ArrayList<String>();
}
String find = "<"+type+" name=\""+name+"\">";
String endS = "</"+type+">";
int findAt = facit.indexOf(find)+find.length();
while (findAt>find.length()) {
int end = facit.indexOf(endS, findAt);
if (type.equals("double")) {
list.add(Double.parseDouble(facit.substring(findAt, end)));
} else if (type.equals("int")) {
list.add(Integer.parseInt(facit.substring(findAt, end)));
} else if (type.equals("long")) {
list.add(Long.parseLong(facit.substring(findAt, end)));
} else if (type.equals("float")) {
list.add(Float.parseFloat(facit.substring(findAt, end)));
} else {
list.add(facit.substring(findAt, end));
}
findAt = facit.indexOf(find, end)+find.length();
}
return list;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public <T extends Number & Comparable<T>> ArrayList calculateNumberStat(ArrayList<ArrayList<T>> lists, String stat) {
ArrayList result;
if (stat.equals("median")) {
result = new ArrayList<Double>();
for (List<T> list : lists) {
result.add(MedianCalculator.getMedian(list));
}
} else if (stat.equals("mean")) {
result = new ArrayList<Double>();
for (List<T> list : lists) {
double d = 0;
for (T element : list) {
d += element.doubleValue();
}
result.add(d/list.size());
}
} else if (stat.equals("sum")) {
result = new ArrayList<Double>();
for (Collection<T> list : lists) {
double d = 0;
for (T element : list) {
d += element.doubleValue();
}
result.add(d);
}
} else if (stat.equals("sumOfSquares")) {
result = new ArrayList<Double>();
for (List<T> list : lists) {
double d = 0;
for (T element : list) {
d += element.doubleValue()*element.doubleValue();
}
result.add(d);
}
} else if (stat.equals("stddev")) {
result = new ArrayList<Double>();
for (List<T> list : lists) {
double sum = 0;
double sumSquares = 0;
for (T element : list) {
sum += element.doubleValue();
sumSquares += element.doubleValue()*element.doubleValue();
}
String res = Double.toString(Math.sqrt(sumSquares/list.size()-sum*sum/(list.size()*list.size())));
result.add(Double.parseDouble(res));
}
} else {
throw new IllegalArgumentException();
}
return result;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public <T extends Comparable<T>> ArrayList calculateStat(ArrayList<ArrayList<T>> lists, String stat) {
ArrayList result;
if (stat.contains("perc_")) {
double[] perc = new double[]{Double.parseDouble(stat.substring(5))/100};
result = new ArrayList<T>();
for (List<T> list : lists) {
if( list.size() == 0) continue;
result.add(PercentileCalculator.getPercentiles(list, perc).get(0));
}
} else if (stat.equals("count")) {
result = new ArrayList<Long>();
for (List<T> list : lists) {
//if( list.size() == 0) continue;
result.add((long)list.size());
}
} else if (stat.equals("missing")) {
result = new ArrayList<Long>();
for (ArrayList<T> list : lists) {
if( list.size() == 0) continue;
result.add(calculateMissing(list,latestType));
}
} else if (stat.equals("unique")) {
result = new ArrayList<Long>();
for (List<T> list : lists) {
HashSet<T> set = new HashSet<T>();
set.addAll(list);
result.add((long)set.size());
}
} else if (stat.equals("max")) {
result = new ArrayList<T>();
for (List<T> list : lists) {
if( list.size() == 0) continue;
Collections.sort(list);
result.add(list.get(list.size()-1));
}
} else if (stat.equals("min")) {
result = new ArrayList<T>();
for (List<T> list : lists) {
if( list.size() == 0) continue;
Collections.sort((List<T>)list);
result.add(list.get(0));
}
} else {
result = null;
}
return result;
}
@SuppressWarnings("unchecked")
public <T extends Comparable<T>> Long calculateMissing(ArrayList<T> list, String type) {
T def = (T)defaults.get(type);
long miss = 0;
for (T element : list) {
if (element.compareTo(def)==0) {
miss++;
}
}
return Long.valueOf(miss);
}
public static SolrQueryRequest request(String...args){
return SolrTestCaseJ4.req( ObjectArrays.concat(BASEPARMS, args,String.class) );
}
public static final String[] BASEPARMS = new String[]{ "q", "*:*", "indent", "true", "olap", "true", "rows", "0" };
public static String[] fileToStringArr(String fileName) throws FileNotFoundException {
Scanner file = new Scanner(new File(ExternalPaths.SOURCE_HOME, fileName), "UTF-8");
ArrayList<String> strList = new ArrayList<String>();
while (file.hasNextLine()) {
String line = file.nextLine();
if (line.length()<2) {
continue;
}
String[] param = line.split("=");
strList.add(param[0]);
strList.add(param[1]);
}
return strList.toArray(new String[0]);
}
}

View File

@ -0,0 +1,190 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.facet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.junit.BeforeClass;
import org.junit.Test;
@SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Appending","Asserting"})
public class FieldFacetExtrasTest extends AbstractAnalyticsFacetTest {
static String fileName = "core/src/test-files/analytics/requestFiles/fieldFacetExtras.txt";
public static final int INT = 21;
public static final int LONG = 22;
public static final int FLOAT = 23;
public static final int DOUBLE = 24;
public static final int DATE = 25;
public static final int STRING = 26;
public static final int NUM_LOOPS = 100;
//INT
static ArrayList<ArrayList<Integer>> intLongTestStart;
static ArrayList<ArrayList<Integer>> intFloatTestStart;
static ArrayList<ArrayList<Integer>> intDoubleTestStart;
static ArrayList<ArrayList<Integer>> intStringTestStart;
static String response;
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-basic.xml","schema-analytics.xml");
h.update("<delete><query>*:*</query></delete>");
//INT
intLongTestStart = new ArrayList<ArrayList<Integer>>();
intFloatTestStart = new ArrayList<ArrayList<Integer>>();
intDoubleTestStart = new ArrayList<ArrayList<Integer>>();
intStringTestStart = new ArrayList<ArrayList<Integer>>();
for (int j = 0; j < NUM_LOOPS; ++j) {
int i = j%INT;
long l = j%LONG;
float f = j%FLOAT;
double d = j%DOUBLE;
int dt = j%DATE;
int s = j%STRING;
assertU(adoc("id", "1000" + j, "int_id", "" + i, "long_ld", "" + l, "float_fd", "" + f,
"double_dd", "" + d, "date_dtd", (1800+dt) + "-12-31T23:59:59.999Z", "string_sd", "abc" + s));
//Long
if (j-LONG<0) {
ArrayList<Integer> list1 = new ArrayList<Integer>();
list1.add(i);
intLongTestStart.add(list1);
} else {
intLongTestStart.get((int)l).add(i);
}
//String
if (j-FLOAT<0) {
ArrayList<Integer> list1 = new ArrayList<Integer>();
list1.add(i);
intFloatTestStart.add(list1);
} else {
intFloatTestStart.get((int)f).add(i);
}
//String
if (j-DOUBLE<0) {
ArrayList<Integer> list1 = new ArrayList<Integer>();
list1.add(i);
intDoubleTestStart.add(list1);
} else {
intDoubleTestStart.get((int)d).add(i);
}
//String
if (j-STRING<0) {
ArrayList<Integer> list1 = new ArrayList<Integer>();
list1.add(i);
intStringTestStart.add(list1);
} else {
intStringTestStart.get(s).add(i);
}
if (usually()) {
commit(); // to have several segments
}
}
assertU(commit());
response = h.query(request(fileToStringArr(fileName)));
}
@SuppressWarnings("unchecked")
@Test
public void limitTest() throws Exception {
String longLimit = getFacetXML(response, "lr", "fieldFacets", "long_ld");
Collection<Double> lon = (ArrayList<Double>)xmlToList(longLimit, "double", "mean");
assertEquals(lon.size(),5);
String floatLimit = getFacetXML(response, "lr", "fieldFacets", "float_fd");
Collection<Double> flo = (ArrayList<Double>)xmlToList(floatLimit, "double", "median");
assertEquals(flo.size(),3);
String doubleLimit = getFacetXML(response, "lr", "fieldFacets", "double_dd");
Collection<Long> doub = (ArrayList<Long>)xmlToList(doubleLimit, "long", "count");
assertEquals(doub.size(),7);
String stringLimit = getFacetXML(response, "lr", "fieldFacets", "string_sd");
Collection<Integer> string = (ArrayList<Integer>)xmlToList(stringLimit, "int", "percentile_20");
assertEquals(string.size(),1);
}
@SuppressWarnings("unchecked")
@Test
public void offsetTest() throws Exception {
String xml;
Collection<Double> lon;
List<Double> all = new ArrayList<Double>();
xml = getFacetXML(response, "off0", "fieldFacets", "long_ld");
lon = (ArrayList<Double>)xmlToList(xml, "double", "mean");
assertEquals(lon.size(),2);
assertArrayEquals(new Double[]{ 1.5, 2.0 }, lon.toArray(new Double[0]));
all.addAll(lon);
xml = getFacetXML(response, "off1", "fieldFacets", "long_ld");
lon = (ArrayList<Double>)xmlToList(xml, "double", "mean");
assertEquals(lon.size(),2);
assertArrayEquals(new Double[]{ 3.0, 4.0 }, lon.toArray(new Double[0]));
all.addAll(lon);
xml = getFacetXML(response, "off2", "fieldFacets", "long_ld");
lon = (ArrayList<Double>)xmlToList(xml, "double", "mean");
assertEquals(lon.size(),3);
assertArrayEquals(new Double[]{ 5.0, 5.75, 6.0 }, lon.toArray(new Double[0]));
all.addAll(lon);
xml = getFacetXML(response, "offAll", "fieldFacets", "long_ld");
lon = (ArrayList<Double>)xmlToList(xml, "double", "mean");
assertEquals(lon.size(),7);
assertArrayEquals(all.toArray(new Double[0]), lon.toArray(new Double[0]));
}
@SuppressWarnings("unchecked")
@Test
public void sortTest() throws Exception {
String longSort = getFacetXML(response, "sr", "fieldFacets", "long_ld");
Collection<Double> lon = (ArrayList<Double>)xmlToList(longSort, "double", "mean");
ArrayList<Double> longTest = calculateNumberStat(intLongTestStart, "mean");
Collections.sort(longTest);
assertEquals(longTest,lon);
String floatSort = getFacetXML(response, "sr", "fieldFacets", "float_fd");
Collection<Double> flo = (ArrayList<Double>)xmlToList(floatSort, "double", "median");
ArrayList<Double> floatTest = calculateNumberStat(intFloatTestStart, "median");
Collections.sort(floatTest,Collections.reverseOrder());
assertEquals(floatTest,flo);
String doubleSort = getFacetXML(response, "sr", "fieldFacets", "double_dd");
Collection<Long> doub = (ArrayList<Long>)xmlToList(doubleSort, "long", "count");
ArrayList<Long> doubleTest = (ArrayList<Long>)calculateStat(intDoubleTestStart, "count");
Collections.sort(doubleTest);
assertEquals(doubleTest,doub);
String stringSort = getFacetXML(response, "sr", "fieldFacets", "string_sd");
Collection<Integer> string = (ArrayList<Integer>)xmlToList(stringSort, "int", "percentile_20");
ArrayList<Integer> stringTest = (ArrayList<Integer>)calculateStat(intStringTestStart, "perc_20");
Collections.sort(stringTest,Collections.reverseOrder());
assertEquals(stringTest,string);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,131 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.facet;
import java.util.ArrayList;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.junit.BeforeClass;
import org.junit.Test;
@SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Appending","Asserting"})
public class QueryFacetTest extends AbstractAnalyticsFacetTest {
static String fileName = "core/src/test-files/analytics/requestFiles/queryFacets.txt";
public final int INT = 71;
public final int LONG = 36;
public final int FLOAT = 93;
public final int DOUBLE = 49;
public final int DATE = 12;
public final int STRING = 28;
public final int NUM_LOOPS = 100;
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-basic.xml","schema-analytics.xml");
}
@SuppressWarnings("unchecked")
@Test
public void queryTest() throws Exception {
h.update("<delete><query>*:*</query></delete>");
//INT
ArrayList<ArrayList<Integer>> int1TestStart = new ArrayList<ArrayList<Integer>>();
int1TestStart.add(new ArrayList<Integer>());
ArrayList<ArrayList<Integer>> int2TestStart = new ArrayList<ArrayList<Integer>>();
int2TestStart.add(new ArrayList<Integer>());
//LONG
ArrayList<ArrayList<Long>> longTestStart = new ArrayList<ArrayList<Long>>();
longTestStart.add(new ArrayList<Long>());
longTestStart.add(new ArrayList<Long>());
//FLOAT
ArrayList<ArrayList<Float>> floatTestStart = new ArrayList<ArrayList<Float>>();
floatTestStart.add(new ArrayList<Float>());
floatTestStart.add(new ArrayList<Float>());
floatTestStart.add(new ArrayList<Float>());
for (int j = 0; j < NUM_LOOPS; ++j) {
int i = j%INT;
long l = j%LONG;
float f = j%FLOAT;
double d = j%DOUBLE;
int dt = j%DATE;
int s = j%STRING;
assertU(adoc("id", "1000" + j, "int_id", "" + i, "long_ld", "" + l, "float_fd", "" + f,
"double_dd", "" + d, "date_dtd", (1800+dt) + "-12-31T23:59:59.999Z", "string_sd", "abc" + new Integer(s).toString().charAt(0)));
if (f<=50) {
int1TestStart.get(0).add(i);
}
if (f<=30) {
int2TestStart.get(0).add(i);
}
if (new Integer(s).toString().charAt(0)=='1') {
longTestStart.get(0).add(l);
}
if (new Integer(s).toString().charAt(0)=='2') {
longTestStart.get(1).add(l);
}
if (l>=20) {
floatTestStart.get(0).add(f);
}
if (l>=30) {
floatTestStart.get(1).add(f);
}
if (d<=50) {
floatTestStart.get(2).add(f);
}
if (usually()) {
commit(); // to have several segments
}
}
assertU(commit());
//Query ascending tests
String response = h.query(request(fileToStringArr(fileName)));
//Int One
String int1Query = getFacetXML(response, "ir", "queryFacets", "float1");
ArrayList<Double> int1 = (ArrayList<Double>)xmlToList(int1Query, "double", "sum");
ArrayList<Double> int1Test = calculateNumberStat(int1TestStart, "sum");
assertEquals(int1,int1Test);
//Int Two
String int2Query = getFacetXML(response, "ir", "queryFacets", "float2");
ArrayList<Integer> int2 = (ArrayList<Integer>)xmlToList(int2Query, "int", "percentile_8");
ArrayList<Integer> int2Test = (ArrayList<Integer>)calculateStat(int2TestStart, "perc_8");
assertEquals(int2,int2Test);
//Long
String long1Query = getFacetXML(response, "lr", "queryFacets", "string");
ArrayList<Double> long1 = (ArrayList<Double>)xmlToList(long1Query, "double", "median");
ArrayList<Double> long1Test = calculateNumberStat(longTestStart, "median");
assertEquals(long1,long1Test);
//Float
String float1Query = getFacetXML(response, "fr", "queryFacets", "lad");
ArrayList<Double> float1 = (ArrayList<Double>)xmlToList(float1Query, "double", "mean");
ArrayList<Double> float1Test = calculateNumberStat(floatTestStart, "mean");
assertEquals(float1,float1Test);
}
}

View File

@ -0,0 +1,467 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.facet;
import java.util.ArrayList;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.junit.BeforeClass;
import org.junit.Test;
@SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Appending","Asserting"})
public class RangeFacetTest extends AbstractAnalyticsFacetTest {
static String fileName = "core/src/test-files/analytics/requestFiles/rangeFacets.txt";
public static final int INT = 71;
public static final int LONG = 36;
public static final int FLOAT = 93;
public static final int DOUBLE = 48;
public static final int DATE = 52;
public static final int STRING = 28;
public static final int NUM_LOOPS = 100;
//INT
static ArrayList<ArrayList<Integer>> intLongTestStart;
static ArrayList<ArrayList<Integer>> intDoubleTestStart;
static ArrayList<ArrayList<Integer>> intDateTestStart;
//FLOAT
static ArrayList<ArrayList<Float>> floatLongTestStart;
static ArrayList<ArrayList<Float>> floatDoubleTestStart;
static ArrayList<ArrayList<Float>> floatDateTestStart;
static String response;
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-basic.xml","schema-analytics.xml");
h.update("<delete><query>*:*</query></delete>");
//INT
intLongTestStart = new ArrayList<ArrayList<Integer>>();
intDoubleTestStart = new ArrayList<ArrayList<Integer>>();
intDateTestStart = new ArrayList<ArrayList<Integer>>();
//FLOAT
floatLongTestStart = new ArrayList<ArrayList<Float>>();
floatDoubleTestStart = new ArrayList<ArrayList<Float>>();
floatDateTestStart = new ArrayList<ArrayList<Float>>();
for (int j = 0; j < NUM_LOOPS; ++j) {
int i = j%INT;
long l = j%LONG;
float f = j%FLOAT;
double d = j%DOUBLE;
int dt = j%DATE;
int s = j%STRING;
assertU(adoc("id", "1000" + j, "int_id", "" + i, "long_ld", "" + l, "float_fd", "" + f,
"double_dd", "" + d, "date_dtd", (1000+dt) + "-01-01T23:59:59Z", "string_sd", "abc" + s));
//Longs
if (j-LONG<0) {
ArrayList<Integer> list1 = new ArrayList<Integer>();
list1.add(i);
intLongTestStart.add(list1);
ArrayList<Float> list2 = new ArrayList<Float>();
list2.add(f);
floatLongTestStart.add(list2);
} else {
intLongTestStart.get((int)l).add(i);
floatLongTestStart.get((int)l).add(f);
}
//Doubles
if (j-DOUBLE<0) {
ArrayList<Integer> list1 = new ArrayList<Integer>();
list1.add(i);
intDoubleTestStart.add(list1);
ArrayList<Float> list2 = new ArrayList<Float>();
list2.add(f);
floatDoubleTestStart.add(list2);
} else {
intDoubleTestStart.get((int)d).add(i);
floatDoubleTestStart.get((int)d).add(f);
}
//Dates
if (j-DATE<0) {
ArrayList<Integer> list1 = new ArrayList<Integer>();
list1.add(i);
intDateTestStart.add(list1);
ArrayList<Float> list2 = new ArrayList<Float>();
list2.add(f);
floatDateTestStart.add(list2);
} else {
intDateTestStart.get(dt).add(i);
floatDateTestStart.get(dt).add(f);
}
if (usually()) {
assertU(commit()); // to have several segments
}
}
assertU(commit());
response = h.query(request(fileToStringArr(fileName)));
}
@SuppressWarnings("unchecked")
@Test
public void rangeTest() throws Exception {
//Int Long
String intLongRange = getFacetXML(response, "ri", "rangeFacets", "long_ld");
ArrayList<Double> intLong = (ArrayList<Double>)xmlToList(intLongRange, "long", "count");
ArrayList<Long> intLongTest = calculateStat(transformLists(intLongTestStart, 5, 30, 5
, false, true, false, false, false), "count");
assertEquals(intLong,intLongTest);
//Int Double
String intDoubleRange = getFacetXML(response, "ri", "rangeFacets", "double_dd");
ArrayList<Double> intDouble = (ArrayList<Double>)xmlToList(intDoubleRange, "double", "mean");
ArrayList<Double> intDoubleTest = calculateNumberStat(transformLists(intDoubleTestStart, 3, 39, 7
, false, false, true, false, true), "mean");
assertEquals(intDouble,intDoubleTest);
//Int Date
String intDateRange = getFacetXML(response, "ri", "rangeFacets", "date_dtd");
ArrayList<Long> intDate = (ArrayList<Long>)xmlToList(intDateRange, "long", "count");
ArrayList<Long> intDateTest = (ArrayList<Long>)calculateStat(transformLists(intDateTestStart, 7, 44, 7
, false, true, false, true, true), "count");
assertEquals(intDate,intDateTest);
//Float Long
String floatLongRange = getFacetXML(response, "rf", "rangeFacets", "long_ld");
ArrayList<Double> floatLong = (ArrayList<Double>)xmlToList(floatLongRange, "double", "median");
ArrayList<Double> floatLongTest = calculateNumberStat(transformLists(floatLongTestStart, 0, 29, 4
, false, true, true, true, true), "median");
assertEquals(floatLong,floatLongTest);
//Float Double
String floatDoubleRange = getFacetXML(response, "rf", "rangeFacets", "double_dd");
ArrayList<Long> floatDouble = (ArrayList<Long>)xmlToList(floatDoubleRange, "long", "count");
ArrayList<Long> floatDoubleTest = (ArrayList<Long>)calculateStat(transformLists(floatDoubleTestStart, 4, 47, 11
, false, false, false, true, false), "count");
assertEquals(floatDouble,floatDoubleTest);
//Float Date
String floatDateRange = getFacetXML(response, "rf", "rangeFacets", "date_dtd");
ArrayList<Double> floatDate = (ArrayList<Double>)xmlToList(floatDateRange, "double", "sumOfSquares");
ArrayList<Double> floatDateTest = calculateNumberStat(transformLists(floatDateTestStart, 4, 46, 5
, false, false, true, true, false), "sumOfSquares");
assertEquals(floatDate,floatDateTest);
}
@SuppressWarnings("unchecked")
@Test
public void hardendRangeTest() throws Exception {
//Int Long
String intLongRange = getFacetXML(response, "hi", "rangeFacets", "long_ld");
ArrayList<Double> intLong = (ArrayList<Double>)xmlToList(intLongRange, "double", "sum");
ArrayList<Double> intLongTest = calculateNumberStat(transformLists(intLongTestStart, 5, 30, 5
, true, true, false, false, false), "sum");
assertEquals(intLong,intLongTest);
//Int Double
String intDoubleRange = getFacetXML(response, "hi", "rangeFacets", "double_dd");
ArrayList<Double> intDouble = (ArrayList<Double>)xmlToList(intDoubleRange, "double", "mean");
ArrayList<Double> intDoubleTest = calculateNumberStat(transformLists(intDoubleTestStart, 3, 39, 7
, true, false, true, false, true), "mean");
assertEquals(intDouble,intDoubleTest);
//Int Date
String intDateRange = getFacetXML(response, "hi", "rangeFacets", "date_dtd");
ArrayList<Long> intDate = (ArrayList<Long>)xmlToList(intDateRange, "long", "count");
ArrayList<Long> intDateTest = (ArrayList<Long>)calculateStat(transformLists(intDateTestStart, 7, 44, 7
, true, true, false, true, true), "count");
assertEquals(intDate,intDateTest);
//Float Long
String floatLongRange = getFacetXML(response, "hf", "rangeFacets", "long_ld");
ArrayList<Double> floatLong = (ArrayList<Double>)xmlToList(floatLongRange, "double", "median");
ArrayList<Double> floatLongTest = calculateNumberStat(transformLists(floatLongTestStart, 0, 29, 4
, true, true, true, true, true), "median");
assertEquals(floatLong,floatLongTest);
//Float Double
String floatDoubleRange = getFacetXML(response, "hf", "rangeFacets", "double_dd");
ArrayList<Long> floatDouble = (ArrayList<Long>)xmlToList(floatDoubleRange, "long", "count");
ArrayList<Long> floatDoubleTest = (ArrayList<Long>)calculateStat(transformLists(floatDoubleTestStart, 4, 47, 11
, true, false, false, true, false), "count");
assertEquals(floatDouble,floatDoubleTest);
//Float Date
String floatDateRange = getFacetXML(response, "hf", "rangeFacets", "date_dtd");
ArrayList<Double> floatDate = (ArrayList<Double>)xmlToList(floatDateRange, "double", "sumOfSquares");
ArrayList<Double> floatDateTest = calculateNumberStat(transformLists(floatDateTestStart, 4, 46, 5
, true, false, true, true, false), "sumOfSquares");
assertEquals(floatDate,floatDateTest);
}
@SuppressWarnings("unchecked")
@Test
public void multiGapTest() throws Exception {
//Int Long
String intLongRange = getFacetXML(response, "mi", "rangeFacets", "long_ld");
ArrayList<Double> intLong = (ArrayList<Double>)xmlToList(intLongRange, "double", "sum");
ArrayList<Double> intLongTest = calculateNumberStat(transformLists(intLongTestStart, 5, 30, "4,2,6,3"
, false, true, false, false, false), "sum");
assertEquals(intLong,intLongTest);
//Int Double
String intDoubleRange = getFacetXML(response, "mi", "rangeFacets", "double_dd");
ArrayList<Double> intDouble = (ArrayList<Double>)xmlToList(intDoubleRange, "double", "mean");
ArrayList<Double> intDoubleTest = calculateNumberStat(transformLists(intDoubleTestStart, 3, 39, "3,1,7"
, false, false, true, false, true), "mean");
assertEquals(intDouble,intDoubleTest);
//Int Date
String intDateRange = getFacetXML(response, "mi", "rangeFacets", "date_dtd");
ArrayList<Long> intDate = (ArrayList<Long>)xmlToList(intDateRange, "long", "count");
ArrayList<Long> intDateTest = (ArrayList<Long>)calculateStat(transformLists(intDateTestStart, 7, 44, "2,7"
, false, true, false, true, true), "count");
assertEquals(intDate,intDateTest);
//Float Long
String floatLongRange = getFacetXML(response, "mf", "rangeFacets", "long_ld");
ArrayList<Double> floatLong = (ArrayList<Double>)xmlToList(floatLongRange, "double", "median");
ArrayList<Double> floatLongTest = calculateNumberStat(transformLists(floatLongTestStart, 0, 29, "1,4"
, false, true, true, true, true), "median");;
assertEquals(floatLong,floatLongTest);
//Float Double
String floatDoubleRange = getFacetXML(response, "mf", "rangeFacets", "double_dd");
ArrayList<Long> floatDouble = (ArrayList<Long>)xmlToList(floatDoubleRange, "long", "count");
ArrayList<Long> floatDoubleTest = (ArrayList<Long>)calculateStat(transformLists(floatDoubleTestStart, 4, 47, "2,3,11"
, false, false, false, true, false), "count");
assertEquals(floatDouble,floatDoubleTest);
//Float Date
String floatDateRange = getFacetXML(response, "mf", "rangeFacets", "date_dtd");
ArrayList<Double> floatDate = (ArrayList<Double>)xmlToList(floatDateRange, "double", "sumOfSquares");
ArrayList<Double> floatDateTest = calculateNumberStat(transformLists(floatDateTestStart, 4, 46, "4,5"
, false, false, true, true, false), "sumOfSquares");
assertEquals(floatDate,floatDateTest);
}
private <T> ArrayList<ArrayList<T>> transformLists(ArrayList<ArrayList<T>> listsStart, int start, int end, int gap
, boolean hardend, boolean incLow, boolean incUp, boolean incEdge, boolean incOut) {
int off = (end-start)%gap;
if (!hardend && off>0) {
end+=gap-off;
}
ArrayList<ArrayList<T>> lists = new ArrayList<ArrayList<T>>();
ArrayList<T> between = new ArrayList<T>();
if (incLow && incUp) {
for (int i = start; i<end && i<listsStart.size(); i+=gap) {
ArrayList<T> list = new ArrayList<T>();
for (int j = i; j<=i+gap && j<=end && j<listsStart.size(); j++) {
list.addAll(listsStart.get(j));
}
lists.add(list);
}
for (int i = start; i<listsStart.size() && i<=end; i++) {
between.addAll(listsStart.get(i));
}
} else if (incLow && !incUp) {
for (int i = start; i<end && i<listsStart.size(); i+=gap) {
ArrayList<T> list = new ArrayList<T>();
for (int j = i; j<i+gap && j<end && j<listsStart.size(); j++) {
list.addAll(listsStart.get(j));
}
lists.add(list);
}
for (int i = start; i<listsStart.size() && i<end; i++) {
between.addAll(listsStart.get(i));
}
} else if (!incLow && incUp) {
for (int i = start; i<end && i<listsStart.size(); i+=gap) {
ArrayList<T> list = new ArrayList<T>();
for (int j = i+1; j<=i+gap && j<=end && j<listsStart.size(); j++) {
list.addAll(listsStart.get(j));
}
lists.add(list);
}
for (int i = start+1; i<listsStart.size() && i<=end; i++) {
between.addAll(listsStart.get(i));
}
} else {
for (int i = start; i<end && i<listsStart.size(); i+=gap) {
ArrayList<T> list = new ArrayList<T>();
for (int j = i+1; j<i+gap && j<end && j<listsStart.size(); j++) {
list.addAll(listsStart.get(j));
}
lists.add(list);
}
for (int i = start+1; i<listsStart.size() && i<end; i++) {
between.addAll(listsStart.get(i));
}
}
if (incEdge && !incLow && start>=0) {
lists.get(0).addAll(listsStart.get(start));
between.addAll(listsStart.get(start));
}
if (incEdge && !incUp && end<listsStart.size()) {
lists.get(lists.size()-1).addAll(listsStart.get(end));
between.addAll(listsStart.get(end));
}
ArrayList<T> before = new ArrayList<T>();
ArrayList<T> after = new ArrayList<T>();
if (incOut || !(incLow||incEdge)) {
for (int i = 0; i<=start; i++) {
before.addAll(listsStart.get(i));
}
} else {
for (int i = 0; i<start; i++) {
before.addAll(listsStart.get(i));
}
}
if (incOut || !(incUp||incEdge)) {
for (int i = end; i<listsStart.size(); i++) {
after.addAll(listsStart.get(i));
}
}
else {
for (int i = end+1; i<listsStart.size(); i++) {
after.addAll(listsStart.get(i));
}
}
if (before.size()>0) {
lists.add(before);
}
if (after.size()>0) {
lists.add(after);
}
if (between.size()>0) {
lists.add(between);
}
return lists;
}
private <T> ArrayList<ArrayList<T>> transformLists(ArrayList<ArrayList<T>> listsStart, int start, int end, String gapString
, boolean hardend, boolean incLow, boolean incUp, boolean incEdge, boolean incOut) {
String[] stringGaps = gapString.split(",");
int[] gaps = new int[stringGaps.length];
for (int i = 0; i<gaps.length; i++) {
gaps[i] = Integer.parseInt(stringGaps[i]);
}
int bigGap = 0;
int last = gaps[gaps.length-1];
for (int i = 0; i<gaps.length-1; i++) {
bigGap += gaps[i];
}
int off = (end-start-bigGap)%last;
if (!hardend && off>0) {
end+=last-off;
}
ArrayList<ArrayList<T>> lists = new ArrayList<ArrayList<T>>();
ArrayList<T> between = new ArrayList<T>();
int gap = 0;
int gapCounter = 0;
if (incLow && incUp) {
for (int i = start; i<end && i<listsStart.size(); i+=gap) {
if (gapCounter<gaps.length) {
gap = gaps[gapCounter++];
}
ArrayList<T> list = new ArrayList<T>();
for (int j = i; j<=i+gap && j<=end && j<listsStart.size(); j++) {
list.addAll(listsStart.get(j));
}
lists.add(list);
}
for (int i = start; i<listsStart.size() && i<=end; i++) {
between.addAll(listsStart.get(i));
}
} else if (incLow && !incUp) {
for (int i = start; i<end && i<listsStart.size(); i+=gap) {
if (gapCounter<gaps.length) {
gap = gaps[gapCounter++];
}
ArrayList<T> list = new ArrayList<T>();
for (int j = i; j<i+gap && j<end && j<listsStart.size(); j++) {
list.addAll(listsStart.get(j));
}
lists.add(list);
}
for (int i = start; i<listsStart.size() && i<end; i++) {
between.addAll(listsStart.get(i));
}
} else if (!incLow && incUp) {
for (int i = start; i<end && i<listsStart.size(); i+=gap) {
if (gapCounter<gaps.length) {
gap = gaps[gapCounter++];
}
ArrayList<T> list = new ArrayList<T>();
for (int j = i+1; j<=i+gap && j<=end && j<listsStart.size(); j++) {
list.addAll(listsStart.get(j));
}
lists.add(list);
}
for (int i = start+1; i<listsStart.size() && i<=end; i++) {
between.addAll(listsStart.get(i));
}
} else {
for (int i = start; i<end && i<listsStart.size(); i+=gap) {
if (gapCounter<gaps.length) {
gap = gaps[gapCounter++];
}
ArrayList<T> list = new ArrayList<T>();
for (int j = i+1; j<i+gap && j<end && j<listsStart.size(); j++) {
list.addAll(listsStart.get(j));
}
lists.add(list);
}
for (int i = start+1; i<listsStart.size() && i<end; i++) {
between.addAll(listsStart.get(i));
}
}
if (incEdge && !incLow && start>=0) {
lists.get(0).addAll(listsStart.get(start));
between.addAll(listsStart.get(start));
}
if (incEdge && !incUp && end<listsStart.size()) {
lists.get(lists.size()-1).addAll(listsStart.get(end));
between.addAll(listsStart.get(end));
}
ArrayList<T> before = new ArrayList<T>();
ArrayList<T> after = new ArrayList<T>();
if (incOut || !(incLow||incEdge)) {
for (int i = 0; i<=start; i++) {
before.addAll(listsStart.get(i));
}
} else {
for (int i = 0; i<start; i++) {
before.addAll(listsStart.get(i));
}
}
if (incOut || !(incUp||incEdge)) {
for (int i = end; i<listsStart.size(); i++) {
after.addAll(listsStart.get(i));
}
}
else {
for (int i = end+1; i<listsStart.size(); i++) {
after.addAll(listsStart.get(i));
}
}
if (before.size()>0) {
lists.add(before);
}
if (after.size()>0) {
lists.add(after);
}
if (between.size()>0) {
lists.add(between);
}
return lists;
}
}

View File

@ -0,0 +1,231 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.util.valuesource;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.solr.analytics.AbstractAnalyticsStatsTest;
import org.apache.solr.analytics.facet.AbstractAnalyticsFacetTest;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
@SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Appending","Asserting"})
public class FunctionTest extends AbstractAnalyticsStatsTest {
static String fileName = "core/src/test-files/analytics/requestFiles/functions.txt";
static public final int INT = 71;
static public final int LONG = 36;
static public final int FLOAT = 93;
static public final int DOUBLE = 49;
static public final int DATE = 12;
static public final int STRING = 28;
static public final int NUM_LOOPS = 100;
static String response;
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-basic.xml","schema-analytics.xml");
h.update("<delete><query>*:*</query></delete>");
for (int j = 0; j < NUM_LOOPS; ++j) {
int i = j%INT+1;
long l = j%LONG+1;
float f = j%FLOAT+1;
double d = j%DOUBLE+1;
double d0 = j%DOUBLE;
String dt = (1800+j%DATE) + "-06-30T23:59:59Z";
String s = "str" + (j%STRING);
double add_if = (double)i+f;
double add_ldf = (double)l+d+f;
double mult_if = (double)i*f;
double mult_ldf = (double)l*d*f;
double div_if = (double)i/f;
double div_ld = (double)l/d;
double pow_if = Math.pow(i,f);
double pow_ld = Math.pow(l,d);
double neg_i = (double)i*-1;
double neg_l = (double)l*-1;
String dm_2y = (1802+j%DATE) + "-06-30T23:59:59Z";
String dm_2m = (1800+j%DATE) + "-08-30T23:59:59Z";
String concat_first = "this is the first"+s;
String concat_second = "this is the second"+s;
String rev = new StringBuilder(s).reverse().toString();
assertU(adoc(AbstractAnalyticsFacetTest.filter("id", "1000" + j, "int_id", "" + i, "long_ld", "" + l, "float_fd", "" + f,
"double_dd", "" + d, "date_dtd", dt, "string_sd", s,
"add_if_dd", ""+add_if, "add_ldf_dd", ""+add_ldf, "mult_if_dd", ""+mult_if, "mult_ldf_dd", ""+mult_ldf,
"div_if_dd", ""+div_if, "div_ld_dd", ""+div_ld, "pow_if_dd", ""+pow_if, "pow_ld_dd", ""+pow_ld,
"neg_i_dd", ""+neg_i, "neg_l_dd", ""+neg_l, "const_8_dd", "8", "const_10_dd", "10", "dm_2y_dtd", dm_2y, "dm_2m_dtd", dm_2m,
"const_00_dtd", "1800-06-30T23:59:59Z", "const_04_dtd", "1804-06-30T23:59:59Z", "const_first_sd", "this is the first", "const_second_sd", "this is the second",
"concat_first_sd", concat_first, "concat_second_sd", concat_second, "rev_sd", rev, "miss_dd", ""+d0 )));
if (usually()) {
commit(); // to have several segments
}
}
assertU(commit());
response = h.query(request(fileToStringArr(fileName)));
}
@Test
public void addTest() throws Exception {
double result = (Double)getStatResult(response, "ar", "double", "sum");
double calculated = (Double)getStatResult(response, "ar", "double", "sumc");
assertTrue(result==calculated);
result = (Double)getStatResult(response, "ar", "double", "mean");
calculated = (Double)getStatResult(response, "ar", "double", "meanc");
assertTrue(result==calculated);
}
@Test
public void multiplyTest() throws Exception {
double result = (Double)getStatResult(response, "mr", "double", "sum");
double calculated = (Double)getStatResult(response, "mr", "double", "sumc");
assertTrue(result==calculated);
result = (Double)getStatResult(response, "mr", "double", "mean");
calculated = (Double)getStatResult(response, "mr", "double", "meanc");
assertTrue(result==calculated);
}
@Test
public void divideTest() throws Exception {
Double result = (Double)getStatResult(response, "dr", "double", "sum");
Double calculated = (Double)getStatResult(response, "dr", "double", "sumc");
assertTrue(result.equals(calculated));
result = (Double)getStatResult(response, "dr", "double", "mean");
calculated = (Double)getStatResult(response, "dr", "double", "meanc");
assertTrue(result.equals(calculated));
}
@Test
public void powerTest() throws Exception {
double result = (Double)getStatResult(response, "pr", "double", "sum");
double calculated = (Double)getStatResult(response, "pr", "double", "sumc");
assertTrue(result==calculated);
result = (Double)getStatResult(response, "pr", "double", "mean");
calculated = (Double)getStatResult(response, "pr", "double", "meanc");
assertTrue(result==calculated);
}
@Test
public void negateTest() throws Exception {
double result = (Double)getStatResult(response, "nr", "double", "sum");
double calculated = (Double)getStatResult(response, "nr", "double", "sumc");
assertTrue(result==calculated);
result = (Double)getStatResult(response, "nr", "double", "mean");
calculated = (Double)getStatResult(response, "nr", "double", "meanc");
assertTrue(result==calculated);
}
@Test
public void absoluteValueTest() throws Exception {
double result = (Double)getStatResult(response, "avr", "double", "sum");
double calculated = (Double)getStatResult(response, "avr", "double", "sumc");
assertTrue(result==calculated);
result = (Double)getStatResult(response, "avr", "double", "mean");
calculated = (Double)getStatResult(response, "avr", "double", "meanc");
assertTrue(result==calculated);
}
@Test
public void constantNumberTest() throws Exception {
double result = (Double)getStatResult(response, "cnr", "double", "sum");
double calculated = (Double)getStatResult(response, "cnr", "double", "sumc");
assertTrue(result==calculated);
result = (Double)getStatResult(response, "cnr", "double", "mean");
calculated = (Double)getStatResult(response, "cnr", "double", "meanc");
assertTrue(result==calculated);
}
@Test
public void dateMathTest() throws Exception {
String result = (String)getStatResult(response, "dmr", "date", "median");
String calculated = (String)getStatResult(response, "dmr", "date", "medianc");
assertTrue(result.equals(calculated));
result = (String)getStatResult(response, "dmr", "date", "max");
calculated = (String)getStatResult(response, "dmr", "date", "maxc");
assertTrue(result.equals(calculated));
}
@Test
public void constantDateTest() throws Exception {
String result = (String)getStatResult(response, "cdr", "date", "median");
String calculated = (String)getStatResult(response, "cdr", "date", "medianc");
assertTrue(result.equals(calculated));
result = (String)getStatResult(response, "cdr", "date", "max");
calculated = (String)getStatResult(response, "cdr", "date", "maxc");
assertTrue(result.equals(calculated));
}
@Test
public void constantStringTest() throws Exception {
String result = (String)getStatResult(response, "csr", "str", "min");
String calculated = (String)getStatResult(response, "csr", "str", "minc");
assertTrue(result.equals(calculated));
result = (String)getStatResult(response, "csr", "str", "max");
calculated = (String)getStatResult(response, "csr", "str", "maxc");
assertTrue(result.equals(calculated));
}
@Test
public void concatenateTest() throws Exception {
String result = (String)getStatResult(response, "cr", "str", "min");
String calculated = (String)getStatResult(response, "cr", "str", "minc");
assertTrue(result.equals(calculated));
result = (String)getStatResult(response, "cr", "str", "max");
calculated = (String)getStatResult(response, "cr", "str", "maxc");
assertTrue(result.equals(calculated));
}
@Test
public void reverseTest() throws Exception {
String result = (String)getStatResult(response, "rr", "str", "min");
String calculated = (String)getStatResult(response, "rr", "str", "minc");
assertTrue(result.equals(calculated));
result = (String)getStatResult(response, "rr", "str", "max");
calculated = (String)getStatResult(response, "rr", "str", "maxc");
assertTrue(result.equals(calculated));
}
@Test
public void missingTest() throws Exception {
double min = (Double)getStatResult(response, "ms", "double", "min");
double max = (Double)getStatResult(response, "ms", "double", "max");
Assert.assertEquals((Double)48.0,(Double)max);
Assert.assertEquals((Double)1.0,(Double)min);
}
}