add missing eol-style

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1003877 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2010-10-02 20:32:16 +00:00
parent fd11477ece
commit 2579edf45f
10 changed files with 1405 additions and 1405 deletions

View File

@ -1,224 +1,224 @@
package org.apache.lucene.search.cache;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldCache.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.OpenBitSet;
import static org.hamcrest.CoreMatchers.*;
public class TestEntryCreators extends LuceneTestCase {
protected IndexReader reader;
private static final int NUM_DOCS = 500 * RANDOM_MULTIPLIER;
private Directory directory;
static class NumberTypeTester {
String funcName;
Class<? extends CachedArrayCreator> creator;
Class<? extends Parser> parser;
String field;
Number[] values;
public NumberTypeTester( String f, String func, Class<? extends CachedArrayCreator> creator, Class<? extends Parser> parser ) {
field = f;
funcName = func;
this.creator = creator;
this.parser = parser;
values = new Number[NUM_DOCS];
}
public String toString()
{
return field;
}
}
private NumberTypeTester[] typeTests;
@Override
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
RandomIndexWriter writer= new RandomIndexWriter(random, directory);
typeTests = new NumberTypeTester[] {
new NumberTypeTester( "theRandomByte", "getBytes", ByteValuesCreator.class, ByteParser.class ),
new NumberTypeTester( "theRandomShort", "getShorts", ShortValuesCreator.class, ShortParser.class ),
new NumberTypeTester( "theRandomInt", "getInts", IntValuesCreator.class, IntParser.class ),
new NumberTypeTester( "theRandomLong", "getLongs", LongValuesCreator.class, LongParser.class ),
new NumberTypeTester( "theRandomFloat", "getFloats", FloatValuesCreator.class, FloatParser.class ),
new NumberTypeTester( "theRandomDouble", "getDoubles", DoubleValuesCreator.class, DoubleParser.class ),
};
for (int i = 0; i < NUM_DOCS; i++){
Document doc = new Document();
// Test the valid bits
for( NumberTypeTester tester : typeTests ) {
if (random.nextInt(20) != 17 && i > 1) {
tester.values[i] = 10 + random.nextInt( 20 ); // get some field overlap
doc.add(newField(tester.field, String.valueOf(tester.values[i]),
Field.Store.NO, Field.Index.NOT_ANALYZED ));
}
}
writer.addDocument(doc);
}
reader = writer.getReader();
writer.close();
}
@Override
public void tearDown() throws Exception {
reader.close();
directory.close();
super.tearDown();
}
public void testKeys() throws IOException {
// Check that the keys are unique for different fields
EntryKey key_1 = new ByteValuesCreator( "field1", null ).getCacheKey();
EntryKey key_2 = new ByteValuesCreator( "field2", null ).getCacheKey();
assertThat("different fields should have a different key", key_1, not(key_2) );
key_1 = new ByteValuesCreator( "field1", null ).getCacheKey();
key_2 = new ShortValuesCreator( "field1", null ).getCacheKey();
assertThat( "same field different type should have different key", key_1, not( key_2 ) );
key_1 = new ByteValuesCreator( "ff", null ).getCacheKey();
key_2 = new ByteValuesCreator( "ff", null ).getCacheKey();
assertThat( "same args should have same key", key_1, is( key_2 ) );
key_1 = new ByteValuesCreator( "ff", null, ByteValuesCreator.OPTION_CACHE_BITS ^ ByteValuesCreator.OPTION_CACHE_VALUES ).getCacheKey();
key_2 = new ByteValuesCreator( "ff", null ).getCacheKey();
assertThat( "different options should share same key", key_1, is( key_2 ) );
key_1 = new IntValuesCreator( "ff", FieldCache.DEFAULT_INT_PARSER ).getCacheKey();
key_2 = new IntValuesCreator( "ff", FieldCache.NUMERIC_UTILS_INT_PARSER ).getCacheKey();
assertThat( "diferent parser should have same key", key_1, is( key_2 ) );
}
private CachedArray getWithReflection( FieldCache cache, NumberTypeTester tester, int flags ) throws IOException
{
try {
Method getXXX = cache.getClass().getMethod( tester.funcName, IndexReader.class, String.class, EntryCreator.class );
Constructor constructor = tester.creator.getConstructor( String.class, tester.parser, Integer.TYPE );
CachedArrayCreator creator = (CachedArrayCreator)constructor.newInstance( tester.field, null, flags );
return (CachedArray) getXXX.invoke(cache, reader, tester.field, creator );
}
catch( Exception ex ) {
throw new RuntimeException( "Reflection failed", ex );
}
}
public void testCachedArrays() throws IOException
{
FieldCache cache = FieldCache.DEFAULT;
// Check the Different CachedArray Types
CachedArray last = null;
CachedArray justbits = null;
String field;
for( NumberTypeTester tester : typeTests ) {
justbits = getWithReflection( cache, tester, CachedArrayCreator.OPTION_CACHE_BITS );
assertNull( "should not get values : "+tester, justbits.getRawArray() );
assertNotNull( "should get bits : "+tester, justbits.valid );
last = getWithReflection( cache, tester, CachedArrayCreator.CACHE_VALUES_AND_BITS );
assertEquals( "should use same cached object : "+tester, justbits, last );
assertNull( "Validate=false shoudl not regenerate : "+tester, justbits.getRawArray() );
last = getWithReflection( cache, tester, CachedArrayCreator.CACHE_VALUES_AND_BITS_VALIDATE );
assertEquals( "should use same cached object : "+tester, justbits, last );
assertNotNull( "Validate=true should add the Array : "+tester, justbits.getRawArray() );
checkCachedArrayValuesAndBits( tester, last );
}
// Now switch the the parser (for the same type) and expect an error
cache.purgeAllCaches();
int flags = CachedArrayCreator.CACHE_VALUES_AND_BITS_VALIDATE;
field = "theRandomInt";
last = cache.getInts(reader, field, new IntValuesCreator( field, FieldCache.DEFAULT_INT_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[2], last );
try {
cache.getInts(reader, field, new IntValuesCreator( field, FieldCache.NUMERIC_UTILS_INT_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
field = "theRandomLong";
last = cache.getLongs(reader, field, new LongValuesCreator( field, FieldCache.DEFAULT_LONG_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[3], last );
try {
cache.getLongs(reader, field, new LongValuesCreator( field, FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
field = "theRandomFloat";
last = cache.getFloats(reader, field, new FloatValuesCreator( field, FieldCache.DEFAULT_FLOAT_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[4], last );
try {
cache.getFloats(reader, field, new FloatValuesCreator( field, FieldCache.NUMERIC_UTILS_FLOAT_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
field = "theRandomDouble";
last = cache.getDoubles(reader, field, new DoubleValuesCreator( field, FieldCache.DEFAULT_DOUBLE_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[5], last );
try {
cache.getDoubles(reader, field, new DoubleValuesCreator( field, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
}
private void checkCachedArrayValuesAndBits( NumberTypeTester tester, CachedArray cachedVals )
{
// for( int i=0; i<NUM_DOCS; i++ ) {
// System.out.println( i + "] "+ tester.values[i] + " :: " + cachedVals.valid.get(i) );
// }
int numDocs =0;
Set<Number> distinctTerms = new HashSet<Number>();
for( int i=0; i<NUM_DOCS; i++ ) {
Number v = tester.values[i];
boolean isValid = cachedVals.valid.get(i);
if( v != null ) {
numDocs++;
distinctTerms.add( v );
assertTrue( "Valid bit should be true ("+i+"="+tester.values[i]+") "+tester, isValid );
}
else {
assertFalse( "Valid bit should be false ("+i+") "+tester, isValid );
}
}
assertEquals( "Cached numTerms does not match : "+tester, distinctTerms.size(), cachedVals.numTerms );
assertEquals( "Cached numDocs does not match : "+tester, numDocs, cachedVals.numDocs );
assertEquals( "Ordinal should match numDocs : "+tester, numDocs, ((OpenBitSet)cachedVals.valid).cardinality() );
}
}
package org.apache.lucene.search.cache;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldCache.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.OpenBitSet;
import static org.hamcrest.CoreMatchers.*;
public class TestEntryCreators extends LuceneTestCase {
protected IndexReader reader;
private static final int NUM_DOCS = 500 * RANDOM_MULTIPLIER;
private Directory directory;
static class NumberTypeTester {
String funcName;
Class<? extends CachedArrayCreator> creator;
Class<? extends Parser> parser;
String field;
Number[] values;
public NumberTypeTester( String f, String func, Class<? extends CachedArrayCreator> creator, Class<? extends Parser> parser ) {
field = f;
funcName = func;
this.creator = creator;
this.parser = parser;
values = new Number[NUM_DOCS];
}
public String toString()
{
return field;
}
}
private NumberTypeTester[] typeTests;
@Override
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
RandomIndexWriter writer= new RandomIndexWriter(random, directory);
typeTests = new NumberTypeTester[] {
new NumberTypeTester( "theRandomByte", "getBytes", ByteValuesCreator.class, ByteParser.class ),
new NumberTypeTester( "theRandomShort", "getShorts", ShortValuesCreator.class, ShortParser.class ),
new NumberTypeTester( "theRandomInt", "getInts", IntValuesCreator.class, IntParser.class ),
new NumberTypeTester( "theRandomLong", "getLongs", LongValuesCreator.class, LongParser.class ),
new NumberTypeTester( "theRandomFloat", "getFloats", FloatValuesCreator.class, FloatParser.class ),
new NumberTypeTester( "theRandomDouble", "getDoubles", DoubleValuesCreator.class, DoubleParser.class ),
};
for (int i = 0; i < NUM_DOCS; i++){
Document doc = new Document();
// Test the valid bits
for( NumberTypeTester tester : typeTests ) {
if (random.nextInt(20) != 17 && i > 1) {
tester.values[i] = 10 + random.nextInt( 20 ); // get some field overlap
doc.add(newField(tester.field, String.valueOf(tester.values[i]),
Field.Store.NO, Field.Index.NOT_ANALYZED ));
}
}
writer.addDocument(doc);
}
reader = writer.getReader();
writer.close();
}
@Override
public void tearDown() throws Exception {
reader.close();
directory.close();
super.tearDown();
}
public void testKeys() throws IOException {
// Check that the keys are unique for different fields
EntryKey key_1 = new ByteValuesCreator( "field1", null ).getCacheKey();
EntryKey key_2 = new ByteValuesCreator( "field2", null ).getCacheKey();
assertThat("different fields should have a different key", key_1, not(key_2) );
key_1 = new ByteValuesCreator( "field1", null ).getCacheKey();
key_2 = new ShortValuesCreator( "field1", null ).getCacheKey();
assertThat( "same field different type should have different key", key_1, not( key_2 ) );
key_1 = new ByteValuesCreator( "ff", null ).getCacheKey();
key_2 = new ByteValuesCreator( "ff", null ).getCacheKey();
assertThat( "same args should have same key", key_1, is( key_2 ) );
key_1 = new ByteValuesCreator( "ff", null, ByteValuesCreator.OPTION_CACHE_BITS ^ ByteValuesCreator.OPTION_CACHE_VALUES ).getCacheKey();
key_2 = new ByteValuesCreator( "ff", null ).getCacheKey();
assertThat( "different options should share same key", key_1, is( key_2 ) );
key_1 = new IntValuesCreator( "ff", FieldCache.DEFAULT_INT_PARSER ).getCacheKey();
key_2 = new IntValuesCreator( "ff", FieldCache.NUMERIC_UTILS_INT_PARSER ).getCacheKey();
assertThat( "diferent parser should have same key", key_1, is( key_2 ) );
}
private CachedArray getWithReflection( FieldCache cache, NumberTypeTester tester, int flags ) throws IOException
{
try {
Method getXXX = cache.getClass().getMethod( tester.funcName, IndexReader.class, String.class, EntryCreator.class );
Constructor constructor = tester.creator.getConstructor( String.class, tester.parser, Integer.TYPE );
CachedArrayCreator creator = (CachedArrayCreator)constructor.newInstance( tester.field, null, flags );
return (CachedArray) getXXX.invoke(cache, reader, tester.field, creator );
}
catch( Exception ex ) {
throw new RuntimeException( "Reflection failed", ex );
}
}
public void testCachedArrays() throws IOException
{
FieldCache cache = FieldCache.DEFAULT;
// Check the Different CachedArray Types
CachedArray last = null;
CachedArray justbits = null;
String field;
for( NumberTypeTester tester : typeTests ) {
justbits = getWithReflection( cache, tester, CachedArrayCreator.OPTION_CACHE_BITS );
assertNull( "should not get values : "+tester, justbits.getRawArray() );
assertNotNull( "should get bits : "+tester, justbits.valid );
last = getWithReflection( cache, tester, CachedArrayCreator.CACHE_VALUES_AND_BITS );
assertEquals( "should use same cached object : "+tester, justbits, last );
assertNull( "Validate=false shoudl not regenerate : "+tester, justbits.getRawArray() );
last = getWithReflection( cache, tester, CachedArrayCreator.CACHE_VALUES_AND_BITS_VALIDATE );
assertEquals( "should use same cached object : "+tester, justbits, last );
assertNotNull( "Validate=true should add the Array : "+tester, justbits.getRawArray() );
checkCachedArrayValuesAndBits( tester, last );
}
// Now switch the the parser (for the same type) and expect an error
cache.purgeAllCaches();
int flags = CachedArrayCreator.CACHE_VALUES_AND_BITS_VALIDATE;
field = "theRandomInt";
last = cache.getInts(reader, field, new IntValuesCreator( field, FieldCache.DEFAULT_INT_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[2], last );
try {
cache.getInts(reader, field, new IntValuesCreator( field, FieldCache.NUMERIC_UTILS_INT_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
field = "theRandomLong";
last = cache.getLongs(reader, field, new LongValuesCreator( field, FieldCache.DEFAULT_LONG_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[3], last );
try {
cache.getLongs(reader, field, new LongValuesCreator( field, FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
field = "theRandomFloat";
last = cache.getFloats(reader, field, new FloatValuesCreator( field, FieldCache.DEFAULT_FLOAT_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[4], last );
try {
cache.getFloats(reader, field, new FloatValuesCreator( field, FieldCache.NUMERIC_UTILS_FLOAT_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
field = "theRandomDouble";
last = cache.getDoubles(reader, field, new DoubleValuesCreator( field, FieldCache.DEFAULT_DOUBLE_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[5], last );
try {
cache.getDoubles(reader, field, new DoubleValuesCreator( field, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
}
private void checkCachedArrayValuesAndBits( NumberTypeTester tester, CachedArray cachedVals )
{
// for( int i=0; i<NUM_DOCS; i++ ) {
// System.out.println( i + "] "+ tester.values[i] + " :: " + cachedVals.valid.get(i) );
// }
int numDocs =0;
Set<Number> distinctTerms = new HashSet<Number>();
for( int i=0; i<NUM_DOCS; i++ ) {
Number v = tester.values[i];
boolean isValid = cachedVals.valid.get(i);
if( v != null ) {
numDocs++;
distinctTerms.add( v );
assertTrue( "Valid bit should be true ("+i+"="+tester.values[i]+") "+tester, isValid );
}
else {
assertFalse( "Valid bit should be false ("+i+") "+tester, isValid );
}
}
assertEquals( "Cached numTerms does not match : "+tester, distinctTerms.size(), cachedVals.numTerms );
assertEquals( "Cached numDocs does not match : "+tester, numDocs, cachedVals.numDocs );
assertEquals( "Ordinal should match numDocs : "+tester, numDocs, ((OpenBitSet)cachedVals.valid).cardinality() );
}
}

View File

@ -1,278 +1,278 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.component;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.DocSet;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.request.SimpleFacets;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.FieldType;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.index.Term;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Deque;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
*
* @since solr 4.0
*/
public class PivotFacetComponent extends SearchComponent
{
public static final String COMPONENT_NAME = "pivot";
static final String PIVOT_KEY = "facet_pivot";
/**
* Designed to be overridden by subclasses that provide different faceting implementations.
* TODO: Currently this is returning a SimpleFacets object, but those capabilities would
* be better as an extracted abstract class or interface.
*/
protected SimpleFacets getFacetImplementation(SolrQueryRequest req,
DocSet docs,
SolrParams params) {
return new SimpleFacets(req, docs, params);
}
@Override
public void prepare(ResponseBuilder rb) throws IOException
{
if (rb.req.getParams().getBool(FacetParams.FACET,false)) {
rb.setNeedDocSet( true );
rb.doFacets = true;
}
}
public void process(ResponseBuilder rb) throws IOException {
if (!rb.doFacets) return;
SolrParams params = rb.req.getParams();
String[] pivots = params.getParams(FacetParams.FACET_PIVOT); // example: author,type (for types by author / types within author)
if (pivots == null) return;
int minMatch = params.getInt( FacetParams.FACET_PIVOT_MINCOUNT, 1 );
SimpleOrderedMap<List<NamedList<Object>>> pivotResponse = new SimpleOrderedMap<List<NamedList<Object>>>();
for (String pivot : pivots) {
String[] fields = pivot.split(","); // only support two levels for now
if( fields.length < 2 ) {
throw new SolrException( ErrorCode.BAD_REQUEST,
"Pivot Facet needs at least two fields: "+pivot );
}
DocSet docs = rb.getResults().docSet;
String field = fields[0];
String subField = fields[1];
Deque<String> fnames = new LinkedList<String>();
for( int i=fields.length-1; i>1; i-- ) {
fnames.push( fields[i] );
}
SimpleFacets sf = getFacetImplementation(rb.req, rb.getResults().docSet, rb.req.getParams());
NamedList<Integer> superFacets = sf.getTermCounts(field);
pivotResponse.add(pivot, doPivots(superFacets, field, subField, fnames, rb, docs, minMatch));
}
NamedList facetCounts = (NamedList) rb.rsp.getValues().get("facet_counts");
if (facetCounts == null) {
facetCounts = new NamedList();
rb.rsp.add("facet_counts", facetCounts);
}
facetCounts.add( PIVOT_KEY, pivotResponse);
}
/**
* Recursive function to do all the pivots
*/
protected List<NamedList<Object>> doPivots( NamedList<Integer> superFacets, String field, String subField, Deque<String> fnames, ResponseBuilder rb, DocSet docs, int minMatch ) throws IOException
{
SolrIndexSearcher searcher = rb.req.getSearcher();
// TODO: optimize to avoid converting to an external string and then having to convert back to internal below
FieldType ftype = null;
// SimpleFacets sf = getFacetImplementation(rb.req, docs, rb.req.getParams());
String nextField = fnames.poll();
List<NamedList<Object>> values = new ArrayList<NamedList<Object>>( superFacets.size() );
for (Map.Entry<String, Integer> kv : superFacets) {
// Only sub-facet if parent facet has positive count - still may not be any values for the sub-field though
if (kv.getValue() > minMatch ) {
SimpleOrderedMap<Object> pivot = new SimpleOrderedMap<Object>();
pivot.add( "field", field );
pivot.add( "value", kv.getKey() );
pivot.add( "count", kv.getValue() );
if( subField == null ) {
values.add( pivot );
}
else {
String s = kv.getKey();
if( ftype == null ) {
ftype = searcher.getSchema().getField(field).getType();
}
Query query = new TermQuery(new Term(field, ftype.toInternal(s)));
DocSet subset = searcher.getDocSet(query, docs);
SimpleFacets sf = getFacetImplementation(rb.req, subset, rb.req.getParams());
NamedList<Integer> nl = sf.getTermCounts(subField);
if (nl.size() > minMatch ) {
pivot.add( "pivot", doPivots( nl, subField, nextField, fnames, rb, subset, minMatch ) );
values.add( pivot ); // only add response if there are some counts
}
}
}
}
// put the field back on the list
fnames.push( nextField );
return values;
}
@Override
public int distributedProcess(ResponseBuilder rb) throws IOException {
if (!rb.doFacets) {
return ResponseBuilder.STAGE_DONE;
}
if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
SolrParams params = rb.req.getParams();
String[] pivots = params.getParams(FacetParams.FACET_PIVOT);
for ( ShardRequest sreq : rb.outgoing ) {
if (( sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS ) != 0
&& sreq.shards != null && sreq.shards.length == 1 ) {
sreq.params.set( FacetParams.FACET, "true" );
sreq.params.set( FacetParams.FACET_PIVOT, pivots );
sreq.params.set( FacetParams.FACET_PIVOT_MINCOUNT, 1 ); // keep this at 1 regardless so that it accumulates everything
}
}
}
return ResponseBuilder.STAGE_DONE;
}
@Override
public void handleResponses(ResponseBuilder rb, ShardRequest sreq) {
if (!rb.doFacets) return;
if ((sreq.purpose & ShardRequest.PURPOSE_GET_FACETS)!=0) {
SimpleOrderedMap<List<NamedList<Object>>> tf = rb._pivots;
if ( null == tf ) {
tf = new SimpleOrderedMap<List<NamedList<Object>>>();
rb._pivots = tf;
}
for (ShardResponse srsp: sreq.responses) {
int shardNum = rb.getShardNum(srsp.getShard());
NamedList facet_counts = (NamedList)srsp.getSolrResponse().getResponse().get("facet_counts");
// handle facet trees from shards
SimpleOrderedMap<List<NamedList<Object>>> shard_pivots =
(SimpleOrderedMap<List<NamedList<Object>>>)facet_counts.get( PIVOT_KEY );
if ( shard_pivots != null ) {
for (int j=0; j< shard_pivots.size(); j++) {
// TODO -- accumulate the results from each shard
// The following code worked to accumulate facets for an previous
// two level patch... it is here for reference till someone can upgrade
/**
String shard_tree_name = (String) shard_pivots.getName( j );
SimpleOrderedMap<NamedList> shard_tree = (SimpleOrderedMap<NamedList>)shard_pivots.getVal( j );
SimpleOrderedMap<NamedList> facet_tree = tf.get( shard_tree_name );
if ( null == facet_tree) {
facet_tree = new SimpleOrderedMap<NamedList>();
tf.add( shard_tree_name, facet_tree );
}
for( int o = 0; o < shard_tree.size() ; o++ ) {
String shard_outer = (String) shard_tree.getName( o );
NamedList shard_innerList = (NamedList) shard_tree.getVal( o );
NamedList tree_innerList = (NamedList) facet_tree.get( shard_outer );
if ( null == tree_innerList ) {
tree_innerList = new NamedList();
facet_tree.add( shard_outer, tree_innerList );
}
for ( int i = 0 ; i < shard_innerList.size() ; i++ ) {
String shard_term = (String) shard_innerList.getName( i );
long shard_count = ((Number) shard_innerList.getVal(i)).longValue();
int tree_idx = tree_innerList.indexOf( shard_term, 0 );
if ( -1 == tree_idx ) {
tree_innerList.add( shard_term, shard_count );
} else {
long tree_count = ((Number) tree_innerList.getVal( tree_idx )).longValue();
tree_innerList.setVal( tree_idx, shard_count + tree_count );
}
} // innerList loop
} // outer loop
**/
} // each tree loop
}
}
}
return ;
}
@Override
public void finishStage(ResponseBuilder rb) {
if (!rb.doFacets || rb.stage != ResponseBuilder.STAGE_GET_FIELDS) return;
// wait until STAGE_GET_FIELDS
// so that "result" is already stored in the response (for aesthetics)
SimpleOrderedMap<List<NamedList<Object>>> tf = rb._pivots;
// get 'facet_counts' from the response
NamedList facetCounts = (NamedList) rb.rsp.getValues().get("facet_counts");
if (facetCounts == null) {
facetCounts = new NamedList();
rb.rsp.add("facet_counts", facetCounts);
}
facetCounts.add( PIVOT_KEY, tf );
rb._pivots = null;
}
public String getDescription() {
return "Handle Pivot (multi-level) Faceting";
}
public String getSourceId() {
return "$Id: $";
}
public String getSource() {
return "$URL: $";
}
public String getVersion() {
return "$Revision: $";
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.component;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.DocSet;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.request.SimpleFacets;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.FieldType;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.index.Term;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Deque;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
*
* @since solr 4.0
*/
public class PivotFacetComponent extends SearchComponent
{
public static final String COMPONENT_NAME = "pivot";
static final String PIVOT_KEY = "facet_pivot";
/**
* Designed to be overridden by subclasses that provide different faceting implementations.
* TODO: Currently this is returning a SimpleFacets object, but those capabilities would
* be better as an extracted abstract class or interface.
*/
protected SimpleFacets getFacetImplementation(SolrQueryRequest req,
DocSet docs,
SolrParams params) {
return new SimpleFacets(req, docs, params);
}
@Override
public void prepare(ResponseBuilder rb) throws IOException
{
if (rb.req.getParams().getBool(FacetParams.FACET,false)) {
rb.setNeedDocSet( true );
rb.doFacets = true;
}
}
public void process(ResponseBuilder rb) throws IOException {
if (!rb.doFacets) return;
SolrParams params = rb.req.getParams();
String[] pivots = params.getParams(FacetParams.FACET_PIVOT); // example: author,type (for types by author / types within author)
if (pivots == null) return;
int minMatch = params.getInt( FacetParams.FACET_PIVOT_MINCOUNT, 1 );
SimpleOrderedMap<List<NamedList<Object>>> pivotResponse = new SimpleOrderedMap<List<NamedList<Object>>>();
for (String pivot : pivots) {
String[] fields = pivot.split(","); // only support two levels for now
if( fields.length < 2 ) {
throw new SolrException( ErrorCode.BAD_REQUEST,
"Pivot Facet needs at least two fields: "+pivot );
}
DocSet docs = rb.getResults().docSet;
String field = fields[0];
String subField = fields[1];
Deque<String> fnames = new LinkedList<String>();
for( int i=fields.length-1; i>1; i-- ) {
fnames.push( fields[i] );
}
SimpleFacets sf = getFacetImplementation(rb.req, rb.getResults().docSet, rb.req.getParams());
NamedList<Integer> superFacets = sf.getTermCounts(field);
pivotResponse.add(pivot, doPivots(superFacets, field, subField, fnames, rb, docs, minMatch));
}
NamedList facetCounts = (NamedList) rb.rsp.getValues().get("facet_counts");
if (facetCounts == null) {
facetCounts = new NamedList();
rb.rsp.add("facet_counts", facetCounts);
}
facetCounts.add( PIVOT_KEY, pivotResponse);
}
/**
* Recursive function to do all the pivots
*/
protected List<NamedList<Object>> doPivots( NamedList<Integer> superFacets, String field, String subField, Deque<String> fnames, ResponseBuilder rb, DocSet docs, int minMatch ) throws IOException
{
SolrIndexSearcher searcher = rb.req.getSearcher();
// TODO: optimize to avoid converting to an external string and then having to convert back to internal below
FieldType ftype = null;
// SimpleFacets sf = getFacetImplementation(rb.req, docs, rb.req.getParams());
String nextField = fnames.poll();
List<NamedList<Object>> values = new ArrayList<NamedList<Object>>( superFacets.size() );
for (Map.Entry<String, Integer> kv : superFacets) {
// Only sub-facet if parent facet has positive count - still may not be any values for the sub-field though
if (kv.getValue() > minMatch ) {
SimpleOrderedMap<Object> pivot = new SimpleOrderedMap<Object>();
pivot.add( "field", field );
pivot.add( "value", kv.getKey() );
pivot.add( "count", kv.getValue() );
if( subField == null ) {
values.add( pivot );
}
else {
String s = kv.getKey();
if( ftype == null ) {
ftype = searcher.getSchema().getField(field).getType();
}
Query query = new TermQuery(new Term(field, ftype.toInternal(s)));
DocSet subset = searcher.getDocSet(query, docs);
SimpleFacets sf = getFacetImplementation(rb.req, subset, rb.req.getParams());
NamedList<Integer> nl = sf.getTermCounts(subField);
if (nl.size() > minMatch ) {
pivot.add( "pivot", doPivots( nl, subField, nextField, fnames, rb, subset, minMatch ) );
values.add( pivot ); // only add response if there are some counts
}
}
}
}
// put the field back on the list
fnames.push( nextField );
return values;
}
@Override
public int distributedProcess(ResponseBuilder rb) throws IOException {
if (!rb.doFacets) {
return ResponseBuilder.STAGE_DONE;
}
if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
SolrParams params = rb.req.getParams();
String[] pivots = params.getParams(FacetParams.FACET_PIVOT);
for ( ShardRequest sreq : rb.outgoing ) {
if (( sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS ) != 0
&& sreq.shards != null && sreq.shards.length == 1 ) {
sreq.params.set( FacetParams.FACET, "true" );
sreq.params.set( FacetParams.FACET_PIVOT, pivots );
sreq.params.set( FacetParams.FACET_PIVOT_MINCOUNT, 1 ); // keep this at 1 regardless so that it accumulates everything
}
}
}
return ResponseBuilder.STAGE_DONE;
}
@Override
public void handleResponses(ResponseBuilder rb, ShardRequest sreq) {
if (!rb.doFacets) return;
if ((sreq.purpose & ShardRequest.PURPOSE_GET_FACETS)!=0) {
SimpleOrderedMap<List<NamedList<Object>>> tf = rb._pivots;
if ( null == tf ) {
tf = new SimpleOrderedMap<List<NamedList<Object>>>();
rb._pivots = tf;
}
for (ShardResponse srsp: sreq.responses) {
int shardNum = rb.getShardNum(srsp.getShard());
NamedList facet_counts = (NamedList)srsp.getSolrResponse().getResponse().get("facet_counts");
// handle facet trees from shards
SimpleOrderedMap<List<NamedList<Object>>> shard_pivots =
(SimpleOrderedMap<List<NamedList<Object>>>)facet_counts.get( PIVOT_KEY );
if ( shard_pivots != null ) {
for (int j=0; j< shard_pivots.size(); j++) {
// TODO -- accumulate the results from each shard
// The following code worked to accumulate facets for an previous
// two level patch... it is here for reference till someone can upgrade
/**
String shard_tree_name = (String) shard_pivots.getName( j );
SimpleOrderedMap<NamedList> shard_tree = (SimpleOrderedMap<NamedList>)shard_pivots.getVal( j );
SimpleOrderedMap<NamedList> facet_tree = tf.get( shard_tree_name );
if ( null == facet_tree) {
facet_tree = new SimpleOrderedMap<NamedList>();
tf.add( shard_tree_name, facet_tree );
}
for( int o = 0; o < shard_tree.size() ; o++ ) {
String shard_outer = (String) shard_tree.getName( o );
NamedList shard_innerList = (NamedList) shard_tree.getVal( o );
NamedList tree_innerList = (NamedList) facet_tree.get( shard_outer );
if ( null == tree_innerList ) {
tree_innerList = new NamedList();
facet_tree.add( shard_outer, tree_innerList );
}
for ( int i = 0 ; i < shard_innerList.size() ; i++ ) {
String shard_term = (String) shard_innerList.getName( i );
long shard_count = ((Number) shard_innerList.getVal(i)).longValue();
int tree_idx = tree_innerList.indexOf( shard_term, 0 );
if ( -1 == tree_idx ) {
tree_innerList.add( shard_term, shard_count );
} else {
long tree_count = ((Number) tree_innerList.getVal( tree_idx )).longValue();
tree_innerList.setVal( tree_idx, shard_count + tree_count );
}
} // innerList loop
} // outer loop
**/
} // each tree loop
}
}
}
return ;
}
@Override
public void finishStage(ResponseBuilder rb) {
if (!rb.doFacets || rb.stage != ResponseBuilder.STAGE_GET_FIELDS) return;
// wait until STAGE_GET_FIELDS
// so that "result" is already stored in the response (for aesthetics)
SimpleOrderedMap<List<NamedList<Object>>> tf = rb._pivots;
// get 'facet_counts' from the response
NamedList facetCounts = (NamedList) rb.rsp.getValues().get("facet_counts");
if (facetCounts == null) {
facetCounts = new NamedList();
rb.rsp.add("facet_counts", facetCounts);
}
facetCounts.add( PIVOT_KEY, tf );
rb._pivots = null;
}
public String getDescription() {
return "Handle Pivot (multi-level) Faceting";
}
public String getSourceId() {
return "$Id: $";
}
public String getSource() {
return "$URL: $";
}
public String getVersion() {
return "$Revision: $";
}
}

View File

@ -1,57 +1,57 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
public class MutableValueLong extends MutableValue {
public long value;
@Override
public Object toObject() {
return value;
}
@Override
public void copy(MutableValue source) {
value = ((MutableValueLong)source).value;
}
@Override
public MutableValue duplicate() {
MutableValueLong v = new MutableValueLong();
v.value = this.value;
return v;
}
@Override
public boolean equalsSameType(Object other) {
return value == ((MutableValueLong)other).value;
}
@Override
public int compareSameType(Object other) {
long b = ((MutableValueLong)other).value;
if (value<b) return -1;
else if (value>b) return 1;
else return 0;
}
@Override
public int hashCode() {
return (int)value + (int)(value>>32);
}
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
public class MutableValueLong extends MutableValue {
public long value;
@Override
public Object toObject() {
return value;
}
@Override
public void copy(MutableValue source) {
value = ((MutableValueLong)source).value;
}
@Override
public MutableValue duplicate() {
MutableValueLong v = new MutableValueLong();
v.value = this.value;
return v;
}
@Override
public boolean equalsSameType(Object other) {
return value == ((MutableValueLong)other).value;
}
@Override
public int compareSameType(Object other) {
long b = ((MutableValueLong)other).value;
if (value<b) return -1;
else if (value>b) return 1;
else return 0;
}
@Override
public int hashCode() {
return (int)value + (int)(value>>32);
}
}

View File

@ -1,100 +1,100 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search.function;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache.DocTerms;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.common.SolrException;
/**
* Use a field value and find the Document Frequency within another field.
*
* @since solr 4.0
*/
public class JoinDocFreqValueSource extends FieldCacheSource {
public static final String NAME = "joindf";
protected final String qfield;
public JoinDocFreqValueSource(String field, String qfield) {
super(field);
this.qfield = qfield;
}
public String description() {
return NAME + "(" + field +":("+qfield+"))";
}
public DocValues getValues(Map context, final IndexReader reader) throws IOException
{
final DocTerms terms = cache.getTerms(reader, field, true );
return new DocValues() {
public int intVal(int doc)
{
try {
BytesRef ref = new BytesRef();
terms.getTerm(doc, ref);
int v = reader.docFreq( qfield, ref );
//System.out.println( NAME+"["+ref.utf8ToString()+"="+v+"]" );
return v;
}
catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in function "+description()+" : doc="+doc, e);
}
}
public float floatVal(int doc) {
return (float)intVal(doc);
}
public long longVal(int doc) {
return (long)intVal(doc);
}
public double doubleVal(int doc) {
return (double)intVal(doc);
}
public String strVal(int doc) {
return intVal(doc) + "";
}
public String toString(int doc) {
return description() + '=' + intVal(doc);
}
};
}
public boolean equals(Object o) {
if (o.getClass() != JoinDocFreqValueSource.class) return false;
JoinDocFreqValueSource other = (JoinDocFreqValueSource)o;
if( !qfield.equals( other.qfield ) ) return false;
return super.equals(other);
}
public int hashCode() {
return qfield.hashCode() + super.hashCode();
};
}
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search.function;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache.DocTerms;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.common.SolrException;
/**
* Use a field value and find the Document Frequency within another field.
*
* @since solr 4.0
*/
public class JoinDocFreqValueSource extends FieldCacheSource {
public static final String NAME = "joindf";
protected final String qfield;
public JoinDocFreqValueSource(String field, String qfield) {
super(field);
this.qfield = qfield;
}
public String description() {
return NAME + "(" + field +":("+qfield+"))";
}
public DocValues getValues(Map context, final IndexReader reader) throws IOException
{
final DocTerms terms = cache.getTerms(reader, field, true );
return new DocValues() {
public int intVal(int doc)
{
try {
BytesRef ref = new BytesRef();
terms.getTerm(doc, ref);
int v = reader.docFreq( qfield, ref );
//System.out.println( NAME+"["+ref.utf8ToString()+"="+v+"]" );
return v;
}
catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in function "+description()+" : doc="+doc, e);
}
}
public float floatVal(int doc) {
return (float)intVal(doc);
}
public long longVal(int doc) {
return (long)intVal(doc);
}
public double doubleVal(int doc) {
return (double)intVal(doc);
}
public String strVal(int doc) {
return intVal(doc) + "";
}
public String toString(int doc) {
return description() + '=' + intVal(doc);
}
};
}
public boolean equals(Object o) {
if (o.getClass() != JoinDocFreqValueSource.class) return false;
JoinDocFreqValueSource other = (JoinDocFreqValueSource)o;
if( !qfield.equals( other.qfield ) ) return false;
return super.equals(other);
}
public int hashCode() {
return qfield.hashCode() + super.hashCode();
};
}

View File

@ -1,37 +1,37 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj;
import org.apache.solr.common.SolrDocument;
/**
* A callback interface for streaming response
*
* @since solr 4.0
*/
public abstract class StreamingResponseCallback {
/*
* Called for each SolrDocument in the response
*/
public abstract void streamSolrDocument( SolrDocument doc );
/*
* Called at the beginning of each DocList (and SolrDocumentList)
*/
public abstract void streamDocListInfo( long numFound, long start, Float maxScore );
}
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj;
import org.apache.solr.common.SolrDocument;
/**
* A callback interface for streaming response
*
* @since solr 4.0
*/
public abstract class StreamingResponseCallback {
/*
* Called for each SolrDocument in the response
*/
public abstract void streamSolrDocument( SolrDocument doc );
/*
* Called at the beginning of each DocList (and SolrDocumentList)
*/
public abstract void streamDocListInfo( long numFound, long start, Float maxScore );
}

View File

@ -1,89 +1,89 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj.impl;
import org.apache.solr.client.solrj.StreamingResponseCallback;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.FastInputStream;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.JavaBinCodec;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
/**
* A BinaryResponseParser that sends callback events rather then build
* a large response
*
* @version $Id: StreamingBinaryResponseParser.java 990180 2010-08-27 15:40:17Z rmuir $
* @since solr 4.0
*/
public class StreamingBinaryResponseParser extends BinaryResponseParser {
final StreamingResponseCallback callback;
public StreamingBinaryResponseParser( StreamingResponseCallback cb )
{
this.callback = cb;
}
@Override
public NamedList<Object> processResponse(InputStream body, String encoding) {
try {
JavaBinCodec codec = new JavaBinCodec() {
public SolrDocument readSolrDocument(FastInputStream dis) throws IOException {
SolrDocument doc = super.readSolrDocument(dis);
callback.streamSolrDocument( doc );
return null;
}
public SolrDocumentList readSolrDocumentList(FastInputStream dis) throws IOException {
SolrDocumentList solrDocs = new SolrDocumentList();
List list = (List) readVal(dis);
solrDocs.setNumFound((Long) list.get(0));
solrDocs.setStart((Long) list.get(1));
solrDocs.setMaxScore((Float) list.get(2));
callback.streamDocListInfo(
solrDocs.getNumFound(),
solrDocs.getStart(),
solrDocs.getMaxScore() );
// Read the Array
tagByte = dis.readByte();
if( (tagByte >>> 5) != (ARR >>> 5) ) {
throw new RuntimeException( "doclist must have an array" );
}
int sz = readSize(dis);
for (int i = 0; i < sz; i++) {
// must be a SolrDocument
readVal( dis );
}
return solrDocs;
}
};
return (NamedList<Object>) codec.unmarshal(body);
}
catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "parsing error", e);
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj.impl;
import org.apache.solr.client.solrj.StreamingResponseCallback;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.FastInputStream;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.JavaBinCodec;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
/**
* A BinaryResponseParser that sends callback events rather then build
* a large response
*
* @version $Id: StreamingBinaryResponseParser.java 990180 2010-08-27 15:40:17Z rmuir $
* @since solr 4.0
*/
public class StreamingBinaryResponseParser extends BinaryResponseParser {
final StreamingResponseCallback callback;
public StreamingBinaryResponseParser( StreamingResponseCallback cb )
{
this.callback = cb;
}
@Override
public NamedList<Object> processResponse(InputStream body, String encoding) {
try {
JavaBinCodec codec = new JavaBinCodec() {
public SolrDocument readSolrDocument(FastInputStream dis) throws IOException {
SolrDocument doc = super.readSolrDocument(dis);
callback.streamSolrDocument( doc );
return null;
}
public SolrDocumentList readSolrDocumentList(FastInputStream dis) throws IOException {
SolrDocumentList solrDocs = new SolrDocumentList();
List list = (List) readVal(dis);
solrDocs.setNumFound((Long) list.get(0));
solrDocs.setStart((Long) list.get(1));
solrDocs.setMaxScore((Float) list.get(2));
callback.streamDocListInfo(
solrDocs.getNumFound(),
solrDocs.getStart(),
solrDocs.getMaxScore() );
// Read the Array
tagByte = dis.readByte();
if( (tagByte >>> 5) != (ARR >>> 5) ) {
throw new RuntimeException( "doclist must have an array" );
}
int sz = readSize(dis);
for (int i = 0; i < sz; i++) {
// must be a SolrDocument
readVal( dis );
}
return solrDocs;
}
};
return (NamedList<Object>) codec.unmarshal(body);
}
catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "parsing error", e);
}
}
}

View File

@ -1,74 +1,74 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj.response;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.List;
public class PivotField implements Serializable
{
final String _field;
final Object _value;
final int _count;
final List<PivotField> _pivot;
public PivotField( String f, Object v, int count, List<PivotField> pivot )
{
_field = f;
_value = v;
_count = count;
_pivot = pivot;
}
public String getField() {
return _field;
}
public Object getValue() {
return _value;
}
public int getCount() {
return _count;
}
public List<PivotField> getPivot() {
return _pivot;
}
@Override
public String toString()
{
return _field + ":" + _value + " ["+_count+"] "+_pivot;
}
public void write( PrintStream out, int indent )
{
for( int i=0; i<indent; i++ ) {
out.print( " " );
}
out.println( _field + "=" + _value + " ("+_count+")" );
if( _pivot != null ) {
for( PivotField p : _pivot ) {
p.write( out, indent+1 );
}
}
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj.response;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.List;
public class PivotField implements Serializable
{
final String _field;
final Object _value;
final int _count;
final List<PivotField> _pivot;
public PivotField( String f, Object v, int count, List<PivotField> pivot )
{
_field = f;
_value = v;
_count = count;
_pivot = pivot;
}
public String getField() {
return _field;
}
public Object getValue() {
return _value;
}
public int getCount() {
return _count;
}
public List<PivotField> getPivot() {
return _pivot;
}
@Override
public String toString()
{
return _field + ":" + _value + " ["+_count+"] "+_pivot;
}
public void write( PrintStream out, int indent )
{
for( int i=0; i<indent; i++ ) {
out.print( " " );
}
out.println( _field + "=" + _value + " ("+_count+")" );
if( _pivot != null ) {
for( PivotField p : _pivot ) {
p.write( out, indent+1 );
}
}
}
}

View File

@ -1,330 +1,330 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr;
import org.apache.noggit.JSONParser;
import org.apache.noggit.ObjectBuilder;
import org.apache.solr.common.util.StrUtils;
import java.io.StringReader;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class JSONTestUtil {
public static String match(String input, String pathAndExpected) throws Exception {
int pos = pathAndExpected.indexOf("==");
String path = pos>=0 ? pathAndExpected.substring(0,pos) : null;
String expected = pos>=0 ? pathAndExpected.substring(pos+2) : pathAndExpected;
return match(path, input, expected);
}
public static String match(String path, String input, String expected) throws Exception {
Object inputObj = ObjectBuilder.fromJSON(input);
Object expectObj = ObjectBuilder.fromJSON(expected);
return matchObj(path, inputObj, expectObj);
}
/**
public static Object fromJSON(String json) {
try {
Object out = ObjectBuilder.fromJSON(json);
} finally {
}
**/
public static String matchObj(String path, Object input, Object expected) throws Exception {
CollectionTester tester = new CollectionTester(input);
if (!tester.seek(path)) {
return "Path not found: " + path;
}
if (expected != null && !tester.match(expected)) {
return tester.err + " @ " + tester.getPath();
}
return null;
}
}
/** Tests simple object graphs, like those generated by the noggit JSON parser */
class CollectionTester {
public Object valRoot;
public Object val;
public Object expectedRoot;
public Object expected;
public List<Object> path;
public String err;
public CollectionTester(Object val) {
this.val = val;
this.valRoot = val;
path = new ArrayList<Object>();
}
public String getPath() {
StringBuilder sb = new StringBuilder();
boolean first=true;
for (Object seg : path) {
if (seg==null) break;
if (!first) sb.append('/');
else first=false;
if (seg instanceof Integer) {
sb.append('[');
sb.append(seg);
sb.append(']');
} else {
sb.append(seg.toString());
}
}
return sb.toString();
}
void setPath(Object lastSeg) {
path.set(path.size()-1, lastSeg);
}
Object popPath() {
return path.remove(path.size()-1);
}
void pushPath(Object lastSeg) {
path.add(lastSeg);
}
void setErr(String msg) {
err = msg;
}
public boolean match(Object expected) {
this.expectedRoot = expected;
this.expected = expected;
return match();
}
boolean match() {
if (expected == null && val == null) {
return true;
}
if (expected instanceof List) {
return matchList();
}
if (expected instanceof Map) {
return matchMap();
}
// generic fallback
if (!expected.equals(val)) {
setErr("mismatch: '" + expected + "'!='" + val + "'");
return false;
}
// setErr("unknown expected type " + expected.getClass().getName());
return true;
}
boolean matchList() {
List expectedList = (List)expected;
List v = asList();
if (v == null) return false;
int a = 0;
int b = 0;
pushPath(null);
for (;;) {
if (a >= expectedList.size() && b >=v.size()) {
break;
}
if (a >= expectedList.size() || b >=v.size()) {
popPath();
setErr("List size mismatch");
return false;
}
expected = expectedList.get(a);
val = v.get(b);
setPath(b);
if (!match()) return false;
a++; b++;
}
popPath();
return true;
}
private static Set<String> reserved = new HashSet<String>(Arrays.asList("_SKIP_","_MATCH_","_ORDERED_","_UNORDERED_"));
boolean matchMap() {
Map<String,Object> expectedMap = (Map<String,Object>)expected;
Map<String,Object> v = asMap();
if (v == null) return false;
boolean ordered = false;
String skipList = (String)expectedMap.get("_SKIP_");
String matchList = (String)expectedMap.get("_MATCH_");
Object orderedStr = expectedMap.get("_ORDERED_");
Object unorderedStr = expectedMap.get("_UNORDERED_");
if (orderedStr != null) ordered = true;
if (unorderedStr != null) ordered = false;
Set<String> match = null;
if (matchList != null) {
match = new HashSet(StrUtils.splitSmart(matchList,",",false));
}
Set<String> skips = null;
if (skipList != null) {
skips = new HashSet(StrUtils.splitSmart(skipList,",",false));
}
Set<String> keys = match != null ? match : expectedMap.keySet();
Set<String> visited = new HashSet<String>();
Iterator<Map.Entry<String,Object>> iter = ordered ? v.entrySet().iterator() : null;
int numExpected=0;
pushPath(null);
for (String expectedKey : keys) {
if (reserved.contains(expectedKey)) continue;
numExpected++;
setPath(expectedKey);
if (!v.containsKey(expectedKey)) {
popPath();
setErr("expected key '" + expectedKey + "'");
return false;
}
expected = expectedMap.get(expectedKey);
if (ordered) {
Map.Entry<String,Object> entry;
String foundKey;
for(;;) {
if (!iter.hasNext()) {
popPath();
setErr("expected key '" + expectedKey + "' in ordered map");
return false;
}
entry = iter.next();
foundKey = entry.getKey();
if (skips != null && skips.contains(foundKey))continue;
if (match != null && !match.contains(foundKey)) continue;
break;
}
if (!entry.getKey().equals(expectedKey)) {
popPath();
setErr("expected key '" + expectedKey + "' instead of '"+entry.getKey()+"' in ordered map");
return false;
}
val = entry.getValue();
} else {
if (skips != null && skips.contains(expectedKey)) continue;
val = v.get(expectedKey);
}
if (!match()) return false;
}
popPath();
// now check if there were any extra keys in the value (as long as there wasn't a specific list to include)
if (match == null) {
int skipped = 0;
if (skips != null) {
for (String skipStr : skips)
if (v.containsKey(skipStr)) skipped++;
}
if (numExpected != (v.size() - skipped)) {
HashSet<String> set = new HashSet<String>(v.keySet());
set.removeAll(expectedMap.keySet());
setErr("unexpected map keys " + set);
return false;
}
}
return true;
}
public boolean seek(String seekPath) {
if (path == null) return true;
if (seekPath.startsWith("/")) {
seekPath = seekPath.substring(1);
}
if (seekPath.endsWith("/")) {
seekPath = seekPath.substring(0,seekPath.length()-1);
}
List<String> pathList = StrUtils.splitSmart(seekPath, "/", false);
return seek(pathList);
}
List asList() {
// TODO: handle native arrays
if (val instanceof List) {
return (List)val;
}
setErr("expected List");
return null;
}
Map<String,Object> asMap() {
// TODO: handle NamedList
if (val instanceof Map) {
return (Map<String,Object>)val;
}
setErr("expected Map");
return null;
}
public boolean seek(List<String> seekPath) {
if (seekPath.size() == 0) return true;
String seg = seekPath.get(0);
if (seg.charAt(0)=='[') {
List listVal = asList();
if (listVal==null) return false;
int arrIdx = Integer.parseInt(seg.substring(1, seg.length()-1));
if (arrIdx >= listVal.size()) return false;
val = listVal.get(arrIdx);
pushPath(arrIdx);
} else {
Map<String,Object> mapVal = asMap();
if (mapVal==null) return false;
// use containsKey rather than get to handle null values
if (!mapVal.containsKey(seg)) return false;
val = mapVal.get(seg);
pushPath(seg);
}
// recurse after removing head of the path
return seek(seekPath.subList(1,seekPath.size()));
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr;
import org.apache.noggit.JSONParser;
import org.apache.noggit.ObjectBuilder;
import org.apache.solr.common.util.StrUtils;
import java.io.StringReader;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class JSONTestUtil {
public static String match(String input, String pathAndExpected) throws Exception {
int pos = pathAndExpected.indexOf("==");
String path = pos>=0 ? pathAndExpected.substring(0,pos) : null;
String expected = pos>=0 ? pathAndExpected.substring(pos+2) : pathAndExpected;
return match(path, input, expected);
}
public static String match(String path, String input, String expected) throws Exception {
Object inputObj = ObjectBuilder.fromJSON(input);
Object expectObj = ObjectBuilder.fromJSON(expected);
return matchObj(path, inputObj, expectObj);
}
/**
public static Object fromJSON(String json) {
try {
Object out = ObjectBuilder.fromJSON(json);
} finally {
}
**/
public static String matchObj(String path, Object input, Object expected) throws Exception {
CollectionTester tester = new CollectionTester(input);
if (!tester.seek(path)) {
return "Path not found: " + path;
}
if (expected != null && !tester.match(expected)) {
return tester.err + " @ " + tester.getPath();
}
return null;
}
}
/** Tests simple object graphs, like those generated by the noggit JSON parser */
class CollectionTester {
public Object valRoot;
public Object val;
public Object expectedRoot;
public Object expected;
public List<Object> path;
public String err;
public CollectionTester(Object val) {
this.val = val;
this.valRoot = val;
path = new ArrayList<Object>();
}
public String getPath() {
StringBuilder sb = new StringBuilder();
boolean first=true;
for (Object seg : path) {
if (seg==null) break;
if (!first) sb.append('/');
else first=false;
if (seg instanceof Integer) {
sb.append('[');
sb.append(seg);
sb.append(']');
} else {
sb.append(seg.toString());
}
}
return sb.toString();
}
void setPath(Object lastSeg) {
path.set(path.size()-1, lastSeg);
}
Object popPath() {
return path.remove(path.size()-1);
}
void pushPath(Object lastSeg) {
path.add(lastSeg);
}
void setErr(String msg) {
err = msg;
}
public boolean match(Object expected) {
this.expectedRoot = expected;
this.expected = expected;
return match();
}
boolean match() {
if (expected == null && val == null) {
return true;
}
if (expected instanceof List) {
return matchList();
}
if (expected instanceof Map) {
return matchMap();
}
// generic fallback
if (!expected.equals(val)) {
setErr("mismatch: '" + expected + "'!='" + val + "'");
return false;
}
// setErr("unknown expected type " + expected.getClass().getName());
return true;
}
boolean matchList() {
List expectedList = (List)expected;
List v = asList();
if (v == null) return false;
int a = 0;
int b = 0;
pushPath(null);
for (;;) {
if (a >= expectedList.size() && b >=v.size()) {
break;
}
if (a >= expectedList.size() || b >=v.size()) {
popPath();
setErr("List size mismatch");
return false;
}
expected = expectedList.get(a);
val = v.get(b);
setPath(b);
if (!match()) return false;
a++; b++;
}
popPath();
return true;
}
private static Set<String> reserved = new HashSet<String>(Arrays.asList("_SKIP_","_MATCH_","_ORDERED_","_UNORDERED_"));
boolean matchMap() {
Map<String,Object> expectedMap = (Map<String,Object>)expected;
Map<String,Object> v = asMap();
if (v == null) return false;
boolean ordered = false;
String skipList = (String)expectedMap.get("_SKIP_");
String matchList = (String)expectedMap.get("_MATCH_");
Object orderedStr = expectedMap.get("_ORDERED_");
Object unorderedStr = expectedMap.get("_UNORDERED_");
if (orderedStr != null) ordered = true;
if (unorderedStr != null) ordered = false;
Set<String> match = null;
if (matchList != null) {
match = new HashSet(StrUtils.splitSmart(matchList,",",false));
}
Set<String> skips = null;
if (skipList != null) {
skips = new HashSet(StrUtils.splitSmart(skipList,",",false));
}
Set<String> keys = match != null ? match : expectedMap.keySet();
Set<String> visited = new HashSet<String>();
Iterator<Map.Entry<String,Object>> iter = ordered ? v.entrySet().iterator() : null;
int numExpected=0;
pushPath(null);
for (String expectedKey : keys) {
if (reserved.contains(expectedKey)) continue;
numExpected++;
setPath(expectedKey);
if (!v.containsKey(expectedKey)) {
popPath();
setErr("expected key '" + expectedKey + "'");
return false;
}
expected = expectedMap.get(expectedKey);
if (ordered) {
Map.Entry<String,Object> entry;
String foundKey;
for(;;) {
if (!iter.hasNext()) {
popPath();
setErr("expected key '" + expectedKey + "' in ordered map");
return false;
}
entry = iter.next();
foundKey = entry.getKey();
if (skips != null && skips.contains(foundKey))continue;
if (match != null && !match.contains(foundKey)) continue;
break;
}
if (!entry.getKey().equals(expectedKey)) {
popPath();
setErr("expected key '" + expectedKey + "' instead of '"+entry.getKey()+"' in ordered map");
return false;
}
val = entry.getValue();
} else {
if (skips != null && skips.contains(expectedKey)) continue;
val = v.get(expectedKey);
}
if (!match()) return false;
}
popPath();
// now check if there were any extra keys in the value (as long as there wasn't a specific list to include)
if (match == null) {
int skipped = 0;
if (skips != null) {
for (String skipStr : skips)
if (v.containsKey(skipStr)) skipped++;
}
if (numExpected != (v.size() - skipped)) {
HashSet<String> set = new HashSet<String>(v.keySet());
set.removeAll(expectedMap.keySet());
setErr("unexpected map keys " + set);
return false;
}
}
return true;
}
public boolean seek(String seekPath) {
if (path == null) return true;
if (seekPath.startsWith("/")) {
seekPath = seekPath.substring(1);
}
if (seekPath.endsWith("/")) {
seekPath = seekPath.substring(0,seekPath.length()-1);
}
List<String> pathList = StrUtils.splitSmart(seekPath, "/", false);
return seek(pathList);
}
List asList() {
// TODO: handle native arrays
if (val instanceof List) {
return (List)val;
}
setErr("expected List");
return null;
}
Map<String,Object> asMap() {
// TODO: handle NamedList
if (val instanceof Map) {
return (Map<String,Object>)val;
}
setErr("expected Map");
return null;
}
public boolean seek(List<String> seekPath) {
if (seekPath.size() == 0) return true;
String seg = seekPath.get(0);
if (seg.charAt(0)=='[') {
List listVal = asList();
if (listVal==null) return false;
int arrIdx = Integer.parseInt(seg.substring(1, seg.length()-1));
if (arrIdx >= listVal.size()) return false;
val = listVal.get(arrIdx);
pushPath(arrIdx);
} else {
Map<String,Object> mapVal = asMap();
if (mapVal==null) return false;
// use containsKey rather than get to handle null values
if (!mapVal.containsKey(seg)) return false;
val = mapVal.get(seg);
pushPath(seg);
}
// recurse after removing head of the path
return seek(seekPath.subList(1,seekPath.size()));
}
}

View File

@ -1,165 +1,165 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.response;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.util.DateUtil;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.util.SolrPluginUtils;
import org.junit.*;
import java.io.StringWriter;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
public class TestCSVResponseWriter extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig.xml","schema12.xml");
createIndex();
}
public static void createIndex() {
assertU(adoc("id","1", "foo_i","-1", "foo_s","hi", "foo_l","12345678987654321", "foo_b","false", "foo_f","1.414","foo_d","-1.0E300","foo_dt","2000-01-02T03:04:05Z"));
assertU(adoc("id","2", "v_ss","hi", "v_ss","there", "v2_ss","nice", "v2_ss","output"));
assertU(commit());
}
@Test
public void testCSVOutput() throws Exception {
// test our basic types,and that fields come back in the requested order
assertEquals("id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt\n1,hi,-1,12345678987654321,false,1.414,-1.0E300,2000-01-02T03:04:05Z\n"
, h.query(req("q","id:1", "wt","csv", "fl","id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt")));
// test retrieving score, csv.header
assertEquals("1,0.0,hi\n"
, h.query(req("q","id:1^0", "wt","csv", "csv.header","false", "fl","id,score,foo_s")));
// test multivalued
assertEquals("2,\"hi,there\"\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "fl","id,v_ss")));
// test separator change
assertEquals("2|\"hi|there\"\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.separator","|", "fl","id,v_ss")));
// test mv separator change
assertEquals("2,hi|there\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "fl","id,v_ss")));
// test mv separator change for a single field
assertEquals("2,hi|there,nice:output\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "f.v2_ss.csv.separator",":", "fl","id,v_ss,v2_ss")));
// test retrieving fields from index
String result = h.query(req("q","*:*", "wt","csv", "csv.header","true", "fl","*,score"));
for (String field : "id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss,score".split(",")) {
assertTrue(result.indexOf(field) >= 0);
}
// test null values
assertEquals("2,,hi|there\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "fl","id,foo_s,v_ss")));
// test alternate null value
assertEquals("2,NULL,hi|there\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "csv.null","NULL","fl","id,foo_s,v_ss")));
// test alternate newline
assertEquals("2,\"hi,there\"\r\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.newline","\r\n", "fl","id,v_ss")));
// test alternate encapsulator
assertEquals("2,'hi,there'\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.encapsulator","'", "fl","id,v_ss")));
// test using escape instead of encapsulator
assertEquals("2,hi\\,there\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.escape","\\", "fl","id,v_ss")));
// test multiple lines
assertEquals("1,,hi\n2,\"hi,there\",\n"
, h.query(req("q","id:[1 TO 2]", "wt","csv", "csv.header","false", "fl","id,v_ss,foo_s")));
// now test SolrDocumentList
SolrDocument d = new SolrDocument();
SolrDocument d1 = d;
d.addField("id","1");
d.addField("foo_i",-1);
d.addField("foo_s","hi");
d.addField("foo_l","12345678987654321L");
d.addField("foo_b",false);
d.addField("foo_f",1.414f);
d.addField("foo_d",-1.0E300);
d.addField("foo_dt", DateUtil.parseDate("2000-01-02T03:04:05Z"));
d.addField("score", "2.718");
d = new SolrDocument();
SolrDocument d2 = d;
d.addField("id","2");
d.addField("v_ss","hi");
d.addField("v_ss","there");
d.addField("v2_ss","nice");
d.addField("v2_ss","output");
d.addField("score", "89.83");
SolrDocumentList sdl = new SolrDocumentList();
sdl.add(d1);
sdl.add(d2);
SolrQueryRequest req = req("q","*:*");
SolrQueryResponse rsp = new SolrQueryResponse();
rsp.add("response", sdl);
QueryResponseWriter w = new CSVResponseWriter();
SolrPluginUtils.setReturnFields("id,foo_s", rsp);
StringWriter buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,foo_s\n1,hi\n2,\n", buf.toString());
// try scores
SolrPluginUtils.setReturnFields("id,score,foo_s", rsp);
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,score,foo_s\n1,2.718,hi\n2,89.83,\n", buf.toString());
// get field values from docs... should be ordered and not include score unless requested
SolrPluginUtils.setReturnFields("*", rsp);
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,foo_i,foo_s,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss\n" +
"1,-1,hi,12345678987654321L,false,1.414,-1.0E300,2000-01-02T03:04:05Z,,\n" +
"2,,,,,,,,\"hi,there\",\"nice,output\"\n",
buf.toString());
// get field values and scores - just check that the scores are there... we don't guarantee where
SolrPluginUtils.setReturnFields("*,score", rsp);
buf = new StringWriter();
w.write(buf, req, rsp);
String s = buf.toString();
assertTrue(s.indexOf("score") >=0 && s.indexOf("2.718") > 0 && s.indexOf("89.83") > 0 );
}
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.response;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.util.DateUtil;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.util.SolrPluginUtils;
import org.junit.*;
import java.io.StringWriter;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
public class TestCSVResponseWriter extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig.xml","schema12.xml");
createIndex();
}
public static void createIndex() {
assertU(adoc("id","1", "foo_i","-1", "foo_s","hi", "foo_l","12345678987654321", "foo_b","false", "foo_f","1.414","foo_d","-1.0E300","foo_dt","2000-01-02T03:04:05Z"));
assertU(adoc("id","2", "v_ss","hi", "v_ss","there", "v2_ss","nice", "v2_ss","output"));
assertU(commit());
}
@Test
public void testCSVOutput() throws Exception {
// test our basic types,and that fields come back in the requested order
assertEquals("id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt\n1,hi,-1,12345678987654321,false,1.414,-1.0E300,2000-01-02T03:04:05Z\n"
, h.query(req("q","id:1", "wt","csv", "fl","id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt")));
// test retrieving score, csv.header
assertEquals("1,0.0,hi\n"
, h.query(req("q","id:1^0", "wt","csv", "csv.header","false", "fl","id,score,foo_s")));
// test multivalued
assertEquals("2,\"hi,there\"\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "fl","id,v_ss")));
// test separator change
assertEquals("2|\"hi|there\"\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.separator","|", "fl","id,v_ss")));
// test mv separator change
assertEquals("2,hi|there\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "fl","id,v_ss")));
// test mv separator change for a single field
assertEquals("2,hi|there,nice:output\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "f.v2_ss.csv.separator",":", "fl","id,v_ss,v2_ss")));
// test retrieving fields from index
String result = h.query(req("q","*:*", "wt","csv", "csv.header","true", "fl","*,score"));
for (String field : "id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss,score".split(",")) {
assertTrue(result.indexOf(field) >= 0);
}
// test null values
assertEquals("2,,hi|there\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "fl","id,foo_s,v_ss")));
// test alternate null value
assertEquals("2,NULL,hi|there\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "csv.null","NULL","fl","id,foo_s,v_ss")));
// test alternate newline
assertEquals("2,\"hi,there\"\r\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.newline","\r\n", "fl","id,v_ss")));
// test alternate encapsulator
assertEquals("2,'hi,there'\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.encapsulator","'", "fl","id,v_ss")));
// test using escape instead of encapsulator
assertEquals("2,hi\\,there\n"
, h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.escape","\\", "fl","id,v_ss")));
// test multiple lines
assertEquals("1,,hi\n2,\"hi,there\",\n"
, h.query(req("q","id:[1 TO 2]", "wt","csv", "csv.header","false", "fl","id,v_ss,foo_s")));
// now test SolrDocumentList
SolrDocument d = new SolrDocument();
SolrDocument d1 = d;
d.addField("id","1");
d.addField("foo_i",-1);
d.addField("foo_s","hi");
d.addField("foo_l","12345678987654321L");
d.addField("foo_b",false);
d.addField("foo_f",1.414f);
d.addField("foo_d",-1.0E300);
d.addField("foo_dt", DateUtil.parseDate("2000-01-02T03:04:05Z"));
d.addField("score", "2.718");
d = new SolrDocument();
SolrDocument d2 = d;
d.addField("id","2");
d.addField("v_ss","hi");
d.addField("v_ss","there");
d.addField("v2_ss","nice");
d.addField("v2_ss","output");
d.addField("score", "89.83");
SolrDocumentList sdl = new SolrDocumentList();
sdl.add(d1);
sdl.add(d2);
SolrQueryRequest req = req("q","*:*");
SolrQueryResponse rsp = new SolrQueryResponse();
rsp.add("response", sdl);
QueryResponseWriter w = new CSVResponseWriter();
SolrPluginUtils.setReturnFields("id,foo_s", rsp);
StringWriter buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,foo_s\n1,hi\n2,\n", buf.toString());
// try scores
SolrPluginUtils.setReturnFields("id,score,foo_s", rsp);
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,score,foo_s\n1,2.718,hi\n2,89.83,\n", buf.toString());
// get field values from docs... should be ordered and not include score unless requested
SolrPluginUtils.setReturnFields("*", rsp);
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,foo_i,foo_s,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss\n" +
"1,-1,hi,12345678987654321L,false,1.414,-1.0E300,2000-01-02T03:04:05Z,,\n" +
"2,,,,,,,,\"hi,there\",\"nice,output\"\n",
buf.toString());
// get field values and scores - just check that the scores are there... we don't guarantee where
SolrPluginUtils.setReturnFields("*,score", rsp);
buf = new StringWriter();
w.write(buf, req, rsp);
String s = buf.toString();
assertTrue(s.indexOf("score") >=0 && s.indexOf("2.718") > 0 && s.indexOf("89.83") > 0 );
}
}

View File

@ -1,53 +1,53 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import org.apache.solr.SolrTestCaseJ4;
import org.junit.BeforeClass;
import org.junit.After;
import org.junit.Test;
public class TestSolrQueryParser extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig.xml", "schema12.xml");
createIndex();
}
public static void createIndex() {
String v;
v="how now brown cow";
assertU(adoc("id","1", "text",v, "text_np",v));
v="now cow";
assertU(adoc("id","2", "text",v, "text_np",v));
assertU(commit());
}
@Test
public void testPhrase() {
// should generate a phrase of "now cow" and match only one doc
assertQ(req("q","text:now-cow", "indent","true")
,"//*[@numFound='1']"
);
// should generate a query of (now OR cow) and match both docs
assertQ(req("q","text_np:now-cow", "indent","true")
,"//*[@numFound='2']"
);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import org.apache.solr.SolrTestCaseJ4;
import org.junit.BeforeClass;
import org.junit.After;
import org.junit.Test;
public class TestSolrQueryParser extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig.xml", "schema12.xml");
createIndex();
}
public static void createIndex() {
String v;
v="how now brown cow";
assertU(adoc("id","1", "text",v, "text_np",v));
v="now cow";
assertU(adoc("id","2", "text",v, "text_np",v));
assertU(commit());
}
@Test
public void testPhrase() {
// should generate a phrase of "now cow" and match only one doc
assertQ(req("q","text:now-cow", "indent","true")
,"//*[@numFound='1']"
);
// should generate a query of (now OR cow) and match both docs
assertQ(req("q","text_np:now-cow", "indent","true")
,"//*[@numFound='2']"
);
}
}