LUCENE-3850: Removed some of the rawtypes compiler warnings

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1298336 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Martijn van Groningen 2012-03-08 10:10:56 +00:00
parent 8d84214891
commit c83f75f6ac
3 changed files with 45 additions and 30 deletions

View File

@ -19,8 +19,6 @@ package org.apache.lucene.search.grouping.dv;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValues.Type; // javadocs
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.grouping.AbstractFirstPassGroupingCollector;
import org.apache.lucene.util.BytesRef;
@ -38,25 +36,42 @@ public abstract class DVFirstPassGroupingCollector<GROUP_VALUE_TYPE> extends Abs
final boolean diskResident;
final DocValues.Type valueType;
public static DVFirstPassGroupingCollector create(Sort groupSort, int topNGroups, String groupField, DocValues.Type type, boolean diskResident) throws IOException {
/**
* Constructs a {@link DVFirstPassGroupingCollector}.
* Selects and constructs the most optimal first pass collector implementation for grouping by {@link DocValues}.
*
* @param groupField The field to group by
* @param topNGroups The maximum top number of groups to return. Typically this equals to offset + rows.
* @param diskResident Whether the values to group by should be disk resident
* @param type The {@link org.apache.lucene.index.DocValues.Type} which is used to select a concrete implementation.
* @param groupSort The sort used for the groups
* @return the most optimal first pass collector implementation for grouping by {@link DocValues}
* @throws IOException If I/O related errors occur
*/
@SuppressWarnings("unchecked")
public static <T> DVFirstPassGroupingCollector<T> create(Sort groupSort, int topNGroups, String groupField, DocValues.Type type, boolean diskResident) throws IOException {
switch (type) {
case VAR_INTS:
case FIXED_INTS_8:
case FIXED_INTS_16:
case FIXED_INTS_32:
case FIXED_INTS_64:
return new Lng(groupSort, topNGroups, groupField, diskResident, type);
// Type erasure b/c otherwise we have inconvertible types...
return (DVFirstPassGroupingCollector) new Lng(groupSort, topNGroups, groupField, diskResident, type);
case FLOAT_32:
case FLOAT_64:
return new Dbl(groupSort, topNGroups, groupField, diskResident, type);
// Type erasure b/c otherwise we have inconvertible types...
return (DVFirstPassGroupingCollector) new Dbl(groupSort, topNGroups, groupField, diskResident, type);
case BYTES_FIXED_STRAIGHT:
case BYTES_FIXED_DEREF:
case BYTES_VAR_STRAIGHT:
case BYTES_VAR_DEREF:
return new BR(groupSort, topNGroups, groupField, diskResident, type);
// Type erasure b/c otherwise we have inconvertible types...
return (DVFirstPassGroupingCollector) new BR(groupSort, topNGroups, groupField, diskResident, type);
case BYTES_VAR_SORTED:
case BYTES_FIXED_SORTED:
return new SortedBR(groupSort, topNGroups, groupField, diskResident, type);
// Type erasure b/c otherwise we have inconvertible types...
return (DVFirstPassGroupingCollector) new SortedBR(groupSort, topNGroups, groupField, diskResident, type);
default:
throw new IllegalArgumentException(String.format("ValueType %s not supported", type));
}

View File

@ -19,12 +19,12 @@ package org.apache.lucene.search.grouping.dv;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValues.Type; // javadocs
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.grouping.AbstractSecondPassGroupingCollector;
import org.apache.lucene.search.grouping.SearchGroup;
import org.apache.lucene.util.SentinelIntSet;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.SentinelIntSet;
import java.io.IOException;
import java.util.Collection;
@ -54,10 +54,10 @@ public abstract class DVSecondPassGroupingCollector<GROUP_VALUE> extends Abstrac
* @throws IOException If I/O related errors occur
*/
@SuppressWarnings("unchecked")
public static DVSecondPassGroupingCollector create(String groupField,
public static <T> DVSecondPassGroupingCollector<T> create(String groupField,
boolean diskResident,
DocValues.Type type,
Collection<SearchGroup> searchGroups,
Collection<SearchGroup<T>> searchGroups,
Sort groupSort,
Sort withinGroupSort,
int maxDocsPerGroup,
@ -71,21 +71,21 @@ public abstract class DVSecondPassGroupingCollector<GROUP_VALUE> extends Abstrac
case FIXED_INTS_32:
case FIXED_INTS_64:
// Type erasure b/c otherwise we have inconvertible types...
return new Lng(groupField, type, diskResident, (Collection) searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
return (DVSecondPassGroupingCollector) new Lng(groupField, type, diskResident, (Collection) searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
case FLOAT_32:
case FLOAT_64:
// Type erasure b/c otherwise we have inconvertible types...
return new Dbl(groupField, type, diskResident, (Collection) searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
return (DVSecondPassGroupingCollector) new Dbl(groupField, type, diskResident, (Collection) searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
case BYTES_FIXED_STRAIGHT:
case BYTES_FIXED_DEREF:
case BYTES_VAR_STRAIGHT:
case BYTES_VAR_DEREF:
// Type erasure b/c otherwise we have inconvertible types...
return new BR(groupField, type, diskResident, (Collection) searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
return (DVSecondPassGroupingCollector) new BR(groupField, type, diskResident, (Collection) searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
case BYTES_VAR_SORTED:
case BYTES_FIXED_SORTED:
// Type erasure b/c otherwise we have inconvertible types...
return new SortedBR(groupField, type, diskResident, (Collection) searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
return (DVSecondPassGroupingCollector) new SortedBR(groupField, type, diskResident, (Collection) searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
default:
throw new IllegalArgumentException(String.format("ValueType %s not supported", type));
}
@ -192,7 +192,7 @@ public abstract class DVSecondPassGroupingCollector<GROUP_VALUE> extends Abstrac
private final BytesRef spare = new BytesRef();
private final SentinelIntSet ordSet;
@SuppressWarnings("unchecked")
@SuppressWarnings({"unchecked","rawtypes"})
SortedBR(String groupField, DocValues.Type valueType, boolean diskResident, Collection<SearchGroup<BytesRef>> searchGroups, Sort groupSort, Sort withinGroupSort, int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) throws IOException {
super(groupField, valueType, diskResident, searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
ordSet = new SentinelIntSet(groupMap.size(), -1);

View File

@ -126,13 +126,13 @@ public class TestGrouping extends LuceneTestCase {
w.close();
final Sort groupSort = Sort.RELEVANCE;
final AbstractFirstPassGroupingCollector c1 = createRandomFirstPassCollector(groupField, groupSort, 10, canUseIDV);
final AbstractFirstPassGroupingCollector<?> c1 = createRandomFirstPassCollector(groupField, groupSort, 10, canUseIDV);
indexSearcher.search(new TermQuery(new Term("content", "random")), c1);
final AbstractSecondPassGroupingCollector c2 = createSecondPassCollector(c1, groupField, groupSort, null, 0, 5, true, false, true);
final AbstractSecondPassGroupingCollector<?> c2 = createSecondPassCollector(c1, groupField, groupSort, null, 0, 5, true, false, true);
indexSearcher.search(new TermQuery(new Term("content", "random")), c2);
final TopGroups groups = c2.getTopGroups(0);
final TopGroups<?> groups = c2.getTopGroups(0);
assertEquals(7, groups.totalHitCount);
assertEquals(7, groups.totalGroupedHitCount);
@ -142,7 +142,7 @@ public class TestGrouping extends LuceneTestCase {
// the later a document is added the higher this docId
// value
GroupDocs group = groups.groups[0];
GroupDocs<?> group = groups.groups[0];
compareGroupValue("author3", group);
assertEquals(2, group.scoreDocs.length);
assertEquals(5, group.scoreDocs[0].doc);
@ -179,14 +179,14 @@ public class TestGrouping extends LuceneTestCase {
}
}
private AbstractFirstPassGroupingCollector createRandomFirstPassCollector(String groupField, Sort groupSort, int topDocs, boolean canUseIDV) throws IOException {
AbstractFirstPassGroupingCollector selected;
private AbstractFirstPassGroupingCollector<?> createRandomFirstPassCollector(String groupField, Sort groupSort, int topDocs, boolean canUseIDV) throws IOException {
AbstractFirstPassGroupingCollector<?> selected;
if (canUseIDV && random.nextBoolean()) {
boolean diskResident = random.nextBoolean();
selected = DVFirstPassGroupingCollector.create(groupSort, topDocs, groupField, Type.BYTES_VAR_SORTED, diskResident);
} else if (random.nextBoolean()) {
ValueSource vs = new BytesRefFieldSource(groupField);
selected = new FunctionFirstPassGroupingCollector(vs, new HashMap(), groupSort, topDocs);
selected = new FunctionFirstPassGroupingCollector(vs, new HashMap<Object, Object>(), groupSort, topDocs);
} else {
selected = new TermFirstPassGroupingCollector(groupField, groupSort, topDocs);
}
@ -196,20 +196,20 @@ public class TestGrouping extends LuceneTestCase {
return selected;
}
private AbstractFirstPassGroupingCollector createFirstPassCollector(String groupField, Sort groupSort, int topDocs, AbstractFirstPassGroupingCollector firstPassGroupingCollector) throws IOException {
private AbstractFirstPassGroupingCollector<?> createFirstPassCollector(String groupField, Sort groupSort, int topDocs, AbstractFirstPassGroupingCollector firstPassGroupingCollector) throws IOException {
if (DVFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) {
boolean diskResident = random.nextBoolean();
return DVFirstPassGroupingCollector.create(groupSort, topDocs, groupField, Type.BYTES_VAR_SORTED, diskResident);
} else if (TermFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) {
ValueSource vs = new BytesRefFieldSource(groupField);
return new FunctionFirstPassGroupingCollector(vs, new HashMap(), groupSort, topDocs);
return new FunctionFirstPassGroupingCollector(vs, new HashMap<Object, Object>(), groupSort, topDocs);
} else {
return new TermFirstPassGroupingCollector(groupField, groupSort, topDocs);
}
}
@SuppressWarnings("unchecked")
private AbstractSecondPassGroupingCollector createSecondPassCollector(AbstractFirstPassGroupingCollector firstPassGroupingCollector,
private <T> AbstractSecondPassGroupingCollector<T> createSecondPassCollector(AbstractFirstPassGroupingCollector firstPassGroupingCollector,
String groupField,
Sort groupSort,
Sort sortWithinGroup,
@ -225,11 +225,11 @@ public class TestGrouping extends LuceneTestCase {
return DVSecondPassGroupingCollector.create(groupField, diskResident, Type.BYTES_VAR_SORTED, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
} else if (TermFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) {
Collection<SearchGroup<BytesRef>> searchGroups = firstPassGroupingCollector.getTopGroups(groupOffset, fillSortFields);
return new TermSecondPassGroupingCollector(groupField, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup , getScores, getMaxScores, fillSortFields);
return (AbstractSecondPassGroupingCollector) new TermSecondPassGroupingCollector(groupField, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup , getScores, getMaxScores, fillSortFields);
} else {
ValueSource vs = new BytesRefFieldSource(groupField);
Collection<SearchGroup<MutableValue>> searchGroups = firstPassGroupingCollector.getTopGroups(groupOffset, fillSortFields);
return new FunctionSecondPassGroupingCollector(searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields, vs, new HashMap());
return (AbstractSecondPassGroupingCollector) new FunctionSecondPassGroupingCollector(searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields, vs, new HashMap());
}
}
@ -1062,8 +1062,8 @@ public class TestGrouping extends LuceneTestCase {
}
// Get block grouping result:
sBlocks.search(query, c4);
@SuppressWarnings("unchecked")
final TopGroups<BytesRef> tempTopGroupsBlocks = c3.getTopGroups(docSort, groupOffset, docOffset, docOffset+docsPerGroup, fillFields);
@SuppressWarnings({"unchecked","rawtypes"})
final TopGroups<BytesRef> tempTopGroupsBlocks = (TopGroups<BytesRef>) c3.getTopGroups(docSort, groupOffset, docOffset, docOffset+docsPerGroup, fillFields);
final TopGroups<BytesRef> groupsResultBlocks;
if (doAllGroups && tempTopGroupsBlocks != null) {
assertEquals((int) tempTopGroupsBlocks.totalGroupCount, allGroupsCollector2.getGroupCount());