mirror of https://github.com/apache/lucene.git
LUCENE-1257: Remove the rest of unchecked warnings and some unneeded casts. I added a TODO, where I do not understand the code and not for sure know, whats inside the collections. This could be fixed some time later. But the core code now compiles without any unchecked warning.
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@828011 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1ae5f89cfb
commit
33be3177a1
|
@ -49,8 +49,8 @@ public abstract class BaseCharFilter extends CharFilter {
|
|||
return currentOff;
|
||||
}
|
||||
for (int i = pcmList.size() - 1; i >= 0; i--) {
|
||||
if (currentOff >= ((OffCorrectMap) pcmList.get(i)).off) {
|
||||
return currentOff + ((OffCorrectMap) pcmList.get(i)).cumulativeDiff;
|
||||
if (currentOff >= pcmList.get(i).off) {
|
||||
return currentOff + pcmList.get(i).cumulativeDiff;
|
||||
}
|
||||
}
|
||||
return currentOff;
|
||||
|
@ -58,7 +58,7 @@ public abstract class BaseCharFilter extends CharFilter {
|
|||
|
||||
protected int getLastCumulativeDiff() {
|
||||
return pcmList == null || pcmList.isEmpty() ?
|
||||
0 : ((OffCorrectMap)pcmList.get(pcmList.size() - 1)).cumulativeDiff;
|
||||
0 : pcmList.get(pcmList.size() - 1).cumulativeDiff;
|
||||
}
|
||||
|
||||
protected void addOffCorrectMap(int off, int cumulativeDiff) {
|
||||
|
|
|
@ -102,7 +102,7 @@ public final class Document implements java.io.Serializable {
|
|||
public final void removeField(String name) {
|
||||
Iterator<Fieldable> it = fields.iterator();
|
||||
while (it.hasNext()) {
|
||||
Fieldable field = (Fieldable)it.next();
|
||||
Fieldable field = it.next();
|
||||
if (field.name().equals(name)) {
|
||||
it.remove();
|
||||
return;
|
||||
|
@ -122,7 +122,7 @@ public final class Document implements java.io.Serializable {
|
|||
public final void removeFields(String name) {
|
||||
Iterator<Fieldable> it = fields.iterator();
|
||||
while (it.hasNext()) {
|
||||
Fieldable field = (Fieldable)it.next();
|
||||
Fieldable field = it.next();
|
||||
if (field.name().equals(name)) {
|
||||
it.remove();
|
||||
}
|
||||
|
@ -196,7 +196,7 @@ public final class Document implements java.io.Serializable {
|
|||
if (result.size() == 0)
|
||||
return NO_FIELDS;
|
||||
|
||||
return (Field[])result.toArray(new Field[result.size()]);
|
||||
return result.toArray(new Field[result.size()]);
|
||||
}
|
||||
|
||||
|
||||
|
@ -221,7 +221,7 @@ public final class Document implements java.io.Serializable {
|
|||
if (result.size() == 0)
|
||||
return NO_FIELDABLES;
|
||||
|
||||
return (Fieldable[])result.toArray(new Fieldable[result.size()]);
|
||||
return result.toArray(new Fieldable[result.size()]);
|
||||
}
|
||||
|
||||
|
||||
|
@ -244,7 +244,7 @@ public final class Document implements java.io.Serializable {
|
|||
if (result.size() == 0)
|
||||
return NO_STRINGS;
|
||||
|
||||
return (String[])result.toArray(new String[result.size()]);
|
||||
return result.toArray(new String[result.size()]);
|
||||
}
|
||||
|
||||
private final static byte[][] NO_BYTES = new byte[0][];
|
||||
|
@ -268,7 +268,7 @@ public final class Document implements java.io.Serializable {
|
|||
if (result.size() == 0)
|
||||
return NO_BYTES;
|
||||
|
||||
return (byte[][])result.toArray(new byte[result.size()][]);
|
||||
return result.toArray(new byte[result.size()][]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -29,6 +29,8 @@ import org.apache.lucene.util.ArrayUtil;
|
|||
/** This is just a "splitter" class: it lets you wrap two
|
||||
* DocFieldConsumer instances as a single consumer. */
|
||||
|
||||
// TODO: Fix the unchecked collections, I do not understand the whole code here -- Uwe
|
||||
@SuppressWarnings("unchecked")
|
||||
final class DocFieldConsumers extends DocFieldConsumer {
|
||||
final DocFieldConsumer one;
|
||||
final DocFieldConsumer two;
|
||||
|
|
|
@ -400,10 +400,12 @@ final class DocumentsWriter {
|
|||
|
||||
/* Returns Collection of files in use by this instance,
|
||||
* including any flushed segments. */
|
||||
@SuppressWarnings("unchecked")
|
||||
synchronized List<String> openFiles() {
|
||||
return ( List<String>) ((ArrayList<String>) openFiles).clone();
|
||||
return (List<String>) ((ArrayList<String>) openFiles).clone();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
synchronized List<String> closedFiles() {
|
||||
return (List<String>) ((ArrayList<String>) closedFiles).clone();
|
||||
}
|
||||
|
|
|
@ -27,6 +27,8 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Iterator;
|
||||
|
||||
// TODO: Fix the unchecked collections, I do not understand the whole code here -- Uwe
|
||||
@SuppressWarnings("unchecked")
|
||||
final class FreqProxTermsWriter extends TermsHashConsumer {
|
||||
|
||||
public TermsHashConsumerPerThread addThread(TermsHashPerThread perThread) {
|
||||
|
|
|
@ -35,6 +35,8 @@ import org.apache.lucene.search.Similarity;
|
|||
* merges all of these together into a single _X.nrm file.
|
||||
*/
|
||||
|
||||
// TODO: Fix the unchecked collections, I do not understand the whole code here -- Uwe
|
||||
@SuppressWarnings("unchecked")
|
||||
final class NormsWriter extends InvertedDocEndConsumer {
|
||||
|
||||
private static final byte defaultNorm = Similarity.encodeNorm(1.0f);
|
||||
|
|
|
@ -36,6 +36,8 @@ import org.apache.lucene.util.ArrayUtil;
|
|||
* under each term.
|
||||
*/
|
||||
|
||||
// TODO: Fix the unchecked collections, I do not understand the whole code here -- Uwe
|
||||
@SuppressWarnings("unchecked")
|
||||
final class TermsHash extends InvertedDocConsumer {
|
||||
|
||||
final TermsHashConsumer consumer;
|
||||
|
|
|
@ -406,10 +406,10 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Override @SuppressWarnings("unchecked")
|
||||
public Object clone() {
|
||||
BooleanQuery clone = (BooleanQuery)super.clone();
|
||||
clone.clauses = (ArrayList<BooleanClause>)this.clauses.clone();
|
||||
clone.clauses = (ArrayList<BooleanClause>) this.clauses.clone();
|
||||
return clone;
|
||||
}
|
||||
|
||||
|
|
|
@ -214,10 +214,10 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
|
|||
|
||||
/** Create a shallow copy of us -- used in rewriting if necessary
|
||||
* @return a copy of us (but reuse, don't copy, our subqueries) */
|
||||
@Override
|
||||
@Override @SuppressWarnings("unchecked")
|
||||
public Object clone() {
|
||||
DisjunctionMaxQuery clone = (DisjunctionMaxQuery)super.clone();
|
||||
clone.disjuncts = (ArrayList<Query>)this.disjuncts.clone();
|
||||
clone.disjuncts = (ArrayList<Query>) this.disjuncts.clone();
|
||||
return clone;
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ public class FieldDoc extends ScoreDoc {
|
|||
* @see Sort
|
||||
* @see Searcher#search(Query,Filter,int,Sort)
|
||||
*/
|
||||
public Comparable[] fields;
|
||||
public Comparable<?>[] fields;
|
||||
|
||||
/** Expert: Creates one of these objects with empty sort information. */
|
||||
public FieldDoc (int doc, float score) {
|
||||
|
@ -53,7 +53,7 @@ public class FieldDoc extends ScoreDoc {
|
|||
}
|
||||
|
||||
/** Expert: Creates one of these objects with the given sort information. */
|
||||
public FieldDoc (int doc, float score, Comparable[] fields) {
|
||||
public FieldDoc (int doc, float score, Comparable<?>[] fields) {
|
||||
super (doc, score);
|
||||
this.fields = fields;
|
||||
}
|
||||
|
|
|
@ -167,7 +167,9 @@ extends PriorityQueue<FieldDoc> {
|
|||
break;
|
||||
}
|
||||
case SortField.CUSTOM:{
|
||||
c = docA.fields[i].compareTo (docB.fields[i]);
|
||||
// TODO: Use FieldComparator? This does not make sense!
|
||||
@SuppressWarnings("unchecked") final int temp =
|
||||
c = ((Comparable) docA.fields[i]).compareTo((Comparable) docB.fields[i]);
|
||||
break;
|
||||
}
|
||||
default:{
|
||||
|
|
|
@ -199,7 +199,7 @@ class MultiSearcherThread extends Thread {
|
|||
private int nDocs;
|
||||
private TopDocs docs;
|
||||
private int i;
|
||||
private PriorityQueue hq;
|
||||
private PriorityQueue<?> hq;
|
||||
private int[] starts;
|
||||
private IOException ioe;
|
||||
private Sort sort;
|
||||
|
@ -266,7 +266,7 @@ class MultiSearcherThread extends Thread {
|
|||
scoreDoc.doc += starts[i]; // convert doc
|
||||
//it would be so nice if we had a thread-safe insert
|
||||
synchronized (hq) {
|
||||
if (scoreDoc == hq.insertWithOverflow(scoreDoc))
|
||||
if (scoreDoc == ((HitQueue) hq).insertWithOverflow(scoreDoc))
|
||||
break;
|
||||
} // no more scores > minScore
|
||||
}
|
||||
|
|
|
@ -932,12 +932,12 @@ public abstract class Similarity implements Serializable {
|
|||
private static final class MethodSupport implements Serializable {
|
||||
final boolean overridesCollectionIDF, overridesTermIDF;
|
||||
|
||||
MethodSupport(Class clazz) {
|
||||
overridesCollectionIDF = isMethodOverridden(clazz, "idf", C_IDF_METHOD_PARAMS);
|
||||
overridesTermIDF = isMethodOverridden(clazz, "idf", T_IDF_METHOD_PARAMS);
|
||||
MethodSupport(Class<? extends Similarity> clazz) {
|
||||
overridesCollectionIDF = isMethodOverridden(clazz, "idf", Collection.class, Searcher.class);
|
||||
overridesTermIDF = isMethodOverridden(clazz, "idf", Term.class, Searcher.class);
|
||||
}
|
||||
|
||||
private static boolean isMethodOverridden(Class clazz, String name, Class[] params) {
|
||||
private static boolean isMethodOverridden(Class<?> clazz, String name, Class... params) {
|
||||
try {
|
||||
return clazz.getMethod(name, params).getDeclaringClass() != Similarity.class;
|
||||
} catch (NoSuchMethodException e) {
|
||||
|
@ -945,18 +945,14 @@ public abstract class Similarity implements Serializable {
|
|||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
/** @deprecated Remove this when old API is removed! */
|
||||
private static final Class[] T_IDF_METHOD_PARAMS = new Class[]{Term.class, Searcher.class};
|
||||
|
||||
/** @deprecated Remove this when old API is removed! */
|
||||
private static final Class[] C_IDF_METHOD_PARAMS = new Class[]{Collection.class, Searcher.class};
|
||||
}
|
||||
|
||||
/** @deprecated Remove this when old API is removed! */
|
||||
private static final IdentityHashMap<Class<? extends Similarity>,MethodSupport> knownMethodSupport = new IdentityHashMap();
|
||||
private static final IdentityHashMap<Class<? extends Similarity>,MethodSupport> knownMethodSupport
|
||||
= new IdentityHashMap<Class<? extends Similarity>,MethodSupport>();
|
||||
|
||||
/** @deprecated Remove this when old API is removed! */
|
||||
private static MethodSupport getSupportedMethods(Class clazz) {
|
||||
private static MethodSupport getSupportedMethods(Class<? extends Similarity> clazz) {
|
||||
MethodSupport supportedMethods;
|
||||
synchronized(knownMethodSupport) {
|
||||
supportedMethods = (MethodSupport) knownMethodSupport.get(clazz);
|
||||
|
|
|
@ -131,7 +131,7 @@ public class PayloadSpanUtil {
|
|||
}
|
||||
}
|
||||
|
||||
final List<Query>[] disjunctLists = new List[maxPosition + 1];
|
||||
@SuppressWarnings("unchecked") final List<Query>[] disjunctLists = new List[maxPosition + 1];
|
||||
int distinctPositions = 0;
|
||||
|
||||
for (int i = 0; i < termArrays.size(); ++i) {
|
||||
|
|
|
@ -271,7 +271,7 @@ public final class FieldCacheSanityChecker {
|
|||
* returned by obj.getFieldCacheKey()
|
||||
*/
|
||||
private List getAllDecendentReaderKeys(Object seed) {
|
||||
List all = new ArrayList(17); // will grow as we iter
|
||||
List<Object> all = new ArrayList<Object>(17); // will grow as we iter
|
||||
all.add(seed);
|
||||
for (int i = 0; i < all.size(); i++) {
|
||||
Object obj = all.get(i);
|
||||
|
|
Loading…
Reference in New Issue