mirror of https://github.com/apache/lucene.git
- LUCENE-726: removed the use of a deprecated method
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@479458 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
694dceaa33
commit
fe47121a67
|
@ -35,6 +35,10 @@ Changes in runtime behavior
|
|||
StringIndexOutOfBoundsException was thrown.
|
||||
(Michael Busch via Erik Hatcher)
|
||||
|
||||
7. LUCENE-726: Removed the use of deprecated doc.fields() method and
|
||||
Enumeration.
|
||||
(Michael Busch via Otis Gospodnetic)
|
||||
|
||||
New features
|
||||
|
||||
1. LUCENE-503: New ThaiAnalyzer and ThaiWordFilter in contrib/analyzers
|
||||
|
|
|
@ -33,6 +33,7 @@ import java.io.StringReader;
|
|||
import java.util.Arrays;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Iterator;
|
||||
|
||||
final class DocumentWriter {
|
||||
private Analyzer analyzer;
|
||||
|
@ -128,9 +129,9 @@ final class DocumentWriter {
|
|||
// Tokenizes the fields of a document into Postings.
|
||||
private final void invertDocument(Document doc)
|
||||
throws IOException {
|
||||
Enumeration fields = doc.fields();
|
||||
while (fields.hasMoreElements()) {
|
||||
Fieldable field = (Fieldable) fields.nextElement();
|
||||
Iterator fieldIterator = doc.getFields().iterator();
|
||||
while (fieldIterator.hasNext()) {
|
||||
Fieldable field = (Fieldable) fieldIterator.next();
|
||||
String fieldName = field.name();
|
||||
int fieldNumber = fieldInfos.fieldNumber(fieldName);
|
||||
|
||||
|
|
|
@ -18,11 +18,10 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Iterator;
|
||||
import java.util.zip.Deflater;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
|
@ -54,17 +53,17 @@ final class FieldsWriter
|
|||
indexStream.writeLong(fieldsStream.getFilePointer());
|
||||
|
||||
int storedCount = 0;
|
||||
Enumeration fields = doc.fields();
|
||||
while (fields.hasMoreElements()) {
|
||||
Fieldable field = (Fieldable) fields.nextElement();
|
||||
Iterator fieldIterator = doc.getFields().iterator();
|
||||
while (fieldIterator.hasNext()) {
|
||||
Fieldable field = (Fieldable) fieldIterator.next();
|
||||
if (field.isStored())
|
||||
storedCount++;
|
||||
}
|
||||
fieldsStream.writeVInt(storedCount);
|
||||
|
||||
fields = doc.fields();
|
||||
while (fields.hasMoreElements()) {
|
||||
Fieldable field = (Fieldable) fields.nextElement();
|
||||
fieldIterator = doc.getFields().iterator();
|
||||
while (fieldIterator.hasNext()) {
|
||||
Fieldable field = (Fieldable) fieldIterator.next();
|
||||
// if the field as an instanceof FieldsReader.FieldForMerge, we're in merge mode
|
||||
// and field.binaryValue() already returns the compressed value for a field
|
||||
// with isCompressed()==true, so we disable compression in that case
|
||||
|
|
|
@ -155,9 +155,9 @@ public class ParallelReader extends IndexReader {
|
|||
}
|
||||
}
|
||||
if (include) {
|
||||
Enumeration fields = reader.document(n, fieldSelector).fields();
|
||||
while (fields.hasMoreElements()) {
|
||||
result.add((Fieldable)fields.nextElement());
|
||||
Iterator fieldIterator = reader.document(n, fieldSelector).getFields().iterator();
|
||||
while (fieldIterator.hasNext()) {
|
||||
result.add((Fieldable)fieldIterator.next());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue