LUCENE-3863: rename DocValues.type() to DocValues.getType()

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1299495 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-03-12 00:15:49 +00:00
parent 402496c8dd
commit b075b7466c
24 changed files with 40 additions and 40 deletions

View File

@ -43,8 +43,8 @@ class MemoryIndexNormDocValues extends DocValues {
}
@Override
public Type type() {
return source.type();
public Type getType() {
return source.getType();
}
@Override

View File

@ -235,7 +235,7 @@ class Lucene3xNormsProducer extends PerDocProducer {
}
@Override
public Type type() {
public Type getType() {
return Type.FIXED_INTS_8;
}

View File

@ -363,7 +363,7 @@ public final class Bytes {
}
@Override
public Type type() {
public Type getType() {
return type;
}

View File

@ -84,7 +84,7 @@ class FixedDerefBytesImpl {
@Override
public Source getDirectSource()
throws IOException {
return new DirectFixedDerefSource(cloneData(), cloneIndex(), size, type());
return new DirectFixedDerefSource(cloneData(), cloneIndex(), size, getType());
}
@Override

View File

@ -287,7 +287,7 @@ class FixedStraightBytesImpl {
@Override
public Source getDirectSource() throws IOException {
return new DirectFixedStraightSource(cloneData(), size, type());
return new DirectFixedStraightSource(cloneData(), size, getType());
}
@Override

View File

@ -84,7 +84,7 @@ public class Floats {
@Override
protected boolean tryBulkMerge(DocValues docValues) {
// only bulk merge if value type is the same otherwise size differs
return super.tryBulkMerge(docValues) && docValues.type() == template.type();
return super.tryBulkMerge(docValues) && docValues.getType() == template.getType();
}
@Override

View File

@ -121,7 +121,7 @@ public final class Ints {
@Override
protected boolean tryBulkMerge(DocValues docValues) {
// only bulk merge if value type is the same otherwise size differs
return super.tryBulkMerge(docValues) && docValues.type() == template.type();
return super.tryBulkMerge(docValues) && docValues.getType() == template.getType();
}
}

View File

@ -210,7 +210,7 @@ class PackedIntValues {
@Override
public Type type() {
public Type getType() {
return Type.VAR_INTS;
}

View File

@ -105,7 +105,7 @@ class VarDerefBytesImpl {
@Override
public Source getDirectSource()
throws IOException {
return new DirectVarDerefSource(cloneData(), cloneIndex(), type());
return new DirectVarDerefSource(cloneData(), cloneIndex(), getType());
}
}

View File

@ -166,7 +166,7 @@ final class VarSortedBytesImpl {
@Override
public Source getDirectSource() throws IOException {
return new DirectSortedSource(cloneData(), cloneIndex(), comparator, type());
return new DirectSortedSource(cloneData(), cloneIndex(), comparator, getType());
}
}

View File

@ -248,7 +248,7 @@ class VarStraightBytesImpl {
@Override
public Source getDirectSource()
throws IOException {
return new DirectVarStraightSource(cloneData(), cloneIndex(), type());
return new DirectVarStraightSource(cloneData(), cloneIndex(), getType());
}
}

View File

@ -200,7 +200,7 @@ public class SimpleTextPerDocProducer extends PerDocProducerBase {
}
@Override
public Type type() {
public Type getType() {
return type;
}

View File

@ -1167,7 +1167,7 @@ public class CheckIndex {
if (docValues == null) {
throw new RuntimeException("field: " + fieldName + " omits docvalues but should have them!");
}
DocValues.Type type = docValues.type();
DocValues.Type type = docValues.getType();
if (type != expectedType) {
throw new RuntimeException("field: " + fieldName + " has type: " + type + " but fieldInfos says:" + expectedType);
}

View File

@ -90,7 +90,7 @@ public abstract class DocValues implements Closeable {
/**
* Returns the {@link Type} of this {@link DocValues} instance
*/
public abstract Type type();
public abstract Type getType();
/**
* Closes this {@link DocValues} instance. This method should only be called
@ -191,7 +191,7 @@ public abstract class DocValues implements Closeable {
*
* @return the {@link Type} of this source.
*/
public Type type() {
public Type getType() {
return type;
}

View File

@ -144,7 +144,7 @@ public class MultiDocValues extends DocValues {
}
final DocValues d = puller.pull(r, field);
if (d != null) {
TypePromoter incoming = TypePromoter.create(d.type(), d.getValueSize());
TypePromoter incoming = TypePromoter.create(d.getType(), d.getValueSize());
promotedType[0] = promotedType[0].promote(incoming);
} else if (puller.stopLoadingOnNull(r, field)){
promotedType[0] = TypePromoter.getIdentityPromoter(); // set to identity to return null
@ -203,8 +203,8 @@ public class MultiDocValues extends DocValues {
}
@Override
public Type type() {
return emptySource.type();
public Type getType() {
return emptySource.getType();
}
@Override
@ -230,8 +230,8 @@ public class MultiDocValues extends DocValues {
}
@Override
public Type type() {
return emptyFixedSource.type();
public Type getType() {
return emptyFixedSource.getType();
}
@Override
@ -519,7 +519,7 @@ public class MultiDocValues extends DocValues {
@Override
public SortedSource asSortedSource() {
if (type() == Type.BYTES_FIXED_SORTED || type() == Type.BYTES_VAR_SORTED) {
if (getType() == Type.BYTES_FIXED_SORTED || getType() == Type.BYTES_VAR_SORTED) {
}
return super.asSortedSource();
@ -586,7 +586,7 @@ public class MultiDocValues extends DocValues {
}
@Override
public Type type() {
public Type getType() {
return type;
}

View File

@ -173,7 +173,7 @@ final class SegmentMerger {
// returns an updated typepromoter (tracking type and size) given a previous one,
// and a newly encountered docvalues
private TypePromoter mergeDocValuesType(TypePromoter previous, DocValues docValues) {
TypePromoter incoming = TypePromoter.create(docValues.type(), docValues.getValueSize());
TypePromoter incoming = TypePromoter.create(docValues.getType(), docValues.getValueSize());
if (previous == null) {
previous = TypePromoter.getIdentityPromoter();
}

View File

@ -1640,7 +1640,7 @@ public abstract class FieldComparator<T> {
// This means segment has doc values, but they are
// not able to provide a sorted source; consider
// this a hard error:
throw new IllegalStateException("DocValues exist for field \"" + field + "\", but not as a sorted source: type=" + dv.getSource().type() + " reader=" + context.reader());
throw new IllegalStateException("DocValues exist for field \"" + field + "\", but not as a sorted source: type=" + dv.getSource().getType() + " reader=" + context.reader());
}
}

View File

@ -188,7 +188,7 @@ public class TestDocValues extends LuceneTestCase {
DocValues r = Ints.getValues(dir, "test", 2, Type.VAR_INTS, newIOContext(random));
Source source = getSource(r);
assertEquals(i + " with min: " + minMax[i][0] + " max: " + minMax[i][1],
expectedTypes[i], source.type());
expectedTypes[i], source.getType());
assertEquals(minMax[i][0], source.getInt(0));
assertEquals(minMax[i][1], source.getInt(1));
@ -368,7 +368,7 @@ public class TestDocValues extends LuceneTestCase {
DocValues r = Ints.getValues(dir, "test", NUM_VALUES + additionalDocs, type, newIOContext(random));
for (int iter = 0; iter < 2; iter++) {
Source s = getSource(r);
assertEquals(type, s.type());
assertEquals(type, s.getType());
for (int i = 0; i < NUM_VALUES; i++) {
final long v = s.getInt(i);
assertEquals("index " + i, values[i], v);

View File

@ -83,7 +83,7 @@ public class TestCustomNorms extends LuceneTestCase {
assertNotNull(normValues);
Source source = normValues.getSource();
assertTrue(source.hasArray());
assertEquals(Type.FLOAT_32, normValues.type());
assertEquals(Type.FLOAT_32, normValues.getType());
float[] norms = (float[]) source.getArray();
for (int i = 0; i < open.maxDoc(); i++) {
Document document = open.document(i);

View File

@ -432,7 +432,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
if (val == Type.VAR_INTS) {
}
Source source = docValues.getSource();
switch (source.type()) {
switch (source.getType()) {
case FIXED_INTS_8:
{
assertTrue(source.hasArray());
@ -492,7 +492,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
}
break;
default:
fail("unexpected value " + source.type());
fail("unexpected value " + source.getType());
}
r.close();
}
@ -515,7 +515,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// getArray()
Source source = docValues.getSource();
switch (source.type()) {
switch (source.getType()) {
case BYTES_FIXED_STRAIGHT: {
BytesRef ref = new BytesRef();
if (source.hasArray()) {
@ -529,7 +529,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
}
break;
default:
fail("unexpected value " + source.type());
fail("unexpected value " + source.getType());
}
r.close();
w.close();
@ -549,7 +549,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
case 2:
return values.getDirectSource();
case 1:
if(values.type() == Type.BYTES_VAR_SORTED || values.type() == Type.BYTES_FIXED_SORTED) {
if(values.getType() == Type.BYTES_VAR_SORTED || values.getType() == Type.BYTES_FIXED_SORTED) {
return values.getSource().asSortedSource();
}
default:

View File

@ -562,7 +562,7 @@ public class TestDuelingCodecs extends LuceneTestCase {
public void assertDocValues(DocValues leftDocValues, DocValues rightDocValues) throws Exception {
assertNotNull(info, leftDocValues);
assertNotNull(info, rightDocValues);
assertEquals(info, leftDocValues.type(), rightDocValues.type());
assertEquals(info, leftDocValues.getType(), rightDocValues.getType());
assertEquals(info, leftDocValues.getValueSize(), rightDocValues.getValueSize());
assertDocValuesSource(leftDocValues.getDirectSource(), rightDocValues.getDirectSource());
assertDocValuesSource(leftDocValues.getSource(), rightDocValues.getSource());
@ -572,8 +572,8 @@ public class TestDuelingCodecs extends LuceneTestCase {
* checks source API
*/
public void assertDocValuesSource(DocValues.Source left, DocValues.Source right) throws Exception {
DocValues.Type leftType = left.type();
assertEquals(info, leftType, right.type());
DocValues.Type leftType = left.getType();
assertEquals(info, leftType, right.getType());
switch(leftType) {
case VAR_INTS:
case FIXED_INTS_8:

View File

@ -96,7 +96,7 @@ public class TestNorms extends LuceneTestCase {
assertNotNull(normValues);
Source source = normValues.getSource();
assertTrue(source.hasArray());
assertEquals(Type.FIXED_INTS_8, normValues.type());
assertEquals(Type.FIXED_INTS_8, normValues.getType());
byte[] norms = (byte[]) source.getArray();
for (int i = 0; i < open.maxDoc(); i++) {
Document document = open.document(i);
@ -155,7 +155,7 @@ public class TestNorms extends LuceneTestCase {
assertNotNull(normValues);
Source source = normValues.getSource();
assertTrue(source.hasArray());
assertEquals(Type.FIXED_INTS_8, normValues.type());
assertEquals(Type.FIXED_INTS_8, normValues.getType());
byte[] norms = (byte[]) source.getArray();
for (int i = 0; i < mergedReader.maxDoc(); i++) {
Document document = mergedReader.document(i);

View File

@ -349,7 +349,7 @@ public class TestTypePromotion extends LuceneTestCase {
DocValues docValues = children[0].reader().docValues("promote");
assertNotNull(docValues);
assertValues(TestType.Byte, dir, values);
assertEquals(Type.BYTES_VAR_STRAIGHT, docValues.type());
assertEquals(Type.BYTES_VAR_STRAIGHT, docValues.getType());
reader.close();
dir.close();
}

View File

@ -45,7 +45,7 @@ public class NumericIndexDocValueSource extends ValueSource {
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final Source source = readerContext.reader().docValues(field)
.getSource();
Type type = source.type();
Type type = source.getType();
switch (type) {
case FLOAT_32:
case FLOAT_64: