clean up some nocommits

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4547@1414722 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2012-11-28 14:23:28 +00:00
parent ad50720096
commit fdce30e099
16 changed files with 199 additions and 80 deletions

View File

@ -45,6 +45,9 @@ public final class SimpleTextCodec extends Codec {
// TODO: need a plain-text impl (using the above)
private final NormsFormat normsFormat = new SimpleTextNormsFormat();
private final LiveDocsFormat liveDocs = new SimpleTextLiveDocsFormat();
// nocommit rename
private final SimpleDocValuesFormat simpleDVFormat = new SimpleTextSimpleDocValuesFormat();
public SimpleTextCodec() {
super("SimpleText");
@ -90,11 +93,8 @@ public final class SimpleTextCodec extends Codec {
return liveDocs;
}
// nocommit;
private final SimpleDocValuesFormat nocommit = new SimpleTextSimpleDocValuesFormat();
@Override
public SimpleDocValuesFormat simpleDocValuesFormat() {
// nocommit
return nocommit;
return simpleDVFormat;
}
}

View File

@ -268,7 +268,6 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
};
}
// nocommit
@Override
public SortedDocValuesConsumer addSortedField(FieldInfo field, final int valueCount, boolean fixedLength, final int maxLength) throws IOException {
assert fieldSeen(field.name);
@ -466,7 +465,6 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
field.ordPattern = stripPrefix(ORDPATTERN);
field.dataStartFilePointer = data.getFilePointer();
data.seek(data.getFilePointer() + (9+field.pattern.length()+field.maxLength) * field.numValues + (1+field.ordPattern.length())*maxDoc);
// nocommit: we need to seek past the data section!!!!
} else {
throw new AssertionError();
}
@ -491,9 +489,9 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
@Override
public long get(int docID) {
try {
// nocommit bounds check docID? spooky
// because if we don't you can maybe get
// value from the wrong field ...
if (docID < 0 || docID >= maxDoc) {
throw new IndexOutOfBoundsException("docID must be 0 .. " + (maxDoc-1) + "; got " + docID);
}
in.seek(field.dataStartFilePointer + (1+field.pattern.length())*docID);
SimpleTextUtil.readLine(in, scratch);
//System.out.println("parsing delta: " + scratch.utf8ToString());
@ -544,9 +542,9 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
@Override
public void get(int docID, BytesRef result) {
try {
// nocommit bounds check docID? spooky
// because if we don't you can maybe get
// value from the wrong field ...
if (docID < 0 || docID >= maxDoc) {
throw new IndexOutOfBoundsException("docID must be 0 .. " + (maxDoc-1) + "; got " + docID);
}
in.seek(field.dataStartFilePointer + (9+field.pattern.length() + field.maxLength)*docID);
SimpleTextUtil.readLine(in, scratch);
assert StringHelper.startsWith(scratch, LENGTH);
@ -554,7 +552,6 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
try {
len = decoder.parse(new String(scratch.bytes, scratch.offset + LENGTH.length, scratch.length - LENGTH.length, "UTF-8")).intValue();
} catch (ParseException pe) {
// nocommit add message
CorruptIndexException e = new CorruptIndexException("failed to parse int length");
e.initCause(pe);
throw e;
@ -564,7 +561,6 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
result.length = len;
in.readBytes(result.bytes, 0, len);
} catch (IOException ioe) {
// nocommit should .get() just throw IOE...
throw new RuntimeException(ioe);
}
}
@ -602,6 +598,9 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
return new SortedDocValues() {
@Override
public int getOrd(int docID) {
if (docID < 0 || docID >= maxDoc) {
throw new IndexOutOfBoundsException("docID must be 0 .. " + (maxDoc-1) + "; got " + docID);
}
try {
in.seek(field.dataStartFilePointer + field.numValues * (9 + field.pattern.length() + field.maxLength) + docID * (1 + field.ordPattern.length()));
SimpleTextUtil.readLine(in, scratch);
@ -613,7 +612,6 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
throw e;
}
} catch (IOException ioe) {
// nocommit should .get() just throw IOE...
throw new RuntimeException(ioe);
}
}
@ -621,6 +619,9 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
@Override
public void lookupOrd(int ord, BytesRef result) {
try {
if (ord < 0 || ord >= field.numValues) {
throw new IndexOutOfBoundsException("ord must be 0 .. " + (field.numValues-1) + "; got " + ord);
}
in.seek(field.dataStartFilePointer + ord * (9 + field.pattern.length() + field.maxLength));
SimpleTextUtil.readLine(in, scratch);
assert StringHelper.startsWith(scratch, LENGTH);
@ -637,7 +638,6 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
result.length = len;
in.readBytes(result.bytes, 0, len);
} catch (IOException ioe) {
// nocommit should .get() just throw IOE...
throw new RuntimeException(ioe);
}
}

View File

@ -163,13 +163,19 @@ public abstract class AtomicReader extends IndexReader {
*/
public abstract DocValues docValues(String field) throws IOException;
// nocommit javadocs
/** Returns {@link NumericDocValues} for this field, or
* null if no {@link NumericDocValues} were indexed for
* this field. */
public abstract NumericDocValues getNumericDocValues(String field) throws IOException;
// nocommit javadocs
/** Returns {@link BinaryDocValues} for this field, or
* null if no {@link BinaryDocValues} were indexed for
* this field. */
public abstract BinaryDocValues getBinaryDocValues(String field) throws IOException;
// nocommit javadocs
/** Returns {@link SortedDocValues} for this field, or
* null if no {@link SortedDocValues} were indexed for
* this field. */
public abstract SortedDocValues getSortedDocValues(String field) throws IOException;
/**

View File

@ -19,9 +19,8 @@ package org.apache.lucene.index;
import org.apache.lucene.util.BytesRef;
// nocommit need marker interface?
public abstract class BinaryDocValues {
// nocommit throws IOE or not?
public abstract void get(int docID, BytesRef result);
public static final byte[] MISSING = new byte[0];

View File

@ -606,7 +606,6 @@ public class CheckIndex {
// Test Term Vectors
segInfoStat.termVectorStatus = testTermVectors(fieldInfos, info, reader, nf);
// nocommit re-enable
segInfoStat.docValuesStatus = testDocValues(info, fieldInfos, reader);
// Rethrow the first exception we encountered

View File

@ -18,7 +18,6 @@ package org.apache.lucene.index;
*/
public abstract class NumericDocValues {
// nocommit throws IOE or not?
public abstract long get(int docID);
public abstract long minValue();

View File

@ -90,19 +90,19 @@ public final class SlowCompositeReaderWrapper extends AtomicReader {
@Override
public NumericDocValues getNumericDocValues(String field) throws IOException {
// nocommit todo
ensureOpen();
return null;
}
@Override
public BinaryDocValues getBinaryDocValues(String field) throws IOException {
// nocommit todo
ensureOpen();
return null;
}
@Override
public SortedDocValues getSortedDocValues(String field) throws IOException {
// nocommit todo
ensureOpen();
return null;
}

View File

@ -23,16 +23,11 @@ import java.util.Comparator;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
// nocommit need marker interface?
public abstract class SortedDocValues extends BinaryDocValues {
// nocommit throws IOE or not?
public abstract int getOrd(int docID);
// nocommit throws IOE or not?
public abstract void lookupOrd(int ord, BytesRef result);
// nocommit throws IOE or not?
// nocommit .getUniqueValueCount?
public abstract int getValueCount();
@Override
@ -249,7 +244,13 @@ public abstract class SortedDocValues extends BinaryDocValues {
}
}
// nocommit javadocs
/** If {@code key} exists, returns its ordinal, else
* returns {@code -insertionPoint-1}, like {@code
* Arrays.binarySearch}.
*
* @param key Key to look up
* @param spare Spare BytesRef
**/
public int lookupTerm(BytesRef key, BytesRef spare) {
int low = 0;

View File

@ -44,7 +44,6 @@ import org.apache.lucene.util.RamUsageEstimator;
*
* @lucene.internal
*/
// nocommit abstract class...?
public interface FieldCache {
public static abstract class Bytes {

View File

@ -405,8 +405,6 @@ class FieldCacheImpl implements FieldCache {
return (byte) ramInstance.get(docID);
}
};
// nocommit should we throw exc if parser isn't
// null? if setDocsWithField is true?
} else {
int maxDoc = reader.maxDoc();
@ -489,8 +487,6 @@ class FieldCacheImpl implements FieldCache {
return (short) ramInstance.get(docID);
}
};
// nocommit should we throw exc if parser isn't
// null? if setDocsWithField is true?
} else {
int maxDoc = reader.maxDoc();
final short[] values;
@ -570,8 +566,6 @@ class FieldCacheImpl implements FieldCache {
return (int) ramInstance.get(docID);
}
};
// nocommit should we throw exc if parser isn't
// null? if setDocsWithField is true?
} else {
final int[] values;
final IntParser parser = (IntParser) key.custom;
@ -723,8 +717,6 @@ class FieldCacheImpl implements FieldCache {
return Float.intBitsToFloat((int) ramInstance.get(docID));
}
};
// nocommit should we throw exc if parser isn't
// null? if setDocsWithField is true?
} else {
final float[] values;
final FloatParser parser = (FloatParser) key.custom;
@ -813,8 +805,6 @@ class FieldCacheImpl implements FieldCache {
return ramInstance.get(docID);
}
};
// nocommit should we throw exc if parser isn't
// null? if setDocsWithField is true?
} else {
final long[] values;
final LongParser parser = (LongParser) key.custom;
@ -903,8 +893,6 @@ class FieldCacheImpl implements FieldCache {
return Double.longBitsToDouble(ramInstance.get(docID));
}
};
// nocommit should we throw exc if parser isn't
// null? if setDocsWithField is true?
} else {
final double[] values;
final DoubleParser parser = (DoubleParser) key.custom;
@ -954,12 +942,16 @@ class FieldCacheImpl implements FieldCache {
private final PackedInts.Reader termOrdToBytesOffset;
private final PackedInts.Reader docToTermOrd;
private final int numOrd;
private final int maxLength;
private final boolean isFixedLength;
public SortedDocValuesImpl(PagedBytes.Reader bytes, PackedInts.Reader termOrdToBytesOffset, PackedInts.Reader docToTermOrd, int numOrd) {
public SortedDocValuesImpl(PagedBytes.Reader bytes, PackedInts.Reader termOrdToBytesOffset, PackedInts.Reader docToTermOrd, int numOrd, int maxLength, boolean isFixedLength) {
this.bytes = bytes;
this.docToTermOrd = docToTermOrd;
this.termOrdToBytesOffset = termOrdToBytesOffset;
this.numOrd = numOrd;
this.maxLength = maxLength;
this.isFixedLength = isFixedLength;
}
@Override
@ -989,15 +981,13 @@ class FieldCacheImpl implements FieldCache {
}
@Override
public int maxLength() {
// nocommit hmm
throw new UnsupportedOperationException();
public boolean isFixedLength() {
return isFixedLength;
}
@Override
public boolean isFixedLength() {
// nocommit hmm
throw new UnsupportedOperationException();
public int maxLength() {
return maxLength;
}
@Override
@ -1188,7 +1178,7 @@ class FieldCacheImpl implements FieldCache {
termCountHardLimit = maxDoc+1;
}
// nocommit use Uninvert?
// TODO: use Uninvert?
if (terms != null) {
// Try for coarse estimate for number of bits; this
// should be an underestimate most of the time, which
@ -1222,7 +1212,10 @@ class FieldCacheImpl implements FieldCache {
int termOrd = 0;
// nocommit use Uninvert?
int sameLength = -2;
int maxLength = -1;
// TODO: use Uninvert?
if (terms != null) {
final TermsEnum termsEnum = terms.iterator(null);
@ -1233,6 +1226,12 @@ class FieldCacheImpl implements FieldCache {
if (term == null) {
break;
}
if (sameLength == -2) {
sameLength = term.length;
} else if (sameLength != term.length) {
sameLength = -1;
}
maxLength = Math.max(maxLength, term.length);
if (termOrd >= termCountHardLimit) {
break;
}
@ -1262,7 +1261,7 @@ class FieldCacheImpl implements FieldCache {
}
// maybe an int-only impl?
return new SortedDocValuesImpl(bytes.freeze(true), termOrdToBytesOffset.getMutable(), docToTermOrd.getMutable(), termOrd);
return new SortedDocValuesImpl(bytes.freeze(true), termOrdToBytesOffset.getMutable(), docToTermOrd.getMutable(), termOrd, maxLength, sameLength >= 0);
}
}
}
@ -1270,10 +1269,14 @@ class FieldCacheImpl implements FieldCache {
private static class BinaryDocValuesImpl extends BinaryDocValues {
private final PagedBytes.Reader bytes;
private final PackedInts.Reader docToOffset;
private final int maxLength;
private final boolean isFixedLength;
public BinaryDocValuesImpl(PagedBytes.Reader bytes, PackedInts.Reader docToOffset) {
public BinaryDocValuesImpl(PagedBytes.Reader bytes, PackedInts.Reader docToOffset, int maxLength, boolean isFixedLength) {
this.bytes = bytes;
this.docToOffset = docToOffset;
this.maxLength = maxLength;
this.isFixedLength = isFixedLength;
}
@Override
@ -1295,14 +1298,12 @@ class FieldCacheImpl implements FieldCache {
@Override
public boolean isFixedLength() {
// nocommit hmm
throw new UnsupportedOperationException();
return isFixedLength;
}
@Override
public int maxLength() {
// nocommit hmm
throw new UnsupportedOperationException();
return maxLength;
}
}
@ -1363,6 +1364,9 @@ class FieldCacheImpl implements FieldCache {
// pointer==0 means not set
bytes.copyUsingLengthPrefix(new BytesRef());
int sameLength = -2;
int maxLength = -1;
if (terms != null) {
int termCount = 0;
final TermsEnum termsEnum = terms.iterator(null);
@ -1379,6 +1383,12 @@ class FieldCacheImpl implements FieldCache {
if (term == null) {
break;
}
if (sameLength == -2) {
sameLength = term.length;
} else if (sameLength != term.length) {
sameLength = -1;
}
maxLength = Math.max(maxLength, term.length);
final long pointer = bytes.copyUsingLengthPrefix(term);
docs = termsEnum.docs(null, docs, 0);
while (true) {
@ -1392,7 +1402,7 @@ class FieldCacheImpl implements FieldCache {
}
// maybe an int-only impl?
return new BinaryDocValuesImpl(bytes.freeze(true), docToOffset.getMutable());
return new BinaryDocValuesImpl(bytes.freeze(true), docToOffset.getMutable(), maxLength, sameLength >= 0);
}
}
}

View File

@ -30,7 +30,6 @@ import java.util.TreeSet;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory;
@ -65,10 +64,6 @@ public class TestDuelingCodecs extends LuceneTestCase {
// as this gives the best overall coverage. when we have more
// codecs we should probably pick 2 from Codec.availableCodecs()
// TODO: it would also be nice to support preflex, but it doesn't
// support a lot of the current feature set (docvalues, statistics)
// so this would make assertEquals complicated.
leftCodec = Codec.forName("SimpleText");
rightCodec = new RandomCodec(random());
leftDir = newDirectory();
@ -522,6 +517,7 @@ public class TestDuelingCodecs extends LuceneTestCase {
}
for (String field : leftFields) {
// nocommit cutover to per-segment comparison
DocValues leftNorms = MultiDocValues.getNormDocValues(leftReader, field);
DocValues rightNorms = MultiDocValues.getNormDocValues(rightReader, field);
if (leftNorms != null && rightNorms != null) {
@ -609,6 +605,7 @@ public class TestDuelingCodecs extends LuceneTestCase {
assertEquals(info, leftValues, rightValues);
for (String field : leftValues) {
// nocommit cutover to per-segment comparison
DocValues leftDocValues = MultiDocValues.getDocValues(leftReader, field);
DocValues rightDocValues = MultiDocValues.getDocValues(rightReader, field);
if (leftDocValues != null && rightDocValues != null) {

View File

@ -32,6 +32,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
@ -427,4 +428,46 @@ public class TestFieldCache extends LuceneTestCase {
}
assertFalse(failed.get());
}
public void testMaxFixedLength() throws Exception {
Directory d = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
int fixedLength = random().nextBoolean() ? -1 : _TestUtil.nextInt(random(), 1, 10);
int numTerms = atLeast(10);
int actualFixedLength = -2;
int actualMaxLength = -1;
for(int termIDX=0;termIDX<numTerms;termIDX++) {
Document doc = new Document();
String termString;
if (fixedLength == -1) {
termString = _TestUtil.randomRealisticUnicodeString(random());
} else {
termString = _TestUtil.randomRealisticUnicodeString(random(), fixedLength, fixedLength);
}
BytesRef term = new BytesRef(termString);
actualMaxLength = Math.max(actualMaxLength, term.length);
if (actualFixedLength == -2) {
actualFixedLength = term.length;
} else if (actualFixedLength != term.length) {
actualFixedLength = -1;
}
doc.add(newField("term", termString, StringField.TYPE_NOT_STORED));
w.addDocument(doc);
}
w.forceMerge(1);
IndexReader r = w.getReader();
w.close();
AtomicReader subR = r.leaves().get(0).reader();
BinaryDocValues values = FieldCache.DEFAULT.getTerms(subR, "term");
assertEquals(actualFixedLength >= 0, values.isFixedLength());
assertEquals(actualMaxLength, values.maxLength());
SortedDocValues sortedValues = FieldCache.DEFAULT.getTermsIndex(subR, "term");
assertEquals(actualFixedLength >= 0, sortedValues.isFixedLength());
assertEquals(actualMaxLength, sortedValues.maxLength());
// No field cache insanity because close purges FC entries:
r.close();
d.close();
}
}

View File

@ -48,7 +48,6 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.StorableField;
@ -131,9 +130,6 @@ public class TestSort extends LuceneTestCase {
dirs.add(indexStore);
RandomIndexWriter writer = new RandomIndexWriter(random(), indexStore, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
// nocommit remove:
((LogMergePolicy) writer.w.getConfig().getMergePolicy()).setUseCompoundFile(false);
final DocValues.Type stringDVType;
if (dvStringSorted) {
// Index sorted
@ -253,9 +249,7 @@ public class TestSort extends LuceneTestCase {
}
String numFixed = getRandomCharString(fixedLen, 48, 52);
// nocommit shouldn't this be tracer_fixed? how is
// this passing?
doc.add (new Field ("fixed_tracer", numFixed, onlyStored));
doc.add (new Field ("tracer_fixed", numFixed, onlyStored));
//doc.add (new Field ("contents", Integer.toString(i), Field.Store.NO, Field.Index.ANALYZED));
doc.add(new StringField("string_fixed", numFixed, Field.Store.NO));
doc.add(new SortedBytesDocValuesField("string_fixed", new BytesRef(numFixed), true));
@ -272,8 +266,6 @@ public class TestSort extends LuceneTestCase {
writer.addDocument (doc);
}
// nocommit
//writer.forceMerge(1);
//System.out.println(writer.getSegmentCount());
writer.close();
IndexReader reader = DirectoryReader.open(indexStore);

View File

@ -25,11 +25,12 @@ import org.junit.runner.Result;
public class TestFailIfDirectoryNotClosed extends WithNestedTests {
public TestFailIfDirectoryNotClosed() {
super(true);
// nocommit true
super(false);
}
public static class Nested1 extends WithNestedTests.AbstractNestedTest {
public void testDummy() {
public void testDummy() throws Exception {
Directory dir = newDirectory();
System.out.println(dir.toString());
}

View File

@ -0,0 +1,76 @@
package org.apache.lucene.util.junitcompat;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.document.Document;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.store.Directory;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;
public class TestFailOnFieldCacheInsanity extends WithNestedTests {
public TestFailOnFieldCacheInsanity() {
super(true);
}
public static class Nested1 extends WithNestedTests.AbstractNestedTest {
private Directory d;
private IndexReader r;
private AtomicReader subR;
private void makeIndex() throws Exception {
d = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), d);
Document doc = new Document();
doc.add(newField("ints", "1", StringField.TYPE_NOT_STORED));
w.addDocument(doc);
w.forceMerge(1);
r = w.getReader();
w.close();
subR = r.leaves().get(0).reader();
}
public void testDummy() throws Exception {
makeIndex();
assertNotNull(FieldCache.DEFAULT.getTermsIndex(subR, "ints"));
assertNotNull(FieldCache.DEFAULT.getTerms(subR, "ints"));
// NOTE: do not close reader/directory, else it
// purges FC entries
}
}
@Test
public void testFailOnFieldCacheInsanity() {
Result r = JUnitCore.runClasses(Nested1.class);
boolean insane = false;
for(Failure f : r.getFailures()) {
if (f.getMessage().indexOf("Insane") != -1) {
insane = true;
}
}
Assert.assertTrue(insane);
}
}

View File

@ -739,17 +739,14 @@ public class MemoryIndex {
return new FieldInfos(fieldInfos.values().toArray(new FieldInfo[fieldInfos.size()]));
}
// nocommit todo
public NumericDocValues getNumericDocValues(String field) {
return null;
}
// nocommit todo
public BinaryDocValues getBinaryDocValues(String field) {
return null;
}
// nocommit todo
public SortedDocValues getSortedDocValues(String field) {
return null;
}