LUCENE-4540: revert

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1432096 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2013-01-11 15:35:19 +00:00
parent faad008d7f
commit 0ec7cb6d71
3 changed files with 3 additions and 72 deletions

View File

@ -152,11 +152,6 @@ New Features
* LUCENE-4515: MemoryIndex now supports adding the same field multiple * LUCENE-4515: MemoryIndex now supports adding the same field multiple
times. (Simon Willnauer) times. (Simon Willnauer)
* LUCENE-4540: Added an experimental Norm.setPackedLong, which allows
the use of VAR_INTS-encoded norms. This can be useful for cases where
you only need a few bits per-document, or where you might want exact
document length, and so on. (Robert Muir)
* LUCENE-4489: Added consumeAllTokens option to LimitTokenCountFilter * LUCENE-4489: Added consumeAllTokens option to LimitTokenCountFilter
(hossman, Robert Muir) (hossman, Robert Muir)

View File

@ -115,15 +115,6 @@ public final class Norm {
setType(Type.FIXED_INTS_64); setType(Type.FIXED_INTS_64);
this.field.setLongValue(norm); this.field.setLongValue(norm);
} }
/**
* Sets a packed long norm value.
* @lucene.experimental
*/
public void setPackedLong(long norm) {
setType(Type.VAR_INTS);
this.field.setLongValue(norm);
}
/** /**
* Sets a byte norm value * Sets a byte norm value

View File

@ -22,7 +22,6 @@ import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DocValues.Source; import org.apache.lucene.index.DocValues.Source;
import org.apache.lucene.index.DocValues.Type; import org.apache.lucene.index.DocValues.Type;
@ -31,12 +30,14 @@ import org.apache.lucene.search.TermStatistics;
import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.ExactSimScorer;
import org.apache.lucene.search.similarities.Similarity.SimWeight;
import org.apache.lucene.search.similarities.Similarity.SloppySimScorer;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
/** /**
* *
@ -86,39 +87,6 @@ public class TestCustomNorms extends LuceneTestCase {
dir.close(); dir.close();
docs.close(); docs.close();
} }
public void testPackedNorms() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
config.setSimilarity(new PackedNormSimilarity());
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
int num = _TestUtil.nextInt(random(), 1, 1000);
for (int i = 0; i < num; i++) {
Document doc = new Document();
doc.add(new StringField("len", Integer.toString(i), Field.Store.YES));
StringBuilder sb = new StringBuilder();
for (int j = 0; j < i; j++) {
sb.append(" token");
}
doc.add(new TextField("content", sb.toString(), Field.Store.NO));
writer.addDocument(doc);
}
DirectoryReader ir = writer.getReader();
writer.close();
for (AtomicReaderContext context : ir.leaves()) {
AtomicReader reader = context.reader();
DocValues norms = reader.normValues("content");
assertNotNull(norms);
Source source = norms.getSource();
assertEquals(Type.VAR_INTS, source.getType());
for (int i = 0; i < reader.maxDoc(); i++) {
assertEquals(source.getInt(i), Long.parseLong(reader.document(i).get("len")));
}
}
ir.close();
dir.close();
}
public void testExceptionOnRandomType() throws IOException { public void testExceptionOnRandomType() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
@ -334,28 +302,5 @@ public class TestCustomNorms extends LuceneTestCase {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
} }
class PackedNormSimilarity extends Similarity {
@Override
public void computeNorm(FieldInvertState state, Norm norm) {
norm.setPackedLong(state.getLength());
}
@Override
public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
throw new UnsupportedOperationException();
}
@Override
public ExactSimScorer exactSimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public SloppySimScorer sloppySimScorer(SimWeight weight, AtomicReaderContext context) throws IOException {
throw new UnsupportedOperationException();
}
}
} }