allow to also store the all field

This commit is contained in:
kimchy 2010-03-17 13:17:48 +02:00
parent d8ef200b4b
commit 4c13a9d548
14 changed files with 345 additions and 35 deletions

View File

@ -4,6 +4,7 @@
<root url="jar://$GRADLE_REPOSITORY$/org.apache.lucene/lucene-core/jars/lucene-core-3.0.1.jar!/" />
<root url="jar://$GRADLE_REPOSITORY$/org.apache.lucene/lucene-analyzers/jars/lucene-analyzers-3.0.1.jar!/" />
<root url="jar://$GRADLE_REPOSITORY$/org.apache.lucene/lucene-queries/jars/lucene-queries-3.0.1.jar!/" />
<root url="jar://$GRADLE_REPOSITORY$/org.apache.lucene/lucene-fast-vector-highlighter/jars/lucene-fast-vector-highlighter-3.0.1.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES>

View File

@ -47,6 +47,7 @@ dependencies {
compile 'org.apache.lucene:lucene-core:3.0.1'
compile 'org.apache.lucene:lucene-analyzers:3.0.1'
compile 'org.apache.lucene:lucene-queries:3.0.1'
compile 'org.apache.lucene:lucene-fast-vector-highlighter:3.0.1'
compile('jgroups:jgroups:2.9.0.GA') { transitive = false }
compile('org.jboss.netty:netty:3.1.5.GA') { transitive = false }

View File

@ -20,13 +20,16 @@
package org.elasticsearch.index.mapper.json;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.AllFieldMapper;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.util.json.JsonBuilder;
import org.elasticsearch.util.lucene.Lucene;
import org.elasticsearch.util.lucene.all.AllAnalyzer;
import java.io.IOException;
@ -61,6 +64,10 @@ public class JsonAllFieldMapper extends JsonFieldMapper<Void> implements AllFiel
return this;
}
@Override public Builder store(Field.Store store) {
return super.store(store);
}
@Override public Builder termVector(Field.TermVector termVector) {
return super.termVector(termVector);
}
@ -74,7 +81,7 @@ public class JsonAllFieldMapper extends JsonFieldMapper<Void> implements AllFiel
}
@Override public JsonAllFieldMapper build(BuilderContext context) {
return new JsonAllFieldMapper(name, termVector, omitNorms, omitTermFreqAndPositions,
return new JsonAllFieldMapper(name, store, termVector, omitNorms, omitTermFreqAndPositions,
indexAnalyzer, searchAnalyzer, enabled);
}
}
@ -82,13 +89,15 @@ public class JsonAllFieldMapper extends JsonFieldMapper<Void> implements AllFiel
private boolean enabled;
private AllAnalyzer allAnalyzer;
public JsonAllFieldMapper() {
this(Defaults.NAME, Defaults.TERM_VECTOR, Defaults.OMIT_NORMS, Defaults.OMIT_TERM_FREQ_AND_POSITIONS, null, null, Defaults.ENABLED);
this(Defaults.NAME, Defaults.STORE, Defaults.TERM_VECTOR, Defaults.OMIT_NORMS, Defaults.OMIT_TERM_FREQ_AND_POSITIONS, null, null, Defaults.ENABLED);
}
protected JsonAllFieldMapper(String name, Field.TermVector termVector, boolean omitNorms, boolean omitTermFreqAndPositions,
protected JsonAllFieldMapper(String name, Field.Store store, Field.TermVector termVector, boolean omitNorms, boolean omitTermFreqAndPositions,
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, boolean enabled) {
super(new Names(name, name, name, name), Field.Index.ANALYZED, Field.Store.NO, termVector, 1.0f, omitNorms, omitTermFreqAndPositions,
super(new Names(name, name, name, name), Field.Index.ANALYZED, store, termVector, 1.0f, omitNorms, omitTermFreqAndPositions,
indexAnalyzer, searchAnalyzer);
this.enabled = enabled;
}
@ -101,14 +110,30 @@ public class JsonAllFieldMapper extends JsonFieldMapper<Void> implements AllFiel
if (!enabled) {
return null;
}
Analyzer analyzer = indexAnalyzer();
// reset the entries
jsonContext.allEntries().reset();
Analyzer analyzer = findAnalyzer(jsonContext.docMapper());
TokenStream tokenStream = allTokenStream(names.indexName(), jsonContext.allEntries(), analyzer);
if (stored()) {
// TODO when its possible to pass char[] to field, we can optimize
Field field = new Field(names.indexName(), jsonContext.allEntries().buildText(), store, index, termVector);
field.setTokenStream(tokenStream);
return field;
} else {
return new Field(names.indexName(), tokenStream, termVector);
}
}
private Analyzer findAnalyzer(DocumentMapper docMapper) {
Analyzer analyzer = indexAnalyzer;
if (analyzer == null) {
analyzer = jsonContext.docMapper().indexAnalyzer();
analyzer = docMapper.indexAnalyzer();
if (analyzer == null) {
analyzer = Lucene.STANDARD_ANALYZER;
}
}
return new Field(names.indexName(), allTokenStream(names.indexName(), jsonContext.allEntries().finishTexts(), analyzer), termVector);
return analyzer;
}
@Override public Void value(Fieldable field) {
@ -138,6 +163,7 @@ public class JsonAllFieldMapper extends JsonFieldMapper<Void> implements AllFiel
@Override public void toJson(JsonBuilder builder, Params params) throws IOException {
builder.startObject(JSON_TYPE);
builder.field("enabled", enabled);
builder.field("store", store.name().toLowerCase());
builder.field("termVector", termVector.name().toLowerCase());
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_")) {
builder.field("indexAnalyzer", indexAnalyzer.name());

View File

@ -24,6 +24,7 @@ import org.apache.lucene.document.Document;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.util.Nullable;
import org.elasticsearch.util.Preconditions;
@ -58,9 +59,9 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
private JsonAllFieldMapper allFieldMapper = new JsonAllFieldMapper();
private Analyzer indexAnalyzer;
private NamedAnalyzer indexAnalyzer;
private Analyzer searchAnalyzer;
private NamedAnalyzer searchAnalyzer;
private final JsonObjectMapper rootObjectMapper;
@ -107,7 +108,7 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
return this;
}
public Builder indexAnalyzer(Analyzer indexAnalyzer) {
public Builder indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
return this;
}
@ -116,7 +117,7 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
return indexAnalyzer != null;
}
public Builder searchAnalyzer(Analyzer searchAnalyzer) {
public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
return this;
}
@ -393,6 +394,6 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
}
@Override public void toJson(JsonBuilder builder, Params params) throws IOException {
rootObjectMapper.toJson(builder, params);
rootObjectMapper.toJson(builder, params, allFieldMapper);
}
}

View File

@ -411,6 +411,10 @@ public class JsonObjectMapper implements JsonMapper, JsonIncludeInAllMapper {
}
@Override public void toJson(JsonBuilder builder, Params params) throws IOException {
toJson(builder, params, null);
}
public void toJson(JsonBuilder builder, Params params, JsonMapper... additionalMappers) throws IOException {
builder.startObject(name);
builder.field("type", JSON_TYPE);
builder.field("dynamic", dynamic);
@ -434,6 +438,11 @@ public class JsonObjectMapper implements JsonMapper, JsonIncludeInAllMapper {
mapper.toJson(builder, params);
}
}
if (additionalMappers != null) {
for (JsonMapper mapper : additionalMappers) {
mapper.toJson(builder, params);
}
}
if (!mappers.isEmpty()) {
builder.startObject("properties");

View File

@ -0,0 +1,28 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.util.io;
import java.io.Reader;
/**
* @author kimchy (shay.banon)
*/
public abstract class CharSequenceReader extends Reader implements CharSequence {
}

View File

@ -28,11 +28,32 @@ import java.util.Arrays;
/**
* A similar class to {@link java.io.CharArrayWriter} allowing to get the underlying <tt>char[]</tt> buffer.
*
* @author kimchy (Shay Banon)
* @author kimchy (shay.banon)
*/
@NotThreadSafe
public class FastCharArrayWriter extends Writer {
/**
* A thread local based cache of {@link FastByteArrayOutputStream}.
*/
public static class Cached {
private static final ThreadLocal<FastCharArrayWriter> cache = new ThreadLocal<FastCharArrayWriter>() {
@Override protected FastCharArrayWriter initialValue() {
return new FastCharArrayWriter();
}
};
/**
* Returns the cached thread local byte stream, with its internal stream cleared.
*/
public static FastCharArrayWriter cached() {
FastCharArrayWriter os = cache.get();
os.reset();
return os;
}
}
/**
* The buffer where data is stored.
*/

View File

@ -27,10 +27,11 @@ import java.io.Reader;
/**
* A character stream whose source is a string that is <b>not thread safe</b>
*
* @author kimchy (Shay Banon)
* @author kimchy (shay.banon
* )
*/
@NotThreadSafe
public class FastStringReader extends Reader {
public class FastStringReader extends CharSequenceReader {
private String str;
private int length;
@ -55,6 +56,18 @@ public class FastStringReader extends Reader {
throw new IOException("Stream closed");
}
@Override public int length() {
return length;
}
@Override public char charAt(int index) {
return str.charAt(index);
}
@Override public CharSequence subSequence(int start, int end) {
return str.subSequence(start, end);
}
/**
* Reads a single character.
*

View File

@ -0,0 +1,59 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.util.lucene.all;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Fieldable;
import java.io.IOException;
import java.io.Reader;
/**
* An all analyzer.
*
* @author kimchy (shay.banon)
*/
public class AllAnalyzer extends Analyzer {
private final Analyzer analyzer;
public AllAnalyzer(Analyzer analyzer) {
this.analyzer = analyzer;
}
@Override public TokenStream tokenStream(String fieldName, Reader reader) {
AllEntries allEntries = (AllEntries) reader;
return new AllTokenFilter(analyzer.tokenStream(fieldName, reader), allEntries);
}
@Override public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException {
AllEntries allEntries = (AllEntries) reader;
return new AllTokenFilter(analyzer.reusableTokenStream(fieldName, reader), allEntries);
}
@Override public int getPositionIncrementGap(String fieldName) {
return analyzer.getPositionIncrementGap(fieldName);
}
@Override public int getOffsetGap(Fieldable field) {
return analyzer.getOffsetGap(field);
}
}

View File

@ -20,6 +20,9 @@
package org.elasticsearch.util.lucene.all;
import com.google.common.collect.Lists;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.util.io.CharSequenceReader;
import org.elasticsearch.util.io.FastCharArrayWriter;
import org.elasticsearch.util.io.FastStringReader;
import java.io.IOException;
@ -37,10 +40,10 @@ public class AllEntries extends Reader {
public static class Entry {
private final String name;
private final Reader reader;
private final CharSequenceReader reader;
private final float boost;
public Entry(String name, Reader reader, float boost) {
public Entry(String name, CharSequenceReader reader, float boost) {
this.name = name;
this.reader = reader;
this.boost = boost;
@ -54,7 +57,7 @@ public class AllEntries extends Reader {
return this.boost;
}
public Reader reader() {
public CharSequenceReader reader() {
return this.reader;
}
}
@ -79,13 +82,31 @@ public class AllEntries extends Reader {
itsSeparatorTime = false;
}
public AllEntries finishTexts() {
public void reset() {
try {
for (Entry entry : entries) {
entry.reader().reset();
}
} catch (IOException e) {
throw new ElasticSearchIllegalStateException("should not happen");
}
it = entries.iterator();
if (it.hasNext()) {
current = it.next();
itsSeparatorTime = true;
}
return this;
}
public String buildText() {
reset();
FastCharArrayWriter writer = FastCharArrayWriter.Cached.cached();
for (Entry entry : entries) {
writer.append(entry.reader());
writer.append(' ');
}
reset();
return writer.toString();
}
public List<Entry> entries() {

View File

@ -0,0 +1,62 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.deps.lucene;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.vectorhighlight.FastVectorHighlighter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.util.lucene.Lucene;
import org.testng.annotations.Test;
import static org.elasticsearch.util.lucene.DocumentBuilder.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (shay.banon)
*/
@Test
public class VectorHighlighterTests {
@Test public void testVectorHighlighter() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
indexWriter.addDocument(doc().add(field("_id", "1")).add(field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)).build());
IndexReader reader = indexWriter.getReader();
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
assertThat(topDocs.totalHits, equalTo(1));
FastVectorHighlighter highlighter = new FastVectorHighlighter();
String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(new TermQuery(new Term("content", "bad"))),
reader, topDocs.scoreDocs[0].doc, "content", 30);
System.out.println(fragment);
}
}

View File

@ -27,10 +27,10 @@ import org.elasticsearch.index.mapper.json.JsonDocumentMapper;
import org.elasticsearch.index.mapper.json.JsonDocumentMapperParser;
import org.elasticsearch.util.lucene.all.AllEntries;
import org.elasticsearch.util.lucene.all.AllTokenFilter;
import org.hamcrest.MatcherAssert;
import org.testng.annotations.Test;
import static org.elasticsearch.util.io.Streams.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
@ -46,9 +46,9 @@ public class SimpleAllMapperTests {
Document doc = docMapper.parse(json).doc();
Field field = doc.getField("_all");
AllEntries allEntries = ((AllTokenFilter) field.tokenStreamValue()).allEntries();
MatcherAssert.assertThat(allEntries.fields().size(), equalTo(2));
MatcherAssert.assertThat(allEntries.fields().contains("name.last"), equalTo(true));
MatcherAssert.assertThat(allEntries.fields().contains("simple1"), equalTo(true));
assertThat(allEntries.fields().size(), equalTo(2));
assertThat(allEntries.fields().contains("name.last"), equalTo(true));
assertThat(allEntries.fields().contains("simple1"), equalTo(true));
}
@Test public void testSimpleAllMappersWithReparse() throws Exception {
@ -63,8 +63,43 @@ public class SimpleAllMapperTests {
Field field = doc.getField("_all");
AllEntries allEntries = ((AllTokenFilter) field.tokenStreamValue()).allEntries();
MatcherAssert.assertThat(allEntries.fields().size(), equalTo(2));
MatcherAssert.assertThat(allEntries.fields().contains("name.last"), equalTo(true));
MatcherAssert.assertThat(allEntries.fields().contains("simple1"), equalTo(true));
assertThat(allEntries.fields().size(), equalTo(2));
assertThat(allEntries.fields().contains("name.last"), equalTo(true));
assertThat(allEntries.fields().contains("simple1"), equalTo(true));
}
@Test public void testSimpleAllMappersWithStore() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/all/store-mapping.json");
JsonDocumentMapper docMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(mapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/json/all/test1.json");
Document doc = docMapper.parse(json).doc();
Field field = doc.getField("_all");
AllEntries allEntries = ((AllTokenFilter) field.tokenStreamValue()).allEntries();
assertThat(allEntries.fields().size(), equalTo(2));
assertThat(allEntries.fields().contains("name.last"), equalTo(true));
assertThat(allEntries.fields().contains("simple1"), equalTo(true));
String text = field.stringValue();
assertThat(text, equalTo(allEntries.buildText()));
}
@Test public void testSimpleAllMappersWithReparseWithStore() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/all/store-mapping.json");
JsonDocumentMapper docMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(mapping);
String builtMapping = docMapper.buildSource();
System.out.println(builtMapping);
// reparse it
JsonDocumentMapper builtDocMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(builtMapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/json/all/test1.json");
Document doc = builtDocMapper.parse(json).doc();
Field field = doc.getField("_all");
AllEntries allEntries = ((AllTokenFilter) field.tokenStreamValue()).allEntries();
assertThat(allEntries.fields().size(), equalTo(2));
assertThat(allEntries.fields().contains("name.last"), equalTo(true));
assertThat(allEntries.fields().contains("simple1"), equalTo(true));
String text = field.stringValue();
assertThat(text, equalTo(allEntries.buildText()));
}
}

View File

@ -0,0 +1,33 @@
{
person : {
allField : {enabled : true, store : "yes"},
properties : {
name : {
type : "object",
dynamic : false,
properties : {
first : {type : "string", store : "yes", includeInAll : false},
last : {type : "string", index : "not_analyzed"}
}
},
address : {
type : "object",
includeInAll : false,
properties : {
first : {
properties : {
location : {type : "string", store : "yes", indexName : "firstLocation"}
}
},
last : {
properties : {
location : {type : "string"}
}
}
}
},
simple1 : {type : "long", includeInAll : true},
simple2 : {type : "long", includeInAll : false}
}
}
}

View File

@ -49,7 +49,7 @@ public class SimpleAllTests {
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "something", 1.0f);
allEntries.addText("field2", "else", 1.0f);
allEntries.finishTexts();
allEntries.reset();
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
@ -59,7 +59,7 @@ public class SimpleAllTests {
allEntries = new AllEntries();
allEntries.addText("field1", "else", 1.0f);
allEntries.addText("field2", "something", 1.0f);
allEntries.finishTexts();
allEntries.reset();
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
@ -91,7 +91,7 @@ public class SimpleAllTests {
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "something", 1.0f);
allEntries.addText("field2", "else", 1.0f);
allEntries.finishTexts();
allEntries.reset();
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
@ -101,7 +101,7 @@ public class SimpleAllTests {
allEntries = new AllEntries();
allEntries.addText("field1", "else", 2.0f);
allEntries.addText("field2", "something", 1.0f);
allEntries.finishTexts();
allEntries.reset();
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
@ -134,7 +134,7 @@ public class SimpleAllTests {
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "something moo", 1.0f);
allEntries.addText("field2", "else koo", 1.0f);
allEntries.finishTexts();
allEntries.reset();
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
@ -144,7 +144,7 @@ public class SimpleAllTests {
allEntries = new AllEntries();
allEntries.addText("field1", "else koo", 1.0f);
allEntries.addText("field2", "something moo", 1.0f);
allEntries.finishTexts();
allEntries.reset();
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
@ -186,7 +186,7 @@ public class SimpleAllTests {
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", "something moo", 1.0f);
allEntries.addText("field2", "else koo", 1.0f);
allEntries.finishTexts();
allEntries.reset();
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);
@ -196,7 +196,7 @@ public class SimpleAllTests {
allEntries = new AllEntries();
allEntries.addText("field1", "else koo", 2.0f);
allEntries.addText("field2", "something moo", 1.0f);
allEntries.finishTexts();
allEntries.reset();
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
indexWriter.addDocument(doc);