make lucene document creation allowed to be indexed twice
This commit is contained in:
parent
c7cb353956
commit
fe0f9ebc9d
|
@ -0,0 +1,121 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.percolator;
|
||||
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexNameModule;
|
||||
import org.elasticsearch.index.analysis.AnalysisModule;
|
||||
import org.elasticsearch.index.cache.IndexCacheModule;
|
||||
import org.elasticsearch.index.engine.IndexEngineModule;
|
||||
import org.elasticsearch.index.mapper.MapperServiceModule;
|
||||
import org.elasticsearch.index.percolator.PercolatorModule;
|
||||
import org.elasticsearch.index.percolator.PercolatorService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserModule;
|
||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import static org.elasticsearch.index.query.xcontent.QueryBuilders.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class EmbeddedPercolatorBenchmarkTest {
|
||||
|
||||
private static long NUMBER_OF_ITERATIONS = 100000;
|
||||
private static int NUMBER_OF_THREADS = 10;
|
||||
private static int NUMBER_OF_QUERIES = 100;
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
Settings settings = ImmutableSettings.settingsBuilder()
|
||||
.put("index.cache.filter.type", "none")
|
||||
.build();
|
||||
Index index = new Index("test");
|
||||
Injector injector = new ModulesBuilder().add(
|
||||
new SettingsModule(settings),
|
||||
new ScriptModule(),
|
||||
new MapperServiceModule(),
|
||||
new IndexSettingsModule(settings),
|
||||
new IndexCacheModule(settings),
|
||||
new AnalysisModule(settings),
|
||||
new IndexEngineModule(settings),
|
||||
new SimilarityModule(settings),
|
||||
new IndexQueryParserModule(settings),
|
||||
new IndexNameModule(index),
|
||||
new PercolatorModule()
|
||||
).createInjector();
|
||||
|
||||
final PercolatorService percolatorService = injector.getInstance(PercolatorService.class);
|
||||
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder().startObject()
|
||||
.field("field1", 1)
|
||||
.field("field2", "value")
|
||||
.field("field3", "the quick brown fox jumped over the lazy dog")
|
||||
.endObject();
|
||||
final byte[] source = doc.copiedBytes();
|
||||
|
||||
PercolatorService.Response percolate = percolatorService.percolate(new PercolatorService.Request("type1", source));
|
||||
|
||||
for (int i = 0; i < NUMBER_OF_QUERIES; i++) {
|
||||
percolatorService.addQuery("test" + i, termQuery("field3", "quick"));
|
||||
}
|
||||
|
||||
|
||||
System.out.println("Warming Up (1000)");
|
||||
StopWatch stopWatch = new StopWatch().start();
|
||||
System.out.println("Running " + 1000);
|
||||
for (long i = 0; i < 1000; i++) {
|
||||
percolate = percolatorService.percolate(new PercolatorService.Request("type1", source));
|
||||
}
|
||||
System.out.println("[Warmup] Percolated in " + stopWatch.stop().totalTime() + " TP Millis " + (NUMBER_OF_ITERATIONS / stopWatch.totalTime().millisFrac()));
|
||||
|
||||
System.out.println("Percolating using " + NUMBER_OF_THREADS + " threads with " + NUMBER_OF_ITERATIONS + " iterations, and " + NUMBER_OF_QUERIES + " queries");
|
||||
final CountDownLatch latch = new CountDownLatch(NUMBER_OF_THREADS);
|
||||
Thread[] threads = new Thread[NUMBER_OF_THREADS];
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override public void run() {
|
||||
for (long i = 0; i < NUMBER_OF_ITERATIONS; i++) {
|
||||
PercolatorService.Response percolate = percolatorService.percolate(new PercolatorService.Request("type1", source));
|
||||
}
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
}
|
||||
stopWatch = new StopWatch().start();
|
||||
for (Thread thread : threads) {
|
||||
thread.start();
|
||||
}
|
||||
latch.await();
|
||||
stopWatch.stop();
|
||||
System.out.println("Percolated in " + stopWatch.totalTime() + " TP Millis " + ((NUMBER_OF_ITERATIONS * NUMBER_OF_THREADS) / stopWatch.totalTime().millisFrac()));
|
||||
|
||||
}
|
||||
}
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
|
@ -30,6 +32,10 @@ public class SourceToParse {
|
|||
|
||||
private final byte[] source;
|
||||
|
||||
private final XContentParser parser;
|
||||
|
||||
private boolean flyweight = false;
|
||||
|
||||
private String type;
|
||||
|
||||
private String id;
|
||||
|
@ -40,8 +46,18 @@ public class SourceToParse {
|
|||
|
||||
private String parentId;
|
||||
|
||||
public SourceToParse(XContentParser parser) {
|
||||
this.parser = parser;
|
||||
this.source = null;
|
||||
}
|
||||
|
||||
public SourceToParse(byte[] source) {
|
||||
this.source = source;
|
||||
this.parser = null;
|
||||
}
|
||||
|
||||
public XContentParser parser() {
|
||||
return this.parser;
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
|
@ -57,6 +73,15 @@ public class SourceToParse {
|
|||
return this;
|
||||
}
|
||||
|
||||
public SourceToParse flyweight(boolean flyweight) {
|
||||
this.flyweight = flyweight;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean flyweight() {
|
||||
return this.flyweight;
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return this.id;
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.xcontent;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -145,22 +146,21 @@ public class BoostFieldMapper extends NumberFieldMapper<Float> implements org.el
|
|||
super.parse(context);
|
||||
}
|
||||
|
||||
@Override protected Field parseCreateField(ParseContext context) throws IOException {
|
||||
float value = parseFloatValue(context);
|
||||
@Override protected Fieldable parseCreateField(ParseContext context) throws IOException {
|
||||
final float value = parseFloatValue(context);
|
||||
if (Float.isNaN(value)) {
|
||||
return null;
|
||||
}
|
||||
context.doc().setBoost(value);
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
field = new Field(names.indexName(), Numbers.floatToBytes(value), store);
|
||||
if (indexed()) {
|
||||
field.setTokenStream(popCachedStream(precisionStep).setFloatValue(value));
|
||||
return new CustomNumericField(names.indexName(), indexed(), stored() ? Numbers.floatToBytes(value) : null) {
|
||||
@Override public TokenStream tokenStreamValue() {
|
||||
if (indexed()) {
|
||||
return popCachedStream(precisionStep).setFloatValue(value);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else if (indexed()) {
|
||||
field = new Field(names.indexName(), popCachedStream(precisionStep).setFloatValue(value));
|
||||
}
|
||||
return field;
|
||||
};
|
||||
}
|
||||
|
||||
private float parseFloatValue(ParseContext context) throws IOException {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.xcontent;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -176,7 +177,7 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override protected Field parseCreateField(ParseContext context) throws IOException {
|
||||
@Override protected Fieldable parseCreateField(ParseContext context) throws IOException {
|
||||
String dateAsString;
|
||||
if (context.externalValueSet()) {
|
||||
dateAsString = (String) context.externalValue();
|
||||
|
@ -198,17 +199,16 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
|||
context.allEntries().addText(names.fullName(), dateAsString, boost);
|
||||
}
|
||||
|
||||
long value = parseStringValue(dateAsString);
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
field = new Field(names.indexName(), Numbers.longToBytes(value), store);
|
||||
if (indexed()) {
|
||||
field.setTokenStream(popCachedStream(precisionStep).setLongValue(value));
|
||||
final long value = parseStringValue(dateAsString);
|
||||
return new CustomNumericField(names.indexName(), indexed(), stored() ? Numbers.longToBytes(value) : null) {
|
||||
@Override public TokenStream tokenStreamValue() {
|
||||
if (indexed()) {
|
||||
return popCachedStream(precisionStep).setLongValue(value);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else if (indexed()) {
|
||||
field = new Field(names.indexName(), popCachedStream(precisionStep).setLongValue(value));
|
||||
}
|
||||
return field;
|
||||
};
|
||||
}
|
||||
|
||||
@Override public FieldDataType fieldDataType() {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.xcontent;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -149,7 +150,7 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override protected Field parseCreateField(ParseContext context) throws IOException {
|
||||
@Override protected Fieldable parseCreateField(ParseContext context) throws IOException {
|
||||
double value;
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
|
@ -181,16 +182,16 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
|
|||
}
|
||||
}
|
||||
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
field = new Field(names.indexName(), Numbers.doubleToBytes(value), store);
|
||||
if (indexed()) {
|
||||
field.setTokenStream(popCachedStream(precisionStep).setDoubleValue(value));
|
||||
final double fValue = value;
|
||||
return new CustomNumericField(names.indexName(), indexed(), stored() ? Numbers.doubleToBytes(fValue) : null) {
|
||||
@Override public TokenStream tokenStreamValue() {
|
||||
if (indexed()) {
|
||||
return popCachedStream(precisionStep).setDoubleValue(fValue);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else if (indexed()) {
|
||||
field = new Field(names.indexName(), popCachedStream(precisionStep).setDoubleValue(value));
|
||||
}
|
||||
return field;
|
||||
};
|
||||
}
|
||||
|
||||
@Override public FieldDataType fieldDataType() {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.xcontent;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -148,7 +149,7 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override protected Field parseCreateField(ParseContext context) throws IOException {
|
||||
@Override protected Fieldable parseCreateField(ParseContext context) throws IOException {
|
||||
float value;
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
|
@ -180,16 +181,16 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
|
|||
}
|
||||
}
|
||||
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
field = new Field(names.indexName(), Numbers.floatToBytes(value), store);
|
||||
if (indexed()) {
|
||||
field.setTokenStream(popCachedStream(precisionStep).setFloatValue(value));
|
||||
final float fValue = value;
|
||||
return new CustomNumericField(names.indexName(), indexed(), stored() ? Numbers.floatToBytes(fValue) : null) {
|
||||
@Override public TokenStream tokenStreamValue() {
|
||||
if (indexed()) {
|
||||
return popCachedStream(precisionStep).setFloatValue(fValue);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else if (indexed()) {
|
||||
field = new Field(names.indexName(), popCachedStream(precisionStep).setFloatValue(value));
|
||||
}
|
||||
return field;
|
||||
};
|
||||
}
|
||||
|
||||
@Override public FieldDataType fieldDataType() {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.xcontent;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -148,7 +149,7 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override protected Field parseCreateField(ParseContext context) throws IOException {
|
||||
@Override protected Fieldable parseCreateField(ParseContext context) throws IOException {
|
||||
int value;
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
|
@ -180,16 +181,16 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
|
|||
}
|
||||
}
|
||||
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
field = new Field(names.indexName(), Numbers.intToBytes(value), store);
|
||||
if (indexed()) {
|
||||
field.setTokenStream(popCachedStream(precisionStep).setIntValue(value));
|
||||
final int fValue = value;
|
||||
return new CustomNumericField(names.indexName(), indexed(), stored() ? Numbers.intToBytes(fValue) : null) {
|
||||
@Override public TokenStream tokenStreamValue() {
|
||||
if (indexed()) {
|
||||
return popCachedStream(precisionStep).setIntValue(fValue);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else if (indexed()) {
|
||||
field = new Field(names.indexName(), popCachedStream(precisionStep).setIntValue(value));
|
||||
}
|
||||
return field;
|
||||
};
|
||||
}
|
||||
|
||||
@Override public FieldDataType fieldDataType() {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.xcontent;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -148,7 +149,7 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override protected Field parseCreateField(ParseContext context) throws IOException {
|
||||
@Override protected Fieldable parseCreateField(ParseContext context) throws IOException {
|
||||
long value;
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
|
@ -180,16 +181,16 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
}
|
||||
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
field = new Field(names.indexName(), Numbers.longToBytes(value), store);
|
||||
if (indexed()) {
|
||||
field.setTokenStream(popCachedStream(precisionStep).setLongValue(value));
|
||||
final long fValue = value;
|
||||
return new CustomNumericField(names.indexName(), indexed(), stored() ? Numbers.longToBytes(value) : null) {
|
||||
@Override public TokenStream tokenStreamValue() {
|
||||
if (indexed()) {
|
||||
return popCachedStream(precisionStep).setLongValue(fValue);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else if (indexed()) {
|
||||
field = new Field(names.indexName(), popCachedStream(precisionStep).setLongValue(value));
|
||||
}
|
||||
return field;
|
||||
};
|
||||
}
|
||||
|
||||
@Override public FieldDataType fieldDataType() {
|
||||
|
|
|
@ -21,11 +21,13 @@ package org.elasticsearch.index.mapper.xcontent;
|
|||
|
||||
import org.apache.lucene.analysis.NumericTokenStream;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.AbstractField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.elasticsearch.common.thread.ThreadLocals;
|
||||
import org.elasticsearch.common.trove.TIntObjectHashMap;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
|
@ -34,6 +36,7 @@ import org.elasticsearch.index.field.data.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Deque;
|
||||
|
||||
|
@ -255,4 +258,35 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
// used to we can use a numeric field in a document that is then parsed twice!
|
||||
protected abstract static class CustomNumericField extends AbstractField {
|
||||
|
||||
public CustomNumericField(String name, boolean indexed, byte[] value) {
|
||||
this.name = StringHelper.intern(name); // field names are interned
|
||||
fieldsData = value;
|
||||
|
||||
isIndexed = indexed;
|
||||
isTokenized = indexed;
|
||||
omitTermFreqAndPositions = true;
|
||||
omitNorms = true;
|
||||
|
||||
if (value != null) {
|
||||
isStored = true;
|
||||
isBinary = true;
|
||||
binaryLength = value.length;
|
||||
binaryOffset = 0;
|
||||
}
|
||||
|
||||
setStoreTermVector(Field.TermVector.NO);
|
||||
}
|
||||
|
||||
@Override public String stringValue() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override public Reader readerValue() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,6 +56,8 @@ public class ParseContext {
|
|||
|
||||
private String id;
|
||||
|
||||
private boolean flyweight;
|
||||
|
||||
private DocumentMapper.ParseListener listener;
|
||||
|
||||
private String uid;
|
||||
|
@ -81,7 +83,7 @@ public class ParseContext {
|
|||
this.path = path;
|
||||
}
|
||||
|
||||
public void reset(XContentParser parser, Document document, String type, byte[] source, DocumentMapper.ParseListener listener) {
|
||||
public void reset(XContentParser parser, Document document, String type, byte[] source, boolean flyweight, DocumentMapper.ParseListener listener) {
|
||||
this.parser = parser;
|
||||
this.document = document;
|
||||
this.analyzer = null;
|
||||
|
@ -89,6 +91,7 @@ public class ParseContext {
|
|||
this.id = null;
|
||||
this.type = type;
|
||||
this.source = source;
|
||||
this.flyweight = flyweight;
|
||||
this.path.reset();
|
||||
this.parsedIdState = ParsedIdState.NO;
|
||||
this.mappersAdded = false;
|
||||
|
@ -97,6 +100,10 @@ public class ParseContext {
|
|||
this.ignoredValues.clear();
|
||||
}
|
||||
|
||||
public boolean flyweight() {
|
||||
return this.flyweight;
|
||||
}
|
||||
|
||||
public XContentDocumentMapperParser docMapperParser() {
|
||||
return this.docMapperParser;
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.xcontent;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -148,7 +149,7 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override protected Field parseCreateField(ParseContext context) throws IOException {
|
||||
@Override protected Fieldable parseCreateField(ParseContext context) throws IOException {
|
||||
short value;
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
|
@ -179,17 +180,16 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
field = new Field(names.indexName(), Numbers.shortToBytes(value), store);
|
||||
if (indexed()) {
|
||||
field.setTokenStream(popCachedStream(precisionStep).setIntValue(value));
|
||||
final short fValue = value;
|
||||
return new CustomNumericField(names.indexName(), indexed(), stored() ? Numbers.shortToBytes(fValue) : null) {
|
||||
@Override public TokenStream tokenStreamValue() {
|
||||
if (indexed()) {
|
||||
return popCachedStream(precisionStep).setIntValue(fValue);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else if (indexed()) {
|
||||
field = new Field(names.indexName(), popCachedStream(precisionStep).setIntValue(value));
|
||||
}
|
||||
return field;
|
||||
};
|
||||
}
|
||||
|
||||
@Override public FieldDataType fieldDataType() {
|
||||
|
|
|
@ -112,6 +112,9 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements or
|
|||
if (!enabled) {
|
||||
return null;
|
||||
}
|
||||
if (context.flyweight()) {
|
||||
return null;
|
||||
}
|
||||
byte[] data = context.source();
|
||||
if (compress != null && compress && !LZFDecoder.isCompressed(data)) {
|
||||
if (compressThreshold == -1 || data.length > compressThreshold) {
|
||||
|
|
|
@ -376,18 +376,20 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
|
|||
}
|
||||
source.type(this.type);
|
||||
|
||||
XContentParser parser = null;
|
||||
XContentParser parser = source.parser();
|
||||
try {
|
||||
if (LZFDecoder.isCompressed(source.source())) {
|
||||
BytesStreamInput siBytes = new BytesStreamInput(source.source());
|
||||
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
|
||||
XContentType contentType = XContentFactory.xContentType(siLzf);
|
||||
siLzf.resetToBufferStart();
|
||||
parser = XContentFactory.xContent(contentType).createParser(siLzf);
|
||||
} else {
|
||||
parser = XContentFactory.xContent(source.source()).createParser(source.source());
|
||||
if (parser == null) {
|
||||
if (LZFDecoder.isCompressed(source.source())) {
|
||||
BytesStreamInput siBytes = new BytesStreamInput(source.source());
|
||||
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
|
||||
XContentType contentType = XContentFactory.xContentType(siLzf);
|
||||
siLzf.resetToBufferStart();
|
||||
parser = XContentFactory.xContent(contentType).createParser(siLzf);
|
||||
} else {
|
||||
parser = XContentFactory.xContent(source.source()).createParser(source.source());
|
||||
}
|
||||
}
|
||||
context.reset(parser, new Document(), type, source.source(), listener);
|
||||
context.reset(parser, new Document(), type, source.source(), source.flyweight(), listener);
|
||||
|
||||
// will result in START_OBJECT
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
|
@ -431,12 +433,24 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
|
|||
|
||||
// if we did not get the id, we need to parse the uid into the document now, after it was added
|
||||
if (source.id() == null) {
|
||||
uidFieldMapper.parse(context);
|
||||
if (context.id() == null) {
|
||||
if (!source.flyweight()) {
|
||||
throw new MapperParsingException("No id found while parsing the content source");
|
||||
}
|
||||
} else {
|
||||
uidFieldMapper.parse(context);
|
||||
}
|
||||
}
|
||||
if (context.parsedIdState() != ParseContext.ParsedIdState.PARSED) {
|
||||
// mark it as external, so we can parse it
|
||||
context.parsedId(ParseContext.ParsedIdState.EXTERNAL);
|
||||
idFieldMapper.parse(context);
|
||||
if (context.id() == null) {
|
||||
if (!source.flyweight()) {
|
||||
throw new MapperParsingException("No id mapping with [_id] found in the content, and not explicitly set");
|
||||
}
|
||||
} else {
|
||||
// mark it as external, so we can parse it
|
||||
context.parsedId(ParseContext.ParsedIdState.EXTERNAL);
|
||||
idFieldMapper.parse(context);
|
||||
}
|
||||
}
|
||||
if (parentFieldMapper != null) {
|
||||
context.externalValue(source.parent());
|
||||
|
@ -449,14 +463,15 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
|
|||
} catch (IOException e) {
|
||||
throw new MapperParsingException("Failed to parse", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
// only close the parser when its not provided externally
|
||||
if (source.parser() == null && parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
ParsedDocument doc = new ParsedDocument(context.uid(), context.id(), context.type(), source.routing(), context.doc(), context.analyzer(),
|
||||
context.source(), context.mappersAdded()).parent(source.parent());
|
||||
// reset the context to free up memory
|
||||
context.reset(null, null, null, null, null);
|
||||
context.reset(null, null, null, null, false, null);
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.mapper.xcontent.ip;
|
||||
|
||||
import org.apache.lucene.analysis.NumericTokenStream;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -195,7 +196,7 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override protected Field parseCreateField(ParseContext context) throws IOException {
|
||||
@Override protected Fieldable parseCreateField(ParseContext context) throws IOException {
|
||||
String ipAsString;
|
||||
if (context.externalValueSet()) {
|
||||
ipAsString = (String) context.externalValue();
|
||||
|
@ -217,17 +218,16 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
|
|||
context.allEntries().addText(names.fullName(), ipAsString, boost);
|
||||
}
|
||||
|
||||
long value = ipToLong(ipAsString);
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
field = new Field(names.indexName(), Numbers.longToBytes(value), store);
|
||||
if (indexed()) {
|
||||
field.setTokenStream(popCachedStream(precisionStep).setLongValue(value));
|
||||
final long value = ipToLong(ipAsString);
|
||||
return new CustomNumericField(names.indexName(), indexed(), stored() ? Numbers.longToBytes(value) : null) {
|
||||
@Override public TokenStream tokenStreamValue() {
|
||||
if (indexed()) {
|
||||
return popCachedStream(precisionStep).setLongValue(value);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else if (indexed()) {
|
||||
field = new Field(names.indexName(), popCachedStream(precisionStep).setLongValue(value));
|
||||
}
|
||||
return field;
|
||||
};
|
||||
}
|
||||
|
||||
@Override public FieldDataType fieldDataType() {
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.percolator;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class PercolatorModule extends AbstractModule {
|
||||
|
||||
@Override protected void configure() {
|
||||
bind(PercolatorService.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,275 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.percolator;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.memory.MemoryIndex;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FastByteArrayOutputStream;
|
||||
import org.elasticsearch.common.io.FastStringReader;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.query.IndexQueryParser;
|
||||
import org.elasticsearch.index.query.IndexQueryParserMissingException;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.SourceToParse.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class PercolatorService extends AbstractIndexComponent {
|
||||
|
||||
public static class Request {
|
||||
private final String type;
|
||||
private final byte[] source;
|
||||
|
||||
private String match;
|
||||
private String unmatch;
|
||||
|
||||
public Request(String type, byte[] source) {
|
||||
this.type = type;
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public String match() {
|
||||
return this.match;
|
||||
}
|
||||
|
||||
public Request match(String match) {
|
||||
this.match = match;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String unmatch() {
|
||||
return this.unmatch;
|
||||
}
|
||||
|
||||
public Request unmatch(String unmatch) {
|
||||
this.unmatch = unmatch;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Response {
|
||||
private final List<String> matches;
|
||||
private final boolean mappersAdded;
|
||||
|
||||
public Response(List<String> matches, boolean mappersAdded) {
|
||||
this.matches = matches;
|
||||
this.mappersAdded = mappersAdded;
|
||||
}
|
||||
|
||||
public boolean mappersAdded() {
|
||||
return this.mappersAdded;
|
||||
}
|
||||
|
||||
public List<String> matches() {
|
||||
return matches;
|
||||
}
|
||||
}
|
||||
|
||||
private final MapperService mapperService;
|
||||
|
||||
private final IndexQueryParserService queryParserService;
|
||||
|
||||
private volatile ImmutableMap<String, Query> queries = ImmutableMap.of();
|
||||
|
||||
@Inject public PercolatorService(Index index, @IndexSettings Settings indexSettings,
|
||||
MapperService mapperService, IndexQueryParserService queryParserService) {
|
||||
super(index, indexSettings);
|
||||
this.mapperService = mapperService;
|
||||
this.queryParserService = queryParserService;
|
||||
}
|
||||
|
||||
public void addQuery(String name, QueryBuilder queryBuilder) {
|
||||
addQuery(name, null, queryBuilder);
|
||||
}
|
||||
|
||||
public void addQuery(String name, @Nullable String queryParserName, QueryBuilder queryBuilder) {
|
||||
FastByteArrayOutputStream unsafeBytes = queryBuilder.buildAsUnsafeBytes();
|
||||
addQuery(name, queryParserName, unsafeBytes.unsafeByteArray(), 0, unsafeBytes.size());
|
||||
}
|
||||
|
||||
public void addQuery(String name, @Nullable String queryParserName,
|
||||
byte[] querySource, int querySourceOffset, int querySourceLength) throws ElasticSearchException {
|
||||
IndexQueryParser queryParser = queryParserService.defaultIndexQueryParser();
|
||||
if (queryParserName != null) {
|
||||
queryParser = queryParserService.indexQueryParser(queryParserName);
|
||||
if (queryParser == null) {
|
||||
throw new IndexQueryParserMissingException(queryParserName);
|
||||
}
|
||||
}
|
||||
|
||||
Query query = queryParser.parse(querySource, querySourceOffset, querySourceLength).query();
|
||||
addQuery(name, query);
|
||||
}
|
||||
|
||||
public synchronized void addQuery(String name, Query query) {
|
||||
this.queries = MapBuilder.newMapBuilder(queries).put(name, query).immutableMap();
|
||||
}
|
||||
|
||||
public synchronized void removeQuery(String name) {
|
||||
this.queries = MapBuilder.newMapBuilder(queries).remove(name).immutableMap();
|
||||
}
|
||||
|
||||
public Response percolate(Request request) throws ElasticSearchException {
|
||||
// first, parse the source doc into a MemoryIndex
|
||||
final MemoryIndex memoryIndex = new MemoryIndex();
|
||||
DocumentMapper docMapper = mapperService.documentMapperWithAutoCreate(request.type());
|
||||
ParsedDocument doc = docMapper.parse(source(request.source()).type(request.type()).flyweight(true));
|
||||
|
||||
for (Fieldable field : doc.doc().getFields()) {
|
||||
if (!field.isIndexed()) {
|
||||
continue;
|
||||
}
|
||||
TokenStream tokenStream = field.tokenStreamValue();
|
||||
if (tokenStream != null) {
|
||||
memoryIndex.addField(field.name(), tokenStream, field.getBoost());
|
||||
} else {
|
||||
Reader reader = field.readerValue();
|
||||
if (reader != null) {
|
||||
System.err.println("Can't handle reader value currently in percolator");
|
||||
} else {
|
||||
String value = field.stringValue();
|
||||
if (value != null) {
|
||||
try {
|
||||
memoryIndex.addField(field.name(), doc.analyzer().reusableTokenStream(field.name(), new FastStringReader(value)), field.getBoost() * doc.doc().getBoost());
|
||||
} catch (IOException e) {
|
||||
throw new MapperParsingException("Failed to analyze field [" + field.name() + "]", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ExistsCollector collector = new ExistsCollector();
|
||||
List<String> matches = new ArrayList<String>();
|
||||
IndexSearcher searcher = memoryIndex.createSearcher();
|
||||
for (Map.Entry<String, Query> entry : queries.entrySet()) {
|
||||
if (request.match() != null) {
|
||||
if (!Regex.simpleMatch(request.match(), entry.getKey())) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (request.unmatch() != null) {
|
||||
if (Regex.simpleMatch(request.unmatch(), entry.getKey())) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
searcher.search(entry.getValue(), collector);
|
||||
} catch (IOException e) {
|
||||
logger.warn("[" + entry.getKey() + "] failed to execute query", e);
|
||||
}
|
||||
|
||||
if (collector.exists()) {
|
||||
matches.add(entry.getKey());
|
||||
}
|
||||
}
|
||||
|
||||
return new Response(matches, doc.mappersAdded());
|
||||
}
|
||||
|
||||
static class ExistsCollector extends Collector {
|
||||
|
||||
private boolean exists;
|
||||
|
||||
public boolean exists() {
|
||||
return exists;
|
||||
}
|
||||
|
||||
@Override public void setScorer(Scorer scorer) throws IOException {
|
||||
this.exists = false;
|
||||
}
|
||||
|
||||
@Override public void collect(int doc) throws IOException {
|
||||
exists = true;
|
||||
}
|
||||
|
||||
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
|
||||
}
|
||||
|
||||
@Override public boolean acceptsDocsOutOfOrder() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class SingleScoreCollector extends Collector {
|
||||
|
||||
private Scorer scorer;
|
||||
|
||||
private float score;
|
||||
|
||||
public float score() {
|
||||
return this.score;
|
||||
}
|
||||
|
||||
@Override public void setScorer(Scorer scorer) throws IOException {
|
||||
this.score = 0;
|
||||
this.scorer = scorer;
|
||||
}
|
||||
|
||||
@Override public void collect(int doc) throws IOException {
|
||||
score = scorer.score();
|
||||
}
|
||||
|
||||
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
|
||||
}
|
||||
|
||||
@Override public boolean acceptsDocsOutOfOrder() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -47,6 +47,7 @@ import org.elasticsearch.index.engine.IndexEngineModule;
|
|||
import org.elasticsearch.index.gateway.IndexGateway;
|
||||
import org.elasticsearch.index.gateway.IndexGatewayModule;
|
||||
import org.elasticsearch.index.mapper.MapperServiceModule;
|
||||
import org.elasticsearch.index.percolator.PercolatorModule;
|
||||
import org.elasticsearch.index.query.IndexQueryParserModule;
|
||||
import org.elasticsearch.index.service.IndexService;
|
||||
import org.elasticsearch.index.service.InternalIndexService;
|
||||
|
@ -237,6 +238,7 @@ public class InternalIndicesService extends AbstractLifecycleComponent<IndicesSe
|
|||
modules.add(new MapperServiceModule());
|
||||
modules.add(new IndexGatewayModule(indexSettings, injector.getInstance(Gateway.class)));
|
||||
modules.add(new IndexModule());
|
||||
modules.add(new PercolatorModule());
|
||||
|
||||
Injector indexInjector = modules.createChildInjector(injector);
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.index.mapper.xcontent.dynamictemplate.genericstore;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.elasticsearch.index.mapper.FieldMappers;
|
||||
import org.elasticsearch.index.mapper.xcontent.MapperTests;
|
||||
import org.elasticsearch.index.mapper.xcontent.XContentDocumentMapper;
|
||||
|
@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.*;
|
|||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class GenericStoreDynamicTempalteTests {
|
||||
public class GenericStoreDynamicTemplateTests {
|
||||
|
||||
@Test public void testSimple() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/xcontent/dynamictemplate/genericstore/test-mapping.json");
|
||||
|
@ -41,7 +41,7 @@ public class GenericStoreDynamicTempalteTests {
|
|||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/dynamictemplate/genericstore/test-data.json");
|
||||
Document doc = docMapper.parse(json).doc();
|
||||
|
||||
Field f = doc.getField("name");
|
||||
Fieldable f = doc.getFieldable("name");
|
||||
assertThat(f.name(), equalTo("name"));
|
||||
assertThat(f.stringValue(), equalTo("some name"));
|
||||
assertThat(f.isStored(), equalTo(true));
|
||||
|
@ -50,7 +50,7 @@ public class GenericStoreDynamicTempalteTests {
|
|||
assertThat(fieldMappers.mappers().size(), equalTo(1));
|
||||
assertThat(fieldMappers.mapper().stored(), equalTo(true));
|
||||
|
||||
f = doc.getField("age");
|
||||
f = doc.getFieldable("age");
|
||||
assertThat(f.name(), equalTo("age"));
|
||||
assertThat(f.isStored(), equalTo(true));
|
||||
|
|
@ -47,8 +47,8 @@ public class GeohashMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lat"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lon"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getFieldable("point.lat"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getFieldable("point.lon"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().get("point"), equalTo("1.2,1.3"));
|
||||
}
|
||||
|
||||
|
@ -65,8 +65,8 @@ public class GeohashMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lat"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lon"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getFieldable("point.lat"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getFieldable("point.lon"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().get("point"), equalTo("1.2,1.3"));
|
||||
}
|
||||
|
||||
|
@ -83,8 +83,8 @@ public class GeohashMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lat"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lon"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getFieldable("point.lat"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getFieldable("point.lon"), nullValue());
|
||||
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
|
||||
MatcherAssert.assertThat(doc.doc().get("point"), notNullValue());
|
||||
}
|
||||
|
|
|
@ -24,9 +24,9 @@ import org.elasticsearch.index.mapper.ParsedDocument;
|
|||
import org.elasticsearch.index.mapper.xcontent.MapperTests;
|
||||
import org.elasticsearch.index.mapper.xcontent.XContentDocumentMapper;
|
||||
import org.elasticsearch.index.search.geo.GeoHashUtils;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
|
@ -47,9 +47,9 @@ public class LatLonAndGeohashMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
|
||||
}
|
||||
|
||||
@Test public void testLatLonInOneValue() throws Exception {
|
||||
|
@ -65,9 +65,9 @@ public class LatLonAndGeohashMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
|
||||
}
|
||||
|
||||
@Test public void testGeoHashValue() throws Exception {
|
||||
|
@ -83,8 +83,8 @@ public class LatLonAndGeohashMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
MatcherAssert.assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
|
||||
}
|
||||
}
|
|
@ -48,11 +48,11 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lat").getBinaryValue(), nullValue());
|
||||
assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lon").getBinaryValue(), nullValue());
|
||||
assertThat(doc.doc().getField("point.geohash"), nullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat").getBinaryValue(), nullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon").getBinaryValue(), nullValue());
|
||||
assertThat(doc.doc().getFieldable("point.geohash"), nullValue());
|
||||
assertThat(doc.doc().get("point"), equalTo("1.2,1.3"));
|
||||
}
|
||||
|
||||
|
@ -69,11 +69,11 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lat").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lon").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getField("point.geohash"), nullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getFieldable("point.geohash"), nullValue());
|
||||
assertThat(doc.doc().get("point"), equalTo("1.2,1.3"));
|
||||
}
|
||||
|
||||
|
@ -93,14 +93,14 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getFields("point.lat").length, equalTo(2));
|
||||
assertThat(doc.doc().getFields("point.lon").length, equalTo(2));
|
||||
assertThat(doc.doc().getFields("point.lat")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getFields("point.lon")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
assertThat(doc.doc().getFields("point.lat")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.4)));
|
||||
assertThat(doc.doc().getFields("point.lon")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.5)));
|
||||
assertThat(doc.doc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
assertThat(doc.doc().getFieldables("point.lat").length, equalTo(2));
|
||||
assertThat(doc.doc().getFieldables("point.lon").length, equalTo(2));
|
||||
assertThat(doc.doc().getFieldables("point.lat")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getFieldables("point.lon")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getFieldables("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
assertThat(doc.doc().getFieldables("point.lat")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.4)));
|
||||
assertThat(doc.doc().getFieldables("point.lon")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.5)));
|
||||
assertThat(doc.doc().getFieldables("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
}
|
||||
|
||||
@Test public void testLatLonInOneValue() throws Exception {
|
||||
|
@ -116,8 +116,8 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().get("point"), equalTo("1.2,1.3"));
|
||||
}
|
||||
|
||||
|
@ -134,10 +134,10 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lat").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lon").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().get("point"), equalTo("1.2,1.3"));
|
||||
}
|
||||
|
||||
|
@ -157,14 +157,14 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getFields("point.lat").length, equalTo(2));
|
||||
assertThat(doc.doc().getFields("point.lon").length, equalTo(2));
|
||||
assertThat(doc.doc().getFields("point.lat")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getFields("point.lon")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
assertThat(doc.doc().getFields("point.lat")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.4)));
|
||||
assertThat(doc.doc().getFields("point.lon")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.5)));
|
||||
assertThat(doc.doc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
assertThat(doc.doc().getFieldables("point.lat").length, equalTo(2));
|
||||
assertThat(doc.doc().getFieldables("point.lon").length, equalTo(2));
|
||||
assertThat(doc.doc().getFieldables("point.lat")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getFieldables("point.lon")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getFieldables("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
assertThat(doc.doc().getFieldables("point.lat")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.4)));
|
||||
assertThat(doc.doc().getFieldables("point.lon")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.5)));
|
||||
assertThat(doc.doc().getFieldables("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
}
|
||||
|
||||
@Test public void testGeoHashValue() throws Exception {
|
||||
|
@ -180,8 +180,8 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().get("point"), notNullValue());
|
||||
}
|
||||
|
||||
|
@ -198,10 +198,10 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lat").getBinaryValue(), nullValue());
|
||||
assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lon").getBinaryValue(), nullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat").getBinaryValue(), nullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon").getBinaryValue(), nullValue());
|
||||
assertThat(doc.doc().get("point"), equalTo("1.2,1.3"));
|
||||
}
|
||||
|
||||
|
@ -218,10 +218,10 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lat").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getField("point.lon").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getFieldable("point.lat"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lat").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getFieldable("point.lon"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("point.lon").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().get("point"), equalTo("1.2,1.3"));
|
||||
}
|
||||
|
||||
|
@ -241,13 +241,13 @@ public class LatLonMappingGeoPointTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getFields("point.lat").length, equalTo(2));
|
||||
assertThat(doc.doc().getFields("point.lon").length, equalTo(2));
|
||||
assertThat(doc.doc().getFields("point.lat")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getFields("point.lon")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
assertThat(doc.doc().getFields("point.lat")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.4)));
|
||||
assertThat(doc.doc().getFields("point.lon")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.5)));
|
||||
assertThat(doc.doc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
assertThat(doc.doc().getFieldables("point.lat").length, equalTo(2));
|
||||
assertThat(doc.doc().getFieldables("point.lon").length, equalTo(2));
|
||||
assertThat(doc.doc().getFieldables("point.lat")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
|
||||
assertThat(doc.doc().getFieldables("point.lon")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
|
||||
assertThat(doc.doc().getFieldables("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
assertThat(doc.doc().getFieldables("point.lat")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.4)));
|
||||
assertThat(doc.doc().getFieldables("point.lon")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.5)));
|
||||
assertThat(doc.doc().getFieldables("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ public class SimpleIpMappingTests {
|
|||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
assertThat(doc.doc().getField("ip1"), notNullValue());
|
||||
assertThat(doc.doc().getFieldable("ip1"), notNullValue());
|
||||
assertThat(doc.doc().get("ip1"), nullValue()); // its numeric
|
||||
assertThat(doc.doc().get("ip2"), equalTo("0.1"));
|
||||
assertThat(doc.doc().get("ip3"), equalTo("127.0.0.1.2"));
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
package org.elasticsearch.index.mapper.xcontent.lucene;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.xcontent.MapperTests;
|
||||
import org.elasticsearch.index.mapper.xcontent.XContentDocumentMapper;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class DoubleIndexingDocTest {
|
||||
|
||||
@Test public void testDoubleIndexingSameDoc() throws Exception {
|
||||
IndexWriter writer = new IndexWriter(new RAMDirectory(), Lucene.STANDARD_ANALYZER, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").endObject()
|
||||
.endObject().endObject().string();
|
||||
XContentDocumentMapper mapper = MapperTests.newParser().parse(mapping);
|
||||
|
||||
ParsedDocument doc = mapper.parse("type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field1", "value1")
|
||||
.field("field2", 1)
|
||||
.field("field3", 1.1)
|
||||
.field("field4", "2010-01-01")
|
||||
.startArray("field5").value(1).value(2).value(3).endArray()
|
||||
.endObject()
|
||||
.copiedBytes());
|
||||
|
||||
writer.addDocument(doc.doc(), doc.analyzer());
|
||||
writer.addDocument(doc.doc(), doc.analyzer());
|
||||
|
||||
IndexReader reader = writer.getReader();
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
TopDocs topDocs = searcher.search(mapper.mappers().smartName("field1").mapper().fieldQuery("value1"), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartName("field2").mapper().fieldQuery("1"), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartName("field3").mapper().fieldQuery("1.1"), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartName("field4").mapper().fieldQuery("2010-01-01"), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().fieldQuery("1"), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().fieldQuery("2"), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().fieldQuery("3"), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.xcontent.multifield;
|
|||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.elasticsearch.index.mapper.xcontent.MapperTests;
|
||||
import org.elasticsearch.index.mapper.xcontent.XContentDocumentMapper;
|
||||
import org.elasticsearch.index.mapper.xcontent.XContentDocumentMapperParser;
|
||||
|
@ -43,28 +44,28 @@ public class MultiFieldTests {
|
|||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/multifield/test-data.json");
|
||||
Document doc = docMapper.parse(json).doc();
|
||||
|
||||
Field f = doc.getField("name");
|
||||
Fieldable f = doc.getFieldable("name");
|
||||
assertThat(f.name(), equalTo("name"));
|
||||
assertThat(f.stringValue(), equalTo("some name"));
|
||||
assertThat(f.isStored(), equalTo(true));
|
||||
assertThat(f.isIndexed(), equalTo(true));
|
||||
|
||||
f = doc.getField("name.indexed");
|
||||
f = doc.getFieldable("name.indexed");
|
||||
assertThat(f.name(), equalTo("name.indexed"));
|
||||
assertThat(f.stringValue(), equalTo("some name"));
|
||||
assertThat(f.isStored(), equalTo(false));
|
||||
assertThat(f.isIndexed(), equalTo(true));
|
||||
|
||||
f = doc.getField("name.not_indexed");
|
||||
f = doc.getFieldable("name.not_indexed");
|
||||
assertThat(f.name(), equalTo("name.not_indexed"));
|
||||
assertThat(f.stringValue(), equalTo("some name"));
|
||||
assertThat(f.isStored(), equalTo(true));
|
||||
assertThat(f.isIndexed(), equalTo(false));
|
||||
|
||||
f = doc.getField("object1.multi1");
|
||||
f = doc.getFieldable("object1.multi1");
|
||||
assertThat(f.name(), equalTo("object1.multi1"));
|
||||
|
||||
f = doc.getField("object1.multi1.string");
|
||||
f = doc.getFieldable("object1.multi1.string");
|
||||
assertThat(f.name(), equalTo("object1.multi1.string"));
|
||||
assertThat(f.stringValue(), equalTo("2010-01-01"));
|
||||
}
|
||||
|
@ -89,19 +90,19 @@ public class MultiFieldTests {
|
|||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/multifield/test-data.json");
|
||||
Document doc = docMapper.parse(json).doc();
|
||||
|
||||
Field f = doc.getField("name");
|
||||
Fieldable f = doc.getFieldable("name");
|
||||
assertThat(f.name(), equalTo("name"));
|
||||
assertThat(f.stringValue(), equalTo("some name"));
|
||||
assertThat(f.isStored(), equalTo(true));
|
||||
assertThat(f.isIndexed(), equalTo(true));
|
||||
|
||||
f = doc.getField("name.indexed");
|
||||
f = doc.getFieldable("name.indexed");
|
||||
assertThat(f.name(), equalTo("name.indexed"));
|
||||
assertThat(f.stringValue(), equalTo("some name"));
|
||||
assertThat(f.isStored(), equalTo(false));
|
||||
assertThat(f.isIndexed(), equalTo(true));
|
||||
|
||||
f = doc.getField("name.not_indexed");
|
||||
f = doc.getFieldable("name.not_indexed");
|
||||
assertThat(f.name(), equalTo("name.not_indexed"));
|
||||
assertThat(f.stringValue(), equalTo("some name"));
|
||||
assertThat(f.isStored(), equalTo(true));
|
||||
|
|
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.percolator;
|
||||
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexNameModule;
|
||||
import org.elasticsearch.index.analysis.AnalysisModule;
|
||||
import org.elasticsearch.index.cache.IndexCacheModule;
|
||||
import org.elasticsearch.index.engine.IndexEngineModule;
|
||||
import org.elasticsearch.index.mapper.MapperServiceModule;
|
||||
import org.elasticsearch.index.query.IndexQueryParserModule;
|
||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.testng.annotations.BeforeTest;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.index.query.xcontent.QueryBuilders.*;
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class SimplePercolatorTests {
|
||||
|
||||
private PercolatorService percolatorService;
|
||||
|
||||
@BeforeTest public void buildPercolatorService() {
|
||||
Settings settings = ImmutableSettings.settingsBuilder()
|
||||
.put("index.cache.filter.type", "none")
|
||||
.build();
|
||||
Index index = new Index("test");
|
||||
Injector injector = new ModulesBuilder().add(
|
||||
new SettingsModule(settings),
|
||||
new ScriptModule(),
|
||||
new MapperServiceModule(),
|
||||
new IndexSettingsModule(settings),
|
||||
new IndexCacheModule(settings),
|
||||
new AnalysisModule(settings),
|
||||
new IndexEngineModule(settings),
|
||||
new SimilarityModule(settings),
|
||||
new IndexQueryParserModule(settings),
|
||||
new IndexNameModule(index),
|
||||
new PercolatorModule()
|
||||
).createInjector();
|
||||
|
||||
percolatorService = injector.getInstance(PercolatorService.class);
|
||||
}
|
||||
|
||||
@Test public void testSimplePercolator() throws Exception {
|
||||
// introduce the doc
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder().startObject()
|
||||
.field("field1", 1)
|
||||
.field("field2", "value")
|
||||
.endObject();
|
||||
byte[] source = doc.copiedBytes();
|
||||
|
||||
PercolatorService.Response percolate = percolatorService.percolate(new PercolatorService.Request("type1", source));
|
||||
assertThat(percolate.matches(), hasSize(0));
|
||||
|
||||
// add a query
|
||||
percolatorService.addQuery("test1", termQuery("field2", "value"));
|
||||
|
||||
percolate = percolatorService.percolate(new PercolatorService.Request("type1", source));
|
||||
assertThat(percolate.matches(), hasSize(1));
|
||||
assertThat(percolate.matches(), hasItem("test1"));
|
||||
|
||||
percolatorService.addQuery("test2", termQuery("field1", 1));
|
||||
|
||||
percolate = percolatorService.percolate(new PercolatorService.Request("type1", source));
|
||||
assertThat(percolate.matches(), hasSize(2));
|
||||
assertThat(percolate.matches(), hasItems("test1", "test2"));
|
||||
|
||||
percolate = percolatorService.percolate(new PercolatorService.Request("type1", source).match("*2"));
|
||||
assertThat(percolate.matches(), hasSize(1));
|
||||
assertThat(percolate.matches(), hasItems("test2"));
|
||||
|
||||
percolate = percolatorService.percolate(new PercolatorService.Request("type1", source).match("*").unmatch("*1"));
|
||||
assertThat(percolate.matches(), hasSize(1));
|
||||
assertThat(percolate.matches(), hasItems("test2"));
|
||||
|
||||
percolatorService.removeQuery("test2");
|
||||
percolate = percolatorService.percolate(new PercolatorService.Request("type1", source));
|
||||
assertThat(percolate.matches(), hasSize(1));
|
||||
assertThat(percolate.matches(), hasItems("test1"));
|
||||
}
|
||||
}
|
|
@ -144,6 +144,7 @@ public class UpdateNumberOfReplicasTests extends AbstractNodesTests {
|
|||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
CountResponse countResponse = client1.prepareCount().setQuery(matchAllQuery()).execute().actionGet();
|
||||
assertThat(countResponse.shardFailures().toString(), countResponse.failedShards(), equalTo(0));
|
||||
assertThat(countResponse.count(), equalTo(10l));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue