Merge remote-tracking branch 'dakrone/disable-all-by-default'
This commit is contained in:
commit
2db01b6127
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexOptions;
|
|||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lucene.all.AllEntries;
|
||||
import org.elasticsearch.common.lucene.all.AllField;
|
||||
|
@ -52,7 +53,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
public static class Defaults {
|
||||
public static final String NAME = AllFieldMapper.NAME;
|
||||
public static final String INDEX_NAME = AllFieldMapper.NAME;
|
||||
public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_ENABLED;
|
||||
public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED;
|
||||
public static final int POSITION_INCREMENT_GAP = 100;
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new AllFieldType();
|
||||
|
@ -103,6 +104,11 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node,
|
||||
ParserContext parserContext) throws MapperParsingException {
|
||||
if (node.isEmpty() == false &&
|
||||
parserContext.indexVersionCreated().onOrAfter(Version.V_6_0_0_alpha1_UNRELEASED)) {
|
||||
throw new IllegalArgumentException("[_all] is disabled in 6.0. As a replacement, you can use an [copy_to] " +
|
||||
"on mapping fields to create your own catch all field.");
|
||||
}
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
|
||||
builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer());
|
||||
builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer());
|
||||
|
|
|
@ -682,30 +682,6 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));
|
||||
}
|
||||
|
||||
public void testGetAllField() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addAlias(new Alias("alias"))
|
||||
.addMapping("my-type1", jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("my-type1")
|
||||
.startObject("_all")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("some_field")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()));
|
||||
index("test", "my-type1", "1", "some_field", "some text");
|
||||
refresh();
|
||||
|
||||
GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("_all").get();
|
||||
assertNotNull(getResponse.getField("_all").getValue());
|
||||
assertThat(getResponse.getField("_all").getValue().toString(), equalTo("some text"));
|
||||
}
|
||||
|
||||
public void testUngeneratedFieldsThatAreNeverStored() throws IOException {
|
||||
String createIndexSource = "{\n" +
|
||||
" \"settings\": {\n" +
|
||||
|
@ -804,7 +780,7 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
|
||||
public void testGeneratedStringFieldsUnstored() throws IOException {
|
||||
indexSingleDocumentWithStringFieldsGeneratedFromText(false, randomBoolean());
|
||||
String[] fieldsList = {"_all", "_field_names"};
|
||||
String[] fieldsList = {"_field_names"};
|
||||
// before refresh - document is only in translog
|
||||
assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList);
|
||||
refresh();
|
||||
|
@ -817,7 +793,7 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
|
||||
public void testGeneratedStringFieldsStored() throws IOException {
|
||||
indexSingleDocumentWithStringFieldsGeneratedFromText(true, randomBoolean());
|
||||
String[] fieldsList = {"_all"};
|
||||
String[] fieldsList = {"text1", "text2"};
|
||||
String[] alwaysNotStoredFieldsList = {"_field_names"};
|
||||
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList);
|
||||
assertGetFieldsNull(indexOrAlias(), "doc", "1", alwaysNotStoredFieldsList);
|
||||
|
@ -838,7 +814,16 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
" \"mappings\": {\n" +
|
||||
" \"doc\": {\n" +
|
||||
" \"_source\" : {\"enabled\" : " + sourceEnabled + "}," +
|
||||
" \"_all\" : {\"enabled\" : true, \"store\":\"" + storedString + "\" }" +
|
||||
" \"properties\": {\n" +
|
||||
" \"text1\": {\n" +
|
||||
" \"type\": \"text\",\n" +
|
||||
" \"store\": \"" + storedString + "\"" +
|
||||
" },\n" +
|
||||
" \"text2\": {\n" +
|
||||
" \"type\": \"text\",\n" +
|
||||
" \"store\": \"" + storedString + "\"" +
|
||||
" }" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
|
|
@ -97,14 +97,6 @@ public abstract class AbstractNumericFieldMapperTestCase extends ESSingleNodeTes
|
|||
|
||||
protected abstract void doTestCoerce(String type) throws IOException;
|
||||
|
||||
public void testIncludeInAll() throws Exception {
|
||||
for (String type : TYPES) {
|
||||
doTestIncludeInAll(type);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void doTestIncludeInAll(String type) throws Exception;
|
||||
|
||||
public void testNullValue() throws IOException {
|
||||
for (String type : TYPES) {
|
||||
doTestNullValue(type);
|
||||
|
|
|
@ -1,109 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
|
||||
/**
|
||||
* Tests that position_increment_gap is read from the mapper and applies as
|
||||
* expected in queries.
|
||||
*/
|
||||
public class AllFieldMapperPositionIncrementGapTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* The default position_increment_gap should be large enough that most
|
||||
* "sensible" queries phrase slops won't match across values.
|
||||
*/
|
||||
public void testDefault() throws IOException {
|
||||
assertGapIsOneHundred(client(), "test", "test");
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the post-2.0 default is being applied.
|
||||
*/
|
||||
public static void assertGapIsOneHundred(Client client, String indexName, String type) throws IOException {
|
||||
testGap(client, indexName, type, 100);
|
||||
|
||||
// No match across gap using default slop with default positionIncrementGap
|
||||
assertHitCount(client.prepareSearch(indexName)
|
||||
.setQuery(new MatchPhraseQueryBuilder("_all", "one two")).get(), 0);
|
||||
|
||||
// Nor with small-ish values
|
||||
assertHitCount(client.prepareSearch(indexName)
|
||||
.setQuery(new MatchPhraseQueryBuilder("_all", "one two").slop(5)).get(), 0);
|
||||
assertHitCount(client.prepareSearch(indexName)
|
||||
.setQuery(new MatchPhraseQueryBuilder("_all", "one two").slop(50)).get(), 0);
|
||||
|
||||
// But huge-ish values still match
|
||||
assertHitCount(client.prepareSearch(indexName)
|
||||
.setQuery(new MatchPhraseQueryBuilder("_all", "one two").slop(500)).get(), 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the pre-2.0 default has been applied or explicitly
|
||||
* configured.
|
||||
*/
|
||||
public static void assertGapIsZero(Client client, String indexName, String type) throws IOException {
|
||||
testGap(client, indexName, type, 0);
|
||||
/*
|
||||
* Phrases match across different values using default slop with pre-2.0 default
|
||||
* position_increment_gap.
|
||||
*/
|
||||
assertHitCount(client.prepareSearch(indexName)
|
||||
.setQuery(new MatchPhraseQueryBuilder("string", "one two")).get(), 1);
|
||||
}
|
||||
|
||||
private static void testGap(Client client, String indexName,
|
||||
String type, int positionIncrementGap) throws IOException {
|
||||
client.prepareIndex(indexName, type, "position_gap_test")
|
||||
.setSource("string1", "one", "string2", "two three").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
|
||||
|
||||
// Baseline - phrase query finds matches in the same field value
|
||||
assertHitCount(client.prepareSearch(indexName)
|
||||
.setQuery(new MatchPhraseQueryBuilder("_all", "two three")).get(), 1);
|
||||
|
||||
if (positionIncrementGap > 0) {
|
||||
// No match across gaps when slop < position gap
|
||||
assertHitCount(client.prepareSearch(indexName)
|
||||
.setQuery(new MatchPhraseQueryBuilder("_all", "one two").slop(positionIncrementGap - 1)).get(), 0);
|
||||
}
|
||||
|
||||
// Match across gaps when slop >= position gap
|
||||
assertHitCount(client.prepareSearch(indexName)
|
||||
.setQuery(new MatchPhraseQueryBuilder("_all", "one two").slop(positionIncrementGap)).get(), 1);
|
||||
assertHitCount(client.prepareSearch(indexName)
|
||||
.setQuery(new MatchPhraseQueryBuilder("_all", "one two").slop(positionIncrementGap+1)).get(), 1);
|
||||
}
|
||||
}
|
|
@ -1,490 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.lucene.all.AllTokenStream;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine.Searcher;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath;
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class AllFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public void testSimpleAllMappers() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json");
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("person", new CompressedXContent(mapping));
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
|
||||
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(3));
|
||||
String[] expected = new String[] {"banon", "last location", "1"};
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
assertThat(fields[i].stringValue(), equalTo(expected[i]));
|
||||
assertThat(fields[i].fieldType().omitNorms(), equalTo(true));
|
||||
if (i == 0) {
|
||||
// The field "name.last" is boosted so we should see AllTokenStream used:
|
||||
assertThat(fields[i].tokenStream(docMapper.mappers().indexAnalyzer(), null),
|
||||
Matchers.instanceOf(AllTokenStream.class));
|
||||
} else {
|
||||
assertThat(fields[i].tokenStream(docMapper.mappers().indexAnalyzer(), null),
|
||||
Matchers.not(Matchers.instanceOf(AllTokenStream.class)));
|
||||
}
|
||||
}
|
||||
AllFieldMapper mapper = docMapper.allFieldMapper();
|
||||
assertThat(mapper.fieldType().queryStringTermQuery(new Term("_all", "foobar")),
|
||||
Matchers.instanceOf(AllTermQuery.class));
|
||||
}
|
||||
|
||||
public void testAllMappersNoBoost() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json");
|
||||
IndexService index = createIndex("test");
|
||||
DocumentMapper docMapper = index.mapperService().documentMapperParser().parse("person",
|
||||
new CompressedXContent(mapping));
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
|
||||
|
||||
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(3));
|
||||
String[] expected = new String[] {"banon", "last location", "1"};
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
assertThat(fields[i].stringValue(), equalTo(expected[i]));
|
||||
assertThat(fields[i].fieldType().omitNorms(), equalTo(false));
|
||||
}
|
||||
AllFieldMapper mapper = docMapper.allFieldMapper();
|
||||
assertThat(mapper.fieldType().queryStringTermQuery(new Term("_all", "foobar")),
|
||||
Matchers.instanceOf(AllTermQuery.class));
|
||||
}
|
||||
|
||||
public void testAllMappersTermQuery() throws Exception {
|
||||
String mapping =
|
||||
copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json");
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person",
|
||||
new CompressedXContent(mapping));
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
|
||||
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(3));
|
||||
String[] expected = new String[] {"banon", "last location", "1"};
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
assertThat(fields[i].stringValue(), equalTo(expected[i]));
|
||||
assertThat(fields[i].fieldType().omitNorms(), equalTo(false));
|
||||
}
|
||||
AllFieldMapper mapper = docMapper.allFieldMapper();
|
||||
assertThat(mapper.fieldType().queryStringTermQuery(new Term("_all", "foobar")),
|
||||
Matchers.instanceOf(AllTermQuery.class));
|
||||
}
|
||||
|
||||
// #6187: make sure we see AllTermQuery even when offsets are indexed in the _all field:
|
||||
public void testAllMappersWithOffsetsTermQuery() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json");
|
||||
DocumentMapper docMapper =
|
||||
createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
|
||||
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(3));
|
||||
String[] expected = new String[] {"banon", "last location", "1"};
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
assertThat(fields[i].stringValue(), equalTo(expected[i]));
|
||||
assertThat(fields[i].fieldType().omitNorms(), equalTo(false));
|
||||
}
|
||||
AllFieldMapper mapper = docMapper.allFieldMapper();
|
||||
assertThat(mapper.fieldType().queryStringTermQuery(new Term("_all", "foobar")),
|
||||
Matchers.instanceOf(AllTermQuery.class));
|
||||
}
|
||||
|
||||
// #6187: if _all doesn't index positions then we never use AllTokenStream, even if some fields have boost
|
||||
public void testBoostWithOmitPositions() throws Exception {
|
||||
String mapping =
|
||||
copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json");
|
||||
DocumentMapper docMapper =
|
||||
createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
|
||||
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(3));
|
||||
for (IndexableField field : fields) {
|
||||
// _all field omits positions, so we should not get AllTokenStream even though fields are boosted
|
||||
assertThat(field.tokenStream(docMapper.mappers().indexAnalyzer(), null),
|
||||
Matchers.not(Matchers.instanceOf(AllTokenStream.class)));
|
||||
}
|
||||
}
|
||||
|
||||
// #6187: if no fields were boosted, we shouldn't use AllTokenStream
|
||||
public void testNoBoost() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json");
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("person", new CompressedXContent(mapping));
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
|
||||
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(3));
|
||||
for (IndexableField field : fields) {
|
||||
// no fields have boost, so we should not see AllTokenStream:
|
||||
assertThat(field.tokenStream(docMapper.mappers().indexAnalyzer(), null),
|
||||
Matchers.not(Matchers.instanceOf(AllTokenStream.class)));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSimpleAllMappersWithReparse() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json");
|
||||
DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping));
|
||||
String builtMapping = docMapper.mappingSource().string();
|
||||
// reparse it
|
||||
DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping));
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
|
||||
Document doc = builtDocMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(3));
|
||||
String[] expected = new String[] {"banon", "last location", "1"};
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
assertThat(fields[i].stringValue(), equalTo(expected[i]));
|
||||
assertThat(fields[i].fieldType().omitNorms(), equalTo(true));
|
||||
if (i == 0) {
|
||||
// The field "name.last" is boosted so we should see AllTokenStream used:
|
||||
assertThat(fields[i].tokenStream(docMapper.mappers().indexAnalyzer(), null),
|
||||
Matchers.instanceOf(AllTokenStream.class));
|
||||
} else {
|
||||
assertThat(fields[i].tokenStream(docMapper.mappers().indexAnalyzer(), null),
|
||||
Matchers.not(Matchers.instanceOf(AllTokenStream.class)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testSimpleAllMappersWithStore() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json");
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("person", new CompressedXContent(mapping));
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
|
||||
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(2));
|
||||
String[] expected = new String[] {"banon", "1"};
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
assertThat(fields[i].stringValue(), equalTo(expected[i]));
|
||||
assertThat(fields[i].fieldType().omitNorms(), equalTo(false));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void testSimpleAllMappersWithReparseWithStore() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json");
|
||||
DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping));
|
||||
String builtMapping = docMapper.mappingSource().string();
|
||||
// reparse it
|
||||
DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping));
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
|
||||
Document doc = builtDocMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
|
||||
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(2));
|
||||
String[] expected = new String[] {"banon", "1"};
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
assertThat(fields[i].stringValue(), equalTo(expected[i]));
|
||||
assertThat(fields[i].fieldType().omitNorms(), equalTo(false));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRandom() throws Exception {
|
||||
boolean norms = true;
|
||||
boolean stored = false;
|
||||
boolean enabled = true;
|
||||
boolean tv_stored = false;
|
||||
boolean tv_payloads = false;
|
||||
boolean tv_offsets = false;
|
||||
boolean tv_positions = false;
|
||||
String similarity = null;
|
||||
XContentBuilder mappingBuilder = jsonBuilder();
|
||||
mappingBuilder.startObject().startObject("test");
|
||||
List<Tuple<String, Boolean>> booleanOptionList = new ArrayList<>();
|
||||
boolean allDefault = true;
|
||||
if (frequently()) {
|
||||
allDefault = false;
|
||||
mappingBuilder.startObject("_all");
|
||||
if (randomBoolean()) {
|
||||
booleanOptionList.add(new Tuple<>("norms", norms = randomBoolean()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
booleanOptionList.add(new Tuple<>("store", stored = randomBoolean()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
booleanOptionList.add(new Tuple<>("store_term_vectors", tv_stored = randomBoolean()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
booleanOptionList.add(new Tuple<>("enabled", enabled = randomBoolean()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
booleanOptionList.add(new Tuple<>("store_term_vector_offsets", tv_offsets = randomBoolean()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
booleanOptionList.add(new Tuple<>("store_term_vector_positions", tv_positions = randomBoolean()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
booleanOptionList.add(new Tuple<>("store_term_vector_payloads", tv_payloads = randomBoolean()));
|
||||
}
|
||||
Collections.shuffle(booleanOptionList, random());
|
||||
for (Tuple<String, Boolean> option : booleanOptionList) {
|
||||
mappingBuilder.field(option.v1(), option.v2().booleanValue());
|
||||
}
|
||||
tv_stored |= tv_positions || tv_payloads || tv_offsets;
|
||||
if (randomBoolean()) {
|
||||
mappingBuilder.field("similarity", similarity = "BM25");
|
||||
}
|
||||
mappingBuilder.endObject();
|
||||
}
|
||||
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping = mappingBuilder.endObject().endObject().bytes().utf8ToString();
|
||||
logger.info("Mapping: {}", mapping);
|
||||
DocumentMapper docMapper = parser.parse("test", new CompressedXContent(mapping));
|
||||
String builtMapping = docMapper.mappingSource().string();
|
||||
// reparse it
|
||||
DocumentMapper builtDocMapper = parser.parse("test", new CompressedXContent(builtMapping));
|
||||
|
||||
byte[] json = BytesReference.toBytes(jsonBuilder().startObject()
|
||||
.field("foo", "bar")
|
||||
.field("foobar", "foobar")
|
||||
.endObject().bytes());
|
||||
Document doc = builtDocMapper.parse("test", "test", "1", new BytesArray(json)).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
if (enabled) {
|
||||
assertThat(fields.length, equalTo(2));
|
||||
String[] expected = new String[] {"bar", "foobar"};
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
assertThat(fields[i].fieldType().omitNorms(), equalTo(!norms));
|
||||
assertThat(fields[i].fieldType().stored(), equalTo(stored));
|
||||
assertThat(fields[i].fieldType().storeTermVectorOffsets(), equalTo(tv_offsets));
|
||||
assertThat(fields[i].fieldType().storeTermVectorPayloads(), equalTo(tv_payloads));
|
||||
assertThat(fields[i].fieldType().storeTermVectorPositions(), equalTo(tv_positions));
|
||||
assertThat(fields[i].fieldType().storeTermVectors(), equalTo(tv_stored));
|
||||
assertThat(fields[i].stringValue(), equalTo(expected[i]));
|
||||
}
|
||||
} else {
|
||||
assertThat(fields.length, equalTo(0));
|
||||
}
|
||||
if (similarity == null) {
|
||||
assertThat(builtDocMapper.allFieldMapper().fieldType().similarity(), nullValue());
|
||||
} else {
|
||||
assertThat(similarity, equalTo(builtDocMapper.allFieldMapper().fieldType().similarity().name()));
|
||||
}
|
||||
if (allDefault) {
|
||||
BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(0);
|
||||
XContentBuilder b = new XContentBuilder(XContentType.JSON.xContent(), bytesStreamOutput);
|
||||
XContentBuilder xContentBuilder = builtDocMapper.allFieldMapper().toXContent(b, ToXContent.EMPTY_PARAMS);
|
||||
xContentBuilder.flush();
|
||||
assertThat(bytesStreamOutput.size(), equalTo(0));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testMultiField_includeInAllSetToFalse() throws IOException {
|
||||
String mapping =
|
||||
copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/" +
|
||||
"multifield-mapping_include_in_all_set_to_false.json");
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("test", new CompressedXContent(mapping));
|
||||
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject()
|
||||
.field("foo")
|
||||
.startObject()
|
||||
.field("bar", "Elasticsearch rules!")
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
Document doc = docMapper.parse("test", "test", "1", builder.bytes()).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(0));
|
||||
}
|
||||
|
||||
public void testMultiField_defaults() throws IOException {
|
||||
String mapping =
|
||||
copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/multifield-mapping_default.json");
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("test", new CompressedXContent(mapping));
|
||||
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject()
|
||||
.field("foo")
|
||||
.startObject()
|
||||
.field("bar", "Elasticsearch rules!")
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
Document doc = docMapper.parse("test", "test", "1", builder.bytes()).rootDoc();
|
||||
IndexableField[] fields = doc.getFields("_all");
|
||||
assertThat(fields.length, equalTo(1));
|
||||
assertThat(fields[0].stringValue(), equalTo("Elasticsearch rules!"));
|
||||
}
|
||||
|
||||
public void testMisplacedTypeInRoot() throws IOException {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/misplaced_type_in_root.json");
|
||||
try {
|
||||
createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
|
||||
fail("Expected MapperParsingException");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
|
||||
assertThat(e.getMessage(), containsString("[type : text]"));
|
||||
}
|
||||
}
|
||||
|
||||
// related to https://github.com/elastic/elasticsearch/issues/5864
|
||||
public void testMistypedTypeInRoot() throws IOException {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mistyped_type_in_root.json");
|
||||
try {
|
||||
createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
|
||||
fail("Expected MapperParsingException");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
|
||||
assertThat(e.getMessage(), containsString("type=text"));
|
||||
}
|
||||
}
|
||||
|
||||
// issue https://github.com/elastic/elasticsearch/issues/5864
|
||||
public void testMisplacedMappingAsRoot() throws IOException {
|
||||
String mapping =
|
||||
copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/misplaced_mapping_key_in_root.json");
|
||||
try {
|
||||
createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
|
||||
fail("Expected MapperParsingException");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
|
||||
assertThat(e.getMessage(), containsString("type=text"));
|
||||
}
|
||||
}
|
||||
|
||||
// issue https://github.com/elastic/elasticsearch/issues/5864
|
||||
// test that RootObjectMapping still works
|
||||
public void testRootObjectMapperPropertiesDoNotCauseException() throws IOException {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping =
|
||||
copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_dynamic_template_mapping.json");
|
||||
parser.parse("test", new CompressedXContent(mapping));
|
||||
mapping =
|
||||
copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_dynamic_date_formats_mapping.json");
|
||||
parser.parse("test", new CompressedXContent(mapping));
|
||||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_date_detection_mapping.json");
|
||||
parser.parse("test", new CompressedXContent(mapping));
|
||||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_numeric_detection_mapping.json");
|
||||
parser.parse("test", new CompressedXContent(mapping));
|
||||
}
|
||||
|
||||
public void testDocValuesNotAllowed() throws IOException {
|
||||
String mapping = jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_all")
|
||||
.field("doc_values", true)
|
||||
.endObject().endObject().endObject().string();
|
||||
try {
|
||||
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getDetailedMessage(),
|
||||
containsString("[_all] is always tokenized and cannot have doc values"));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testAutoBoost() throws Exception {
|
||||
for (boolean boost : new boolean[] {false, true}) {
|
||||
String index = "test_" + boost;
|
||||
IndexService indexService =
|
||||
createIndex(index, client().admin().indices().prepareCreate(index)
|
||||
.addMapping("type", "foo", "type=text" + (boost ? ",boost=2" : "")));
|
||||
client().prepareIndex(index, "type").setSource("foo", "bar").get();
|
||||
client().admin().indices().prepareRefresh(index).get();
|
||||
Query query = indexService.mapperService()
|
||||
.documentMapper("type").allFieldMapper().fieldType().termQuery("bar", null);
|
||||
try (Searcher searcher = indexService.getShardOrNull(0).acquireSearcher("tests")) {
|
||||
query = searcher.searcher().rewrite(query);
|
||||
final Class<?> expected = boost ? AllTermQuery.class : TermQuery.class;
|
||||
assertThat(query, Matchers.instanceOf(expected));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testIncludeInObjectNotAllowed() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type").endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test").mapperService()
|
||||
.documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
try {
|
||||
docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject().field("_all", "foo").endObject().bytes());
|
||||
fail("Expected failure to parse metadata field");
|
||||
} catch (MapperParsingException e) {
|
||||
assertTrue(e.getMessage(),
|
||||
e.getMessage().contains("Field [_all] is a metadata field and cannot be added inside a document"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testAllDefaults() {
|
||||
// We use to have a bug with the default mapping having null analyzers because
|
||||
// it was not fully constructed and was in particular lacking analyzers
|
||||
IndexService index = createIndex("index", Settings.EMPTY, "type");
|
||||
AllFieldMapper all = index.mapperService().documentMapper("type").allFieldMapper();
|
||||
assertNotNull(all.fieldType().indexAnalyzer());
|
||||
assertNotNull(all.fieldType().searchAnalyzer());
|
||||
assertNotNull(all.fieldType().searchQuoteAnalyzer());
|
||||
}
|
||||
}
|
|
@ -183,44 +183,6 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testIncludeInAll() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "date").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "2016-03-11")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals("2016-03-11", fields[0].stringValue());
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "date")
|
||||
.field("include_in_all", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "2016-03-11")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testChangeFormat() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "date")
|
||||
|
|
|
@ -32,13 +32,11 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
public class DocumentMapperParserTests extends ESSingleNodeTestCase {
|
||||
public void testTypeLevel() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_all").field("enabled", false).endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
assertThat(mapper.type(), equalTo("type"));
|
||||
assertThat(mapper.allFieldMapper().enabled(), equalTo(false));
|
||||
}
|
||||
|
||||
public void testFieldNameWithDots() throws Exception {
|
||||
|
|
|
@ -1218,50 +1218,6 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().get("type.inner.inner_field"), equalTo("inner_value"));
|
||||
}
|
||||
|
||||
public void testIncludeInAllPropagation() throws IOException {
|
||||
String defaultMapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.field("dynamic", "strict")
|
||||
.startObject("properties")
|
||||
.startObject("a")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.startObject("o")
|
||||
.field("include_in_all", false)
|
||||
.startObject("properties")
|
||||
.startObject("a")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.startObject("o")
|
||||
.field("include_in_all", true)
|
||||
.startObject("properties")
|
||||
.startObject("a")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
|
||||
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("a", "b")
|
||||
.startObject("o")
|
||||
.field("a", "c")
|
||||
.startObject("o")
|
||||
.field("a", "d")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().bytes());
|
||||
Set<String> values = new HashSet<>();
|
||||
for (IndexableField f : doc.rootDoc().getFields("_all")) {
|
||||
values.add(f.stringValue());
|
||||
}
|
||||
assertEquals(new HashSet<>(Arrays.asList("b", "d")), values);
|
||||
}
|
||||
|
||||
public void testDynamicDateDetectionDisabledOnNumbers() throws IOException {
|
||||
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
|
|
@ -97,7 +97,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.bytes());
|
||||
|
||||
assertFieldNames(set("a", "a.keyword", "b", "b.c", "_uid", "_type", "_version", "_seq_no", "_primary_term", "_source", "_all"), doc);
|
||||
assertFieldNames(set("a", "a.keyword", "b", "b.c", "_uid", "_type", "_version", "_seq_no", "_primary_term", "_source"), doc);
|
||||
}
|
||||
|
||||
public void testExplicitEnabled() throws Exception {
|
||||
|
@ -114,7 +114,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.bytes());
|
||||
|
||||
assertFieldNames(set("field", "field.keyword", "_uid", "_type", "_version", "_seq_no", "_primary_term", "_source", "_all"), doc);
|
||||
assertFieldNames(set("field", "field.keyword", "_uid", "_type", "_version", "_seq_no", "_primary_term", "_source"), doc);
|
||||
}
|
||||
|
||||
public void testDisabled() throws Exception {
|
||||
|
|
|
@ -187,44 +187,6 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testIncludeInAll() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "ip").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "::1")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals("::1", fields[0].stringValue());
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "ip")
|
||||
.field("include_in_all", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "::1")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testNullValue() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
|
|
|
@ -229,16 +229,9 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
|
|||
.field("enabled", false)
|
||||
.endObject().endObject().bytes());
|
||||
|
||||
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, enabledAll,
|
||||
MergeReason.MAPPING_UPDATE, random().nextBoolean());
|
||||
assertFalse(indexService.mapperService().allEnabled()); // _default_ does not count
|
||||
|
||||
indexService.mapperService().merge("some_type", enabledAll,
|
||||
MergeReason.MAPPING_UPDATE, random().nextBoolean());
|
||||
assertTrue(indexService.mapperService().allEnabled());
|
||||
|
||||
indexService.mapperService().merge("other_type", disabledAll,
|
||||
MergeReason.MAPPING_UPDATE, random().nextBoolean());
|
||||
assertTrue(indexService.mapperService().allEnabled()); // this returns true if any of the types has _all enabled
|
||||
Exception e = expectThrows(MapperParsingException.class,
|
||||
() -> indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, enabledAll,
|
||||
MergeReason.MAPPING_UPDATE, random().nextBoolean()));
|
||||
assertThat(e.getMessage(), containsString("[_all] is disabled in 6.0"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -217,45 +217,6 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
|
|||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doTestIncludeInAll(String type) throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", type).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", 123)
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals("123", fields[0].stringValue());
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", type)
|
||||
.field("include_in_all", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", 123)
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testRejectNorms() throws IOException {
|
||||
// not supported as of 5.0
|
||||
for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) {
|
||||
|
|
|
@ -228,56 +228,6 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
|
|||
assertThat(e.getCause().getMessage(), anyOf(containsString("passed as String"), containsString("failed to parse date")));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doTestIncludeInAll(String type) throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", type);
|
||||
if (type.equals("date_range")) {
|
||||
mapping = mapping.field("format", DATE_FORMAT);
|
||||
}
|
||||
mapping = mapping.endObject().endObject().endObject().endObject();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
|
||||
|
||||
assertEquals(mapping.string(), mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("field")
|
||||
.field(getFromField(), getFrom(type))
|
||||
.field(getToField(), getTo(type))
|
||||
.endObject()
|
||||
.endObject().bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(1, fields.length);
|
||||
|
||||
assertThat(fields[0].stringValue(), containsString(type.equals("date_range") ? "1477872000000" : "5"));
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", type);
|
||||
if (type.equals("date_range")) {
|
||||
mapping = mapping.field("format", DATE_FORMAT);
|
||||
}
|
||||
mapping = mapping.field("include_in_all", false).endObject().endObject()
|
||||
.endObject().endObject();
|
||||
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping.string()));
|
||||
|
||||
assertEquals(mapping.string(), mapper.mappingSource().toString());
|
||||
|
||||
doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("field")
|
||||
.field(getFromField(), getFrom(type))
|
||||
.field(getToField(), getTo(type))
|
||||
.endObject()
|
||||
.endObject().bytes());
|
||||
|
||||
fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doTestNullValue(String type) throws IOException {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
|
|
@ -250,46 +250,6 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testIncludeInAll() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "scaled_float")
|
||||
.field("scaling_factor", 10.0).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", 123)
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals("123", fields[0].stringValue());
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "scaled_float").field("scaling_factor", 10.0)
|
||||
.field("include_in_all", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", 123)
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
fields = doc.rootDoc().getFields("_all");
|
||||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testNullValue() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
|
|
|
@ -19,13 +19,18 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
@ -38,109 +43,13 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
private static final String INDEX = "index";
|
||||
private static final String TYPE = "type";
|
||||
|
||||
public void testAllEnabled() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", "false").endObject().endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", "true").endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
String errorMessage = "[_all] enabled is false now encountering true";
|
||||
testConflict(mapping.string(), mappingUpdate.string(), errorMessage);
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created
|
||||
}
|
||||
|
||||
public void testAllConflicts() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_create_index.json");
|
||||
String mappingUpdate = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json");
|
||||
String[] errorMessage = {
|
||||
"[_all] has different [norms] values",
|
||||
"[_all] has different [store] values",
|
||||
"[_all] has different [store_term_vector] values",
|
||||
"[_all] has different [store_term_vector_offsets] values",
|
||||
"[_all] has different [store_term_vector_positions] values",
|
||||
"[_all] has different [store_term_vector_payloads] values",
|
||||
"[_all] has different [analyzer]",
|
||||
"[_all] has different [similarity]"};
|
||||
// fielddata and search_analyzer should not report conflict
|
||||
testConflict(mapping, mappingUpdate, errorMessage);
|
||||
}
|
||||
|
||||
public void testAllDisabled() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", true).endObject().endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
String errorMessage = "[_all] enabled is true now encountering false";
|
||||
testConflict(mapping.string(), mappingUpdate.string(), errorMessage);
|
||||
}
|
||||
|
||||
public void testAllWithDefault() throws Exception {
|
||||
String defaultMapping = jsonBuilder().startObject().startObject("_default_")
|
||||
.startObject("_all")
|
||||
.field("enabled", false)
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
client().admin().indices().prepareCreate("index").addMapping("_default_", defaultMapping).get();
|
||||
String docMapping = jsonBuilder().startObject()
|
||||
.startObject("doc")
|
||||
.endObject()
|
||||
.endObject().string();
|
||||
PutMappingResponse response = client().admin().indices().preparePutMapping("index").setType("doc").setSource(docMapping).get();
|
||||
assertTrue(response.isAcknowledged());
|
||||
String docMappingUpdate = jsonBuilder().startObject().startObject("doc")
|
||||
.startObject("properties")
|
||||
.startObject("text")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().string();
|
||||
response = client().admin().indices().preparePutMapping("index").setType("doc").setSource(docMappingUpdate).get();
|
||||
assertTrue(response.isAcknowledged());
|
||||
String docMappingAllExplicitEnabled = jsonBuilder().startObject()
|
||||
.startObject("doc_all_enabled")
|
||||
.startObject("_all")
|
||||
.field("enabled", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().string();
|
||||
response = client().admin().indices().preparePutMapping("index").setType("doc_all_enabled").setSource(docMappingAllExplicitEnabled).get();
|
||||
assertTrue(response.isAcknowledged());
|
||||
|
||||
GetMappingsResponse mapping = client().admin().indices().prepareGetMappings("index").get();
|
||||
HashMap props = (HashMap)mapping.getMappings().get("index").get("doc").getSourceAsMap().get("_all");
|
||||
assertThat((Boolean)props.get("enabled"), equalTo(false));
|
||||
props = (HashMap)mapping.getMappings().get("index").get("doc").getSourceAsMap().get("properties");
|
||||
assertNotNull(props);
|
||||
assertNotNull(props.get("text"));
|
||||
props = (HashMap)mapping.getMappings().get("index").get("doc_all_enabled").getSourceAsMap().get("_all");
|
||||
assertThat((Boolean)props.get("enabled"), equalTo(true));
|
||||
props = (HashMap)mapping.getMappings().get("index").get("_default_").getSourceAsMap().get("_all");
|
||||
assertThat((Boolean)props.get("enabled"), equalTo(false));
|
||||
|
||||
}
|
||||
|
||||
public void testDocValuesInvalidMapping() throws Exception {
|
||||
String mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").startObject("fielddata").field("format", "doc_values").endObject().endObject().endObject().endObject().endObject().string();
|
||||
try {
|
||||
prepareCreate(INDEX).setSource(mapping).get();
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testDocValuesInvalidMappingOnUpdate() throws Exception {
|
||||
String mapping = jsonBuilder().startObject().startObject(TYPE).startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject().endObject().string();
|
||||
prepareCreate(INDEX).addMapping(TYPE, mapping).get();
|
||||
String mappingUpdate = jsonBuilder().startObject().startObject(TYPE).startObject("_all").startObject("fielddata").field("format", "doc_values").endObject().endObject().endObject().endObject().string();
|
||||
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
|
||||
try {
|
||||
client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(mappingUpdate).get();
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
|
||||
}
|
||||
// make sure all nodes have same cluster state
|
||||
compareMappingOnNodes(mappingsBeforeUpdateResponse);
|
||||
}
|
||||
|
||||
protected void testConflict(String mapping, String mappingUpdate, String... errorMessages) throws InterruptedException {
|
||||
assertAcked(prepareCreate(INDEX).setSource(mapping).get());
|
||||
protected void testConflict(String mapping, String mappingUpdate, Version idxVersion, String... errorMessages) throws InterruptedException {
|
||||
assertAcked(prepareCreate(INDEX).setSource(mapping).setSettings("index.version.created", idxVersion.id));
|
||||
ensureGreen(INDEX);
|
||||
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
|
||||
try {
|
||||
|
@ -155,6 +64,44 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
|
||||
}
|
||||
|
||||
public void testUpdatingAllSettingsOnOlderIndex() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("mappings")
|
||||
.startObject(TYPE)
|
||||
.startObject("_all").field("enabled", "true").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("_all").field("enabled", "false").endObject()
|
||||
.startObject("properties").startObject("text").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
String errorMessage = "[_all] enabled is true now encountering false";
|
||||
testConflict(mapping.string(), mappingUpdate.string(), Version.V_5_0_0, errorMessage);
|
||||
}
|
||||
|
||||
public void testUpdatingAllSettingsOnOlderIndexDisabledToEnabled() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("mappings")
|
||||
.startObject(TYPE)
|
||||
.startObject("_all").field("enabled", "false").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("_all").field("enabled", "true").endObject()
|
||||
.startObject("properties").startObject("text").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
String errorMessage = "[_all] enabled is false now encountering true";
|
||||
testConflict(mapping.string(), mappingUpdate.string(), Version.V_5_0_0, errorMessage);
|
||||
}
|
||||
|
||||
private void compareMappingOnNodes(GetMappingsResponse previousMapping) {
|
||||
// make sure all nodes have same cluster state
|
||||
for (Client client : cluster().getClients()) {
|
||||
|
|
|
@ -42,38 +42,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
return pluginList(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public void testAllEnabledAfterDisabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate);
|
||||
}
|
||||
|
||||
public void testAllDisabledAfterEnabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate);
|
||||
}
|
||||
|
||||
public void testAllDisabledAfterDefaultEnabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("some_text").field("type", "text").endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate);
|
||||
}
|
||||
|
||||
public void testAllEnabledAfterEnabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
XContentBuilder expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject().endObject();
|
||||
testNoConflictWhileMergingAndMappingChanged(mapping, mappingUpdate, expectedMapping);
|
||||
}
|
||||
|
||||
public void testAllDisabledAfterDisabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
XContentBuilder expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject().endObject();
|
||||
testNoConflictWhileMergingAndMappingChanged(mapping, mappingUpdate, expectedMapping);
|
||||
}
|
||||
|
||||
private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException {
|
||||
IndexService indexService = createIndex("test", Settings.builder().build(), "type", mapping);
|
||||
// simulate like in MetaDataMappingService#putMapping
|
||||
|
|
|
@ -117,9 +117,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
if (randomBoolean()) {
|
||||
queryStringQueryBuilder.enablePositionIncrements(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
queryStringQueryBuilder.lenient(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
queryStringQueryBuilder.escape(randomBoolean());
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ import java.util.Locale;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
|
@ -59,9 +60,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
if (randomBoolean()) {
|
||||
result.analyzeWildcard(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
result.lenient(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
result.minimumShouldMatch(randomMinimumShouldMatch());
|
||||
}
|
||||
|
@ -214,9 +212,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
// the remaining tests requires either a mapping that we register with types in base test setup
|
||||
if (getCurrentTypes().length > 0) {
|
||||
Query luceneQuery = queryBuilder.toQuery(shardContext);
|
||||
assertThat(luceneQuery, instanceOf(TermQuery.class));
|
||||
TermQuery termQuery = (TermQuery) luceneQuery;
|
||||
assertThat(termQuery.getTerm(), equalTo(new Term(MetaData.ALL, query)));
|
||||
assertThat(luceneQuery, instanceOf(BooleanQuery.class));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -259,7 +255,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
if (ms.allEnabled()) {
|
||||
assertTermQuery(query, MetaData.ALL, queryBuilder.value());
|
||||
} else {
|
||||
assertThat(query.getClass(), equalTo(MatchNoDocsQuery.class));
|
||||
assertThat(query.getClass(), anyOf(equalTo(BooleanQuery.class), equalTo(MatchNoDocsQuery.class)));
|
||||
}
|
||||
} else {
|
||||
fail("Encountered lucene query type we do not have a validation implementation for in our "
|
||||
|
|
|
@ -33,6 +33,7 @@ import java.util.concurrent.ExecutionException;
|
|||
|
||||
import static org.elasticsearch.common.util.set.Sets.newHashSet;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -51,7 +52,7 @@ public class AliasResolveRoutingIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test-0", "type1", "2").setSource("field1", "quick brown"),
|
||||
client().prepareIndex("test-0", "type1", "3").setSource("field1", "quick"));
|
||||
refresh("test-*");
|
||||
assertHitCount(client().prepareSearch().setIndices("alias-*").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(matchQuery("_all", "quick")).get(), 3L);
|
||||
assertHitCount(client().prepareSearch().setIndices("alias-*").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(queryStringQuery("quick")).get(), 3L);
|
||||
}
|
||||
|
||||
public void testResolveIndexRouting() throws Exception {
|
||||
|
|
|
@ -128,7 +128,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testStructuredAnalysis() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint())
|
||||
.minDocCount(2))
|
||||
|
@ -144,7 +144,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
long[] excludeTerms = { MUSIC_CATEGORY };
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "paul"))
|
||||
.setQuery(new TermQueryBuilder("description", "paul"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint())
|
||||
.minDocCount(1).includeExclude(new IncludeExclude(null, excludeTerms)))
|
||||
|
@ -158,7 +158,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
|
||||
public void testIncludeExclude() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(new TermQueryBuilder("_all", "weller"))
|
||||
.setQuery(new TermQueryBuilder("description", "weller"))
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint())
|
||||
.includeExclude(new IncludeExclude(null, "weller")))
|
||||
.get();
|
||||
|
@ -177,7 +177,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
assertThat(terms.contains("the"), is(true));
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(new TermQueryBuilder("_all", "weller"))
|
||||
.setQuery(new TermQueryBuilder("description", "weller"))
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint())
|
||||
.includeExclude(new IncludeExclude("weller", null)))
|
||||
.get();
|
||||
|
@ -194,7 +194,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testIncludeExcludeExactValues() throws Exception {
|
||||
String []incExcTerms={"weller","nosuchterm"};
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(new TermQueryBuilder("_all", "weller"))
|
||||
.setQuery(new TermQueryBuilder("description", "weller"))
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint())
|
||||
.includeExclude(new IncludeExclude(null, incExcTerms)))
|
||||
.get();
|
||||
|
@ -207,7 +207,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
assertEquals(new HashSet<String>(Arrays.asList("jam", "council", "style", "paul", "of", "the")), terms);
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(new TermQueryBuilder("_all", "weller"))
|
||||
.setQuery(new TermQueryBuilder("description", "weller"))
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint())
|
||||
.includeExclude(new IncludeExclude(incExcTerms, null)))
|
||||
.get();
|
||||
|
@ -224,7 +224,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testUnmapped() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx_unmapped")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint())
|
||||
.minDocCount(2))
|
||||
|
@ -238,7 +238,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testTextAnalysis() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint())
|
||||
.minDocCount(2))
|
||||
|
@ -252,7 +252,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testTextAnalysisGND() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()).significanceHeuristic(new GND(true))
|
||||
.minDocCount(2))
|
||||
|
@ -266,7 +266,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testTextAnalysisChiSquare() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()).significanceHeuristic(new ChiSquare(false,true))
|
||||
.minDocCount(2))
|
||||
|
@ -281,7 +281,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
SearchResponse response = client()
|
||||
.prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0)
|
||||
.setSize(60)
|
||||
.setExplain(true)
|
||||
|
@ -300,7 +300,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
// as the background source of term statistics.
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description")
|
||||
.minDocCount(2).backgroundFilter(QueryBuilders.termQuery("fact_category", 1)))
|
||||
|
@ -324,7 +324,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testFilteredAnalysis() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "weller"))
|
||||
.setQuery(new TermQueryBuilder("description", "weller"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description")
|
||||
.minDocCount(1).backgroundFilter(QueryBuilders.termsQuery("description", "paul")))
|
||||
|
@ -374,7 +374,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testPartiallyUnmapped() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx_unmapped", "test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms").field("description")
|
||||
.executionHint(randomExecutionHint())
|
||||
|
@ -389,7 +389,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testPartiallyUnmappedWithFormat() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx_unmapped", "test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(boolQuery().should(termQuery("_all", "the")).should(termQuery("_all", "terje")))
|
||||
.setQuery(boolQuery().should(termQuery("description", "the")).should(termQuery("description", "terje")))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms")
|
||||
.field("fact_category")
|
||||
|
@ -426,7 +426,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testDefaultSignificanceHeuristic() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms")
|
||||
.field("description")
|
||||
|
@ -443,7 +443,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void testMutualInformation() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_AND_FETCH)
|
||||
.setQuery(new TermQueryBuilder("_all", "terje"))
|
||||
.setQuery(new TermQueryBuilder("description", "terje"))
|
||||
.setFrom(0).setSize(60).setExplain(true)
|
||||
.addAggregation(significantTerms("mySignificantTerms")
|
||||
.field("description")
|
||||
|
|
|
@ -699,7 +699,6 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testPlainHighlighter() throws Exception {
|
||||
createIndex("test");
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1")
|
||||
|
@ -715,49 +714,6 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>"));
|
||||
|
||||
logger.info("--> searching on _all, highlighting on field1");
|
||||
source = searchSource()
|
||||
.query(termQuery("_all", "test"))
|
||||
.highlighter(highlight().field("field1").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setSource(source).get();
|
||||
|
||||
assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>"));
|
||||
|
||||
logger.info("--> searching on _all, highlighting on field2");
|
||||
source = searchSource()
|
||||
.query(termQuery("_all", "quick"))
|
||||
.highlighter(highlight().field("field2").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setSource(source).get();
|
||||
|
||||
assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog"));
|
||||
|
||||
logger.info("--> searching on _all, highlighting on field2");
|
||||
source = searchSource()
|
||||
.query(prefixQuery("_all", "qui"))
|
||||
.highlighter(highlight().field("field2").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setSource(source).get();
|
||||
|
||||
assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog"));
|
||||
|
||||
logger.info("--> searching on _all with constant score, highlighting on field2");
|
||||
source = searchSource()
|
||||
.query(constantScoreQuery(prefixQuery("_all", "qui")))
|
||||
.highlighter(highlight().field("field2").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setSource(source).get();
|
||||
|
||||
assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog"));
|
||||
|
||||
logger.info("--> searching on _all with constant score, highlighting on field2");
|
||||
source = searchSource()
|
||||
.query(boolQuery().should(constantScoreQuery(prefixQuery("_all", "qui"))))
|
||||
.highlighter(highlight().field("field2").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setSource(source).get();
|
||||
assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog"));
|
||||
}
|
||||
|
||||
public void testFastVectorHighlighter() throws Exception {
|
||||
|
@ -776,36 +732,6 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>"));
|
||||
|
||||
logger.info("--> searching on _all, highlighting on field1");
|
||||
source = searchSource()
|
||||
.query(termQuery("_all", "test"))
|
||||
.highlighter(highlight().field("field1", 100, 0).order("score").preTags("<xxx>").postTags("</xxx>")
|
||||
.requireFieldMatch(false));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setSource(source).get();
|
||||
|
||||
assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>"));
|
||||
|
||||
logger.info("--> searching on _all, highlighting on field2");
|
||||
source = searchSource()
|
||||
.query(termQuery("_all", "quick"))
|
||||
.highlighter(highlight().field("field2", 100, 0).order("score").preTags("<xxx>").postTags("</xxx>")
|
||||
.requireFieldMatch(false));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setSource(source).get();
|
||||
|
||||
assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog"));
|
||||
|
||||
logger.info("--> searching on _all, highlighting on field2");
|
||||
source = searchSource()
|
||||
.query(prefixQuery("_all", "qui"))
|
||||
.highlighter(highlight().field("field2", 100, 0).order("score").preTags("<xxx>").postTags("</xxx>")
|
||||
.requireFieldMatch(false));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setSource(source).get();
|
||||
|
||||
assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog"));
|
||||
|
||||
logger.info("--> searching with boundary characters");
|
||||
source = searchSource()
|
||||
.query(matchQuery("field2", "quick"))
|
||||
|
@ -1051,22 +977,10 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
// LUCENE 3.1 UPGRADE: Caused adding the space at the end...
|
||||
assertHighlight(searchResponse, i, "field1", 0, 1, equalTo("<em>test</em> " + hit.id()));
|
||||
}
|
||||
|
||||
logger.info("--> searching explicitly _all and highlighting on _all");
|
||||
searchResponse = client().prepareSearch()
|
||||
.setSize(COUNT)
|
||||
.setQuery(termQuery("_all", "test"))
|
||||
.highlighter(new HighlightBuilder().field("_all", 100, 0))
|
||||
.get();
|
||||
for (int i = 0; i < COUNT; i++) {
|
||||
SearchHit hit = searchResponse.getHits().getHits()[i];
|
||||
assertHighlight(searchResponse, i, "_all", 0, 1, equalTo("<em>test</em> " + hit.id()));
|
||||
}
|
||||
}
|
||||
|
||||
public XContentBuilder type1TermVectorMapping() throws IOException {
|
||||
return XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "text").field("term_vector", "with_positions_offsets").endObject()
|
||||
.startObject("field2").field("type", "text").field("term_vector", "with_positions_offsets").endObject()
|
||||
|
@ -1491,7 +1405,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
.putArray("index.analysis.filter.synonym.synonyms", "quick => fast");
|
||||
|
||||
assertAcked(prepareCreate("test").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping())
|
||||
.addMapping("type2", "_all", "store=true,term_vector=with_positions_offsets",
|
||||
.addMapping("type2",
|
||||
"field4", "type=text,term_vector=with_positions_offsets,analyzer=synonym",
|
||||
"field3", "type=text,analyzer=synonym"));
|
||||
ensureGreen();
|
||||
|
@ -2089,7 +2003,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
//lets fall back to the standard highlighter then, what people would do to highlight query matches
|
||||
logger.info("--> searching on field2, highlighting on field2, falling back to the plain highlighter");
|
||||
source = searchSource()
|
||||
.query(matchPhraseQuery("_all", "quick brown"))
|
||||
.query(matchPhraseQuery("field2", "quick brown"))
|
||||
.highlighter(highlight()
|
||||
.field("field2").preTags("<xxx>").postTags("</xxx>").highlighterType("plain").requireFieldMatch(false));
|
||||
|
||||
|
@ -2174,7 +2088,6 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testMultiMatchQueryHighlight() throws IOException {
|
||||
String[] highlighterTypes = new String[] {"fvh", "plain", "postings"};
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_all").field("store", true).field("index_options", "offsets").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "text")
|
||||
|
@ -2790,9 +2703,6 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
XContentBuilder mappings = jsonBuilder();
|
||||
mappings.startObject();
|
||||
mappings.startObject("jobs")
|
||||
.startObject("_all")
|
||||
.field("enabled", false)
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("loc")
|
||||
.field("type", "geo_point")
|
||||
|
|
|
@ -263,9 +263,9 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
assertThrows(client().prepareSearch().setQuery(
|
||||
new MoreLikeThisQueryBuilder(new String[] {"string_value", "int_value"}, null, new Item[] {new Item("test", "type", "1")}).minTermFreq(1).minDocFreq(1)), SearchPhaseExecutionException.class);
|
||||
|
||||
// mlt query with no field -> OK
|
||||
// mlt query with no field -> No results (because _all is not enabled)
|
||||
searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"index"}).minTermFreq(1).minDocFreq(1)).execute().actionGet();
|
||||
assertHitCount(searchResponse, 2L);
|
||||
assertHitCount(searchResponse, 0L);
|
||||
|
||||
// mlt query with string fields
|
||||
searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[]{"string_value"}, new String[] {"index"}, null).minTermFreq(1).minDocFreq(1)).execute().actionGet();
|
||||
|
|
|
@ -56,11 +56,13 @@ import static org.hamcrest.Matchers.startsWith;
|
|||
|
||||
public class SimpleNestedIT extends ESIntegTestCase {
|
||||
public void testSimpleNested() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "nested1", "type=nested").addMapping("type2", "nested1", "type=nested"));
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "nested1", "type=nested")
|
||||
.addMapping("type2", "nested1", "type=nested"));
|
||||
ensureGreen();
|
||||
|
||||
// check on no data, see it works
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(termQuery("_all", "n_value1_1")).execute().actionGet();
|
||||
SearchResponse searchResponse = client().prepareSearch("test").execute().actionGet();
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
|
||||
searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).execute().actionGet();
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
|
||||
|
@ -89,9 +91,6 @@ public class SimpleNestedIT extends ESIntegTestCase {
|
|||
// check the numDocs
|
||||
assertDocumentCount("test", 3);
|
||||
|
||||
// check that _all is working on nested docs
|
||||
searchResponse = client().prepareSearch("test").setQuery(termQuery("_all", "n_value1_1")).execute().actionGet();
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
|
||||
searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).execute().actionGet();
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
|
||||
|
||||
|
|
|
@ -40,18 +40,26 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.index.query.Operator;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryStringQueryBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public class QueryStringIT extends ESIntegTestCase {
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
|
|
|
@ -112,37 +112,6 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
return Math.min(2, cluster().numDataNodes() - 1);
|
||||
}
|
||||
|
||||
public void testOmitNormsOnAll() throws ExecutionException, InterruptedException, IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_all").field("norms", false).endObject()
|
||||
.endObject().endObject())
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)); // only one shard otherwise IDF might be different for comparing scores
|
||||
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox jumps"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("field1", "quick brown"),
|
||||
client().prepareIndex("test", "type1", "3").setSource("field1", "quick"));
|
||||
|
||||
assertHitCount(client().prepareSearch().setQuery(matchQuery("_all", "quick")).get(), 3L);
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("_all", "quick")).setExplain(true).get();
|
||||
SearchHit[] hits = searchResponse.getHits().hits();
|
||||
assertThat(hits.length, equalTo(3));
|
||||
assertThat(hits[0].score(), allOf(equalTo(hits[1].getScore()), equalTo(hits[2].getScore())));
|
||||
cluster().wipeIndices("test");
|
||||
|
||||
createIndex("test");
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox jumps"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("field1", "quick brown"),
|
||||
client().prepareIndex("test", "type1", "3").setSource("field1", "quick"));
|
||||
|
||||
assertHitCount(client().prepareSearch().setQuery(matchQuery("_all", "quick")).get(), 3L);
|
||||
searchResponse = client().prepareSearch().setQuery(matchQuery("_all", "quick")).get();
|
||||
hits = searchResponse.getHits().hits();
|
||||
assertThat(hits.length, equalTo(3));
|
||||
assertThat(hits[0].score(), allOf(greaterThan(hits[1].getScore()), greaterThan(hits[2].getScore())));
|
||||
|
||||
}
|
||||
|
||||
// see #3952
|
||||
public void testEmptyQueryString() throws ExecutionException, InterruptedException, IOException {
|
||||
createIndex("test");
|
||||
|
@ -284,20 +253,6 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testCommonTermsQueryOnAllField() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addMapping("type1", "message", "type=text", "comment", "type=text,boost=5.0")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1).get();
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("message", "test message", "comment", "whatever"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("message", "hello world", "comment", "test comment"));
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(commonTermsQuery("_all", "test")).get();
|
||||
assertHitCount(searchResponse, 2L);
|
||||
assertFirstHit(searchResponse, hasId("2"));
|
||||
assertSecondHit(searchResponse, hasId("1"));
|
||||
assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore()));
|
||||
}
|
||||
|
||||
public void testCommonTermsQuery() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=text,analyzer=whitespace")
|
||||
|
@ -528,7 +483,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertHitCount(searchResponse, 1L);
|
||||
|
||||
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch()
|
||||
.setQuery(queryStringQuery("future:[now/D TO now+2M/d]")).get());
|
||||
.setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lenient(false)).get());
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
assertThat(e.toString(), containsString("unit [D] not supported for date math"));
|
||||
}
|
||||
|
@ -1698,23 +1653,6 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertHitCount(searchResponse, 1L);
|
||||
}
|
||||
|
||||
public void testAllFieldEmptyMapping() throws Exception {
|
||||
client().prepareIndex("myindex", "mytype").setId("1").setSource("{}").setRefreshPolicy(IMMEDIATE).get();
|
||||
SearchResponse response = client().prepareSearch("myindex").setQuery(matchQuery("_all", "foo")).get();
|
||||
assertNoFailures(response);
|
||||
}
|
||||
|
||||
public void testAllDisabledButQueried() throws Exception {
|
||||
createIndex("myindex");
|
||||
assertAcked(client().admin().indices().preparePutMapping("myindex").setType("mytype").setSource(
|
||||
jsonBuilder().startObject().startObject("mytype").startObject("_all").field("enabled", false)
|
||||
.endObject().endObject().endObject()));
|
||||
client().prepareIndex("myindex", "mytype").setId("1").setSource("bar", "foo").setRefreshPolicy(IMMEDIATE).get();
|
||||
SearchResponse response = client().prepareSearch("myindex").setQuery(matchQuery("_all", "foo")).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 0);
|
||||
}
|
||||
|
||||
public void testMinScore() throws ExecutionException, InterruptedException {
|
||||
createIndex("test");
|
||||
|
||||
|
|
|
@ -21,21 +21,27 @@ package org.elasticsearch.search.query;
|
|||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.Operator;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.SimpleQueryStringFlag;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
@ -61,6 +67,11 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
* Tests for the {@code simple_query_string} query
|
||||
*/
|
||||
public class SimpleQueryStringIT extends ESIntegTestCase {
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created
|
||||
}
|
||||
|
||||
public void testSimpleQueryString() throws ExecutionException, InterruptedException {
|
||||
createIndex("test");
|
||||
indexRandom(true, false,
|
||||
|
@ -252,8 +263,8 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
|
|||
searchResponse = client()
|
||||
.prepareSearch()
|
||||
.setQuery(
|
||||
simpleQueryStringQuery("baz | egg*").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.WHITESPACE,
|
||||
SimpleQueryStringFlag.PREFIX)).get();
|
||||
simpleQueryStringQuery("quuz~1 + egg*").flags(SimpleQueryStringFlag.WHITESPACE, SimpleQueryStringFlag.AND,
|
||||
SimpleQueryStringFlag.FUZZY, SimpleQueryStringFlag.PREFIX)).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertFirstHit(searchResponse, hasId("4"));
|
||||
}
|
||||
|
@ -525,7 +536,10 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
|
|||
|
||||
public void testExplicitAllFieldsRequested() throws Exception {
|
||||
String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index-with-all.json");
|
||||
prepareCreate("test").setSource(indexBody).get();
|
||||
prepareCreate("test")
|
||||
.setSource(indexBody)
|
||||
// .setSettings(Settings.builder().put("index.version.created", Version.V_5_0_0.id)).get();
|
||||
.get();
|
||||
ensureGreen("test");
|
||||
|
||||
List<IndexRequestBuilder> reqs = new ArrayList<>();
|
||||
|
|
|
@ -459,7 +459,6 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
|
||||
.put("index.analysis.filter.my_shingle.max_shingle_size", 2));
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("body").field("type", "text").field("analyzer", "body").endObject()
|
||||
.startObject("body_reverse").field("type", "text").field("analyzer", "reverse").endObject()
|
||||
|
@ -502,10 +501,6 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
.put("index.analysis.filter.my_shingle.max_shingle_size", 2)
|
||||
.put("index.number_of_shards", 1));
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_all")
|
||||
.field("store", true)
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("body").
|
||||
field("type", "text").
|
||||
|
@ -655,10 +650,6 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_all")
|
||||
.field("store", true)
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("body")
|
||||
.field("type", "text")
|
||||
|
@ -726,7 +717,6 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject().startObject("type1")
|
||||
.startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("body").field("type", "text").field("analyzer", "body").endObject()
|
||||
.startObject("bigram").field("type", "text").field("analyzer", "bigram").endObject()
|
||||
|
@ -929,10 +919,6 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_all")
|
||||
.field("store", true)
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("body")
|
||||
.field("type", "text")
|
||||
|
|
|
@ -68,7 +68,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
|
|||
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_i:d:1")).execute().actionGet().isValid(), equalTo(false));
|
||||
|
||||
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("foo:1")).execute().actionGet().isValid(), equalTo(true));
|
||||
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("bar:hey")).execute().actionGet().isValid(), equalTo(false));
|
||||
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("bar:hey").lenient(false)).execute().actionGet().isValid(), equalTo(false));
|
||||
|
||||
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("nonexistent:hello")).execute().actionGet().isValid(), equalTo(true));
|
||||
|
||||
|
@ -108,7 +108,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
|
|||
.execute().actionGet();
|
||||
assertThat(response.isValid(), equalTo(true));
|
||||
assertThat(response.getQueryExplanation().size(), equalTo(1));
|
||||
assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("_all:foo"));
|
||||
assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("(foo:foo | baz:foo)"));
|
||||
assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
{
|
||||
"mappings": {
|
||||
"_default_": {
|
||||
"_all": {
|
||||
"norms": false
|
||||
},
|
||||
"_meta": {
|
||||
"version": "5.0.0-beta1"
|
||||
},
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
},
|
||||
"mappings" : {
|
||||
"_default_" : {
|
||||
"_all" : {"enabled" : true, "norms" : false},
|
||||
"dynamic_templates" : [ {
|
||||
"message_field" : {
|
||||
"path_match" : "message",
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
{
|
||||
"mappings": {
|
||||
"_default_": {
|
||||
"_all": {
|
||||
"norms": false
|
||||
},
|
||||
"_meta": {
|
||||
"version": "5.0.0-beta1"
|
||||
},
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
{
|
||||
"mappings": {
|
||||
"_default_": {
|
||||
"_all": {
|
||||
"norms": false
|
||||
},
|
||||
"_meta": {
|
||||
"version": "5.0.0-beta1"
|
||||
},
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
{
|
||||
"mappings": {
|
||||
"_default_": {
|
||||
"_all": {
|
||||
"norms": false
|
||||
},
|
||||
"_meta": {
|
||||
"version": "5.0.0-beta1"
|
||||
},
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
{
|
||||
"test": {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"type": "nested",
|
||||
"properties": {
|
||||
"bar": {
|
||||
"type": "keyword",
|
||||
"fields": {
|
||||
"lower": {
|
||||
"analyzer": "standard",
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
{
|
||||
"test": {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"type": "nested",
|
||||
"include_in_all": false,
|
||||
"properties": {
|
||||
"bar": {
|
||||
"type": "keyword",
|
||||
"include_in_all": false,
|
||||
"fields": {
|
||||
"lower": {
|
||||
"analyzer": "standard",
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
{
|
||||
"mappings": {
|
||||
"type": {
|
||||
"_all": {
|
||||
"store": true,
|
||||
"store_term_vectors": true,
|
||||
"store_term_vector_offsets": true,
|
||||
"store_term_vector_positions": true,
|
||||
"store_term_vector_payloads": true,
|
||||
"norms": false,
|
||||
"analyzer": "standard",
|
||||
"search_analyzer": "whitespace",
|
||||
"similarity": "my_similarity"
|
||||
}
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"similarity": {
|
||||
"my_similarity": {
|
||||
"type": "DFR",
|
||||
"basic_model": "g",
|
||||
"after_effect": "l",
|
||||
"normalization": "h2",
|
||||
"normalization.h2.c": "3.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"type": {
|
||||
"_all": {
|
||||
"store": false,
|
||||
"enabled": true,
|
||||
"store_term_vectors": false,
|
||||
"store_term_vector_offsets": false,
|
||||
"store_term_vector_positions": false,
|
||||
"store_term_vector_payloads": false,
|
||||
"norms": true,
|
||||
"analyzer": "whitespace",
|
||||
"search_analyzer": "standard",
|
||||
"similarity": "BM25"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
{"type":{"_timestamp":{"enabled":false}}}
|
|
@ -3,6 +3,9 @@
|
|||
"index": {
|
||||
"number_of_shards": 1,
|
||||
"number_of_replicas": 0,
|
||||
"version": {
|
||||
"created": "5000099"
|
||||
},
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"my_ngrams": {
|
||||
|
|
|
@ -23,9 +23,6 @@
|
|||
},
|
||||
"mappings": {
|
||||
"doc": {
|
||||
"_all": {
|
||||
"enabled": false
|
||||
},
|
||||
"properties": {
|
||||
"f1": {"type": "text"},
|
||||
"f2": {"type": "keyword"},
|
||||
|
|
|
@ -155,23 +155,21 @@ PUT my_index <1>
|
|||
{
|
||||
"mappings": {
|
||||
"user": { <2>
|
||||
"_all": { "enabled": false }, <3>
|
||||
"properties": { <4>
|
||||
"title": { "type": "text" }, <5>
|
||||
"name": { "type": "text" }, <5>
|
||||
"age": { "type": "integer" } <5>
|
||||
"properties": { <3>
|
||||
"title": { "type": "text" }, <4>
|
||||
"name": { "type": "text" }, <4>
|
||||
"age": { "type": "integer" } <4>
|
||||
}
|
||||
},
|
||||
"blogpost": { <2>
|
||||
"_all": { "enabled": false }, <3>
|
||||
"properties": { <4>
|
||||
"title": { "type": "text" }, <5>
|
||||
"body": { "type": "text" }, <5>
|
||||
"properties": { <3>
|
||||
"title": { "type": "text" }, <4>
|
||||
"body": { "type": "text" }, <4>
|
||||
"user_id": {
|
||||
"type": "keyword" <5>
|
||||
"type": "keyword" <4>
|
||||
},
|
||||
"created": {
|
||||
"type": "date", <5>
|
||||
"type": "date", <4>
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
}
|
||||
}
|
||||
|
@ -182,9 +180,8 @@ PUT my_index <1>
|
|||
// CONSOLE
|
||||
<1> Create an index called `my_index`.
|
||||
<2> Add mapping types called `user` and `blogpost`.
|
||||
<3> Disable the `_all` <<mapping-fields,meta field>> for the `user` mapping type.
|
||||
<4> Specify fields or _properties_ in each mapping type.
|
||||
<5> Specify the data `type` and mapping for each field.
|
||||
<3> Specify fields or _properties_ in each mapping type.
|
||||
<4> Specify the data `type` and mapping for each field.
|
||||
|
||||
|
||||
--
|
||||
|
|
|
@ -14,13 +14,13 @@ PUT my_index
|
|||
{
|
||||
"mappings": {
|
||||
"_default_": { <1>
|
||||
"_all": {
|
||||
"_source": {
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"user": {}, <2>
|
||||
"blogpost": { <3>
|
||||
"_all": {
|
||||
"_source": {
|
||||
"enabled": true
|
||||
}
|
||||
}
|
||||
|
@ -28,9 +28,9 @@ PUT my_index
|
|||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
<1> The `_default_` mapping defaults the <<mapping-all-field,`_all`>> field to disabled.
|
||||
<1> The `_default_` mapping defaults the <<mapping-source-field,`_source`>> field to disabled.
|
||||
<2> The `user` type inherits the settings from `_default_`.
|
||||
<3> The `blogpost` type overrides the defaults and enables the <<mapping-all-field,`_all`>> field.
|
||||
<3> The `blogpost` type overrides the defaults and enables the <<mapping-source-field,`_source`>> field.
|
||||
|
||||
NOTE: When updating the `_default_` mapping with the
|
||||
<<indices-put-mapping,PUT mapping>> API, the new mapping is not merged with
|
||||
|
@ -53,7 +53,7 @@ PUT _template/logging
|
|||
"settings": { "number_of_shards": 1 }, <2>
|
||||
"mappings": {
|
||||
"_default_": {
|
||||
"_all": { <3>
|
||||
"_field_names": { <3>
|
||||
"enabled": false
|
||||
},
|
||||
"dynamic_templates": [
|
||||
|
@ -82,5 +82,5 @@ PUT logs-2015.10.01/event/1
|
|||
// CONSOLE
|
||||
<1> The `logging` template will match any indices beginning with `logs-`.
|
||||
<2> Matching indices will be created with a single primary shard.
|
||||
<3> The `_all` field will be disabled by default for new type mappings.
|
||||
<3> The `_field_names` field will be disabled by default for new type mappings.
|
||||
<4> String fields will be created with a `text` main field, and a `keyword` `.raw` field.
|
||||
|
|
|
@ -399,18 +399,18 @@ You can override the default mappings for all indices and all types
|
|||
by specifying a `_default_` type mapping in an index template
|
||||
which matches all indices.
|
||||
|
||||
For example, to disable the `_all` field by default for all types in all
|
||||
For example, to disable the `_field_names` field by default for all types in all
|
||||
new indices, you could create the following index template:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _template/disable_all_field
|
||||
PUT _template/disable_field_names
|
||||
{
|
||||
"order": 0,
|
||||
"index_patterns": ["*"], <1>
|
||||
"mappings": {
|
||||
"_default_": { <2>
|
||||
"_all": { <3>
|
||||
"_field_names": { <3>
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
|
@ -421,4 +421,4 @@ PUT _template/disable_all_field
|
|||
<1> Applies the mappings to an `index` which matches the pattern `*`, in other
|
||||
words, all new indices.
|
||||
<2> Defines the `_default_` type mapping types within the index.
|
||||
<3> Disables the `_all` field by default.
|
||||
<3> Disables the `_field_names` field by default.
|
||||
|
|
|
@ -42,7 +42,7 @@ can be customised when a mapping type is created.
|
|||
|
||||
<<mapping-all-field,`_all`>>::
|
||||
|
||||
A _catch-all_ field that indexes the values of all other fields.
|
||||
A _catch-all_ field that indexes the values of all other fields. Disabled by default.
|
||||
|
||||
<<mapping-field-names-field,`_field_names`>>::
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
[[mapping-all-field]]
|
||||
=== `_all` field
|
||||
|
||||
deprecated[6.0.0, `_all` may no longer be enabled for indices created in 6.0+, use a custom field and the mapping `copy_to` parameter]
|
||||
|
||||
The `_all` field is a special _catch-all_ field which concatenates the values
|
||||
of all of the other fields into one big string, using space as a delimiter, which is then
|
||||
<<analysis,analyzed>> and indexed, but not stored. This means that it can be
|
||||
|
@ -12,24 +14,37 @@ started with a new dataset. For instance:
|
|||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
PUT my_index/user/1 <1>
|
||||
PUT /my_index
|
||||
{
|
||||
"mapping": {
|
||||
"user": {
|
||||
"_all": {
|
||||
"enabled": true <1>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /my_index/user/1 <2>
|
||||
{
|
||||
"first_name": "John",
|
||||
"last_name": "Smith",
|
||||
"date_of_birth": "1970-10-24"
|
||||
}
|
||||
|
||||
GET my_index/_search
|
||||
GET /my_index/_search
|
||||
{
|
||||
"query": {
|
||||
"match": {
|
||||
"_all": "john smith new york"
|
||||
"_all": "john smith 1970"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
<1> The `_all` field will contain the terms: [ `"john"`, `"smith"`, `"1970"`, `"10"`, `"24"` ]
|
||||
<1> Enabling the `_all` field
|
||||
<2> The `_all` field will contain the terms: [ `"john"`, `"smith"`, `"1970"`, `"10"`, `"24"` ]
|
||||
|
||||
[NOTE]
|
||||
.All values treated as strings
|
||||
|
@ -56,15 +71,16 @@ and long fields (less relevant). For use cases where search relevance is
|
|||
important, it is better to query individual fields specifically.
|
||||
|
||||
The `_all` field is not free: it requires extra CPU cycles and uses more disk
|
||||
space. If not needed, it can be completely <<disabling-all-field,disabled>> or
|
||||
customised on a <<include-in-all,per-field basis>>.
|
||||
space. For this reason, it is disabled by default. If not needed, it can be
|
||||
<<enabling-all-field,enabled>> or customised on a <<include-in-all,per-field
|
||||
basis>>.
|
||||
|
||||
[[querying-all-field]]
|
||||
==== Using the `_all` field in queries
|
||||
|
||||
The <<query-dsl-query-string-query,`query_string`>> and
|
||||
<<query-dsl-simple-query-string-query,`simple_query_string`>> queries query
|
||||
the `_all` field by default, unless another field is specified:
|
||||
<<query-dsl-simple-query-string-query,`simple_query_string`>> queries query the
|
||||
`_all` field by default if it is enabled, unless another field is specified:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
|
@ -88,15 +104,13 @@ GET _search?q=john+smith+new+york
|
|||
--------------------------------
|
||||
|
||||
Other queries, such as the <<query-dsl-match-query,`match`>> and
|
||||
<<query-dsl-term-query,`term`>> queries require you to specify
|
||||
the `_all` field explicitly, as per the
|
||||
<<mapping-all-field,first example>>.
|
||||
<<query-dsl-term-query,`term`>> queries require you to specify the `_all` field
|
||||
explicitly, as per the <<mapping-all-field,first example>>.
|
||||
|
||||
[[disabling-all-field]]
|
||||
==== Disabling the `_all` field
|
||||
[[enabling-all-field]]
|
||||
==== Enabling the `_all` field
|
||||
|
||||
The `_all` field can be completely disabled per-type by setting `enabled` to
|
||||
`false`:
|
||||
The `_all` field can be enabled per-type by setting `enabled` to `true`:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
|
@ -108,23 +122,24 @@ PUT my_index
|
|||
},
|
||||
"type_2": { <2>
|
||||
"_all": {
|
||||
"enabled": false
|
||||
"enabled": true
|
||||
},
|
||||
"properties": {...}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// CONSOLE
|
||||
// TEST[s/\.\.\.//]
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
|
||||
<1> The `_all` field in `type_1` is enabled.
|
||||
<2> The `_all` field in `type_2` is completely disabled.
|
||||
<1> The `_all` field in `type_1` is disabled.
|
||||
<2> The `_all` field in `type_2` is enabled.
|
||||
|
||||
If the `_all` field is disabled, then URI search requests and the
|
||||
`query_string` and `simple_query_string` queries will not be able to use it
|
||||
for queries (see <<querying-all-field>>). You can configure them to use a
|
||||
different field with the `index.query.default_field` setting:
|
||||
If the `_all` field is enabled, then URI search requests and the `query_string`
|
||||
and `simple_query_string` queries can automatically use it for queries (see
|
||||
<<querying-all-field>>). You can configure them to use a different field with
|
||||
the `index.query.default_field` setting:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
|
@ -132,9 +147,6 @@ PUT my_index
|
|||
{
|
||||
"mappings": {
|
||||
"my_type": {
|
||||
"_all": {
|
||||
"enabled": false <1>
|
||||
},
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "text"
|
||||
|
@ -143,14 +155,13 @@ PUT my_index
|
|||
}
|
||||
},
|
||||
"settings": {
|
||||
"index.query.default_field": "content" <2>
|
||||
"index.query.default_field": "content" <1>
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// CONSOLE
|
||||
|
||||
<1> The `_all` field is disabled for the `my_type` type.
|
||||
<2> The `query_string` query will default to querying the `content` field in this index.
|
||||
<1> The `query_string` query will default to querying the `content` field in this index.
|
||||
|
||||
[[excluding-from-all]]
|
||||
==== Excluding fields from `_all`
|
||||
|
@ -171,6 +182,7 @@ PUT myindex
|
|||
{
|
||||
"mappings": {
|
||||
"mytype": {
|
||||
"_all": {"enabled": true},
|
||||
"properties": {
|
||||
"title": { <1>
|
||||
"type": "text",
|
||||
|
@ -184,6 +196,7 @@ PUT myindex
|
|||
}
|
||||
}
|
||||
--------------------------------
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
|
||||
<1> When querying the `_all` field, words that originated in the
|
||||
|
@ -252,8 +265,8 @@ A field can only be used for <<search-request-highlighting,highlighting>> if
|
|||
the original string value is available, either from the
|
||||
<<mapping-source-field,`_source`>> field or as a stored field.
|
||||
|
||||
The `_all` field is not present in the `_source` field and it is not stored by
|
||||
default, and so cannot be highlighted. There are two options. Either
|
||||
The `_all` field is not present in the `_source` field and it is not stored or
|
||||
enabled by default, and so cannot be highlighted. There are two options. Either
|
||||
<<all-field-store,store the `_all` field>> or highlight the
|
||||
<<all-highlight-fields,original fields>>.
|
||||
|
||||
|
@ -270,6 +283,7 @@ PUT myindex
|
|||
"mappings": {
|
||||
"mytype": {
|
||||
"_all": {
|
||||
"enabled": true,
|
||||
"store": true
|
||||
}
|
||||
}
|
||||
|
@ -296,11 +310,12 @@ GET _search
|
|||
}
|
||||
}
|
||||
--------------------------------
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
|
||||
Of course, storing the `_all` field will use significantly more disk space
|
||||
and, because it is a combination of other fields, it may result in odd
|
||||
highlighting results.
|
||||
Of course, enabling and storing the `_all` field will use significantly more
|
||||
disk space and, because it is a combination of other fields, it may result in
|
||||
odd highlighting results.
|
||||
|
||||
The `_all` field also accepts the `term_vector` and `index_options`
|
||||
parameters, allowing the use of the fast vector highlighter and the postings
|
||||
|
@ -317,7 +332,7 @@ PUT myindex
|
|||
{
|
||||
"mappings": {
|
||||
"mytype": {
|
||||
"_all": {}
|
||||
"_all": {"enabled": true}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -344,6 +359,7 @@ GET _search
|
|||
}
|
||||
}
|
||||
--------------------------------
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
|
||||
<1> The query inspects the `_all` field to find matching documents.
|
||||
|
|
|
@ -1,6 +1,15 @@
|
|||
[[breaking_60_mappings_changes]]
|
||||
=== Mapping changes
|
||||
|
||||
==== The `_all` meta field is now disabled by default
|
||||
|
||||
On new mappings, the `_all` meta field that contains a copy of the text from
|
||||
each field is now disabled by default. The `query_string` and
|
||||
`simple_query_string` queries that previously used `_all` to search will now
|
||||
check if `_all` is enabled/disabled and switch to executing the query across all
|
||||
fields if `_all` is disabled. `_all` can no longer be configured for indices
|
||||
created with Elasticsearch version 6.0 or later.
|
||||
|
||||
==== Unrecognized `match_mapping_type` options not silently ignored
|
||||
|
||||
Previously Elastiscearch would silently ignore any dynamic templates that
|
||||
|
|
|
@ -87,7 +87,15 @@ due to dynamic mapping, and 'foo' does not correctly parse into a date:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET twitter/tweet/_validate/query?q=post_date:foo%5d
|
||||
GET twitter/tweet/_validate/query
|
||||
{
|
||||
"query": {
|
||||
"query_string": {
|
||||
"query": "post_date:foo",
|
||||
"lenient": false
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
|
@ -102,7 +110,15 @@ about why a query failed:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET twitter/tweet/_validate/query?q=post_date:foo%5d&explain=true
|
||||
GET twitter/tweet/_validate/query?explain=true
|
||||
{
|
||||
"query": {
|
||||
"query_string": {
|
||||
"query": "post_date:foo",
|
||||
"lenient": false
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
body:
|
||||
mappings:
|
||||
test:
|
||||
_all:
|
||||
enabled: false
|
||||
properties:
|
||||
number:
|
||||
type: integer
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
body:
|
||||
mappings:
|
||||
test:
|
||||
_all:
|
||||
enabled: false
|
||||
properties:
|
||||
number:
|
||||
type: integer
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
body:
|
||||
mappings:
|
||||
test:
|
||||
_all:
|
||||
enabled: false
|
||||
properties:
|
||||
field:
|
||||
type: text
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
body:
|
||||
mappings:
|
||||
test:
|
||||
_all:
|
||||
enabled: false
|
||||
properties:
|
||||
number:
|
||||
type: integer
|
||||
|
|
Loading…
Reference in New Issue