Merge branch 'master' into mockfilesystem

This commit is contained in:
Robert Muir 2015-04-17 19:15:25 -04:00
commit 43b6cd2118
6 changed files with 176 additions and 3 deletions

View File

@ -685,6 +685,7 @@
<exclude name="**/org/elasticsearch/cluster/routing/shard_routes.txt"/> <exclude name="**/org/elasticsearch/cluster/routing/shard_routes.txt"/>
<exclude name="target/**/*"/> <exclude name="target/**/*"/>
<exclude name=".metadata/**/*"/> <exclude name=".metadata/**/*"/>
<exclude name=".idea/**/*"/>
<or> <or>
<containsregexp expression="\bno(n|)commit\b" casesensitive="no"/> <containsregexp expression="\bno(n|)commit\b" casesensitive="no"/>
<containsregexp expression="\t" casesensitive="no"/> <containsregexp expression="\t" casesensitive="no"/>

View File

@ -382,7 +382,7 @@ public class StringFieldMapper extends AbstractFieldMapper<String> implements Al
if (includeDefaults || positionOffsetGap != Defaults.POSITION_OFFSET_GAP) { if (includeDefaults || positionOffsetGap != Defaults.POSITION_OFFSET_GAP) {
builder.field("position_offset_gap", positionOffsetGap); builder.field("position_offset_gap", positionOffsetGap);
} }
if (searchQuotedAnalyzer != null && searchAnalyzer != searchQuotedAnalyzer) { if (searchQuotedAnalyzer != null && !searchQuotedAnalyzer.name().equals(searchAnalyzer.name())) {
builder.field("search_quote_analyzer", searchQuotedAnalyzer.name()); builder.field("search_quote_analyzer", searchQuotedAnalyzer.name());
} else if (includeDefaults) { } else if (includeDefaults) {
if (searchQuotedAnalyzer == null) { if (searchQuotedAnalyzer == null) {

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bwcompat;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import java.nio.file.Paths;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
/**
* These tests are against static indexes, built from versions of ES that cannot be upgraded without
* a full cluster restart (ie no wire format compatibility).
*/
@LuceneTestCase.SuppressCodecs({"Lucene3x", "MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene40", "Lucene41", "Appending", "Lucene42", "Lucene45", "Lucene46", "Lucene49"})
@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numDataNodes = 0, minNumDataNodes = 0, maxNumDataNodes = 0)
public class StaticIndexBackwardCompatibilityTest extends ElasticsearchIntegrationTest {
public void loadIndex(String index, Object... settings) throws Exception {
logger.info("Checking static index " + index);
Settings nodeSettings = prepareBackwardsDataDir(Paths.get(getClass().getResource(index + ".zip").toURI()), settings);
internalCluster().startNode(nodeSettings);
ensureGreen(index);
assertIndexSanity(index);
}
private void assertIndexSanity(String index) {
GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().get();
assertEquals(1, getIndexResponse.indices().length);
assertEquals(index, getIndexResponse.indices()[0]);
ensureYellow(index);
SearchResponse test = client().prepareSearch(index).get();
assertThat(test.getHits().getTotalHits(), greaterThanOrEqualTo(1l));
}
}

View File

@ -20,6 +20,8 @@
package org.elasticsearch.index.mapper.string; package org.elasticsearch.index.mapper.string;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
@ -27,13 +29,13 @@ import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermFilter; import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.queries.TermsFilter; import org.apache.lucene.queries.TermsFilter;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.ContentPath;
@ -52,6 +54,7 @@ import org.junit.Test;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
@ -216,6 +219,79 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest {
assertThat(fieldType.omitNorms(), equalTo(false)); assertThat(fieldType.omitNorms(), equalTo(false));
assertParseIdemPotent(fieldType, defaultMapper); assertParseIdemPotent(fieldType, defaultMapper);
} }
@Test
public void testSearchQuoteAnalyzerSerialization() throws Exception {
// Cases where search_quote_analyzer should not be added to the mapping.
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1")
.field("type", "string")
.field("position_offset_gap", 1000)
.endObject()
.startObject("field2")
.field("type", "string")
.field("position_offset_gap", 1000)
.field("analyzer", "standard")
.endObject()
.startObject("field3")
.field("type", "string")
.field("position_offset_gap", 1000)
.field("analyzer", "standard")
.field("search_analyzer", "simple")
.endObject()
.startObject("field4")
.field("type", "string")
.field("position_offset_gap", 1000)
.field("analyzer", "standard")
.field("search_analyzer", "simple")
.field("search_quote_analyzer", "simple")
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse(mapping);
for (String fieldName : Lists.newArrayList("field1", "field2", "field3", "field4")) {
Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper);
assertFalse(serializedMap.containsKey("search_quote_analyzer"));
}
// Cases where search_quote_analyzer should be present.
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1")
.field("type", "string")
.field("position_offset_gap", 1000)
.field("search_quote_analyzer", "simple")
.endObject()
.startObject("field2")
.field("type", "string")
.field("position_offset_gap", 1000)
.field("analyzer", "standard")
.field("search_analyzer", "standard")
.field("search_quote_analyzer", "simple")
.endObject()
.endObject()
.endObject().endObject().string();
mapper = parser.parse(mapping);
for (String fieldName : Lists.newArrayList("field1", "field2")) {
Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper);
assertEquals(serializedMap.get("search_quote_analyzer"), "simple");
}
}
private Map<String, Object> getSerializedMap(String fieldName, DocumentMapper mapper) throws Exception {
FieldMapper<?> fieldMapper = mapper.mappers().smartNameFieldMapper(fieldName);
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
fieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject();
builder.close();
Map<String, Object> fieldMap = JsonXContent.jsonXContent.createParser(builder.bytes()).mapAndClose();
@SuppressWarnings("unchecked")
Map<String, Object> result = (Map<String, Object>) fieldMap.get(fieldName);
return result;
}
@Test @Test
public void testTermVectors() throws Exception { public void testTermVectors() throws Exception {

View File

@ -0,0 +1,39 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action.admin.indices.upgrade;
import org.elasticsearch.bwcompat.StaticIndexBackwardCompatibilityTest;
import org.elasticsearch.node.Node;
public class UpgradeReallyOldIndexTest extends StaticIndexBackwardCompatibilityTest {
public void testUpgrade_0_90_6() throws Exception {
String indexName = "index-0.90.6";
loadIndex(indexName, Node.HTTP_ENABLED, true);
UpgradeTest.assertNotUpgraded(httpClient(), indexName);
assertTrue(UpgradeTest.hasAncientSegments(httpClient(), indexName));
UpgradeTest.runUpgrade(httpClient(), indexName, "wait_for_completion", "true", "only_ancient_segments", "true");
assertFalse(UpgradeTest.hasAncientSegments(httpClient(), "index-0.90.6"));
// This index has only ancient segments, so it should now be fully upgraded:
UpgradeTest.assertUpgraded(httpClient(), indexName);
}
}