Merge branch 'master' into hdfs2-only

This commit is contained in:
Robert Muir 2015-12-22 00:40:54 -05:00
commit 010d1a89c5
170 changed files with 1083 additions and 1242 deletions

View File

@ -1,5 +1,5 @@
elasticsearch = 3.0.0-SNAPSHOT
lucene = 5.5.0-snapshot-1719088
lucene = 5.5.0-snapshot-1721183
# optional dependencies
spatial4j = 0.5

View File

@ -279,6 +279,8 @@ public class Version {
public static final Version V_2_1_2 = new Version(V_2_1_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1);
public static final int V_2_2_0_ID = 2020099;
public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0);
public static final int V_2_3_0_ID = 2030099;
public static final Version V_2_3_0 = new Version(V_2_3_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0);
public static final int V_3_0_0_ID = 3000099;
public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final Version CURRENT = V_3_0_0;
@ -295,6 +297,8 @@ public class Version {
switch (id) {
case V_3_0_0_ID:
return V_3_0_0;
case V_2_3_0_ID:
return V_2_3_0;
case V_2_2_0_ID:
return V_2_2_0;
case V_2_1_2_ID:

View File

@ -62,4 +62,12 @@ public interface DocumentRequest<T> extends IndicesRequest {
* @return the Routing
*/
String routing();
/**
* Get the parent for this request
* @return the Parent
*/
String parent();
}

View File

@ -239,7 +239,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
}
} else {
concreteIndices.resolveIfAbsent(req);
req.routing(clusterState.metaData().resolveIndexRouting(req.routing(), req.index()));
req.routing(clusterState.metaData().resolveIndexRouting(req.parent(), req.routing(), req.index()));
}
}
}

View File

@ -50,6 +50,8 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
private String id;
@Nullable
private String routing;
@Nullable
private String parent;
private boolean refresh;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
@ -94,6 +96,7 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
this.type = request.type();
this.id = request.id();
this.routing = request.routing();
this.parent = request.parent();
this.refresh = request.refresh();
this.version = request.version();
this.versionType = request.versionType();
@ -155,13 +158,18 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
}
/**
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
* used for routing with delete requests.
* @return The parent for this request.
*/
@Override
public String parent() {
return parent;
}
/**
* Sets the parent id of this document.
*/
public DeleteRequest parent(String parent) {
if (routing == null) {
routing = parent;
}
this.parent = parent;
return this;
}
@ -230,6 +238,7 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
type = in.readString();
id = in.readString();
routing = in.readOptionalString();
parent = in.readOptionalString();
refresh = in.readBoolean();
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
@ -241,6 +250,7 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
out.writeString(type);
out.writeString(id);
out.writeOptionalString(routing());
out.writeOptionalString(parent());
out.writeBoolean(refresh);
out.writeLong(version);
out.writeByte(versionType.getValue());

View File

@ -95,7 +95,7 @@ public class TransportDeleteAction extends TransportReplicationAction<DeleteRequ
@Override
protected void resolveRequest(final MetaData metaData, String concreteIndex, DeleteRequest request) {
request.routing(metaData.resolveIndexRouting(request.routing(), request.index()));
request.routing(metaData.resolveIndexRouting(request.parent(), request.routing(), request.index()));
if (metaData.hasIndex(concreteIndex)) {
// check if routing is required, if so, do a broadcast delete
MappingMetaData mappingMd = metaData.index(concreteIndex).mappingOrDefault(request.type());

View File

@ -49,6 +49,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
private String type;
private String id;
private String routing;
private String parent;
private String preference;
private String[] fields;
@ -77,6 +78,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
this.type = getRequest.type;
this.id = getRequest.id;
this.routing = getRequest.routing;
this.parent = getRequest.parent;
this.preference = getRequest.preference;
this.fields = getRequest.fields;
this.fetchSourceContext = getRequest.fetchSourceContext;
@ -153,13 +155,17 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
}
/**
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
* used for routing with delete requests.
* @return The parent for this request.
*/
public String parent() {
return parent;
}
/**
* Sets the parent id of this document.
*/
public GetRequest parent(String parent) {
if (routing == null) {
routing = parent;
}
this.parent = parent;
return this;
}
@ -291,6 +297,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
type = in.readString();
id = in.readString();
routing = in.readOptionalString();
parent = in.readOptionalString();
preference = in.readOptionalString();
refresh = in.readBoolean();
int size = in.readInt();
@ -320,6 +327,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
out.writeString(type);
out.writeString(id);
out.writeOptionalString(routing);
out.writeOptionalString(parent);
out.writeOptionalString(preference);
out.writeBoolean(refresh);

View File

@ -57,6 +57,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
private String type;
private String id;
private String routing;
private String parent;
private String[] fields;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
@ -124,12 +125,17 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
}
public Item parent(String parent) {
if (routing == null) {
this.routing = parent;
}
this.parent = parent;
return this;
}
/**
* @return The parent for this request.
*/
public String parent() {
return parent;
}
public Item fields(String... fields) {
this.fields = fields;
return this;
@ -181,6 +187,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
type = in.readOptionalString();
id = in.readString();
routing = in.readOptionalString();
parent = in.readOptionalString();
int size = in.readVInt();
if (size > 0) {
fields = new String[size];
@ -200,6 +207,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
out.writeOptionalString(type);
out.writeString(id);
out.writeOptionalString(routing);
out.writeOptionalString(parent);
if (fields == null) {
out.writeVInt(0);
} else {
@ -229,6 +237,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
if (!id.equals(item.id)) return false;
if (!index.equals(item.index)) return false;
if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false;
if (parent != null ? !parent.equals(item.parent) : item.parent != null) return false;
if (type != null ? !type.equals(item.type) : item.type != null) return false;
if (versionType != item.versionType) return false;
@ -241,6 +250,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
result = 31 * result + (type != null ? type.hashCode() : 0);
result = 31 * result + id.hashCode();
result = 31 * result + (routing != null ? routing.hashCode() : 0);
result = 31 * result + (parent != null ? parent.hashCode() : 0);
result = 31 * result + (fields != null ? Arrays.hashCode(fields) : 0);
result = 31 * result + Long.hashCode(version);
result = 31 * result + versionType.hashCode();

View File

@ -82,7 +82,7 @@ public class TransportGetAction extends TransportSingleShardAction<GetRequest, G
request.request().preference(Preference.PRIMARY.type());
}
// update the routing (request#index here is possibly an alias)
request.request().routing(state.metaData().resolveIndexRouting(request.request().routing(), request.request().index()));
request.request().routing(state.metaData().resolveIndexRouting(request.request().parent(), request.request().routing(), request.request().index()));
// Fail fast on the node that received the request.
if (request.request().routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.request().type())) {
throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id());

View File

@ -68,7 +68,7 @@ public class TransportMultiGetAction extends HandledTransportAction<MultiGetRequ
responses.set(i, new MultiGetItemResponse(null, new MultiGetResponse.Failure(item.index(), item.type(), item.id(), new IndexNotFoundException(item.index()))));
continue;
}
item.routing(clusterState.metaData().resolveIndexRouting(item.routing(), item.index()));
item.routing(clusterState.metaData().resolveIndexRouting(item.parent(), item.routing(), item.index()));
String concreteSingleIndex = indexNameExpressionResolver.concreteSingleIndex(clusterState, item);
if (item.routing() == null && clusterState.getMetaData().routingRequired(concreteSingleIndex, item.type())) {
responses.set(i, new MultiGetItemResponse(null, new MultiGetResponse.Failure(concreteSingleIndex, item.type(), item.id(),

View File

@ -312,14 +312,10 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
}
/**
* Sets the parent id of this document. If routing is not set, automatically set it as the
* routing as well.
* Sets the parent id of this document.
*/
public IndexRequest parent(String parent) {
this.parent = parent;
if (routing == null) {
routing = parent;
}
return this;
}
@ -601,7 +597,7 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
public void process(MetaData metaData, @Nullable MappingMetaData mappingMd, boolean allowIdGeneration, String concreteIndex) {
// resolve the routing if needed
routing(metaData.resolveIndexRouting(routing, index));
routing(metaData.resolveIndexRouting(parent, routing, index));
// resolve timestamp if provided externally
if (timestamp != null) {

View File

@ -177,7 +177,7 @@ public abstract class TransportSingleShardAction<Request extends SingleShardRequ
@Override
public void handleException(TransportException exp) {
perform(exp);
listener.onFailure(exp);
}
});
} else {

View File

@ -65,6 +65,8 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
private String routing;
private String parent;
private VersionType versionType = VersionType.INTERNAL;
private long version = Versions.MATCH_ANY;
@ -162,6 +164,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
this.flagsEnum = other.getFlags().clone();
this.preference = other.preference();
this.routing = other.routing();
this.parent = other.parent();
if (other.selectedFields != null) {
this.selectedFields = new HashSet<>(other.selectedFields);
}
@ -181,6 +184,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
this.type = item.type();
this.selectedFields(item.fields());
this.routing(item.routing());
this.parent(item.parent());
}
public EnumSet<Flag> getFlags() {
@ -259,14 +263,16 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
return this;
}
@Override
public String parent() {
return parent;
}
/**
* Sets the parent id of this document. Will simply set the routing to this
* value, as it is only used for routing with delete requests.
* Sets the parent id of this document.
*/
public TermVectorsRequest parent(String parent) {
if (routing == null) {
routing = parent;
}
this.parent = parent;
return this;
}
@ -506,6 +512,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
doc = in.readBytesReference();
}
routing = in.readOptionalString();
parent = in.readOptionalString();
preference = in.readOptionalString();
long flags = in.readVLong();
@ -545,6 +552,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
out.writeBytesReference(doc);
}
out.writeOptionalString(routing);
out.writeOptionalString(parent);
out.writeOptionalString(preference);
long longFlags = 0;
for (Flag flag : flagsEnum) {
@ -629,6 +637,8 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
termVectorsRequest.routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
termVectorsRequest.parent = parser.text();
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
termVectorsRequest.version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {

View File

@ -66,7 +66,7 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
for (int i = 0; i < request.requests.size(); i++) {
TermVectorsRequest termVectorsRequest = request.requests.get(i);
termVectorsRequest.startTime = System.currentTimeMillis();
termVectorsRequest.routing(clusterState.metaData().resolveIndexRouting(termVectorsRequest.routing(), termVectorsRequest.index()));
termVectorsRequest.routing(clusterState.metaData().resolveIndexRouting(termVectorsRequest.parent(), termVectorsRequest.routing(), termVectorsRequest.index()));
if (!clusterState.metaData().hasConcreteIndex(termVectorsRequest.index())) {
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(termVectorsRequest.index(),
termVectorsRequest.type(), termVectorsRequest.id(), new IndexNotFoundException(termVectorsRequest.index()))));

View File

@ -71,8 +71,8 @@ public class TransportTermVectorsAction extends TransportSingleShardAction<TermV
@Override
protected void resolveRequest(ClusterState state, InternalRequest request) {
// update the routing (request#index here is possibly an alias)
request.request().routing(state.metaData().resolveIndexRouting(request.request().routing(), request.request().index()));
// update the routing (request#index here is possibly an alias or a parent)
request.request().routing(state.metaData().resolveIndexRouting(request.request().parent(), request.request().routing(), request.request().index()));
// Fail fast on the node that received the request.
if (request.request().routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.request().type())) {
throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id());

View File

@ -101,7 +101,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
@Override
protected boolean resolveRequest(ClusterState state, UpdateRequest request, ActionListener<UpdateResponse> listener) {
request.routing((state.metaData().resolveIndexRouting(request.routing(), request.index())));
request.routing((state.metaData().resolveIndexRouting(request.parent(), request.routing(), request.index())));
// Fail fast on the node that received the request, rather than failing when translating on the index or delete request.
if (request.routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.type())) {
throw new RoutingMissingException(request.concreteIndex(), request.type(), request.id());

View File

@ -184,13 +184,10 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
}
/**
* The parent id is used for the upsert request and also implicitely sets the routing if not already set.
* The parent id is used for the upsert request.
*/
public UpdateRequest parent(String parent) {
this.parent = parent;
if (routing == null) {
routing = parent;
}
return this;
}

View File

@ -140,7 +140,6 @@ public class ClusterModule extends AbstractModule {
registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "*", Validator.EMPTY);
registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY);
registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Validator.EMPTY);
registerIndexDynamicSetting(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, Validator.EMPTY);
registerIndexDynamicSetting(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, Validator.NON_NEGATIVE_INTEGER);
registerIndexDynamicSetting(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, Validator.EMPTY);
registerIndexDynamicSetting(IndexMetaData.SETTING_READ_ONLY, Validator.EMPTY);
@ -182,9 +181,7 @@ public class ClusterModule extends AbstractModule {
registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, Validator.DOUBLE_GTE_2);
registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, Validator.NON_NEGATIVE_DOUBLE);
registerIndexDynamicSetting(MergePolicyConfig.INDEX_COMPOUND_FORMAT, Validator.EMPTY);
registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, Validator.INTEGER);
registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, Validator.BYTES_SIZE);
registerIndexDynamicSetting(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, Validator.EMPTY);
registerIndexDynamicSetting(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Validator.EMPTY);
registerIndexDynamicSetting(IndicesWarmer.INDEX_WARMER_ENABLED, Validator.EMPTY);
registerIndexDynamicSetting(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN);

View File

@ -440,13 +440,19 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
*/
// TODO: This can be moved to IndexNameExpressionResolver too, but this means that we will support wildcards and other expressions
// in the index,bulk,update and delete apis.
public String resolveIndexRouting(@Nullable String routing, String aliasOrIndex) {
public String resolveIndexRouting(@Nullable String parent, @Nullable String routing, String aliasOrIndex) {
if (aliasOrIndex == null) {
if (routing == null) {
return parent;
}
return routing;
}
AliasOrIndex result = getAliasAndIndexLookup().get(aliasOrIndex);
if (result == null || result.isAlias() == false) {
if (routing == null) {
return parent;
}
return routing;
}
AliasOrIndex.Alias alias = (AliasOrIndex.Alias) result;
@ -460,17 +466,19 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
}
AliasMetaData aliasMd = alias.getFirstAliasMetaData();
if (aliasMd.indexRouting() != null) {
if (aliasMd.indexRouting().indexOf(',') != -1) {
throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + aliasMd.getIndexRouting() + "] that resolved to several routing values, rejecting operation");
}
if (routing != null) {
if (!routing.equals(aliasMd.indexRouting())) {
throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has index routing associated with it [" + aliasMd.indexRouting() + "], and was provided with routing value [" + routing + "], rejecting operation");
}
}
routing = aliasMd.indexRouting();
// Alias routing overrides the parent routing (if any).
return aliasMd.indexRouting();
}
if (routing != null) {
if (routing.indexOf(',') != -1) {
throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + routing + "] that resolved to several routing values, rejecting operation");
}
if (routing == null) {
return parent;
}
return routing;
}

View File

@ -284,7 +284,8 @@ public class Lucene {
continue;
}
final Bits liveDocs = context.reader().getLiveDocs();
for (int doc = scorer.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = scorer.nextDoc()) {
final DocIdSetIterator iterator = scorer.iterator();
for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) {
if (liveDocs == null || liveDocs.get(doc)) {
return true;
}
@ -667,19 +668,11 @@ public class Lucene {
throw new IllegalStateException(message);
}
@Override
public int advance(int arg0) throws IOException {
throw new IllegalStateException(message);
}
@Override
public long cost() {
throw new IllegalStateException(message);
}
@Override
public int docID() {
throw new IllegalStateException(message);
}
@Override
public int nextDoc() throws IOException {
public DocIdSetIterator iterator() {
throw new IllegalStateException(message);
}
};
@ -757,10 +750,10 @@ public class Lucene {
if (scorer == null) {
return new Bits.MatchNoBits(maxDoc);
}
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator();
final DocIdSetIterator iterator;
if (twoPhase == null) {
iterator = scorer;
iterator = scorer.iterator();
} else {
iterator = twoPhase.approximation();
}

View File

@ -29,6 +29,7 @@ import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchNoDocsQuery;
@ -120,7 +121,7 @@ public final class AllTermQuery extends Query {
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
AllTermScorer scorer = scorer(context);
if (scorer != null) {
int newDoc = scorer.advance(doc);
int newDoc = scorer.iterator().advance(doc);
if (newDoc == doc) {
float score = scorer.score();
float freq = scorer.freq();
@ -213,18 +214,8 @@ public final class AllTermQuery extends Query {
}
@Override
public int nextDoc() throws IOException {
return postings.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return postings.advance(target);
}
@Override
public long cost() {
return postings.cost();
public DocIdSetIterator iterator() {
return postings;
}
}

View File

@ -28,6 +28,7 @@ import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
@ -99,11 +100,12 @@ public class FilterableTermsEnum extends TermsEnum {
}
BitSet bits = null;
if (weight != null) {
DocIdSetIterator docs = weight.scorer(context);
if (docs == null) {
Scorer scorer = weight.scorer(context);
if (scorer == null) {
// fully filtered, none matching, no need to iterate on this
continue;
}
DocIdSetIterator docs = scorer.iterator();
// we want to force apply deleted docs
final Bits liveDocs = context.reader().getLiveDocs();

View File

@ -1,67 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import java.io.IOException;
/**
*
*/
public class EmptyScorer extends Scorer {
private int docId = -1;
public EmptyScorer(Weight weight) {
super(weight);
}
@Override
public float score() throws IOException {
throw new UnsupportedOperationException("Should never be called");
}
@Override
public int freq() throws IOException {
throw new UnsupportedOperationException("Should never be called");
}
@Override
public int docID() {
return docId;
}
@Override
public int nextDoc() throws IOException {
assert docId != NO_MORE_DOCS;
return docId = NO_MORE_DOCS;
}
@Override
public int advance(int target) throws IOException {
return slowAdvance(target);
}
@Override
public long cost() {
return 0;
}
}

View File

@ -179,8 +179,6 @@ public class Queries {
result = calc < 0 ? result + calc : calc;
}
return (optionalClauseCount < result ?
optionalClauseCount : (result < 0 ? 0 : result));
return result < 0 ? 0 : result;
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
@ -27,6 +28,7 @@ import java.io.IOException;
abstract class CustomBoostFactorScorer extends Scorer {
final Scorer scorer;
final DocIdSetIterator iterator;
final float maxBoost;
final CombineFunction scoreCombiner;
@ -42,6 +44,7 @@ abstract class CustomBoostFactorScorer extends Scorer {
nextDoc = new MinScoreNextDoc();
}
this.scorer = scorer;
this.iterator = scorer.iterator();
this.maxBoost = maxBoost;
this.scoreCombiner = scoreCombiner;
this.minScore = minScore;
@ -53,13 +56,25 @@ abstract class CustomBoostFactorScorer extends Scorer {
}
@Override
public int advance(int target) throws IOException {
return nextDoc.advance(target);
}
@Override
public int nextDoc() throws IOException {
return nextDoc.nextDoc();
public DocIdSetIterator iterator() {
return new DocIdSetIterator() {
@Override
public int nextDoc() throws IOException {
return nextDoc.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return nextDoc.advance(target);
}
@Override
public long cost() {
return iterator.cost();
}
@Override
public int docID() {
return iterator.docID();
}
};
}
public abstract float innerScore() throws IOException;
@ -74,11 +89,6 @@ abstract class CustomBoostFactorScorer extends Scorer {
return scorer.freq();
}
@Override
public long cost() {
return scorer.cost();
}
public interface NextDoc {
public int advance(int target) throws IOException;
@ -94,8 +104,8 @@ abstract class CustomBoostFactorScorer extends Scorer {
public int nextDoc() throws IOException {
int doc;
do {
doc = scorer.nextDoc();
if (doc == NO_MORE_DOCS) {
doc = iterator.nextDoc();
if (doc == DocIdSetIterator.NO_MORE_DOCS) {
return doc;
}
currentScore = innerScore();
@ -110,13 +120,13 @@ abstract class CustomBoostFactorScorer extends Scorer {
@Override
public int advance(int target) throws IOException {
int doc = scorer.advance(target);
if (doc == NO_MORE_DOCS) {
int doc = iterator.advance(target);
if (doc == DocIdSetIterator.NO_MORE_DOCS) {
return doc;
}
currentScore = innerScore();
if (currentScore < minScore) {
return scorer.nextDoc();
return iterator.nextDoc();
}
return doc;
}
@ -126,7 +136,7 @@ abstract class CustomBoostFactorScorer extends Scorer {
@Override
public int nextDoc() throws IOException {
return scorer.nextDoc();
return iterator.nextDoc();
}
@Override
@ -136,7 +146,7 @@ abstract class CustomBoostFactorScorer extends Scorer {
@Override
public int advance(int target) throws IOException {
return scorer.advance(target);
return iterator.advance(target);
}
}
}

View File

@ -231,7 +231,7 @@ public class FiltersFunctionScoreQuery extends Query {
}
FiltersFunctionFactorScorer scorer = (FiltersFunctionFactorScorer)scorer(context);
int actualDoc = scorer.advance(doc);
int actualDoc = scorer.iterator.advance(doc);
assert (actualDoc == doc);
double score = scorer.computeScore(doc, subQueryExpl.getValue());
Explanation factorExplanation = Explanation.match(

View File

@ -20,6 +20,7 @@
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.script.ExplainableSearchScript;
@ -57,19 +58,9 @@ public class ScriptScoreFunction extends ScoreFunction {
}
@Override
public int nextDoc() throws IOException {
public DocIdSetIterator iterator() {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return 1;
}
}
private final Script sScript;

View File

@ -26,6 +26,7 @@ import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.Accountable;
@ -127,12 +128,12 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
searcher.setQueryCache(null);
final Weight weight = searcher.createNormalizedWeight(query, false);
final DocIdSetIterator it = weight.scorer(context);
Scorer s = weight.scorer(context);
final BitSet bitSet;
if (it == null) {
if (s == null) {
bitSet = null;
} else {
bitSet = BitSet.of(it, context.reader().maxDoc());
bitSet = BitSet.of(s.iterator(), context.reader().maxDoc());
}
Value value = new Value(bitSet, shardId);

View File

@ -1129,20 +1129,18 @@ public class InternalEngine extends Engine {
@Override
protected void handleMergeException(final Directory dir, final Throwable exc) {
logger.error("failed to merge", exc);
if (config().getMergeSchedulerConfig().isNotifyOnMergeFailure()) {
engineConfig.getThreadPool().generic().execute(new AbstractRunnable() {
@Override
public void onFailure(Throwable t) {
logger.debug("merge failure action rejected", t);
}
engineConfig.getThreadPool().generic().execute(new AbstractRunnable() {
@Override
public void onFailure(Throwable t) {
logger.debug("merge failure action rejected", t);
}
@Override
protected void doRun() throws Exception {
MergePolicy.MergeException e = new MergePolicy.MergeException(exc, dir);
failEngine("merge failed", e);
}
});
}
@Override
protected void doRun() throws Exception {
MergePolicy.MergeException e = new MergePolicy.MergeException(exc, dir);
failEngine("merge failed", e);
}
});
}
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldComparatorSource;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
@ -139,7 +140,8 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
* Get a {@link DocIdSet} that matches the inner documents.
*/
public DocIdSetIterator innerDocs(LeafReaderContext ctx) throws IOException {
return innerFilter.scorer(ctx);
Scorer s = innerFilter.scorer(ctx);
return s == null ? null : s.iterator();
}
}

View File

@ -20,8 +20,8 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
@ -297,12 +297,12 @@ public class DocumentMapper implements ToXContent {
// We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and
// therefor is guaranteed to be a live doc.
final Weight nestedWeight = filter.createWeight(sc.searcher(), false);
DocIdSetIterator iterator = nestedWeight.scorer(context);
if (iterator == null) {
Scorer scorer = nestedWeight.scorer(context);
if (scorer == null) {
continue;
}
if (iterator.advance(nestedDocId) == nestedDocId) {
if (scorer.iterator().advance(nestedDocId) == nestedDocId) {
if (nestedObjectMapper == null) {
nestedObjectMapper = objectMapper;
} else {

View File

@ -76,32 +76,11 @@ public class DocumentMapperParser {
return new Mapper.TypeParser.ParserContext(type, analysisService, similarityService::getSimilarity, mapperService, typeParsers::get, indexVersionCreated, parseFieldMatcher);
}
public DocumentMapper parse(String source) throws MapperParsingException {
return parse(null, source);
}
public DocumentMapper parse(@Nullable String type, String source) throws MapperParsingException {
public DocumentMapper parse(@Nullable String type, CompressedXContent source) throws MapperParsingException {
return parse(type, source, null);
}
public DocumentMapper parse(@Nullable String type, String source, String defaultSource) throws MapperParsingException {
Map<String, Object> mapping = null;
if (source != null) {
Tuple<String, Map<String, Object>> t = extractMapping(type, source);
type = t.v1();
mapping = t.v2();
}
if (mapping == null) {
mapping = new HashMap<>();
}
return parse(type, mapping, defaultSource);
}
public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source) throws MapperParsingException {
return parseCompressed(type, source, null);
}
public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException {
public DocumentMapper parse(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException {
Map<String, Object> mapping = null;
if (source != null) {
Map<String, Object> root = XContentHelper.convertToMap(source.compressedReference(), true).v2();

View File

@ -213,7 +213,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
if (DEFAULT_MAPPING.equals(type)) {
// verify we can parse it
// NOTE: never apply the default here
DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource);
DocumentMapper mapper = documentParser.parse(type, mappingSource);
// still add it as a document mapper so we have it registered and, for example, persisted back into
// the cluster meta data if needed, or checked for existence
try (ReleasableLock lock = mappingWriteLock.acquire()) {
@ -392,7 +392,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
} else {
defaultMappingSource = this.defaultMappingSource;
}
return documentParser.parseCompressed(mappingType, mappingSource, applyDefault ? defaultMappingSource : null);
return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null);
}
public boolean hasMapping(String mappingType) {

View File

@ -273,8 +273,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
return new MatchAllDocsQuery();
}
final String minimumShouldMatch;
if (context.isFilter() && this.minimumShouldMatch == null) {
//will be applied for real only if there are should clauses
if (context.isFilter() && this.minimumShouldMatch == null && shouldClauses.size() > 0) {
minimumShouldMatch = "1";
} else {
minimumShouldMatch = this.minimumShouldMatch;

View File

@ -144,14 +144,15 @@ public class GeoDistanceRangeQuery extends Query {
public Scorer scorer(LeafReaderContext context) throws IOException {
final DocIdSetIterator approximation;
if (boundingBoxWeight != null) {
approximation = boundingBoxWeight.scorer(context);
Scorer s = boundingBoxWeight.scorer(context);
if (s == null) {
// if the approximation does not match anything, we're done
return null;
}
approximation = s.iterator();
} else {
approximation = DocIdSetIterator.all(context.reader().maxDoc());
}
if (approximation == null) {
// if the approximation does not match anything, we're done
return null;
}
final MultiGeoPointValues values = indexFieldData.load(context).getGeoPointValues();
final TwoPhaseIterator twoPhaseIterator = new TwoPhaseIterator(approximation) {
@Override

View File

@ -188,18 +188,14 @@ public class IndexShard extends AbstractIndexShardComponent {
private final ShardEventListener shardEventListener = new ShardEventListener();
private volatile boolean flushOnClose = true;
private volatile int flushThresholdOperations;
private volatile ByteSizeValue flushThresholdSize;
private volatile boolean disableFlush;
/**
* Index setting to control if a flush is executed before engine is closed
* This setting is realtime updateable.
*/
public static final String INDEX_FLUSH_ON_CLOSE = "index.flush_on_close";
public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS = "index.translog.flush_threshold_ops";
public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE = "index.translog.flush_threshold_size";
public static final String INDEX_TRANSLOG_DISABLE_FLUSH = "index.translog.disable_flush";
/** If we see no indexing operations after this much time for a given shard, we consider that shard inactive (default: 5 minutes). */
public static final String INDEX_SHARD_INACTIVE_TIME_SETTING = "index.shard.inactive_time";
private static final String INDICES_INACTIVE_TIME_SETTING = "indices.memory.shard_inactive_time";
@ -270,9 +266,7 @@ public class IndexShard extends AbstractIndexShardComponent {
}
this.engineConfig = newEngineConfig(translogConfig, cachingPolicy);
this.flushThresholdOperations = settings.getAsInt(INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, settings.getAsInt("index.translog.flush_threshold", Integer.MAX_VALUE));
this.flushThresholdSize = settings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB));
this.disableFlush = settings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false);
this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId);
this.provider = provider;
this.searcherWrapper = indexSearcherWrapper;
@ -1022,7 +1016,7 @@ public class IndexShard extends AbstractIndexShardComponent {
* Change the indexing and translog buffer sizes. If {@code IndexWriter} is currently using more than
* the new buffering indexing size then we do a refresh to free up the heap.
*/
public void updateBufferSize(ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
public void updateBufferSize(ByteSizeValue shardIndexingBufferSize) {
final EngineConfig config = engineConfig;
final ByteSizeValue preValue = config.getIndexingBufferSize();
@ -1060,8 +1054,6 @@ public class IndexShard extends AbstractIndexShardComponent {
logger.debug(message);
}
}
engine.getTranslog().updateBuffer(shardTranslogBufferSize);
}
/**
@ -1078,7 +1070,7 @@ public class IndexShard extends AbstractIndexShardComponent {
if (engineOrNull != null && System.nanoTime() - engineOrNull.getLastWriteNanos() >= inactiveTimeNS) {
boolean wasActive = active.getAndSet(false);
if (wasActive) {
updateBufferSize(IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
updateBufferSize(IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER);
logger.debug("marking shard as inactive (inactive_time=[{}]) indexing wise", inactiveTime);
indexEventListener.onShardInactive(this);
}
@ -1136,15 +1128,13 @@ public class IndexShard extends AbstractIndexShardComponent {
* Otherwise <code>false</code>.
*/
boolean shouldFlush() {
if (disableFlush == false) {
Engine engine = getEngineOrNull();
if (engine != null) {
try {
Translog translog = engine.getTranslog();
return translog.totalOperations() > flushThresholdOperations || translog.sizeInBytes() > flushThresholdSize.bytes();
} catch (AlreadyClosedException | EngineClosedException ex) {
// that's fine we are already close - no need to flush
}
Engine engine = getEngineOrNull();
if (engine != null) {
try {
Translog translog = engine.getTranslog();
return translog.sizeInBytes() > flushThresholdSize.bytes();
} catch (AlreadyClosedException | EngineClosedException ex) {
// that's fine we are already close - no need to flush
}
}
return false;
@ -1156,21 +1146,11 @@ public class IndexShard extends AbstractIndexShardComponent {
if (state() == IndexShardState.CLOSED) { // no need to update anything if we are closed
return;
}
int flushThresholdOperations = settings.getAsInt(INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, this.flushThresholdOperations);
if (flushThresholdOperations != this.flushThresholdOperations) {
logger.info("updating flush_threshold_ops from [{}] to [{}]", this.flushThresholdOperations, flushThresholdOperations);
this.flushThresholdOperations = flushThresholdOperations;
}
ByteSizeValue flushThresholdSize = settings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, this.flushThresholdSize);
if (!flushThresholdSize.equals(this.flushThresholdSize)) {
logger.info("updating flush_threshold_size from [{}] to [{}]", this.flushThresholdSize, flushThresholdSize);
this.flushThresholdSize = flushThresholdSize;
}
boolean disableFlush = settings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, this.disableFlush);
if (disableFlush != this.disableFlush) {
logger.info("updating disable_flush from [{}] to [{}]", this.disableFlush, disableFlush);
this.disableFlush = disableFlush;
}
final EngineConfig config = engineConfig;
final boolean flushOnClose = settings.getAsBoolean(INDEX_FLUSH_ON_CLOSE, this.flushOnClose);
@ -1179,12 +1159,6 @@ public class IndexShard extends AbstractIndexShardComponent {
this.flushOnClose = flushOnClose;
}
TranslogWriter.Type type = TranslogWriter.Type.fromString(settings.get(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, translogConfig.getType().name()));
if (type != translogConfig.getType()) {
logger.info("updating type from [{}] to [{}]", translogConfig.getType(), type);
translogConfig.setType(type);
}
final Translog.Durabilty durabilty = getFromSettings(logger, settings, translogConfig.getDurabilty());
if (durabilty != translogConfig.getDurabilty()) {
logger.info("updating durability from [{}] to [{}]", translogConfig.getDurabilty(), durabilty);

View File

@ -55,19 +55,16 @@ public final class MergeSchedulerConfig {
public static final String MAX_THREAD_COUNT = "index.merge.scheduler.max_thread_count";
public static final String MAX_MERGE_COUNT = "index.merge.scheduler.max_merge_count";
public static final String AUTO_THROTTLE = "index.merge.scheduler.auto_throttle";
public static final String NOTIFY_ON_MERGE_FAILURE = "index.merge.scheduler.notify_on_failure"; // why would we not wanna do this?
private volatile boolean autoThrottle;
private volatile int maxThreadCount;
private volatile int maxMergeCount;
private final boolean notifyOnMergeFailure;
public MergeSchedulerConfig(IndexSettings indexSettings) {
final Settings settings = indexSettings.getSettings();
maxThreadCount = settings.getAsInt(MAX_THREAD_COUNT, Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(settings) / 2)));
maxMergeCount = settings.getAsInt(MAX_MERGE_COUNT, maxThreadCount + 5);
this.autoThrottle = settings.getAsBoolean(AUTO_THROTTLE, true);
notifyOnMergeFailure = settings.getAsBoolean(NOTIFY_ON_MERGE_FAILURE, true);
}
/**
@ -114,11 +111,4 @@ public final class MergeSchedulerConfig {
public void setMaxMergeCount(int maxMergeCount) {
this.maxMergeCount = maxMergeCount;
}
/**
* Returns <code>true</code> iff we fail the engine on a merge failure. Default is <code>true</code>
*/
public boolean isNotifyOnMergeFailure() {
return notifyOnMergeFailure;
}
}

View File

@ -1,177 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.translog;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Channels;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
/**
*/
public final class BufferingTranslogWriter extends TranslogWriter {
private byte[] buffer;
private int bufferCount;
private WrapperOutputStream bufferOs = new WrapperOutputStream();
/* the total offset of this file including the bytes written to the file as well as into the buffer */
private volatile long totalOffset;
public BufferingTranslogWriter(ShardId shardId, long generation, ChannelReference channelReference, int bufferSize) throws IOException {
super(shardId, generation, channelReference);
this.buffer = new byte[bufferSize];
this.totalOffset = writtenOffset;
}
@Override
public Translog.Location add(BytesReference data) throws IOException {
try (ReleasableLock lock = writeLock.acquire()) {
ensureOpen();
final long offset = totalOffset;
if (data.length() >= buffer.length) {
flush();
// we use the channel to write, since on windows, writing to the RAF might not be reflected
// when reading through the channel
try {
data.writeTo(channel);
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
}
writtenOffset += data.length();
totalOffset += data.length();
} else {
if (data.length() > buffer.length - bufferCount) {
flush();
}
data.writeTo(bufferOs);
totalOffset += data.length();
}
operationCounter++;
return new Translog.Location(generation, offset, data.length());
}
}
protected final void flush() throws IOException {
assert writeLock.isHeldByCurrentThread();
if (bufferCount > 0) {
ensureOpen();
// we use the channel to write, since on windows, writing to the RAF might not be reflected
// when reading through the channel
final int bufferSize = bufferCount;
try {
Channels.writeToChannel(buffer, 0, bufferSize, channel);
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
}
writtenOffset += bufferSize;
bufferCount = 0;
}
}
@Override
protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException {
try (ReleasableLock lock = readLock.acquire()) {
if (position >= writtenOffset) {
assert targetBuffer.hasArray() : "buffer must have array";
final int sourcePosition = (int) (position - writtenOffset);
System.arraycopy(buffer, sourcePosition,
targetBuffer.array(), targetBuffer.position(), targetBuffer.limit());
targetBuffer.position(targetBuffer.limit());
return;
}
}
// we don't have to have a read lock here because we only write ahead to the file, so all writes has been complete
// for the requested location.
Channels.readFromFileChannelWithEofException(channel, position, targetBuffer);
}
@Override
public boolean syncNeeded() {
return totalOffset != lastSyncedOffset;
}
@Override
public synchronized void sync() throws IOException {
if (syncNeeded()) {
ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event
channelReference.incRef();
try {
final long offsetToSync;
final int opsCounter;
try (ReleasableLock lock = writeLock.acquire()) {
flush();
offsetToSync = totalOffset;
opsCounter = operationCounter;
}
// we can do this outside of the write lock but we have to protect from
// concurrent syncs
ensureOpen(); // just for kicks - the checkpoint happens or not either way
try {
checkpoint(offsetToSync, opsCounter, channelReference);
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
}
lastSyncedOffset = offsetToSync;
} finally {
channelReference.decRef();
}
}
}
public void updateBufferSize(int bufferSize) {
try (ReleasableLock lock = writeLock.acquire()) {
ensureOpen();
if (this.buffer.length != bufferSize) {
flush();
this.buffer = new byte[bufferSize];
}
} catch (IOException e) {
throw new TranslogException(shardId, "failed to flush", e);
}
}
class WrapperOutputStream extends OutputStream {
@Override
public void write(int b) throws IOException {
buffer[bufferCount++] = (byte) b;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
// we do safety checked when we decide to use this stream...
System.arraycopy(b, off, buffer, bufferCount, len);
bufferCount += len;
}
}
@Override
public long sizeInBytes() {
return totalOffset;
}
}

View File

@ -280,13 +280,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
throw new IllegalArgumentException("can't parse id from file: " + fileName);
}
public void updateBuffer(ByteSizeValue bufferSize) {
config.setBufferSize(bufferSize.bytesAsInt());
try (ReleasableLock lock = writeLock.acquire()) {
current.updateBufferSize(config.getBufferSize());
}
}
/** Returns {@code true} if this {@code Translog} is still open. */
public boolean isOpen() {
return closed.get() == false;
@ -367,7 +360,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
TranslogWriter createWriter(long fileGeneration) throws IOException {
TranslogWriter newFile;
try {
newFile = TranslogWriter.create(config.getType(), shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), config.getBufferSize(), getChannelFactory());
newFile = TranslogWriter.create(shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), getChannelFactory(), config.getBufferSize());
} catch (IOException e) {
throw new TranslogException(shardId, "failed to create new translog file", e);
}

View File

@ -20,13 +20,13 @@
package org.elasticsearch.index.translog;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.Translog.TranslogGeneration;
import org.elasticsearch.indices.memory.IndexingMemoryController;
import org.elasticsearch.threadpool.ThreadPool;
import java.nio.file.Path;
@ -39,21 +39,19 @@ import java.nio.file.Path;
public final class TranslogConfig {
public static final String INDEX_TRANSLOG_DURABILITY = "index.translog.durability";
public static final String INDEX_TRANSLOG_FS_TYPE = "index.translog.fs.type";
public static final String INDEX_TRANSLOG_BUFFER_SIZE = "index.translog.fs.buffer_size";
public static final String INDEX_TRANSLOG_SYNC_INTERVAL = "index.translog.sync_interval";
public static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(8, ByteSizeUnit.KB);
private final TimeValue syncInterval;
private final BigArrays bigArrays;
private final ThreadPool threadPool;
private final boolean syncOnEachOperation;
private volatile int bufferSize;
private volatile TranslogGeneration translogGeneration;
private volatile Translog.Durabilty durabilty = Translog.Durabilty.REQUEST;
private volatile TranslogWriter.Type type;
private final IndexSettings indexSettings;
private final ShardId shardId;
private final Path translogPath;
private final ByteSizeValue bufferSize;
/**
* Creates a new TranslogConfig instance
@ -65,14 +63,17 @@ public final class TranslogConfig {
* @param threadPool a {@link ThreadPool} to schedule async sync durability
*/
public TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, Translog.Durabilty durabilty, BigArrays bigArrays, @Nullable ThreadPool threadPool) {
this(shardId, translogPath, indexSettings, durabilty, bigArrays, threadPool, DEFAULT_BUFFER_SIZE);
}
TranslogConfig(ShardId shardId, Path translogPath, IndexSettings indexSettings, Translog.Durabilty durabilty, BigArrays bigArrays, @Nullable ThreadPool threadPool, ByteSizeValue bufferSize) {
this.bufferSize = bufferSize;
this.indexSettings = indexSettings;
this.shardId = shardId;
this.translogPath = translogPath;
this.durabilty = durabilty;
this.threadPool = threadPool;
this.bigArrays = bigArrays;
this.type = TranslogWriter.Type.fromString(indexSettings.getSettings().get(INDEX_TRANSLOG_FS_TYPE, TranslogWriter.Type.BUFFERED.name()));
this.bufferSize = (int) indexSettings.getSettings().getAsBytesSize(INDEX_TRANSLOG_BUFFER_SIZE, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER).bytes(); // Not really interesting, updated by IndexingMemoryController...
syncInterval = indexSettings.getSettings().getAsTime(INDEX_TRANSLOG_SYNC_INTERVAL, TimeValue.timeValueSeconds(5));
if (syncInterval.millis() > 0 && threadPool != null) {
@ -84,6 +85,7 @@ public final class TranslogConfig {
}
}
/**
* Returns a {@link ThreadPool} to schedule async durability operations
*/
@ -105,20 +107,6 @@ public final class TranslogConfig {
this.durabilty = durabilty;
}
/**
* Returns the translog type
*/
public TranslogWriter.Type getType() {
return type;
}
/**
* Sets the TranslogType for this Translog. The change will affect all subsequent translog files.
*/
public void setType(TranslogWriter.Type type) {
this.type = type;
}
/**
* Returns <code>true</code> iff each low level operation shoudl be fsynced
*/
@ -126,20 +114,6 @@ public final class TranslogConfig {
return syncOnEachOperation;
}
/**
* Retruns the current translog buffer size.
*/
public int getBufferSize() {
return bufferSize;
}
/**
* Sets the current buffer size - for setting a live setting use {@link Translog#updateBuffer(ByteSizeValue)}
*/
public void setBufferSize(int bufferSize) {
this.bufferSize = bufferSize;
}
/**
* Returns the current async fsync interval
*/
@ -192,4 +166,11 @@ public final class TranslogConfig {
public void setTranslogGeneration(TranslogGeneration translogGeneration) {
this.translogGeneration = translogGeneration;
}
/**
* The translog buffer size. Default is <tt>8kb</tt>
*/
public ByteSizeValue getBufferSize() {
return bufferSize;
}
}

View File

@ -28,11 +28,14 @@ import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Channels;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.Callback;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.index.shard.ShardId;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
@ -49,30 +52,39 @@ public class TranslogWriter extends TranslogReader {
public static final int VERSION_CHECKPOINTS = 2; // since 2.0 we have checkpoints?
public static final int VERSION = VERSION_CHECKPOINTS;
protected final ShardId shardId;
protected final ReleasableLock readLock;
protected final ReleasableLock writeLock;
private final ShardId shardId;
private final ReleasableLock readLock;
private final ReleasableLock writeLock;
/* the offset in bytes that was written when the file was last synced*/
protected volatile long lastSyncedOffset;
private volatile long lastSyncedOffset;
/* the number of translog operations written to this file */
protected volatile int operationCounter;
private volatile int operationCounter;
/* the offset in bytes written to the file */
protected volatile long writtenOffset;
private volatile long writtenOffset;
/* if we hit an exception that we can't recover from we assign it to this var and ship it with every AlreadyClosedException we throw */
private volatile Throwable tragedy;
private final byte[] buffer;
private int bufferCount;
private WrapperOutputStream bufferOs = new WrapperOutputStream();
public TranslogWriter(ShardId shardId, long generation, ChannelReference channelReference) throws IOException {
/* the total offset of this file including the bytes written to the file as well as into the buffer */
private volatile long totalOffset;
public TranslogWriter(ShardId shardId, long generation, ChannelReference channelReference, ByteSizeValue bufferSize) throws IOException {
super(generation, channelReference, channelReference.getChannel().position());
this.shardId = shardId;
ReadWriteLock rwl = new ReentrantReadWriteLock();
readLock = new ReleasableLock(rwl.readLock());
writeLock = new ReleasableLock(rwl.writeLock());
this.writtenOffset = channelReference.getChannel().position();
this.lastSyncedOffset = channelReference.getChannel().position();;
this.totalOffset = writtenOffset;
this.buffer = new byte[bufferSize.bytesAsInt()];
this.lastSyncedOffset = channelReference.getChannel().position();
}
public static TranslogWriter create(Type type, ShardId shardId, String translogUUID, long fileGeneration, Path file, Callback<ChannelReference> onClose, int bufferSize, ChannelFactory channelFactory) throws IOException {
public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, Callback<ChannelReference> onClose, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException {
final BytesRef ref = new BytesRef(translogUUID);
final int headerLength = CodecUtil.headerLength(TRANSLOG_CODEC) + ref.length + RamUsageEstimator.NUM_BYTES_INT;
final FileChannel channel = channelFactory.open(file);
@ -85,7 +97,7 @@ public class TranslogWriter extends TranslogReader {
out.writeBytes(ref.bytes, ref.offset, ref.length);
channel.force(false);
writeCheckpoint(headerLength, 0, file.getParent(), fileGeneration, StandardOpenOption.WRITE);
final TranslogWriter writer = type.create(shardId, fileGeneration, new ChannelReference(file, fileGeneration, channel, onClose), bufferSize);
final TranslogWriter writer = new TranslogWriter(shardId, fileGeneration, new ChannelReference(file, fileGeneration, channel, onClose), bufferSize);
return writer;
} catch (Throwable throwable){
IOUtils.closeWhileHandlingException(channel);
@ -104,34 +116,7 @@ public class TranslogWriter extends TranslogReader {
return tragedy;
}
public enum Type {
SIMPLE() {
@Override
public TranslogWriter create(ShardId shardId, long generation, ChannelReference channelReference, int bufferSize) throws IOException {
return new TranslogWriter(shardId, generation, channelReference);
}
},
BUFFERED() {
@Override
public TranslogWriter create(ShardId shardId, long generation, ChannelReference channelReference, int bufferSize) throws IOException {
return new BufferingTranslogWriter(shardId, generation, channelReference, bufferSize);
}
};
public abstract TranslogWriter create(ShardId shardId, long generation, ChannelReference raf, int bufferSize) throws IOException;
public static Type fromString(String type) {
if (SIMPLE.name().equalsIgnoreCase(type)) {
return SIMPLE;
} else if (BUFFERED.name().equalsIgnoreCase(type)) {
return BUFFERED;
}
throw new IllegalArgumentException("No translog fs type [" + type + "]");
}
}
protected final void closeWithTragicEvent(Throwable throwable) throws IOException {
private final void closeWithTragicEvent(Throwable throwable) throws IOException {
try (ReleasableLock lock = writeLock.acquire()) {
if (tragedy == null) {
tragedy = throwable;
@ -146,38 +131,60 @@ public class TranslogWriter extends TranslogReader {
* add the given bytes to the translog and return the location they were written at
*/
public Translog.Location add(BytesReference data) throws IOException {
final long position;
try (ReleasableLock lock = writeLock.acquire()) {
ensureOpen();
position = writtenOffset;
try {
data.writeTo(channel);
} catch (Throwable e) {
closeWithTragicEvent(e);
throw e;
final long offset = totalOffset;
if (data.length() >= buffer.length) {
flush();
// we use the channel to write, since on windows, writing to the RAF might not be reflected
// when reading through the channel
try {
data.writeTo(channel);
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
}
writtenOffset += data.length();
totalOffset += data.length();
} else {
if (data.length() > buffer.length - bufferCount) {
flush();
}
data.writeTo(bufferOs);
totalOffset += data.length();
}
writtenOffset = writtenOffset + data.length();
operationCounter++;;
operationCounter++;
return new Translog.Location(generation, offset, data.length());
}
return new Translog.Location(generation, position, data.length());
}
/**
* change the size of the internal buffer if relevant
*/
public void updateBufferSize(int bufferSize) throws TranslogException {
}
/**
* write all buffered ops to disk and fsync file
*/
public synchronized void sync() throws IOException { // synchronized to ensure only one sync happens a time
// check if we really need to sync here...
public synchronized void sync() throws IOException {
if (syncNeeded()) {
try (ReleasableLock lock = writeLock.acquire()) {
ensureOpen();
checkpoint(writtenOffset, operationCounter, channelReference);
lastSyncedOffset = writtenOffset;
ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event
channelReference.incRef();
try {
final long offsetToSync;
final int opsCounter;
try (ReleasableLock lock = writeLock.acquire()) {
flush();
offsetToSync = totalOffset;
opsCounter = operationCounter;
}
// we can do this outside of the write lock but we have to protect from
// concurrent syncs
ensureOpen(); // just for kicks - the checkpoint happens or not either way
try {
checkpoint(offsetToSync, opsCounter, channelReference);
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
}
lastSyncedOffset = offsetToSync;
} finally {
channelReference.decRef();
}
}
}
@ -185,9 +192,7 @@ public class TranslogWriter extends TranslogReader {
/**
* returns true if there are buffered ops
*/
public boolean syncNeeded() {
return writtenOffset != lastSyncedOffset; // by default nothing is buffered
}
public boolean syncNeeded() { return totalOffset != lastSyncedOffset; }
@Override
public int totalOperations() {
@ -196,14 +201,29 @@ public class TranslogWriter extends TranslogReader {
@Override
public long sizeInBytes() {
return writtenOffset;
return totalOffset;
}
/**
* Flushes the buffer if the translog is buffered.
*/
protected void flush() throws IOException {
private final void flush() throws IOException {
assert writeLock.isHeldByCurrentThread();
if (bufferCount > 0) {
ensureOpen();
// we use the channel to write, since on windows, writing to the RAF might not be reflected
// when reading through the channel
final int bufferSize = bufferCount;
try {
Channels.writeToChannel(buffer, 0, bufferSize, channel);
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
}
writtenOffset += bufferSize;
bufferCount = 0;
}
}
/**
@ -292,13 +312,23 @@ public class TranslogWriter extends TranslogReader {
}
@Override
protected void readBytes(ByteBuffer buffer, long position) throws IOException {
protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException {
try (ReleasableLock lock = readLock.acquire()) {
Channels.readFromFileChannelWithEofException(channel, position, buffer);
if (position >= writtenOffset) {
assert targetBuffer.hasArray() : "buffer must have array";
final int sourcePosition = (int) (position - writtenOffset);
System.arraycopy(buffer, sourcePosition,
targetBuffer.array(), targetBuffer.position(), targetBuffer.limit());
targetBuffer.position(targetBuffer.limit());
return;
}
}
// we don't have to have a read lock here because we only write ahead to the file, so all writes has been complete
// for the requested location.
Channels.readFromFileChannelWithEofException(channel, position, targetBuffer);
}
protected synchronized void checkpoint(long lastSyncPosition, int operationCounter, ChannelReference channelReference) throws IOException {
private synchronized void checkpoint(long lastSyncPosition, int operationCounter, ChannelReference channelReference) throws IOException {
channelReference.getChannel().force(false);
writeCheckpoint(lastSyncPosition, operationCounter, channelReference.getPath().getParent(), channelReference.getGeneration(), StandardOpenOption.WRITE);
}
@ -324,4 +354,19 @@ public class TranslogWriter extends TranslogReader {
throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed", tragedy);
}
}
class WrapperOutputStream extends OutputStream {
@Override
public void write(int b) throws IOException {
buffer[bufferCount++] = (byte) b;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
// we do safety checked when we decide to use this stream...
System.arraycopy(b, off, buffer, bufferCount, len);
bufferCount += len;
}
}
}

View File

@ -58,15 +58,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
/** Sets a ceiling on the per-shard index buffer size (default: 512 MB). */
public static final String MAX_SHARD_INDEX_BUFFER_SIZE_SETTING = "indices.memory.max_shard_index_buffer_size";
/** How much heap (% or bytes) we will share across all actively indexing shards for the translog buffer (default: 1%). */
public static final String TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.translog_buffer_size";
/** Only applies when <code>indices.memory.translog_buffer_size</code> is a %, to set a floor on the actual size in bytes (default: 256 KB). */
public static final String MIN_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.min_translog_buffer_size";
/** Only applies when <code>indices.memory.translog_buffer_size</code> is a %, to set a ceiling on the actual size in bytes (default: not set). */
public static final String MAX_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.max_translog_buffer_size";
/** Sets a floor on the per-shard translog buffer size (default: 2 KB). */
public static final String MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.min_shard_translog_buffer_size";
@ -88,11 +79,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
private final ByteSizeValue indexingBuffer;
private final ByteSizeValue minShardIndexBufferSize;
private final ByteSizeValue maxShardIndexBufferSize;
private final ByteSizeValue translogBuffer;
private final ByteSizeValue minShardTranslogBufferSize;
private final ByteSizeValue maxShardTranslogBufferSize;
private final TimeValue interval;
private volatile ScheduledFuture scheduler;
@ -135,27 +121,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
// LUCENE MONITOR: Based on this thread, currently (based on Mike), having a large buffer does not make a lot of sense: https://issues.apache.org/jira/browse/LUCENE-2324?focusedCommentId=13005155&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-13005155
this.maxShardIndexBufferSize = this.settings.getAsBytesSize(MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, new ByteSizeValue(512, ByteSizeUnit.MB));
ByteSizeValue translogBuffer;
String translogBufferSetting = this.settings.get(TRANSLOG_BUFFER_SIZE_SETTING, "1%");
if (translogBufferSetting.endsWith("%")) {
double percent = Double.parseDouble(translogBufferSetting.substring(0, translogBufferSetting.length() - 1));
translogBuffer = new ByteSizeValue((long) (((double) jvmMemoryInBytes) * (percent / 100)));
ByteSizeValue minTranslogBuffer = this.settings.getAsBytesSize(MIN_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(256, ByteSizeUnit.KB));
ByteSizeValue maxTranslogBuffer = this.settings.getAsBytesSize(MAX_TRANSLOG_BUFFER_SIZE_SETTING, null);
if (translogBuffer.bytes() < minTranslogBuffer.bytes()) {
translogBuffer = minTranslogBuffer;
}
if (maxTranslogBuffer != null && translogBuffer.bytes() > maxTranslogBuffer.bytes()) {
translogBuffer = maxTranslogBuffer;
}
} else {
translogBuffer = ByteSizeValue.parseBytesSizeValue(translogBufferSetting, TRANSLOG_BUFFER_SIZE_SETTING);
}
this.translogBuffer = translogBuffer;
this.minShardTranslogBufferSize = this.settings.getAsBytesSize(MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(2, ByteSizeUnit.KB));
this.maxShardTranslogBufferSize = this.settings.getAsBytesSize(MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(64, ByteSizeUnit.KB));
// we need to have this relatively small to move a shard from inactive to active fast (enough)
this.interval = this.settings.getAsTime(SHARD_INACTIVE_INTERVAL_TIME_SETTING, TimeValue.timeValueSeconds(30));
@ -192,14 +157,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
return indexingBuffer;
}
/**
* returns the current budget for the total amount of translog buffers of
* active shards on this node
*/
public ByteSizeValue translogBufferSize() {
return translogBuffer;
}
protected List<IndexShard> availableShards() {
List<IndexShard> availableShards = new ArrayList<>();
@ -220,9 +177,9 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
}
/** set new indexing and translog buffers on this shard. this may cause the shard to refresh to free up heap. */
protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize) {
try {
shard.updateBufferSize(shardIndexingBufferSize, shardTranslogBufferSize);
shard.updateBufferSize(shardIndexingBufferSize);
} catch (EngineClosedException | FlushNotAllowedEngineException e) {
// ignore
} catch (Exception e) {
@ -262,18 +219,10 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
shardIndexingBufferSize = maxShardIndexBufferSize;
}
ByteSizeValue shardTranslogBufferSize = new ByteSizeValue(translogBuffer.bytes() / activeShardCount);
if (shardTranslogBufferSize.bytes() < minShardTranslogBufferSize.bytes()) {
shardTranslogBufferSize = minShardTranslogBufferSize;
}
if (shardTranslogBufferSize.bytes() > maxShardTranslogBufferSize.bytes()) {
shardTranslogBufferSize = maxShardTranslogBufferSize;
}
logger.debug("recalculating shard indexing buffer, total is [{}] with [{}] active shards, each shard set to indexing=[{}], translog=[{}]", indexingBuffer, activeShardCount, shardIndexingBufferSize, shardTranslogBufferSize);
logger.debug("recalculating shard indexing buffer, total is [{}] with [{}] active shards, each shard set to indexing=[{}]", indexingBuffer, activeShardCount, shardIndexingBufferSize);
for (IndexShard shard : activeShards) {
updateShardBuffers(shard, shardIndexingBufferSize, shardTranslogBufferSize);
updateShardBuffers(shard, shardIndexingBufferSize);
}
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.aggregations.bucket;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
@ -277,17 +278,7 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme
}
@Override
public int nextDoc() throws IOException {
throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()");
}
@Override
public int advance(int target) throws IOException {
throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()");
}
@Override
public long cost() {
public DocIdSetIterator iterator() {
throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()");
}

View File

@ -134,10 +134,11 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
protected void doPostCollection() throws IOException {
IndexReader indexReader = context().searchContext().searcher().getIndexReader();
for (LeafReaderContext ctx : indexReader.leaves()) {
DocIdSetIterator childDocsIter = childFilter.scorer(ctx);
if (childDocsIter == null) {
Scorer childDocsScorer = childFilter.scorer(ctx);
if (childDocsScorer == null) {
continue;
}
DocIdSetIterator childDocsIter = childDocsScorer.iterator();
final LeafBucketCollector sub = collectableSubAggregators.getLeafCollector(ctx);
final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx);

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitSet;
@ -69,7 +70,12 @@ public class NestedAggregator extends SingleBucketAggregator {
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
searcher.setQueryCache(null);
final Weight weight = searcher.createNormalizedWeight(childFilter, false);
childDocs = weight.scorer(ctx);
Scorer childDocsScorer = weight.scorer(ctx);
if (childDocsScorer == null) {
childDocs = null;
} else {
childDocs = childDocsScorer.iterator();
}
return new LeafBucketCollectorBase(sub, null) {
@Override

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.ExceptionsHelper;
@ -314,11 +315,12 @@ public class FetchPhase implements SearchPhase {
continue;
}
final Weight childWeight = context.searcher().createNormalizedWeight(childFilter, false);
DocIdSetIterator childIter = childWeight.scorer(subReaderContext);
if (childIter == null) {
Scorer childScorer = childWeight.scorer(subReaderContext);
if (childScorer == null) {
current = nestedParentObjectMapper;
continue;
}
DocIdSetIterator childIter = childScorer.iterator();
BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext);

View File

@ -221,10 +221,11 @@ public final class InnerHitsContext {
return null;
}
final DocIdSetIterator childrenIterator = childWeight.scorer(context);
if (childrenIterator == null) {
final Scorer childrenScorer = childWeight.scorer(context);
if (childrenScorer == null) {
return null;
}
DocIdSetIterator childrenIterator = childrenScorer.iterator();
final DocIdSetIterator it = new DocIdSetIterator() {
int doc = -1;

View File

@ -90,9 +90,9 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase {
if (scorer == null) {
continue;
}
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator();
if (twoPhase == null) {
if (scorer.advance(hitContext.docId()) == hitContext.docId()) {
if (scorer.iterator().advance(hitContext.docId()) == hitContext.docId()) {
matchedQueries.add(name);
}
} else {

View File

@ -49,26 +49,6 @@ final class ProfileScorer extends Scorer {
return scorer.docID();
}
@Override
public int advance(int target) throws IOException {
profile.startTime(ProfileBreakdown.TimingType.ADVANCE);
try {
return scorer.advance(target);
} finally {
profile.stopAndRecordTime();
}
}
@Override
public int nextDoc() throws IOException {
profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC);
try {
return scorer.nextDoc();
} finally {
profile.stopAndRecordTime();
}
}
@Override
public float score() throws IOException {
profile.startTime(ProfileBreakdown.TimingType.SCORE);
@ -84,11 +64,6 @@ final class ProfileScorer extends Scorer {
return scorer.freq();
}
@Override
public long cost() {
return scorer.cost();
}
@Override
public Weight getWeight() {
return profileWeight;
@ -100,8 +75,45 @@ final class ProfileScorer extends Scorer {
}
@Override
public TwoPhaseIterator asTwoPhaseIterator() {
final TwoPhaseIterator in = scorer.asTwoPhaseIterator();
public DocIdSetIterator iterator() {
final DocIdSetIterator in = scorer.iterator();
return new DocIdSetIterator() {
@Override
public int advance(int target) throws IOException {
profile.startTime(ProfileBreakdown.TimingType.ADVANCE);
try {
return in.advance(target);
} finally {
profile.stopAndRecordTime();
}
}
@Override
public int nextDoc() throws IOException {
profile.startTime(ProfileBreakdown.TimingType.NEXT_DOC);
try {
return in.nextDoc();
} finally {
profile.stopAndRecordTime();
}
}
@Override
public int docID() {
return in.docID();
}
@Override
public long cost() {
return in.cost();
}
};
}
@Override
public TwoPhaseIterator twoPhaseIterator() {
final TwoPhaseIterator in = scorer.twoPhaseIterator();
if (in == null) {
return null;
}

View File

@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" {
//// Very special jar permissions:
//// These are dangerous permissions that we don't want to grant to everything.
grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1719088.jar}" {
grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1721183.jar}" {
// needed to allow MMapDirectory's "unmap hack"
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";

View File

@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1719088.jar}" {
grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1721183.jar}" {
// needed by RamUsageTester
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};

View File

@ -255,7 +255,7 @@ public class TermVectorsUnitTests extends ESTestCase {
assertThat(request.positions(), equalTo(req2.positions()));
assertThat(request.termStatistics(), equalTo(req2.termStatistics()));
assertThat(request.preference(), equalTo(pref));
assertThat(request.routing(), equalTo(parent));
assertThat(request.routing(), equalTo(null));
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class MetaDataTests extends ESTestCase {
@ -41,4 +42,72 @@ public class MetaDataTests extends ESTestCase {
}
}
public void testResolveIndexRouting() {
IndexMetaData.Builder builder = IndexMetaData.builder("index")
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetaData.builder("alias0").build())
.putAlias(AliasMetaData.builder("alias1").routing("1").build())
.putAlias(AliasMetaData.builder("alias2").routing("1,2").build());
MetaData metaData = MetaData.builder().put(builder).build();
// no alias, no index
assertEquals(metaData.resolveIndexRouting(null, null, null), null);
assertEquals(metaData.resolveIndexRouting(null, "0", null), "0");
assertEquals(metaData.resolveIndexRouting("32", "0", null), "0");
assertEquals(metaData.resolveIndexRouting("32", null, null), "32");
// index, no alias
assertEquals(metaData.resolveIndexRouting("32", "0", "index"), "0");
assertEquals(metaData.resolveIndexRouting("32", null, "index"), "32");
assertEquals(metaData.resolveIndexRouting(null, null, "index"), null);
assertEquals(metaData.resolveIndexRouting(null, "0", "index"), "0");
// alias with no index routing
assertEquals(metaData.resolveIndexRouting(null, null, "alias0"), null);
assertEquals(metaData.resolveIndexRouting(null, "0", "alias0"), "0");
assertEquals(metaData.resolveIndexRouting("32", null, "alias0"), "32");
assertEquals(metaData.resolveIndexRouting("32", "0", "alias0"), "0");
// alias with index routing.
assertEquals(metaData.resolveIndexRouting(null, null, "alias1"), "1");
assertEquals(metaData.resolveIndexRouting("32", null, "alias1"), "1");
assertEquals(metaData.resolveIndexRouting("32", "1", "alias1"), "1");
try {
metaData.resolveIndexRouting(null, "0", "alias1");
fail("should fail");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage(), is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation"));
}
try {
metaData.resolveIndexRouting("32", "0", "alias1");
fail("should fail");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage(), is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation"));
}
// alias with invalid index routing.
try {
metaData.resolveIndexRouting(null, null, "alias2");
fail("should fail");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation"));
}
try {
metaData.resolveIndexRouting(null, "1", "alias2");
fail("should fail");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation"));
}
try {
metaData.resolveIndexRouting("32", null, "alias2");
fail("should fail");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation"));
}
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.codecs;
import org.apache.lucene.codecs.Codec;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
@ -46,7 +47,7 @@ public class CodecTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test-" + i++, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build());
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
try {
parser.parse(mapping);
parser.parse("type", new CompressedXContent(mapping));
if (v.onOrAfter(Version.V_2_0_0_beta1)) {
fail("Elasticsearch 2.0 should not support custom postings formats");
}
@ -69,7 +70,7 @@ public class CodecTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test-" + i++, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build());
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
try {
parser.parse(mapping);
parser.parse("type", new CompressedXContent(mapping));
if (v.onOrAfter(Version.V_2_0_0_beta1)) {
fail("Elasticsearch 2.0 should not support custom postings formats");
}

View File

@ -874,7 +874,7 @@ public class GetActionIT extends ESIntegTestCase {
public void testUngeneratedFieldsThatAreNeverStored() throws IOException {
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.disable_flush\": true,\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
" \"refresh_interval\": \"-1\"\n" +
" },\n" +
" \"mappings\": {\n" +
@ -913,7 +913,7 @@ public class GetActionIT extends ESIntegTestCase {
public void testUngeneratedFieldsThatAreAlwaysStored() throws IOException {
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.disable_flush\": true,\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
" \"refresh_interval\": \"-1\"\n" +
" },\n" +
" \"mappings\": {\n" +
@ -983,7 +983,7 @@ public class GetActionIT extends ESIntegTestCase {
String storedString = stored ? "yes" : "no";
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.disable_flush\": true,\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
" \"refresh_interval\": \"-1\",\n" +
" \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" +
" },\n" +
@ -1008,7 +1008,7 @@ public class GetActionIT extends ESIntegTestCase {
public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException {
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.disable_flush\": true,\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
" \"refresh_interval\": \"-1\"\n" +
" },\n" +
" \"mappings\": {\n" +
@ -1074,7 +1074,7 @@ public class GetActionIT extends ESIntegTestCase {
String storedString = stored ? "yes" : "no";
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.disable_flush\": true,\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
" \"refresh_interval\": \"-1\",\n" +
" \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" +
" },\n" +
@ -1126,7 +1126,7 @@ public class GetActionIT extends ESIntegTestCase {
String storedString = stored ? "yes" : "no";
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.disable_flush\": true,\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
" \"refresh_interval\": \"-1\",\n" +
" \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" +
" },\n" +

View File

@ -34,6 +34,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShadowIndexShard;
import org.elasticsearch.index.translog.TranslogStats;
@ -179,7 +181,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
Settings idxSettings = Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2)
.put(IndexShard.INDEX_TRANSLOG_DISABLE_FLUSH, true)
.put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB))
.put(IndexMetaData.SETTING_DATA_PATH, dataPath.toAbsolutePath().toString())
.put(IndexMetaData.SETTING_SHADOW_REPLICAS, true)
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)

View File

@ -23,6 +23,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -154,7 +155,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("analyzer", analyzerName).endObject().endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldMapper fieldMapper = docMapper.mappers().getMapper("field");
assertThat(fieldMapper.fieldType().searchAnalyzer(), instanceOf(NamedAnalyzer.class));

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.fielddata;
import com.carrotsearch.hppc.ObjectArrayList;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -50,7 +51,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase {
.endObject()
.endObject().endObject().string();
final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
final DocumentMapper mapper = mapperService.documentMapperParser().parse("test", new CompressedXContent(mapping));
ObjectArrayList<byte[]> bytesList1 = new ObjectArrayList<>(2);

View File

@ -32,6 +32,7 @@ import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.English;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
@ -74,7 +75,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
.startObject("float").field("type", "float").endObject()
.startObject("double").field("type", "double").endObject()
.endObject().endObject().endObject().string();
final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping));
Random random = getRandom();
int atLeast = scaledRandomIntBetween(200, 1500);
for (int i = 0; i < atLeast; i++) {
@ -142,7 +143,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
.startObject("long").field("type", "long").endObject()
.endObject().endObject().endObject().string();
final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping));
Random random = getRandom();
int atLeast = scaledRandomIntBetween(200, 1500);
final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 10);
@ -219,7 +220,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
.startObject("double").field("type", "double").endObject()
.endObject().endObject().endObject().string();
final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping));
Random random = getRandom();
int atLeast = scaledRandomIntBetween(200, 1500);
final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 10);
@ -397,7 +398,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
.startObject("geopoint").field("type", "geo_point").startObject("fielddata").field("format", "doc_values").endObject().endObject()
.endObject().endObject().endObject().string();
final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
final DocumentMapper mapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping));
Random random = getRandom();
int atLeast = scaledRandomIntBetween(200, 1500);

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -31,7 +32,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.field("enabled", false).endObject().endObject().string();
DocumentMapper mapper = mapperParser.parse(mapping);
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
BytesReference bytes = XContentFactory.jsonBuilder()
.startObject().startObject("foo")
@ -48,7 +49,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
.startObject("foo").field("enabled", false).endObject()
.startObject("bar").field("type", "integer").endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = mapperParser.parse(mapping);
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
BytesReference bytes = XContentFactory.jsonBuilder()
.startObject()

View File

@ -22,6 +22,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -52,7 +53,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder()
.startObject()
@ -72,7 +73,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder()
.startObject()
@ -93,7 +94,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
defaultMapper.parse("test", "type", "1", jsonBuilder()
@ -128,7 +129,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder()
.startObject().startObject("obj1")
@ -151,7 +152,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
defaultMapper.parse("test", "type", "1", jsonBuilder()
@ -214,7 +215,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject());
// foo is already defined in the mappings
assertNull(update);
@ -227,7 +228,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject());
@ -247,7 +248,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject());
@ -268,7 +269,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject());
@ -289,7 +290,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject());
@ -309,7 +310,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo").value("bar").value("baz").endArray().endObject());
@ -329,7 +330,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("foo").field("type", "object").endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject());
@ -349,7 +350,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo")

View File

@ -26,6 +26,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.all.AllEntries;
import org.elasticsearch.common.lucene.all.AllField;
@ -69,7 +70,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testSimpleAllMappers() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
AllField field = (AllField) doc.getField("_all");
@ -88,7 +89,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testAllMappersNoBoost() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json");
IndexService index = createIndex("test");
DocumentMapper docMapper = index.mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = index.mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
AllField field = (AllField) doc.getField("_all");
@ -102,7 +103,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testAllMappersTermQuery() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
AllField field = (AllField) doc.getField("_all");
@ -120,7 +121,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
// #6187: make sure we see AllTermQuery even when offsets are indexed in the _all field:
public void testAllMappersWithOffsetsTermQuery() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
AllField field = (AllField) doc.getField("_all");
@ -139,7 +140,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
// #6187: if _all doesn't index positions then we never use AllTokenStream, even if some fields have boost
public void testBoostWithOmitPositions() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
AllField field = (AllField) doc.getField("_all");
@ -150,7 +151,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
// #6187: if no fields were boosted, we shouldn't use AllTokenStream
public void testNoBoost() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
AllField field = (AllField) doc.getField("_all");
@ -161,10 +162,10 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testSimpleAllMappersWithReparse() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json");
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping));
String builtMapping = docMapper.mappingSource().string();
// reparse it
DocumentMapper builtDocMapper = parser.parse(builtMapping);
DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping));
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = builtDocMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
@ -179,7 +180,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testSimpleAllMappersWithStore() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
AllField field = (AllField) doc.getField("_all");
@ -196,10 +197,10 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testSimpleAllMappersWithReparseWithStore() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json");
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping));
String builtMapping = docMapper.mappingSource().string();
// reparse it
DocumentMapper builtDocMapper = parser.parse(builtMapping);
DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping));
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = builtDocMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();
@ -265,10 +266,10 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping = mappingBuilder.endObject().endObject().bytes().toUtf8();
logger.info(mapping);
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = parser.parse("test", new CompressedXContent(mapping));
String builtMapping = docMapper.mappingSource().string();
// reparse it
DocumentMapper builtDocMapper = parser.parse(builtMapping);
DocumentMapper builtDocMapper = parser.parse("test", new CompressedXContent(builtMapping));
byte[] json = jsonBuilder().startObject()
.field("foo", "bar")
@ -312,7 +313,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testMultiField_includeInAllSetToFalse() throws IOException {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/multifield-mapping_include_in_all_set_to_false.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject()
@ -330,7 +331,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testMultiField_defaults() throws IOException {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/multifield-mapping_default.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject()
@ -350,7 +351,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testMisplacedTypeInRoot() throws IOException {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/misplaced_type_in_root.json");
try {
createIndex("test").mapperService().documentMapperParser().parse("test", mapping);
createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
fail("Expected MapperParsingException");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
@ -362,7 +363,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testMistypedTypeInRoot() throws IOException {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mistyped_type_in_root.json");
try {
createIndex("test").mapperService().documentMapperParser().parse("test", mapping);
createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
fail("Expected MapperParsingException");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
@ -374,7 +375,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testMisplacedMappingAsRoot() throws IOException {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/misplaced_mapping_key_in_root.json");
try {
createIndex("test").mapperService().documentMapperParser().parse("test", mapping);
createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
fail("Expected MapperParsingException");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
@ -387,17 +388,17 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testRootObjectMapperPropertiesDoNotCauseException() throws IOException {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_dynamic_template_mapping.json");
parser.parse("test", mapping);
parser.parse("test", new CompressedXContent(mapping));
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_dynamic_date_formats_mapping.json");
parser.parse("test", mapping);
parser.parse("test", new CompressedXContent(mapping));
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_date_detection_mapping.json");
parser.parse("test", mapping);
parser.parse("test", new CompressedXContent(mapping));
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/type_numeric_detection_mapping.json");
parser.parse("test", mapping);
parser.parse("test", new CompressedXContent(mapping));
}
// issue https://github.com/elasticsearch/elasticsearch/issues/5864
public void testMetadataMappersStillWorking() {
public void testMetadataMappersStillWorking() throws MapperParsingException, IOException {
String mapping = "{";
Map<String, String> rootTypes = new HashMap<>();
//just pick some example from DocumentMapperParser.rootTypeParsers
@ -410,7 +411,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
mapping += "\"" + key+ "\"" + ":" + rootTypes.get(key) + ",\n";
}
mapping += "\"properties\":{}}" ;
createIndex("test").mapperService().documentMapperParser().parse("test", mapping);
createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
}
public void testDocValuesNotAllowed() throws IOException {
@ -419,7 +420,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
.field("doc_values", true)
.endObject().endObject().endObject().string();
try {
createIndex("test").mapperService().documentMapperParser().parse(mapping);
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
fail();
} catch (MapperParsingException e) {
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
@ -433,7 +434,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().endObject().string();
Settings legacySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
try {
createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse(mapping);
createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
fail();
} catch (MapperParsingException e) {
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
@ -458,7 +459,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testIncludeInObjectBackcompat() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type").endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject().field("_all", "foo").endObject().bytes());
@ -470,7 +471,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testIncludeInObjectNotAllowed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.binary;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -50,7 +51,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field");
assertThat(fieldMapper, instanceOf(BinaryFieldMapper.class));
@ -67,7 +68,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
// case 1: a simple binary value
final byte[] binaryValue1 = new byte[100];

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.boost;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -39,7 +40,7 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase {
.startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.startObject("s_field").field("value", "s_value").field("boost", 2.0f).endObject()

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.boost;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -44,7 +45,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase {
.startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject()
.string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
BytesReference json = XContentFactory.jsonBuilder().startObject()
.startObject("str_field").field("boost", 2.0).field("value", "some name").endObject()
.startObject("int_field").field("boost", 3.0).field("value", 10).endObject()
@ -94,7 +95,7 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase {
.startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject()
.string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
try {
docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject()
.startObject("str_field").field("foo", "bar")

View File

@ -47,7 +47,7 @@ public class CamelCaseFieldNameTests extends ESSingleNodeTestCase {
assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase"));
assertNull(documentMapper.mappers().getMapper("this_is_camel_case"));
documentMapper = index.mapperService().documentMapperParser().parse(documentMapper.mappingSource().string());
documentMapper = index.mapperService().documentMapperParser().parse("type", documentMapper.mappingSource());
assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase"));
assertNull(documentMapper.mappers().getMapper("this_is_camel_case"));

View File

@ -29,6 +29,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -60,7 +61,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class));
@ -93,7 +94,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class));
@ -128,7 +129,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class));
@ -153,7 +154,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
MappedFieldType completionFieldType = fieldMapper.fieldType();
ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder()
@ -172,7 +173,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
MappedFieldType completionFieldType = fieldMapper.fieldType();
ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder()
@ -191,7 +192,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
MappedFieldType completionFieldType = fieldMapper.fieldType();
ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder()
@ -213,7 +214,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
MappedFieldType completionFieldType = fieldMapper.fieldType();
ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder()
@ -235,7 +236,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
MappedFieldType completionFieldType = fieldMapper.fieldType();
ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder()
@ -267,7 +268,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
MappedFieldType completionFieldType = fieldMapper.fieldType();
ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder()
@ -299,7 +300,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
try {
defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder()
.startObject()
@ -325,7 +326,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
CharsRefBuilder charsRefBuilder = new CharsRefBuilder();
charsRefBuilder.append("sugg");
charsRefBuilder.setCharAt(2, '\u001F');
@ -378,7 +379,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType().prefixQuery(new BytesRef("co"));
@ -392,7 +393,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co",
@ -409,7 +410,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType()

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.compound;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -35,7 +36,7 @@ public class CompoundTypesTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.copyto;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -149,7 +150,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
@ -175,7 +176,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
@ -211,7 +212,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
@ -240,7 +241,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
@ -274,7 +275,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
@ -308,7 +309,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper docMapperBefore = parser.parse(mappingBefore);
DocumentMapper docMapperBefore = parser.parse("type1", new CompressedXContent(mappingBefore));
List<String> fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields();
@ -317,7 +318,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
assertThat(fields.get(1), equalTo("bar"));
DocumentMapper docMapperAfter = parser.parse(mappingAfter);
DocumentMapper docMapperAfter = parser.parse("type1", new CompressedXContent(mappingAfter));
docMapperBefore.merge(docMapperAfter.mapping(), true, false);
@ -372,7 +373,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
}
mapping = mapping.endObject();
DocumentMapper mapper = parser.parse(mapping.string());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
XContentBuilder jsonDoc = XContentFactory.jsonBuilder()
.startObject()
@ -452,7 +453,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")

View File

@ -28,6 +28,7 @@ import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -57,7 +58,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "boolean").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -86,7 +87,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "boolean").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
FieldMapper mapper = defaultMapper.mappers().getMapper("field");
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
mapper.toXContent(builder, ToXContent.EMPTY_PARAMS);
@ -102,7 +103,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
defaultMapper = parser.parse(mapping);
defaultMapper = parser.parse("type", new CompressedXContent(mapping));
mapper = defaultMapper.mappers().getMapper("field");
builder = XContentFactory.jsonBuilder().startObject();
mapper.toXContent(builder, ToXContent.EMPTY_PARAMS);

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
@ -50,7 +51,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse(stage1Mapping);
DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping));
String stage2Mapping = XContentFactory.jsonBuilder().startObject()
.startObject("person")
@ -61,7 +62,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping));
stage1.merge(stage2.mapping(), true, false);
// Just simulated so merge hasn't happened yet

View File

@ -30,6 +30,7 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.LocaleUtils;
@ -336,7 +337,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
// Unless the global ignore_malformed option is set to true
Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build();
defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse(mapping);
defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field3", "a")

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.externalvalues;
import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
@ -55,7 +56,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
indexService.analysisService(), indexService.similarityService(), mapperRegistry);
DocumentMapper documentMapper = parser.parse(
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject(ExternalMetadataMapper.CONTENT_TYPE)
.endObject()
@ -63,7 +64,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
.startObject("field").field("type", "external").endObject()
.endObject()
.endObject().endObject().string()
);
));
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -102,7 +103,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
indexService.analysisService(), indexService.similarityService(), mapperRegistry);
DocumentMapper documentMapper = parser.parse(
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("field")
.field("type", ExternalMapperPlugin.EXTERNAL)
@ -121,7 +122,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject().endObject().endObject()
.string());
.string()));
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -161,7 +162,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
indexService.analysisService(), indexService.similarityService(), mapperRegistry);
DocumentMapper documentMapper = parser.parse(
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("field")
.field("type", ExternalMapperPlugin.EXTERNAL)
@ -183,7 +184,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject().endObject().endObject()
.string());
.string()));
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()

View File

@ -57,7 +57,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -87,7 +87,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -107,7 +107,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -127,7 +127,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -147,7 +147,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -171,7 +171,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject();
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -220,7 +220,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
mapping.field("ignore_malformed", false).endObject().endObject().endObject().endObject().string();
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -283,7 +283,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject().string();
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -323,7 +323,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -350,7 +350,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -386,7 +386,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -410,7 +410,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -436,7 +436,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -472,7 +472,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -497,7 +497,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -521,7 +521,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -547,7 +547,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -590,7 +590,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(validateMapping);
parser.parse("type", new CompressedXContent(validateMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]");
@ -601,7 +601,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate_lat", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(validateMapping);
parser.parse("type", new CompressedXContent(validateMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]");
@ -612,7 +612,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate_lon", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(validateMapping);
parser.parse("type", new CompressedXContent(validateMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]");
@ -624,7 +624,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(normalizeMapping);
parser.parse("type", new CompressedXContent(normalizeMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]");
@ -635,7 +635,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize_lat", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(normalizeMapping);
parser.parse("type", new CompressedXContent(normalizeMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]");
@ -646,7 +646,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize_lon", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(normalizeMapping);
parser.parse("type", new CompressedXContent(normalizeMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]");
@ -667,44 +667,44 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", false).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
parser.parse("type", new CompressedXContent(mapping));
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate_lat", false).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
parser.parse("type", new CompressedXContent(mapping));
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate_lon", false).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
parser.parse("type", new CompressedXContent(mapping));
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true"));
// normalize
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
parser.parse("type", new CompressedXContent(mapping));
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize_lat", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
parser.parse("type", new CompressedXContent(mapping));
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize_lon", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
parser.parse("type", new CompressedXContent(mapping));
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true"));
}
public void testGeoPointMapperMerge() throws Exception {

View File

@ -46,7 +46,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -70,7 +70,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -87,7 +87,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse(mapping);
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -108,7 +108,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -123,7 +123,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse(mapping);
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -141,7 +141,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -164,7 +164,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -192,7 +192,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -216,7 +216,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -242,7 +242,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -266,7 +266,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -289,7 +289,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -311,7 +311,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -334,7 +334,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -356,7 +356,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));

View File

@ -23,6 +23,7 @@ import org.apache.lucene.util.GeoHashUtils;
import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -48,7 +49,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -72,7 +73,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -96,7 +97,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -117,7 +118,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class));
BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper;
@ -131,7 +132,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class));
BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper;
@ -145,7 +146,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.id;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -43,7 +44,7 @@ public class IdMappingTests extends ESSingleNodeTestCase {
public void testId() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -69,7 +70,7 @@ public class IdMappingTests extends ESSingleNodeTestCase {
.startObject("_id").field("index", "not_analyzed").endObject()
.endObject().endObject().string();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -85,7 +86,7 @@ public class IdMappingTests extends ESSingleNodeTestCase {
.startObject("_id").field("path", "my_path").endObject()
.endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID).build();
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
// serialize the id mapping
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
@ -103,7 +104,7 @@ public class IdMappingTests extends ESSingleNodeTestCase {
public void testIncludeInObjectBackcompat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
.startObject()
@ -117,7 +118,7 @@ public class IdMappingTests extends ESSingleNodeTestCase {
public void testIncludeInObjectNotAllowed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.index;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -40,7 +41,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
IndexFieldMapper indexMapper = docMapper.indexMapper();
assertThat(indexMapper.enabled(), equalTo(true));
@ -58,7 +59,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", false).endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class);
assertThat(indexMapper.enabled(), equalTo(false));
@ -75,7 +76,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase {
public void testDefaultDisabledIndexMapper() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class);
assertThat(indexMapper.enabled(), equalTo(false));
@ -94,13 +95,13 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase {
.startObject("_index").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test", bwcSettings).mapperService().documentMapperParser();
DocumentMapper mapperEnabled = parser.parse(mappingWithIndexEnabled);
DocumentMapper mapperEnabled = parser.parse("type", new CompressedXContent(mappingWithIndexEnabled));
String mappingWithIndexDisabled = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", false).endObject()
.endObject().endObject().string();
DocumentMapper mapperDisabled = parser.parse(mappingWithIndexDisabled);
DocumentMapper mapperDisabled = parser.parse("type", new CompressedXContent(mappingWithIndexDisabled));
mapperEnabled.merge(mapperDisabled.mapping(), false, false);
assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false));
@ -111,12 +112,12 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase {
.startObject("_index").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test", bwcSettings).mapperService().documentMapperParser();
DocumentMapper enabledMapper = parser.parse(enabledMapping);
DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping));
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", false).endObject()
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping));
enabledMapper.merge(disabledMapper.mapping(), false, false);
assertThat(enabledMapper.indexMapper().enabled(), is(false));
@ -129,7 +130,7 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase {
.field("store", "yes").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class);
assertThat(indexMapper.enabled(), equalTo(true));
assertThat(indexMapper.fieldType().stored(), equalTo(true));

View File

@ -25,6 +25,7 @@ import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
@ -81,7 +82,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
.startObject("_field_names").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
assertFalse(fieldNamesMapper.fieldType().hasDocValues());
assertEquals(IndexOptions.DOCS, fieldNamesMapper.fieldType().indexOptions());
@ -92,7 +93,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
public void testInjectIntoDocDuringParsing() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -110,7 +111,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_field_names").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
assertTrue(fieldNamesMapper.fieldType().isEnabled());
@ -127,7 +128,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_field_names").field("enabled", false).endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
assertFalse(fieldNamesMapper.fieldType().isEnabled());
@ -143,7 +144,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
public void testPre13Disabled() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_2_4.id).build();
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
assertFalse(fieldNamesMapper.fieldType().isEnabled());
}
@ -155,7 +156,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
assertFalse(fieldNamesMapper.fieldType().isEnabled());
@ -174,7 +175,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
assertTrue(fieldNamesMapper.fieldType().stored());
}
@ -188,12 +189,12 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper mapperEnabled = parser.parse(enabledMapping);
DocumentMapper mapperDisabled = parser.parse(disabledMapping);
DocumentMapper mapperEnabled = parser.parse("type", new CompressedXContent(enabledMapping));
DocumentMapper mapperDisabled = parser.parse("type", new CompressedXContent(disabledMapping));
mapperEnabled.merge(mapperDisabled.mapping(), false, false);
assertFalse(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
mapperEnabled = parser.parse(enabledMapping);
mapperEnabled = parser.parse("type", new CompressedXContent(enabledMapping));
mapperDisabled.merge(mapperEnabled.mapping(), false, false);
assertTrue(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
}
@ -282,7 +283,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService,
indexService.analysisService(), indexService.similarityService(), mapperRegistry);
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDocument = mapper.parse("index", "type", "id", new BytesArray("{}"));
IndexableField[] fields = parsedDocument.rootDoc().getFields(FieldNamesFieldMapper.NAME);
boolean found = false;

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.internal;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -31,7 +32,7 @@ public class TypeFieldMapperTests extends ESSingleNodeTestCase {
public void testDocValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
TypeFieldMapper typeMapper = docMapper.metadataMapper(TypeFieldMapper.class);
assertTrue(typeMapper.fieldType().hasDocValues());
}
@ -41,7 +42,7 @@ public class TypeFieldMapperTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
Settings bwcSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0_beta1.id).build();
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
TypeFieldMapper typeMapper = docMapper.metadataMapper(TypeFieldMapper.class);
assertFalse(typeMapper.fieldType().hasDocValues());
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.ip;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -42,7 +43,7 @@ public class SimpleIpMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("ip").field("type", "ip").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -82,7 +83,7 @@ public class SimpleIpMappingTests extends ESSingleNodeTestCase {
.field("ignore_malformed", false).endObject().startObject("field3").field("type", "ip").endObject().endObject().endObject()
.endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1",
XContentFactory.jsonBuilder().startObject().field("field1", "").field("field2", "10.20.30.40").endObject().bytes());
@ -104,7 +105,7 @@ public class SimpleIpMappingTests extends ESSingleNodeTestCase {
// Unless the global ignore_malformed option is set to true
Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build();
defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse(mapping);
defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field3", "").endObject().bytes());
assertThat(doc.rootDoc().getField("field3"), nullValue());

View File

@ -29,6 +29,7 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
@ -60,7 +61,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()

View File

@ -51,13 +51,13 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
.startObject("name").field("type", "string").endObject()
.endObject().endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse(stage1Mapping);
DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping));
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
.startObject("name").field("type", "string").endObject()
.startObject("age").field("type", "integer").endObject()
.startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject().endObject()
.endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping));
stage1.merge(stage2.mapping(), true, false);
// since we are simulating, we should not have the age mapping
@ -73,11 +73,11 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
public void testMergeObjectDynamic() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject().string();
DocumentMapper mapper = parser.parse(objectMapping);
DocumentMapper mapper = parser.parse("type1", new CompressedXContent(objectMapping));
assertNull(mapper.root().dynamic());
String withDynamicMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject().string();
DocumentMapper withDynamicMapper = parser.parse(withDynamicMapping);
DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping));
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
mapper.merge(withDynamicMapper.mapping(), false, false);
@ -89,11 +89,11 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("obj").field("type", "object").endObject()
.endObject().endObject().endObject().string();
DocumentMapper objectMapper = parser.parse(objectMapping);
DocumentMapper objectMapper = parser.parse("type1", new CompressedXContent(objectMapping));
String nestedMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("obj").field("type", "nested").endObject()
.endObject().endObject().endObject().string();
DocumentMapper nestedMapper = parser.parse(nestedMapping);
DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping));
try {
objectMapper.merge(nestedMapper.mapping(), true, false);
@ -119,8 +119,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject()
.endObject().endObject().string();
DocumentMapper existing = parser.parse(mapping1);
DocumentMapper changed = parser.parse(mapping2);
DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1));
DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2));
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
existing.merge(changed.mapping(), false, false);
@ -137,8 +137,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject()
.endObject().endObject().string();
DocumentMapper existing = parser.parse(mapping1);
DocumentMapper changed = parser.parse(mapping2);
DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1));
DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2));
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
existing.merge(changed.mapping(), false, false);

View File

@ -26,6 +26,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -77,7 +78,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
}
private void testMultiField(String mapping) throws Exception {
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json"));
Document doc = docMapper.parse("test", "person", "1", json).rootDoc();
@ -161,7 +162,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
String builtMapping = builderDocMapper.mappingSource().string();
// System.out.println(builtMapping);
// reparse it
DocumentMapper docMapper = mapperParser.parse(builtMapping);
DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(builtMapping));
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json"));
@ -189,7 +190,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
public void testConvertMultiFieldNoDefaultField() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json"));
Document doc = docMapper.parse("test", "person", "1", json).rootDoc();
@ -261,7 +262,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
boolean indexCreatedBefore22 = version.before(Version.V_2_2_0);
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-geo_point.json");
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.mappers().getMapper("a"), notNullValue());
assertThat(docMapper.mappers().getMapper("a"), instanceOf(StringFieldMapper.class));
@ -377,7 +378,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
public void testConvertMultiFieldCompletion() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-completion.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.mappers().getMapper("a"), notNullValue());
assertThat(docMapper.mappers().getMapper("a"), instanceOf(StringFieldMapper.class));
@ -457,7 +458,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
}
builder = builder.endObject().endObject().endObject().endObject().endObject();
String mapping = builder.string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
Arrays.sort(multiFieldNames);
Map<String, Object> sourceAsMap = XContentHelper.convertToMap(docMapper.mappingSource().compressedReference(), true).v2();
@ -498,8 +499,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
// Check the mapping remains identical when deserialed/re-serialsed
final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(builder.string());
DocumentMapper docMapper2 = parser.parse(docMapper.mappingSource().string());
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(builder.string()));
DocumentMapper docMapper2 = parser.parse("type", docMapper.mappingSource());
assertThat(docMapper.mappingSource(), equalTo(docMapper2.mappingSource()));
}
@ -509,7 +510,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().endObject().string();
final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
try {
parser.parse(mapping);
parser.parse("type", new CompressedXContent(mapping));
fail("expected mapping parse failure");
} catch (MapperParsingException e) {
assertTrue(e.getMessage().contains("cannot be used in multi field"));
@ -522,7 +523,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().endObject().string();
final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
try {
parser.parse(mapping);
parser.parse("type", new CompressedXContent(mapping));
fail("expected mapping parse failure");
} catch (MapperParsingException e) {
assertTrue(e.getMessage().contains("cannot be used in multi field"));
@ -548,7 +549,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
MapperService mapperService = createIndex("test").mapperService();
try {
mapperService.documentMapperParser().parse(mapping.string());
mapperService.documentMapperParser().parse("my_type", new CompressedXContent(mapping.string()));
fail("this should throw an exception because one field contains a dot");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), equalTo("Field name [raw.foo] which is a multi field of [city] cannot contain '.'"));

View File

@ -43,7 +43,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json");
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue());
@ -56,7 +56,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
assertThat(f, nullValue());
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json");
DocumentMapper docMapper2 = parser.parse(mapping);
DocumentMapper docMapper2 = parser.parse("person", new CompressedXContent(mapping));
docMapper.merge(docMapper2.mapping(), true, false);
@ -77,7 +77,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
assertThat(f, notNullValue());
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json");
DocumentMapper docMapper3 = parser.parse(mapping);
DocumentMapper docMapper3 = parser.parse("person", new CompressedXContent(mapping));
docMapper.merge(docMapper3.mapping(), true, false);
@ -92,7 +92,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue());
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json");
DocumentMapper docMapper4 = parser.parse(mapping);
DocumentMapper docMapper4 = parser.parse("person", new CompressedXContent(mapping));
docMapper.merge(docMapper4.mapping(), true, false);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.nested;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -36,7 +37,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase {
.startObject("nested1").field("type", "nested").endObject()
.endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -62,7 +63,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase {
.startObject("nested1").field("type", "nested").endObject()
.endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -111,7 +112,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -162,7 +163,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -213,7 +214,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -264,7 +265,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -315,7 +316,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");

View File

@ -1,5 +1,7 @@
package org.elasticsearch.index.mapper.null_value;
import org.elasticsearch.common.compress.CompressedXContent;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
@ -49,7 +51,7 @@ public class NullValueTests extends ESSingleNodeTestCase {
.endObject().string();
try {
indexService.mapperService().documentMapperParser().parse(mapping);
indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
fail("Test should have failed because [null_value] was null.");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), equalTo("Property [null_value] cannot be null."));

View File

@ -26,6 +26,7 @@ import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
@ -113,7 +114,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -147,7 +148,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
// Unless the global ignore_malformed option is set to true
Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build();
defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse(mapping);
defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field3", "a")
@ -184,7 +185,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
//Test numbers passed as strings
String invalidJsonNumberAsString="1";
@ -284,7 +285,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -320,7 +321,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -352,7 +353,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.field("date_detection", true)
.endObject().endObject().string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -402,7 +403,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -472,7 +473,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -531,7 +532,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().endObject().string();
try {
parser.parse(mappingWithTV);
parser.parse("type", new CompressedXContent(mappingWithTV));
fail();
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [term_vector : yes]"));
@ -541,7 +542,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0)
.build();
parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser();
parser.parse(mappingWithTV); // no exception
parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception
}
public void testAnalyzerBackCompat() throws Exception {
@ -560,7 +561,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().endObject().string();
try {
parser.parse(mappingWithTV);
parser.parse("type", new CompressedXContent(mappingWithTV));
fail();
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [analyzer : keyword]"));
@ -570,6 +571,6 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0)
.build();
parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser();
parser.parse(mappingWithTV); // no exception
parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.object;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -37,7 +38,7 @@ public class NullValueObjectMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("obj1").field("type", "object").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper.object;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -34,7 +35,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
defaultMapper.parse("test", "type", "1", new BytesArray(" {\n" +
" \"object\": {\n" +
@ -59,7 +60,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startArray("properties").endArray()
.endObject().endObject().string();
createIndex("test").mapperService().documentMapperParser().parse(mapping);
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
}
public void testEmptyFieldsArrayMultiFields() throws Exception {
@ -77,7 +78,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.string();
createIndex("test").mapperService().documentMapperParser().parse(mapping);
createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping));
}
public void testFieldsArrayMultiFieldsShouldThrowException() throws Exception {
@ -98,7 +99,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
.endObject()
.string();
try {
createIndex("test").mapperService().documentMapperParser().parse(mapping);
createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping));
fail("Expected MapperParsingException");
} catch(MapperParsingException e) {
assertThat(e.getMessage(), containsString("expected map for property [fields]"));
@ -117,7 +118,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.string();
createIndex("test").mapperService().documentMapperParser().parse(mapping);
createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping));
}
public void testFieldsWithFilledArrayShouldThrowException() throws Exception {
@ -134,7 +135,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
.endObject()
.string();
try {
createIndex("test").mapperService().documentMapperParser().parse(mapping);
createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping));
fail("Expected MapperParsingException");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Expected map for property [fields]"));
@ -160,6 +161,6 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.string();
createIndex("test").mapperService().documentMapperParser().parse(mapping);
createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping));
}
}

View File

@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.parent;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -34,7 +35,7 @@ public class ParentMappingTests extends ESSingleNodeTestCase {
public void testParentSetInDocNotAllowed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
@ -50,7 +51,7 @@ public class ParentMappingTests extends ESSingleNodeTestCase {
.startObject("_parent").field("type", "p_type").endObject()
.endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
.startObject()
@ -67,7 +68,7 @@ public class ParentMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "p_type").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
.startObject()

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.path;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -34,7 +35,7 @@ import static org.hamcrest.Matchers.nullValue;
public class PathMapperTests extends ESSingleNodeTestCase {
public void testPathMapping() throws IOException {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/path/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
// test full name
assertThat(docMapper.mappers().getMapper("first1"), nullValue());

View File

@ -25,6 +25,7 @@ import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -49,7 +50,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase {
public void testRoutingMapper() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
.startObject()
@ -69,7 +70,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.routingFieldMapper().fieldType().stored(), equalTo(false));
assertEquals(IndexOptions.NONE, docMapper.routingFieldMapper().fieldType().indexOptions());
}
@ -79,7 +80,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase {
.startObject("_routing").field("store", "no").field("index", "no").endObject()
.endObject().endObject().string();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper enabledMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(enabledMapping);
DocumentMapper enabledMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(enabledMapping));
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
enabledMapper.routingFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject();
@ -102,7 +103,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase {
.startObject("_routing").field("path", "custom_routing").endObject()
.endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_routing", "routing_value").endObject();
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -115,7 +116,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase {
public void testIncludeInObjectBackcompat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_routing", "foo").endObject();
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -129,7 +130,7 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase {
public void testIncludeInObjectNotAllowed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.simple;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
@ -64,10 +65,10 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
public void testParseToJsonAndParse() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping));
String builtMapping = docMapper.mappingSource().string();
// reparse it
DocumentMapper builtDocMapper = parser.parse(builtMapping);
DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping));
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
Document doc = builtDocMapper.parse("test", "person", "1", json).rootDoc();
assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1")));
@ -76,7 +77,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
public void testSimpleParser() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
assertThat((String) docMapper.meta().get("param1"), equalTo("value1"));
@ -88,7 +89,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
public void testSimpleParserNoTypeNoId() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json"));
Document doc = docMapper.parse("test", "person", "1", json).rootDoc();
assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1")));
@ -98,12 +99,12 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
public void testAttributes() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping));
assertThat((String) docMapper.meta().get("param1"), equalTo("value1"));
String builtMapping = docMapper.mappingSource().string();
DocumentMapper builtDocMapper = parser.parse(builtMapping);
DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping));
assertThat((String) builtDocMapper.meta().get("param1"), equalTo("value1"));
}
@ -132,7 +133,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
.startObject("foo.bar").field("type", "string").endObject()
.endObject().endObject().string();
try {
mapperParser.parse(mapping);
mapperParser.parse("type", new CompressedXContent(mapping));
fail("Mapping parse should have failed");
} catch (MapperParsingException e) {
assertTrue(e.getMessage(), e.getMessage().contains("cannot contain '.'"));

View File

@ -50,14 +50,14 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper documentMapper = parser.parse(mapping);
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.endObject().bytes());
assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON));
documentMapper = parser.parse(mapping);
documentMapper = parser.parse("type", new CompressedXContent(mapping));
doc = documentMapper.parse("test", "type", "1", XContentFactory.smileBuilder().startObject()
.field("field", "value")
.endObject().bytes());
@ -74,7 +74,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
.build();
DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser();
parser.parse(mapping); // no exception
parser.parse("type", new CompressedXContent(mapping)); // no exception
}
public void testIncludes() throws Exception {
@ -82,7 +82,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
.startObject("_source").field("includes", new String[]{"path1*"}).endObject()
.endObject().endObject().string();
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.startObject("path1").field("field1", "value1").endObject()
@ -103,7 +103,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
.startObject("_source").field("excludes", new String[]{"path1*"}).endObject()
.endObject().endObject().string();
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.startObject("path1").field("field1", "value1").endObject()
@ -137,7 +137,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
// all is well
}
try {
mapper = parser.parse(null, "{}", defaultMapping);
mapper = parser.parse(null, new CompressedXContent("{}"), defaultMapping);
assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(false));
fail();
@ -156,7 +156,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
.startObject("_source").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("my_type", mapping, defaultMapping);
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("my_type", new CompressedXContent(mapping), defaultMapping);
assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(true));
}
@ -193,13 +193,13 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
}
void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException {
DocumentMapper docMapper = parser.parse(mapping1);
docMapper = parser.parse(docMapper.mappingSource().string());
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1));
docMapper = parser.parse("type", docMapper.mappingSource());
if (conflicts.length == 0) {
docMapper.merge(parser.parse(mapping2).mapping(), true, false);
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false);
} else {
try {
docMapper.merge(parser.parse(mapping2).mapping(), true, false);
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false);
fail();
} catch (IllegalArgumentException e) {
for (String conflict : conflicts) {
@ -264,27 +264,27 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
public void testComplete() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
assertTrue(parser.parse(mapping).sourceMapper().isComplete());
assertTrue(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string();
assertFalse(parser.parse(mapping).sourceMapper().isComplete());
assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("includes", "foo.*").endObject()
.endObject().endObject().string();
assertFalse(parser.parse(mapping).sourceMapper().isComplete());
assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("excludes", "foo.*").endObject()
.endObject().endObject().string();
assertFalse(parser.parse(mapping).sourceMapper().isComplete());
assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete());
}
public void testSourceObjectContainsExtraTokens() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
documentMapper.parse("test", "type", "1", new BytesArray("{}}")); // extra end object (invalid JSON)

View File

@ -76,7 +76,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").field("ignore_above", 5).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -122,7 +122,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
private void assertParseIdemPotent(IndexableFieldType expected, DocumentMapper mapper) throws Exception {
String mapping = mapper.toXContent(XContentFactory.jsonBuilder().startObject(), new ToXContent.MapParams(emptyMap())).endObject().string();
mapper = parser.parse(mapping);
mapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "2345")
@ -136,7 +136,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -154,7 +154,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -177,7 +177,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").startObject("norms").field("enabled", true).endObject().field("index_options", "freqs").endObject().endObject()
.endObject().endObject().string();
defaultMapper = parser.parse(mapping);
defaultMapper = parser.parse("type", new CompressedXContent(mapping));
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -200,7 +200,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", false).endObject().endObject()
.endObject().endObject().string();
defaultMapper = parser.parse(mapping);
defaultMapper = parser.parse("type", new CompressedXContent(mapping));
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -242,7 +242,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
for (String fieldName : Arrays.asList("field1", "field2", "field3", "field4")) {
Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper);
assertFalse(fieldName, serializedMap.containsKey("search_quote_analyzer"));
@ -266,7 +266,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
mapper = parser.parse(mapping);
mapper = parser.parse("type", new CompressedXContent(mapping));
for (String fieldName : Arrays.asList("field1", "field2")) {
Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper);
assertEquals(serializedMap.get("search_quote_analyzer"), "simple");
@ -318,7 +318,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -389,7 +389,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -443,7 +443,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -492,7 +492,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject()
.endObject().endObject().endObject().endObject().string();
defaultMapper.merge(parser.parse(updatedMapping).mapping(), false, false);
defaultMapper.merge(parser.parse("type", new CompressedXContent(updatedMapping)).mapping(), false, false);
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -507,7 +507,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
.endObject().endObject().endObject().endObject().string();
try {
defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false);
defaultMapper.merge(parser.parse("type", new CompressedXContent(updatedMapping)).mapping(), true, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("different [omit_norms]"));
@ -531,7 +531,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.field("analyzer", "standard")
.endObject().endObject().endObject().endObject().string();
try {
parser.parse(mapping);
parser.parse("type", new CompressedXContent(mapping));
fail("Mapping definition should fail with the position_offset_gap setting");
}catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [field2] has unsupported parameters: [position_offset_gap : 50]");
@ -554,8 +554,8 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.field("type", "string")
.field("position_offset_gap", 10)
.endObject().endObject().endObject().endObject().string();
parser.parse(mapping);
parser.parse("type", new CompressedXContent(mapping));
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"position_increment_gap\":10"));
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"position_increment_gap\":10"));
}
}

View File

@ -75,7 +75,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
public void testSimpleDisabled() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
@ -90,7 +90,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", "yes").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
@ -108,7 +108,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
for (String mapping : Arrays.asList(
XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(),
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().string())) {
DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled));
assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1)));
assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions()));
@ -130,7 +130,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.field("doc_values", true)
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(true));
assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(false));
assertEquals(IndexOptions.NONE, docMapper.timestampFieldMapper().fieldType().indexOptions());
@ -144,12 +144,12 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.startObject("_timestamp").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper enabledMapper = parser.parse(enabledMapping);
DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping));
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", false).endObject()
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping));
enabledMapper.merge(disabledMapper.mapping(), false, false);
@ -161,7 +161,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).field("store", "yes").field("index", "no").endObject()
.endObject().endObject().string();
DocumentMapper enabledMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(enabledMapping);
DocumentMapper enabledMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(enabledMapping));
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
enabledMapper.timestampFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject();
@ -192,7 +192,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject();
MetaData metaData = MetaData.builder().build();
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -218,7 +218,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject();
MetaData metaData = MetaData.builder().build();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -246,7 +246,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.field("foo", "bar")
.endObject();
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -271,7 +271,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.field("foo", "bar")
.endObject();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -298,7 +298,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject();
MetaData metaData = MetaData.builder().build();
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -326,7 +326,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject();
MetaData metaData = MetaData.builder().build();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -349,7 +349,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject();
try {
createIndex("test").mapperService().documentMapperParser().parse(mapping.string());
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
} catch (TimestampParsingException e) {
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
@ -371,7 +371,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject();
MetaData metaData = MetaData.builder().build();
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -394,7 +394,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject().endObject();
try {
createIndex("test").mapperService().documentMapperParser().parse(mapping.string());
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
} catch (TimestampParsingException e) {
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
@ -412,7 +412,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject().endObject();
try {
createIndex("test").mapperService().documentMapperParser().parse(mapping.string());
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set with ignore_missing set to false");
} catch (TimestampParsingException e) {
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false"));
@ -432,7 +432,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject();
MetaData metaData = MetaData.builder().build();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -506,14 +506,14 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
.endObject().endObject().string();
docMapper.merge(parser.parse(mapping).mapping(), false, false);
docMapper.merge(parser.parse("type", new CompressedXContent(mapping)).mapping(), false, false);
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER));
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array"));
}
@ -526,8 +526,8 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
docMapper = parser.parse(docMapper.mappingSource().string());
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
docMapper = parser.parse("type", docMapper.mappingSource());
assertThat(docMapper.mappingSource().string(), equalTo(mapping));
}
@ -546,9 +546,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
boolean tokenized = docMapper.timestampFieldMapper().fieldType().tokenized();
docMapper = parser.parse(docMapper.mappingSource().string());
docMapper = parser.parse("type", docMapper.mappingSource());
assertThat(tokenized, equalTo(docMapper.timestampFieldMapper().fieldType().tokenized()));
}
@ -674,13 +674,13 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
}
void assertConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException {
DocumentMapper docMapper = parser.parse(mapping1);
docMapper = parser.parse(docMapper.mappingSource().string());
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1));
docMapper = parser.parse("type", docMapper.mappingSource());
if (conflict == null) {
docMapper.merge(parser.parse(mapping2).mapping(), true, false);
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false);
} else {
try {
docMapper.merge(parser.parse(mapping2).mapping(), true, false);
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString(conflict));
@ -735,9 +735,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
void assertDocValuesSerialization(String mapping) throws Exception {
DocumentMapperParser parser = createIndex("test_doc_values", BWC_SETTINGS).mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
boolean docValues = docMapper.timestampFieldMapper().fieldType().hasDocValues();
docMapper = parser.parse(docMapper.mappingSource().string());
docMapper = parser.parse("type", docMapper.mappingSource());
assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(docValues));
assertAcked(client().admin().indices().prepareDelete("test_doc_values"));
}
@ -746,7 +746,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).field("path", "custom_timestamp").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_timestamp", 1).endObject();
@ -762,7 +762,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject()
.endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_timestamp", 2000000).endObject();
@ -779,7 +779,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
@ -794,7 +794,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().endObject();
@ -815,7 +815,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
//
// test with older versions
Settings oldSettings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersionBetween(random(), Version.V_0_90_0, Version.V_1_6_0)).build();
DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
@ -827,7 +827,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
//
// test with 2.x
DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse(mapping);
DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
MetaData newMetaData = client().admin().cluster().prepareState().get().getState().getMetaData();
// this works with 2.x

View File

@ -50,7 +50,7 @@ import static org.hamcrest.Matchers.notNullValue;
public class TTLMappingTests extends ESSingleNodeTestCase {
public void testSimpleDisabled() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
@ -65,7 +65,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_ttl").field("enabled", "yes").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
@ -80,7 +80,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
public void testDefaultValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(TTLFieldMapper.Defaults.ENABLED_STATE.enabled));
assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.stored()));
assertThat(docMapper.TTLFieldMapper().fieldType().indexOptions(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.indexOptions()));
@ -93,7 +93,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(true));
assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(true)); // store was never serialized, so it was always lost
@ -112,8 +112,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper mapperWithoutTtl = parser.parse(mappingWithoutTtl);
DocumentMapper mapperWithTtl = parser.parse(mappingWithTtl);
DocumentMapper mapperWithoutTtl = parser.parse("type", new CompressedXContent(mappingWithoutTtl));
DocumentMapper mapperWithTtl = parser.parse("type", new CompressedXContent(mappingWithTtl));
mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false);
@ -136,8 +136,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
DocumentMapper updatedMapper = parser.parse(updatedMapping);
DocumentMapper initialMapper = parser.parse("type", new CompressedXContent(mappingWithTtl));
DocumentMapper updatedMapper = parser.parse("type", new CompressedXContent(updatedMapping));
initialMapper.merge(updatedMapper.mapping(), true, false);
@ -148,8 +148,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
String mappingWithTtl = getMappingWithTtlEnabled().string();
String mappingWithTtlDisabled = getMappingWithTtlDisabled().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
DocumentMapper updatedMapper = parser.parse(mappingWithTtlDisabled);
DocumentMapper initialMapper = parser.parse("type", new CompressedXContent(mappingWithTtl));
DocumentMapper updatedMapper = parser.parse("type", new CompressedXContent(mappingWithTtlDisabled));
try {
initialMapper.merge(updatedMapper.mapping(), true, false);
@ -278,7 +278,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
.startObject("_ttl").field("enabled", true).endObject()
.endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_ttl", "2d").endObject();
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -294,7 +294,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_ttl").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.typelevels;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -33,7 +34,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testNoLevel() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -51,7 +52,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testTypeLevel() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject().startObject("type")
@ -69,7 +70,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testNoLevelWithFieldTypeAsValue() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -89,7 +90,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testTypeLevelWithFieldTypeAsValue() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject().startObject("type")
@ -109,7 +110,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testNoLevelWithFieldTypeAsObject() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -129,7 +130,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testTypeLevelWithFieldTypeAsObject() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject().startObject("type")
@ -149,7 +150,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject().startObject("type")
@ -169,7 +170,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject().startObject("type")
@ -189,7 +190,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -210,7 +211,7 @@ public class ParseDocumentTypeLevelsTests extends ESSingleNodeTestCase {
public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(defaultMapping);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject().startObject("type")

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.typelevels;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
@ -34,11 +35,7 @@ public class ParseMappingTypeLevelTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper mapper = parser.parse("type", mapping);
assertThat(mapper.type(), equalTo("type"));
assertThat(mapper.timestampFieldMapper().enabled(), equalTo(true));
mapper = parser.parse(mapping);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertThat(mapper.type(), equalTo("type"));
assertThat(mapper.timestampFieldMapper().enabled(), equalTo(true));
}

Some files were not shown because too many files have changed in this diff Show More