Merge branch 'master' into feature/aggs-refactoring

# Conflicts:
#	core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java
This commit is contained in:
Colin Goodheart-Smithe 2016-02-15 10:37:16 +00:00
commit 1a46628daa
261 changed files with 3630 additions and 2251 deletions

View File

@ -27,11 +27,14 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.PendingClusterTask;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.List;
/**
*/
public class TransportPendingClusterTasksAction extends TransportMasterNodeReadAction<PendingClusterTasksRequest, PendingClusterTasksResponse> {
@ -63,6 +66,9 @@ public class TransportPendingClusterTasksAction extends TransportMasterNodeReadA
@Override
protected void masterOperation(PendingClusterTasksRequest request, ClusterState state, ActionListener<PendingClusterTasksResponse> listener) {
listener.onResponse(new PendingClusterTasksResponse(clusterService.pendingTasks()));
logger.trace("fetching pending tasks from cluster service");
final List<PendingClusterTask> pendingTasks = clusterService.pendingTasks();
logger.trace("done fetching pending tasks from cluster service");
listener.onResponse(new PendingClusterTasksResponse(pendingTasks));
}
}

View File

@ -272,7 +272,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
list = new ArrayList<>();
requestsByShard.put(shardIt.shardId(), list);
}
list.add(new BulkItemRequest(i, new DeleteRequest(deleteRequest)));
list.add(new BulkItemRequest(i, deleteRequest));
}
} else {
ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, deleteRequest.type(), deleteRequest.id(), deleteRequest.routing()).shardId();

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.delete;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.DocumentRequest;
import org.elasticsearch.action.support.replication.ReplicationRequest;
@ -80,28 +79,6 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
this.id = id;
}
/**
* Copy constructor that creates a new delete request that is a copy of the one provided as an argument.
*/
public DeleteRequest(DeleteRequest request) {
this(request, request);
}
/**
* Copy constructor that creates a new delete request that is a copy of the one provided as an argument.
* The new request will inherit though headers and context from the original request that caused it.
*/
public DeleteRequest(DeleteRequest request, ActionRequest originalRequest) {
super(request);
this.type = request.type();
this.id = request.id();
this.routing = request.routing();
this.parent = request.parent();
this.refresh = request.refresh();
this.version = request.version();
this.versionType = request.versionType();
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();

View File

@ -67,26 +67,6 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
type = "_all";
}
/**
* Copy constructor that creates a new get request that is a copy of the one provided as an argument.
* The new request will inherit though headers and context from the original request that caused it.
*/
public GetRequest(GetRequest getRequest) {
this.index = getRequest.index;
this.type = getRequest.type;
this.id = getRequest.id;
this.routing = getRequest.routing;
this.parent = getRequest.parent;
this.preference = getRequest.preference;
this.fields = getRequest.fields;
this.fetchSourceContext = getRequest.fetchSourceContext;
this.refresh = getRequest.refresh;
this.realtime = getRequest.realtime;
this.version = getRequest.version;
this.versionType = getRequest.versionType;
this.ignoreErrorsOnGeneratedFields = getRequest.ignoreErrorsOnGeneratedFields;
}
/**
* Constructs a new get request against the specified index. The {@link #type(String)} and {@link #id(String)}
* must be set.

View File

@ -159,26 +159,6 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
public IndexRequest() {
}
/**
* Copy constructor that creates a new index request that is a copy of the one provided as an argument.
* The new request will inherit though headers and context from the original request that caused it.
*/
public IndexRequest(IndexRequest indexRequest) {
super(indexRequest);
this.type = indexRequest.type;
this.id = indexRequest.id;
this.routing = indexRequest.routing;
this.parent = indexRequest.parent;
this.timestamp = indexRequest.timestamp;
this.ttl = indexRequest.ttl;
this.source = indexRequest.source;
this.opType = indexRequest.opType;
this.refresh = indexRequest.refresh;
this.version = indexRequest.version;
this.versionType = indexRequest.versionType;
this.contentType = indexRequest.contentType;
}
/**
* Constructs a new index request against the specific index. The {@link #type(String)}
* {@link #source(byte[])} must be set.
@ -646,6 +626,12 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
if (defaultTimestamp.equals(TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP)) {
timestamp = Long.toString(System.currentTimeMillis());
} else {
// if we are here, the defaultTimestamp is not
// TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP but
// this can only happen if defaultTimestamp was
// assigned again because mappingMd and
// mappingMd#timestamp() are not null
assert mappingMd != null;
timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter(), getVersion(metaData, concreteIndex));
}
}

View File

@ -103,7 +103,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio
void processBulkIndexRequest(Task task, BulkRequest original, String action, ActionFilterChain chain, ActionListener<BulkResponse> listener) {
BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original);
executionService.executeBulkRequest(() -> bulkRequestModifier, (indexRequest, throwable) -> {
logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id(), throwable);
logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", throwable, indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id());
bulkRequestModifier.markCurrentItemAsFailed(throwable);
}, (throwable) -> {
if (throwable != null) {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action.percolate;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.TransportGetAction;
import org.elasticsearch.action.support.ActionFilters;
@ -74,9 +73,7 @@ public class TransportPercolateAction extends TransportBroadcastAction<Percolate
protected void doExecute(Task task, final PercolateRequest request, final ActionListener<PercolateResponse> listener) {
request.startTime = System.currentTimeMillis();
if (request.getRequest() != null) {
//create a new get request to make sure it has the same headers and context as the original percolate request
GetRequest getRequest = new GetRequest(request.getRequest());
getAction.execute(getRequest, new ActionListener<GetResponse>() {
getAction.execute(request.getRequest(), new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse getResponse) {
if (!getResponse.isExists()) {

View File

@ -76,23 +76,6 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
public SearchRequest() {
}
/**
* Copy constructor that creates a new search request that is a copy of the one provided as an argument.
* The new request will inherit though headers and context from the original request that caused it.
*/
public SearchRequest(SearchRequest searchRequest) {
this.searchType = searchRequest.searchType;
this.indices = searchRequest.indices;
this.routing = searchRequest.routing;
this.preference = searchRequest.preference;
this.template = searchRequest.template;
this.source = searchRequest.source;
this.requestCache = searchRequest.requestCache;
this.scroll = searchRequest.scroll;
this.types = searchRequest.types;
this.indicesOptions = searchRequest.indicesOptions;
}
/**
* Constructs a new search request against the indices. No indices provided here means that search
* will run against all indices.

View File

@ -59,8 +59,7 @@ public class TransportMultiSearchAction extends HandledTransportAction<MultiSear
final AtomicInteger counter = new AtomicInteger(responses.length());
for (int i = 0; i < responses.length(); i++) {
final int index = i;
SearchRequest searchRequest = new SearchRequest(request.requests().get(i));
searchAction.execute(searchRequest, new ActionListener<SearchResponse>() {
searchAction.execute(request.requests().get(i), new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse searchResponse) {
responses.set(index, new MultiSearchResponse.Item(searchResponse, null));

View File

@ -90,7 +90,6 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
logger.debug("failed to optimize search type, continue as normal", e);
}
}
if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) {
dfsQueryThenFetchAction.execute(searchRequest, listener);
} else if (searchRequest.searchType() == SearchType.QUERY_THEN_FETCH) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.support.replication;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.index.shard.ShardId;
/**
@ -38,13 +37,4 @@ public class BasicReplicationRequest extends ReplicationRequest<BasicReplication
public BasicReplicationRequest(ShardId shardId) {
super(shardId);
}
/**
* Copy constructor that creates a new request that is a copy of the one
* provided as an argument.
*/
protected BasicReplicationRequest(BasicReplicationRequest request) {
super(request);
}
}

View File

@ -70,16 +70,6 @@ public abstract class ReplicationRequest<Request extends ReplicationRequest<Requ
this.shardId = shardId;
}
/**
* Copy constructor that creates a new request that is a copy of the one provided as an argument.
* The new request will inherit though headers and context from the original request that caused it.
*/
protected ReplicationRequest(Request request) {
this.timeout = request.timeout();
this.index = request.index();
this.consistencyLevel = request.consistencyLevel();
}
/**
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
*/

View File

@ -26,7 +26,6 @@ import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.delete.TransportDeleteAction;
import org.elasticsearch.action.index.IndexRequest;
@ -169,7 +168,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
final UpdateHelper.Result result = updateHelper.prepare(request, indexShard);
switch (result.operation()) {
case UPSERT:
IndexRequest upsertRequest = new IndexRequest((IndexRequest)result.action());
IndexRequest upsertRequest = result.action();
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference upsertSourceBytes = upsertRequest.source();
indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
@ -206,7 +205,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
});
break;
case INDEX:
IndexRequest indexRequest = new IndexRequest((IndexRequest)result.action());
IndexRequest indexRequest = result.action();
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference indexSourceBytes = indexRequest.source();
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
@ -236,8 +235,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
});
break;
case DELETE:
DeleteRequest deleteRequest = new DeleteRequest(result.action(), request);
deleteAction.execute(deleteRequest, new ActionListener<DeleteResponse>() {
deleteAction.execute(result.action(), new ActionListener<DeleteResponse>() {
@Override
public void onResponse(DeleteResponse response) {
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false);

View File

@ -27,6 +27,8 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.InvalidAliasNameException;
@ -143,7 +145,9 @@ public class AliasValidator extends AbstractComponent {
private void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
try {
queryShardContext.reset(parser);
queryShardContext.parseContext().parseInnerQueryBuilder().toFilter(queryShardContext);
QueryParseContext queryParseContext = queryShardContext.parseContext();
QueryBuilder<?> queryBuilder = QueryBuilder.rewriteQuery(queryParseContext.parseInnerQueryBuilder(), queryShardContext);
queryBuilder.toFilter(queryShardContext);
} finally {
queryShardContext.reset(null);
parser.close();

View File

@ -420,7 +420,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via {@link QueryShardContext#setTypes(String...)}
*/
public QueryShardContext newQueryShardContext() {
return new QueryShardContext(indexSettings, nodeServicesProvider.getClient(), indexCache.bitsetFilterCache(), indexFieldData, mapperService(), similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry());
return new QueryShardContext(indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(), similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry());
}
ThreadPool getThreadPool() {

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import java.io.Closeable;
import java.util.HashMap;
@ -78,7 +78,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
* and 100 afterwards so we override the positionIncrementGap if it
* doesn't match here.
*/
int overridePositionIncrementGap = StringFieldMapper.Defaults.POSITION_INCREMENT_GAP;
int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
if (analyzerFactory instanceof CustomAnalyzerProvider) {
((CustomAnalyzerProvider) analyzerFactory).build(this);
/*

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.analysis;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import java.util.ArrayList;
import java.util.List;
@ -74,7 +74,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
tokenFilters.add(tokenFilter);
}
int positionIncrementGap = StringFieldMapper.Defaults.POSITION_INCREMENT_GAP;
int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
if (analyzerSettings.getAsMap().containsKey("position_offset_gap")){
if (indexSettings.getIndexVersionCreated().before(Version.V_2_0_0)){

View File

@ -37,6 +37,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.shard.ShardId;
@ -95,6 +96,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
static {
Map<String, IndexFieldData.Builder> buildersByTypeBuilder = new HashMap<>();
buildersByTypeBuilder.put("string", new PagedBytesIndexFieldData.Builder());
buildersByTypeBuilder.put(TextFieldMapper.CONTENT_TYPE, new PagedBytesIndexFieldData.Builder());
buildersByTypeBuilder.put(KeywordFieldMapper.CONTENT_TYPE, MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("float", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("double", MISSING_DOC_VALUES_BUILDER);
@ -129,6 +131,9 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
.put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
.put(Tuple.tuple("string", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple(TextFieldMapper.CONTENT_TYPE, PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder())
.put(Tuple.tuple(TextFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple(KeywordFieldMapper.CONTENT_TYPE, DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
.put(Tuple.tuple(KeywordFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER)

View File

@ -23,6 +23,8 @@ import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.spatial.util.GeoEncodingUtils;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef;
@ -415,6 +417,24 @@ public final class OrdinalsBuilder implements Closeable {
}
}
/**
* A {@link TermsEnum} that iterates only highest resolution geo prefix coded terms.
*
* @see #buildFromTerms(TermsEnum)
*/
public static TermsEnum wrapGeoPointTerms(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
// accept only the max resolution terms
// todo is this necessary?
return GeoEncodingUtils.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ?
AcceptStatus.YES : AcceptStatus.END;
}
};
}
/**
* Returns the maximum document ID this builder can associate with an ordinal
*/

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.spatial.util.GeoEncodingUtils;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.CharsRefBuilder;
@ -47,8 +48,10 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<Ato
}
protected static class GeoPointTermsEnum extends BaseGeoPointTermsEnum {
private final GeoPointField.TermEncoding termEncoding;
protected GeoPointTermsEnum(BytesRefIterator termsEnum, GeoPointField.TermEncoding termEncoding) {
super(termsEnum);
this.termEncoding = termEncoding;
}
public Long next() throws IOException {
@ -56,7 +59,13 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<Ato
if (term == null) {
return null;
}
return NumericUtils.prefixCodedToLong(term);
if (termEncoding == GeoPointField.TermEncoding.PREFIX) {
return GeoEncodingUtils.prefixCodedToGeoCoded(term);
} else if (termEncoding == GeoPointField.TermEncoding.NUMERIC) {
return NumericUtils.prefixCodedToLong(term);
}
throw new IllegalArgumentException("GeoPoint.TermEncoding should be one of: " + GeoPointField.TermEncoding.PREFIX
+ " or " + GeoPointField.TermEncoding.NUMERIC + " found: " + termEncoding);
}
}

View File

@ -23,10 +23,10 @@ import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.util.BigArrays;
@ -92,9 +92,18 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData
OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
boolean success = false;
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
final GeoPointField.TermEncoding termEncoding = indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_3_0) ?
GeoPointField.TermEncoding.PREFIX : GeoPointField.TermEncoding.NUMERIC;
final GeoPointTermsEnum iter = new GeoPointTermsEnum(builder.buildFromTerms(OrdinalsBuilder.wrapNumeric64Bit(terms.iterator())), termEncoding);
final TermsEnum termsEnum;
final GeoPointField.TermEncoding termEncoding;
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_3_0)) {
termEncoding = GeoPointField.TermEncoding.PREFIX;
termsEnum = OrdinalsBuilder.wrapGeoPointTerms(terms.iterator());
} else {
termEncoding = GeoPointField.TermEncoding.NUMERIC;
termsEnum = OrdinalsBuilder.wrapNumeric64Bit(terms.iterator());
}
final GeoPointTermsEnum iter = new GeoPointTermsEnum(builder.buildFromTerms(termsEnum), termEncoding);
Long hashedPoint;
long numTerms = 0;
while ((hashedPoint = iter.next()) != null) {

View File

@ -35,9 +35,13 @@ import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper.KeywordFieldType;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper.TextFieldType;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
@ -452,6 +456,16 @@ class DocumentParser implements Closeable {
if (builder == null) {
builder = new StringFieldMapper.Builder(currentFieldName);
}
} else if (fieldType instanceof TextFieldType) {
builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
if (builder == null) {
builder = new TextFieldMapper.Builder(currentFieldName);
}
} else if (fieldType instanceof KeywordFieldType) {
builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
if (builder == null) {
builder = new KeywordFieldMapper.Builder(currentFieldName);
}
} else if (fieldType instanceof DateFieldType) {
builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
if (builder == null) {

View File

@ -185,6 +185,11 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return builder;
}
public T searchQuoteAnalyzer(NamedAnalyzer searchQuoteAnalyzer) {
this.fieldType.setSearchQuoteAnalyzer(searchQuoteAnalyzer);
return builder;
}
public T includeInAll(Boolean includeInAll) {
this.includeInAll = includeInAll;
return builder;
@ -293,7 +298,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
try {
parseCreateField(context, fields);
for (Field field : fields) {
if (!customBoost()) {
if (!customBoost()
// don't set boosts eg. on dv fields
&& field.fieldType().indexOptions() != IndexOptions.NONE) {
field.setBoost(fieldType().boost());
}
context.doc().add(field);

View File

@ -92,6 +92,14 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
return super.indexOptions(indexOptions);
}
@Override
protected void setupFieldType(BuilderContext context) {
if (!omitNormsSet && fieldType.boost() != 1.0f) {
fieldType.setOmitNorms(false);
}
super.setupFieldType(context);
}
@Override
public KeywordFieldMapper build(BuilderContext context) {
setupFieldType(context);

View File

@ -31,7 +31,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -63,13 +62,6 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
// NOTE, when adding defaults here, make sure you add them in the builder
public static final String NULL_VALUE = null;
/**
* Post 2.0 default for position_increment_gap. Set to 100 so that
* phrase queries of reasonably high slop will not match across field
* values.
*/
public static final int POSITION_INCREMENT_GAP = 100;
public static final int IGNORE_ABOVE = -1;
}
@ -102,11 +94,6 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
return this;
}
public Builder searchQuotedAnalyzer(NamedAnalyzer analyzer) {
this.fieldType.setSearchQuoteAnalyzer(analyzer);
return builder;
}
public Builder ignoreAbove(int ignoreAbove) {
this.ignoreAbove = ignoreAbove;
return this;
@ -167,6 +154,9 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
}
}
builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
parseTextField(builder, fieldName, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@ -178,30 +168,12 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
}
builder.nullValue(propNode.toString());
iterator.remove();
} else if (propName.equals("search_quote_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + fieldName + "]");
}
builder.searchQuotedAnalyzer(analyzer);
iterator.remove();
} else if (propName.equals("position_increment_gap")) {
int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
if (newPositionIncrementGap < 0) {
throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed.");
}
builder.positionIncrementGap(newPositionIncrementGap);
// we need to update to actual analyzers if they are not set in this case...
// so we can inject the position increment gap...
if (builder.fieldType().indexAnalyzer() == null) {
builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
}
if (builder.fieldType().searchAnalyzer() == null) {
builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
}
if (builder.fieldType().searchQuoteAnalyzer() == null) {
builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
}
iterator.remove();
} else if (propName.equals("ignore_above")) {
builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));

View File

@ -0,0 +1,267 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
/** A {@link FieldMapper} for full-text fields. */
public class TextFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
public static final String CONTENT_TYPE = "text";
private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1;
public static class Defaults {
public static final MappedFieldType FIELD_TYPE = new TextFieldType();
static {
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.freeze();
}
/**
* The default position_increment_gap is set to 100 so that phrase
* queries of reasonably high slop will not match across field values.
*/
public static final int POSITION_INCREMENT_GAP = 100;
}
public static class Builder extends FieldMapper.Builder<Builder, TextFieldMapper> {
private int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER;
public Builder(String name) {
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
builder = this;
}
public Builder positionIncrementGap(int positionIncrementGap) {
if (positionIncrementGap < 0) {
throw new MapperParsingException("[positions_increment_gap] must be positive, got " + positionIncrementGap);
}
this.positionIncrementGap = positionIncrementGap;
return this;
}
@Override
public Builder docValues(boolean docValues) {
if (docValues) {
throw new IllegalArgumentException("[text] fields do not support doc values");
}
return super.docValues(docValues);
}
@Override
public TextFieldMapper build(BuilderContext context) {
if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionIncrementGap));
fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionIncrementGap));
fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap));
}
setupFieldType(context);
TextFieldMapper fieldMapper = new TextFieldMapper(
name, fieldType, defaultFieldType, positionIncrementGap,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
return fieldMapper.includeInAll(includeInAll);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(fieldName);
builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
parseTextField(builder, fieldName, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("position_increment_gap")) {
int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
builder.positionIncrementGap(newPositionIncrementGap);
iterator.remove();
} else if (parseMultiField(builder, fieldName, parserContext, propName, propNode)) {
iterator.remove();
}
}
return builder;
}
}
public static final class TextFieldType extends MappedFieldType {
public TextFieldType() {}
protected TextFieldType(TextFieldType ref) {
super(ref);
}
public TextFieldType clone() {
return new TextFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public Query nullValueQuery() {
if (nullValue() == null) {
return null;
}
return termQuery(nullValue(), null);
}
}
private Boolean includeInAll;
private int positionIncrementGap;
protected TextFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
int positionIncrementGap,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
assert fieldType.tokenized();
assert fieldType.hasDocValues() == false;
this.positionIncrementGap = positionIncrementGap;
}
@Override
protected TextFieldMapper clone() {
return (TextFieldMapper) super.clone();
}
@Override
public TextFieldMapper includeInAll(Boolean includeInAll) {
if (includeInAll != null) {
TextFieldMapper clone = clone();
clone.includeInAll = includeInAll;
return clone;
} else {
return this;
}
}
@Override
public TextFieldMapper includeInAllIfNotSet(Boolean includeInAll) {
if (includeInAll != null && this.includeInAll == null) {
TextFieldMapper clone = clone();
clone.includeInAll = includeInAll;
return clone;
} else {
return this;
}
}
@Override
public TextFieldMapper unsetIncludeInAll() {
if (includeInAll != null) {
TextFieldMapper clone = clone();
clone.includeInAll = null;
return clone;
} else {
return this;
}
}
public int getPositionIncrementGap() {
return this.positionIncrementGap;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
final String value;
if (context.externalValueSet()) {
value = context.externalValue().toString();
} else {
value = context.parser().textOrNull();
}
if (value == null) {
return;
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().name(), value, fieldType().boost());
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().name(), value, fieldType());
fields.add(field);
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);
this.includeInAll = ((TextFieldMapper) mergeWith).includeInAll;
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
doXContentAnalyzers(builder, includeDefaults);
if (includeInAll != null) {
builder.field("include_in_all", includeInAll);
} else if (includeDefaults) {
builder.field("include_in_all", true);
}
if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
builder.field("position_increment_gap", positionIncrementGap);
}
}
}

View File

@ -100,8 +100,9 @@ public class TypeParsers {
}
private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer();
NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer();
NamedAnalyzer indexAnalyzer = null;
NamedAnalyzer searchAnalyzer = null;
NamedAnalyzer searchQuoteAnalyzer = null;
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@ -136,18 +137,41 @@ public class TypeParsers {
}
searchAnalyzer = analyzer;
iterator.remove();
} else if (propName.equals("search_quote_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
searchQuoteAnalyzer = analyzer;
iterator.remove();
}
}
if (indexAnalyzer == null) {
if (searchAnalyzer != null) {
throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set");
}
} else if (searchAnalyzer == null) {
if (indexAnalyzer == null && searchAnalyzer != null) {
throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set");
}
if (searchAnalyzer == null && searchQuoteAnalyzer != null) {
throw new MapperParsingException("analyzer and search_analyzer on field [" + name + "] must be set when search_quote_analyzer is set");
}
if (searchAnalyzer == null) {
searchAnalyzer = indexAnalyzer;
}
builder.indexAnalyzer(indexAnalyzer);
builder.searchAnalyzer(searchAnalyzer);
if (searchQuoteAnalyzer == null) {
searchQuoteAnalyzer = searchAnalyzer;
}
if (indexAnalyzer != null) {
builder.indexAnalyzer(indexAnalyzer);
}
if (searchAnalyzer != null) {
builder.searchAnalyzer(searchAnalyzer);
}
if (searchQuoteAnalyzer != null) {
builder.searchQuoteAnalyzer(searchQuoteAnalyzer);
}
}
/**

View File

@ -258,4 +258,23 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder<QB>>
}
return queries;
}
@Override
public final QueryBuilder<?> rewrite(QueryRewriteContext queryShardContext) throws IOException {
QueryBuilder rewritten = doRewrite(queryShardContext);
if (rewritten == this) {
return rewritten;
}
if (queryName() != null && rewritten.queryName() == null) { // we inherit the name
rewritten.queryName(queryName());
}
if (boost() != DEFAULT_BOOST && rewritten.boost() == DEFAULT_BOOST) {
rewritten.boost(boost());
}
return rewritten;
}
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryShardContext) throws IOException {
return this;
}
}

View File

@ -33,6 +33,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
@ -272,6 +273,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
if (booleanQuery.clauses().isEmpty()) {
return new MatchAllDocsQuery();
}
final String minimumShouldMatch;
if (context.isFilter() && this.minimumShouldMatch == null && shouldClauses.size() > 0) {
minimumShouldMatch = "1";
@ -346,4 +348,40 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
out.writeBoolean(disableCoord);
out.writeOptionalString(minimumShouldMatch);
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
BoolQueryBuilder newBuilder = new BoolQueryBuilder();
boolean changed = false;
final int clauses = mustClauses.size() + mustNotClauses.size() + filterClauses.size() + shouldClauses.size();
if (clauses == 0) {
return new MatchAllQueryBuilder().boost(boost()).queryName(queryName());
}
changed |= rewriteClauses(queryRewriteContext, mustClauses, newBuilder::must);
changed |= rewriteClauses(queryRewriteContext, mustNotClauses, newBuilder::mustNot);
changed |= rewriteClauses(queryRewriteContext, filterClauses, newBuilder::filter);
changed |= rewriteClauses(queryRewriteContext, shouldClauses, newBuilder::should);
if (changed) {
newBuilder.adjustPureNegative = adjustPureNegative;
newBuilder.disableCoord = disableCoord;
newBuilder.minimumShouldMatch = minimumShouldMatch;
newBuilder.boost(boost());
newBuilder.queryName(queryName());
return newBuilder;
}
return this;
}
private static boolean rewriteClauses(QueryRewriteContext queryRewriteContext, List<QueryBuilder<?>> builders, Consumer<QueryBuilder<?>> consumer) throws IOException {
boolean changed = false;
for (QueryBuilder builder : builders) {
QueryBuilder result = builder.rewrite(queryRewriteContext);
if (result != builder) {
changed = true;
}
consumer.accept(result);
}
return changed;
}
}

View File

@ -158,4 +158,16 @@ public class BoostingQueryBuilder extends AbstractQueryBuilder<BoostingQueryBuil
out.writeQuery(negativeQuery);
out.writeFloat(negativeBoost);
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder positiveQuery = this.positiveQuery.rewrite(queryRewriteContext);
QueryBuilder negativeQuery = this.negativeQuery.rewrite(queryRewriteContext);
if (positiveQuery != this.positiveQuery || negativeQuery != this.negativeQuery) {
BoostingQueryBuilder newQueryBuilder = new BoostingQueryBuilder(positiveQuery, negativeQuery);
newQueryBuilder.negativeBoost = negativeBoost;
return newQueryBuilder;
}
return this;
}
}

View File

@ -104,4 +104,13 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder<ConstantScor
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeQuery(filterBuilder);
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder rewrite = filterBuilder.rewrite(queryRewriteContext);
if (rewrite != filterBuilder) {
return new ConstantScoreQueryBuilder(rewrite);
}
return this;
}
}

View File

@ -19,12 +19,11 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
@ -32,80 +31,50 @@ import java.io.IOException;
* A {@link QueryBuilder} that is a stand in replacement for an empty query clause in the DSL.
* The current DSL allows parsing inner queries / filters like "{ }", in order to have a
* valid non-null representation of these clauses that actually do nothing we can use this class.
*
* This builder has no corresponding parser and it is not registered under the query name. It is
* intended to be used internally as a stand-in for nested queries that are left empty and should
* be ignored upstream.
*/
public class EmptyQueryBuilder extends ToXContentToBytes implements QueryBuilder<EmptyQueryBuilder> {
public final class EmptyQueryBuilder extends AbstractQueryBuilder<EmptyQueryBuilder> {
public static final String NAME = "empty_query";
/** the one and only empty query builder */
public static final EmptyQueryBuilder PROTOTYPE = new EmptyQueryBuilder();
// prevent instances other than prototype
private EmptyQueryBuilder() {
super(XContentType.JSON);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
return null;
}
@Override
public String getName() {
return getWriteableName();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
}
@Override
public Query toQuery(QueryShardContext context) throws IOException {
// empty
return null;
protected void doWriteTo(StreamOutput out) throws IOException {
}
@Override
protected EmptyQueryBuilder doReadFrom(StreamInput in) throws IOException {
return new EmptyQueryBuilder();
}
@Override
public Query toFilter(QueryShardContext context) throws IOException {
// empty
return null;
protected int doHashCode() {
return 31;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public EmptyQueryBuilder readFrom(StreamInput in) throws IOException {
return EmptyQueryBuilder.PROTOTYPE;
}
@Override
public EmptyQueryBuilder queryName(String queryName) {
//no-op
return this;
}
@Override
public String queryName() {
return null;
}
@Override
public float boost() {
return -1;
}
@Override
public EmptyQueryBuilder boost(float boost) {
//no-op
return this;
protected boolean doEquals(EmptyQueryBuilder other) {
return true;
}
}

View File

@ -43,7 +43,6 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Objects;
@ -62,7 +61,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
private final String fieldName;
private ShapeBuilder shape;
private final ShapeBuilder shape;
private SpatialStrategy strategy;
@ -236,13 +235,12 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
ShapeBuilder shapeToQuery = shape;
if (shapeToQuery == null) {
GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId);
shapeToQuery = fetch(context.getClient(), getRequest, indexedShapePath);
protected Query doToQuery(QueryShardContext context) {
if (shape == null) {
throw new UnsupportedOperationException("query must be rewritten first");
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
final ShapeBuilder shapeToQuery = shape;
final MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "Failed to find geo_shape field [" + fieldName + "]");
}
@ -252,7 +250,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
throw new QueryShardException(context, "Field [" + fieldName + "] is not a geo_shape");
}
GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType;
final GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType;
PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy();
if (this.strategy != null) {
@ -449,4 +447,14 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
public String getWriteableName() {
return NAME;
}
@Override
protected QueryBuilder<GeoShapeQueryBuilder> doRewrite(QueryRewriteContext queryShardContext) throws IOException {
if (this.shape == null) {
GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId);
ShapeBuilder shape = fetch(queryShardContext.getClient(), getRequest, indexedShapePath);
return new GeoShapeQueryBuilder(this.fieldName, shape).relation(relation).strategy(strategy);
}
return this;
}
}

View File

@ -26,7 +26,6 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.JoinUtil;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
@ -397,4 +396,18 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
out.writeBoolean(false);
}
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder rewrite = query.rewrite(queryRewriteContext);
if (rewrite != query) {
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(type, rewrite);
hasChildQueryBuilder.minChildren = minChildren;
hasChildQueryBuilder.maxChildren = maxChildren;
hasChildQueryBuilder.scoreMode = scoreMode;
hasChildQueryBuilder.queryInnerHits = queryInnerHits;
return hasChildQueryBuilder;
}
return this;
}
}

View File

@ -22,7 +22,6 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
@ -256,4 +255,16 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
protected int doHashCode() {
return Objects.hash(query, type, score, innerHit);
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryShardContext) throws IOException {
QueryBuilder rewrite = query.rewrite(queryShardContext);
if (rewrite != query) {
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(type, rewrite);
hasParentQueryBuilder.score = score;
hasParentQueryBuilder.innerHit = innerHit;
return hasParentQueryBuilder;
}
return this;
}
}

View File

@ -140,4 +140,14 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder<IndicesQueryBuilde
Arrays.equals(indices, other.indices) && // otherwise we are comparing pointers
Objects.equals(noMatchQuery, other.noMatchQuery);
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryShardContext) throws IOException {
QueryBuilder<?> newInnnerQuery = innerQuery.rewrite(queryShardContext);
QueryBuilder<?> newNoMatchQuery = noMatchQuery.rewrite(queryShardContext);
if (newInnnerQuery != innerQuery || newNoMatchQuery != noMatchQuery) {
return new IndicesQueryBuilder(innerQuery, indices).noMatchQuery(noMatchQuery);
}
return this;
}
}

View File

@ -1050,4 +1050,10 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
Objects.equals(include, other.include) &&
Objects.equals(failOnUnsupportedField, other.failOnUnsupportedField);
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
// TODO this needs heavy cleanups before we can rewrite it
return this;
}
}

View File

@ -225,4 +225,12 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
return new ToParentBlockJoinQuery(Queries.filtered(innerQuery, childFilter), parentFilter, scoreMode);
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder rewrite = query.rewrite(queryRewriteContext);
if (rewrite != query) {
return new NestedQueryBuilder(path, rewrite).scoreMode(scoreMode);
}
return this;
}
}

View File

@ -72,4 +72,28 @@ public interface QueryBuilder<QB extends QueryBuilder<QB>> extends NamedWriteabl
* Returns the name that identifies uniquely the query
*/
String getName();
/**
* Rewrites this query builder into its primitive form. By default this method return the builder itself. If the builder
* did not change the identity reference must be returned otherwise the builder will be rewritten infinitely.
*/
default QueryBuilder<?> rewrite(QueryRewriteContext queryShardContext) throws IOException {
return this;
}
/**
* Rewrites the given query into its primitive form. Queries that for instance fetch resources from remote hosts or
* can simplify / optimize itself should do their heavy lifting during {@link #rewrite(QueryRewriteContext)}. This method
* rewrites the query until it doesn't change anymore.
* @throws IOException if an {@link IOException} occurs
*/
static QueryBuilder<?> rewriteQuery(QueryBuilder<?> original, QueryRewriteContext context) throws IOException {
QueryBuilder builder = original;
for (QueryBuilder rewrittenBuilder = builder.rewrite(context); rewrittenBuilder != builder;
rewrittenBuilder = builder.rewrite(context)) {
builder = rewrittenBuilder;
}
return builder;
}
}

View File

@ -106,7 +106,7 @@ public class QueryParseContext {
token = parser.nextToken();
if (token == XContentParser.Token.END_OBJECT) {
// empty query
return EmptyQueryBuilder.PROTOTYPE;
return new EmptyQueryBuilder();
}
if (token != XContentParser.Token.FIELD_NAME) {
throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no field after start_object");

View File

@ -0,0 +1,72 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.elasticsearch.client.Client;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptService;
/**
* Context object used to rewrite {@link QueryBuilder} instances into simplified version.
*/
public class QueryRewriteContext {
protected final ScriptService scriptService;
protected final IndexSettings indexSettings;
protected final IndicesQueriesRegistry indicesQueriesRegistry;
protected final QueryParseContext parseContext;
public QueryRewriteContext(IndexSettings indexSettings, ScriptService scriptService, IndicesQueriesRegistry indicesQueriesRegistry) {
this.scriptService = scriptService;
this.indexSettings = indexSettings;
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.parseContext = new QueryParseContext(indicesQueriesRegistry);
}
/**
* Returns a clients to fetch resources from local or remove nodes.
*/
public final Client getClient() {
return scriptService.getClient();
}
/**
* Returns the index settings for this context. This might return null if the
* context has not index scope.
*/
public final IndexSettings getIndexSettings() {
return indexSettings;
}
/**
* Returns a script service to fetch scripts.
*/
public final ScriptService getScriptService() {
return scriptService;
}
/**
* Returns a new {@link QueryParseContext} to parse template or wrapped queries.
*/
public QueryParseContext newParseContext() {
QueryParseContext queryParseContext = new QueryParseContext(indicesQueriesRegistry);
queryParseContext.parseFieldMatcher(parseContext.parseFieldMatcher());
return queryParseContext;
}
}

View File

@ -26,7 +26,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
@ -46,15 +45,13 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
import org.elasticsearch.search.internal.SearchContext;
@ -63,7 +60,6 @@ import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -72,15 +68,13 @@ import static java.util.Collections.unmodifiableMap;
/**
* Context object used to create lucene queries on the shard level.
*/
public class QueryShardContext {
public class QueryShardContext extends QueryRewriteContext {
private final MapperService mapperService;
private final ScriptService scriptService;
private final SimilarityService similarityService;
private final BitsetFilterCache bitsetFilterCache;
private final IndexFieldDataService indexFieldDataService;
private final IndexSettings indexSettings;
private final Client client;
private String[] types = Strings.EMPTY_ARRAY;
public void setTypes(String... types) {
@ -93,35 +87,31 @@ public class QueryShardContext {
private final Map<String, Query> namedQueries = new HashMap<>();
private final MapperQueryParser queryParser = new MapperQueryParser(this);
private final IndicesQueriesRegistry indicesQueriesRegistry;
private boolean allowUnmappedFields;
private boolean mapUnmappedFieldAsString;
private NestedScope nestedScope;
private QueryParseContext parseContext;
boolean isFilter; // pkg private for testing
public QueryShardContext(IndexSettings indexSettings, Client client, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService, ScriptService scriptService,
public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService, ScriptService scriptService,
final IndicesQueriesRegistry indicesQueriesRegistry) {
super(indexSettings, scriptService, indicesQueriesRegistry);
this.indexSettings = indexSettings;
this.scriptService = scriptService;
this.client = client;
this.similarityService = similarityService;
this.mapperService = mapperService;
this.bitsetFilterCache = bitsetFilterCache;
this.indexFieldDataService = indexFieldDataService;
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.parseContext = new QueryParseContext(indicesQueriesRegistry);
}
public QueryShardContext(QueryShardContext source) {
this(source.indexSettings, source.client, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, source.similarityService, source.scriptService, source.indicesQueriesRegistry);
this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, source.similarityService, source.scriptService, source.indicesQueriesRegistry);
this.types = source.getTypes();
}
public QueryShardContext clone() {
return new QueryShardContext(indexSettings, client, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
return new QueryShardContext(indexSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
}
public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) {
@ -146,10 +136,6 @@ public class QueryShardContext {
this.parseContext.reset(jp);
}
public Index index() {
return this.mapperService.getIndexSettings().getIndex();
}
public InnerHitsSubSearchContext getInnerHitsContext(XContentParser parser) throws IOException {
return InnerHitsQueryParserHelper.parse(parser);
}
@ -158,10 +144,6 @@ public class QueryShardContext {
return mapperService.analysisService();
}
public ScriptService getScriptService() {
return scriptService;
}
public MapperService getMapperService() {
return mapperService;
}
@ -210,10 +192,6 @@ public class QueryShardContext {
return unmodifiableMap(new HashMap<>(namedQueries));
}
public void combineNamedQueries(QueryShardContext context) {
namedQueries.putAll(context.namedQueries);
}
/**
* Return whether we are currently parsing a filter or a query.
*/
@ -277,7 +255,7 @@ public class QueryShardContext {
if (fieldMapping != null || allowUnmappedFields) {
return fieldMapping;
} else if (mapUnmappedFieldAsString) {
StringFieldMapper.Builder builder = new StringFieldMapper.Builder(name);
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name);
return builder.build(new Mapper.BuilderContext(indexSettings.getSettings(), new ContentPath(1))).fieldType();
} else {
throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name);
@ -340,18 +318,6 @@ public class QueryShardContext {
return false;
}
/*
* Executes the given template, and returns the response.
*/
public BytesReference executeQueryTemplate(Template template) {
ExecutableScript executable = getScriptService().executable(template, ScriptContext.Standard.SEARCH, Collections.emptyMap());
return (BytesReference) executable.run();
}
public Client getClient() {
return client;
}
public ParsedQuery parse(BytesReference source) {
XContentParser parser = null;
try {
@ -384,7 +350,7 @@ public class QueryShardContext {
reset(parser);
try {
parseFieldMatcher(indexSettings.getParseFieldMatcher());
Query filter = parseContext().parseInnerQueryBuilder().toFilter(this);
Query filter = QueryBuilder.rewriteQuery(parseContext().parseInnerQueryBuilder(), this).toFilter(this);
if (filter == null) {
return null;
}
@ -425,12 +391,16 @@ public class QueryShardContext {
}
}
private static Query toQuery(QueryBuilder<?> queryBuilder, QueryShardContext context) throws IOException {
Query query = queryBuilder.toQuery(context);
private static Query toQuery(final QueryBuilder<?> queryBuilder, final QueryShardContext context) throws IOException {
final Query query = QueryBuilder.rewriteQuery(queryBuilder, context).toQuery(context);
if (query == null) {
query = Queries.newMatchNoDocsQuery();
return Queries.newMatchNoDocsQuery();
}
return query;
}
public final Index index() {
return indexSettings.getIndex();
}
}

View File

@ -25,11 +25,13 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
@ -100,14 +102,7 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuil
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
BytesReference querySource = context.executeQueryTemplate(template);
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
final QueryShardContext contextCopy = new QueryShardContext(context);
contextCopy.reset(qSourceParser);
QueryBuilder result = contextCopy.parseContext().parseInnerQueryBuilder();
context.combineNamedQueries(contextCopy);
return result.toQuery(context);
}
throw new UnsupportedOperationException("this query must be rewritten first");
}
@Override
@ -130,4 +125,22 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuil
protected boolean doEquals(TemplateQueryBuilder other) {
return Objects.equals(template, other.template);
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
ExecutableScript executable = queryRewriteContext.getScriptService().executable(template,
ScriptContext.Standard.SEARCH, Collections.emptyMap());
BytesReference querySource = (BytesReference) executable.run();
final QueryParseContext queryParseContext = queryRewriteContext.newParseContext();
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
queryParseContext.reset(qSourceParser);
final QueryBuilder<?> queryBuilder = queryParseContext.parseInnerQueryBuilder();
if (boost() != DEFAULT_BOOST || queryName() != null) {
final BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.must(queryBuilder);
return boolQueryBuilder;
}
return queryBuilder;
}
}
}

View File

@ -226,22 +226,13 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
List<Object> terms;
TermsLookup termsLookup = null;
if (this.termsLookup != null) {
termsLookup = new TermsLookup(this.termsLookup);
if (termsLookup.index() == null) {
termsLookup.index(context.index().getName());
}
Client client = context.getClient();
terms = fetch(termsLookup, client);
} else {
terms = values;
if (termsLookup != null) {
throw new UnsupportedOperationException("query must be rewritten first");
}
if (terms == null || terms.isEmpty()) {
if (values == null || values.isEmpty()) {
return Queries.newMatchNoDocsQuery();
}
return handleTermsQuery(terms, fieldName, context);
return handleTermsQuery(values, fieldName, context);
}
private List<Object> fetch(TermsLookup termsLookup, Client client) {
@ -323,4 +314,22 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
Objects.equals(values, other.values) &&
Objects.equals(termsLookup, other.termsLookup);
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
if (this.termsLookup != null) {
TermsLookup termsLookup = new TermsLookup(this.termsLookup);
if (termsLookup.index() == null) { // TODO this should go away?
if (queryRewriteContext.getIndexSettings() != null) {
termsLookup.index(queryRewriteContext.getIndexSettings().getIndex().getName());
} else {
return this; // can't rewrite until we have index scope on the shard
}
}
List<Object> values = fetch(termsLookup, queryRewriteContext.getClient());
return new TermsQueryBuilder(this.fieldName, values);
}
return this;
}
}

View File

@ -105,14 +105,7 @@ public class WrapperQueryBuilder extends AbstractQueryBuilder<WrapperQueryBuilde
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
try (XContentParser qSourceParser = XContentFactory.xContent(source).createParser(source)) {
final QueryShardContext contextCopy = new QueryShardContext(context);
contextCopy.reset(qSourceParser);
contextCopy.parseFieldMatcher(context.parseFieldMatcher());
QueryBuilder<?> result = contextCopy.parseContext().parseInnerQueryBuilder();
context.combineNamedQueries(contextCopy);
return result.toQuery(context);
}
throw new UnsupportedOperationException("this query must be rewritten first");
}
@Override
@ -134,4 +127,22 @@ public class WrapperQueryBuilder extends AbstractQueryBuilder<WrapperQueryBuilde
protected boolean doEquals(WrapperQueryBuilder other) {
return Arrays.equals(source, other.source); // otherwise we compare pointers
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext context) throws IOException {
try (XContentParser qSourceParser = XContentFactory.xContent(source).createParser(source)) {
QueryParseContext parseContext = context.newParseContext();
parseContext.reset(qSourceParser);
final QueryBuilder<?> queryBuilder = parseContext.parseInnerQueryBuilder();
if (boost() != DEFAULT_BOOST || queryName() != null) {
final BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.must(queryBuilder);
return boolQueryBuilder;
}
return queryBuilder;
}
}
}

View File

@ -34,6 +34,7 @@ import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.EmptyQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder;
@ -197,22 +198,22 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
if (query != null) {
builder.field("query");
builder.field(FunctionScoreQueryParser.QUERY_FIELD.getPreferredName());
query.toXContent(builder, params);
}
builder.startArray("functions");
builder.startArray(FunctionScoreQueryParser.FUNCTIONS_FIELD.getPreferredName());
for (FilterFunctionBuilder filterFunctionBuilder : filterFunctionBuilders) {
filterFunctionBuilder.toXContent(builder, params);
}
builder.endArray();
builder.field("score_mode", scoreMode.name().toLowerCase(Locale.ROOT));
builder.field(FunctionScoreQueryParser.SCORE_MODE_FIELD.getPreferredName(), scoreMode.name().toLowerCase(Locale.ROOT));
if (boostMode != null) {
builder.field("boost_mode", boostMode.name().toLowerCase(Locale.ROOT));
builder.field(FunctionScoreQueryParser.BOOST_MODE_FIELD.getPreferredName(), boostMode.name().toLowerCase(Locale.ROOT));
}
builder.field("max_boost", maxBoost);
builder.field(FunctionScoreQueryParser.MAX_BOOST_FIELD.getPreferredName(), maxBoost);
if (minScore != null) {
builder.field("min_score", minScore);
builder.field(FunctionScoreQueryParser.MIN_SCORE_FIELD.getPreferredName(), minScore);
}
printBoostAndQueryName(builder);
builder.endObject();
@ -358,7 +359,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("filter");
builder.field(FunctionScoreQueryParser.FILTER_FIELD.getPreferredName());
filter.toXContent(builder, params);
scoreFunction.toXContent(builder, params);
builder.endObject();
@ -393,5 +394,33 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
public FilterFunctionBuilder readFrom(StreamInput in) throws IOException {
return new FilterFunctionBuilder(in.readQuery(), in.readScoreFunction());
}
public FilterFunctionBuilder rewrite(QueryRewriteContext context) throws IOException {
QueryBuilder<?> rewrite = filter.rewrite(context);
if (rewrite != filter) {
return new FilterFunctionBuilder(rewrite, scoreFunction);
}
return this;
}
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder<?> queryBuilder = this.query.rewrite(queryRewriteContext);
FilterFunctionBuilder[] rewrittenBuilders = new FilterFunctionBuilder[this.filterFunctionBuilders.length];
boolean rewritten = false;
for (int i = 0; i < rewrittenBuilders.length; i++) {
FilterFunctionBuilder rewrite = filterFunctionBuilders[i].rewrite(queryRewriteContext);
rewritten |= rewrite != filterFunctionBuilders[i];
rewrittenBuilders[i] = rewrite;
}
if (queryBuilder != query || rewritten) {
FunctionScoreQueryBuilder newQueryBuilder = new FunctionScoreQueryBuilder(queryBuilder, rewrittenBuilders);
newQueryBuilder.scoreMode = scoreMode;
newQueryBuilder.minScore = minScore;
newQueryBuilder.maxBoost = maxBoost;
return newQueryBuilder;
}
return this;
}
}

View File

@ -19,10 +19,6 @@
package org.elasticsearch.index.query.functionscore;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
@ -39,6 +35,10 @@ import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParser;
import org.elasticsearch.index.query.functionscore.weight.WeightBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Parser for function_score query
*/
@ -50,6 +50,13 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
static final String MISPLACED_FUNCTION_MESSAGE_PREFIX = "you can either define [functions] array or a single function, not both. ";
public static final ParseField WEIGHT_FIELD = new ParseField("weight");
public static final ParseField QUERY_FIELD = new ParseField("query");
public static final ParseField FILTER_FIELD = new ParseField("filter");
public static final ParseField FUNCTIONS_FIELD = new ParseField("functions");
public static final ParseField SCORE_MODE_FIELD = new ParseField("score_mode");
public static final ParseField BOOST_MODE_FIELD = new ParseField("boost_mode");
public static final ParseField MAX_BOOST_FIELD = new ParseField("max_boost");
public static final ParseField MIN_SCORE_FIELD = new ParseField("min_score");
private final ScoreFunctionParserMapper functionParserMapper;
@ -86,48 +93,69 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if ("query".equals(currentFieldName)) {
query = parseContext.parseInnerQueryBuilder();
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text());
} else if ("boost_mode".equals(currentFieldName) || "boostMode".equals(currentFieldName)) {
combineFunction = CombineFunction.fromString(parser.text());
} else if ("max_boost".equals(currentFieldName) || "maxBoost".equals(currentFieldName)) {
maxBoost = parser.floatValue();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("min_score".equals(currentFieldName) || "minScore".equals(currentFieldName)) {
minScore = parser.floatValue();
} else if ("functions".equals(currentFieldName)) {
if (singleFunctionFound) {
String errorString = "already found [" + singleFunctionName + "], now encountering [functions].";
handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString);
}
functionArrayFound = true;
currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctionBuilders);
} else {
if (singleFunctionFound) {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", FunctionScoreQueryBuilder.NAME, singleFunctionName, currentFieldName);
}
if (functionArrayFound) {
String errorString = "already found [functions] array, now encountering [" + currentFieldName + "].";
handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString);
}
singleFunctionFound = true;
singleFunctionName = currentFieldName;
ScoreFunctionBuilder<?> scoreFunction;
if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {
scoreFunction = new WeightBuilder().setWeight(parser.floatValue());
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (query != null) {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. [query] is already defined.", FunctionScoreQueryBuilder.NAME);
}
query = parseContext.parseInnerQueryBuilder();
} else {
// we try to parse a score function. If there is no score
// function for the current field name,
// functionParserMapper.get() will throw an Exception.
scoreFunction = functionParserMapper.get(parser.getTokenLocation(), currentFieldName).fromXContent(parseContext, parser);
if (singleFunctionFound) {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", FunctionScoreQueryBuilder.NAME, singleFunctionName, currentFieldName);
}
if (functionArrayFound) {
String errorString = "already found [functions] array, now encountering [" + currentFieldName + "].";
handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString);
}
singleFunctionFound = true;
singleFunctionName = currentFieldName;
// we try to parse a score function. If there is no score function for the current field name,
// functionParserMapper.get() may throw an Exception.
ScoreFunctionBuilder<?> scoreFunction = functionParserMapper.get(parser.getTokenLocation(), currentFieldName).fromXContent(parseContext, parser);
filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction));
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, FUNCTIONS_FIELD)) {
if (singleFunctionFound) {
String errorString = "already found [" + singleFunctionName + "], now encountering [functions].";
handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString);
}
functionArrayFound = true;
currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctionBuilders);
} else {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. array [{}] is not supported", FunctionScoreQueryBuilder.NAME, currentFieldName);
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) {
scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOOST_MODE_FIELD)) {
combineFunction = CombineFunction.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_BOOST_FIELD)) {
maxBoost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_SCORE_FIELD)) {
minScore = parser.floatValue();
} else {
if (singleFunctionFound) {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", FunctionScoreQueryBuilder.NAME, singleFunctionName, currentFieldName);
}
if (functionArrayFound) {
String errorString = "already found [functions] array, now encountering [" + currentFieldName + "].";
handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString);
}
if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {
filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(new WeightBuilder().setWeight(parser.floatValue())));
singleFunctionFound = true;
singleFunctionName = currentFieldName;
} else {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. field [{}] is not supported", FunctionScoreQueryBuilder.NAME, currentFieldName);
}
}
filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction));
}
}
@ -167,21 +195,23 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {
functionWeight = parser.floatValue();
} else {
if ("filter".equals(currentFieldName)) {
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
filter = parseContext.parseInnerQueryBuilder();
} else {
if (scoreFunction != null) {
throw new ParsingException(parser.getTokenLocation(), "failed to parse function_score functions. already found [{}], now encountering [{}].", scoreFunction.getName(), currentFieldName);
}
// do not need to check null here,
// functionParserMapper throws exception if parser
// non-existent
// do not need to check null here, functionParserMapper does it already
ScoreFunctionParser functionParser = functionParserMapper.get(parser.getTokenLocation(), currentFieldName);
scoreFunction = functionParser.fromXContent(parseContext, parser);
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {
functionWeight = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. field [{}] is not supported", FunctionScoreQueryBuilder.NAME, currentFieldName);
}
}
}
if (functionWeight != null) {

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
@ -91,7 +92,8 @@ public class NestedInnerQueryParseSupport {
if (path != null) {
setPathLevel();
try {
innerFilter = parseContext.parseInnerQueryBuilder().toFilter(this.shardContext);
innerFilter = QueryBuilder.rewriteQuery(parseContext.parseInnerQueryBuilder(),
this.shardContext).toFilter(this.shardContext);
} finally {
resetPathLevel();
}
@ -147,7 +149,8 @@ public class NestedInnerQueryParseSupport {
try {
XContentParser innerParser = XContentHelper.createParser(source);
parseContext.parser(innerParser);
innerFilter = parseContext.parseInnerQueryBuilder().toFilter(this.shardContext);
innerFilter = QueryBuilder.rewriteQuery(parseContext.parseInnerQueryBuilder(),
this.shardContext).toFilter(this.shardContext);
filterParsed = true;
return innerFilter;
} finally {

View File

@ -248,7 +248,7 @@ public class IndexShard extends AbstractIndexShardComponent {
this.engineConfig = newEngineConfig(translogConfig, cachingPolicy);
this.suspendableRefContainer = new SuspendableRefContainer();
this.searcherWrapper = indexSearcherWrapper;
QueryShardContext queryShardContext = new QueryShardContext(idxSettings, provider.getClient(), indexCache.bitsetFilterCache(), indexFieldDataService, mapperService, similarityService, provider.getScriptService(), provider.getIndicesQueriesRegistry());
QueryShardContext queryShardContext = new QueryShardContext(idxSettings, indexCache.bitsetFilterCache(), indexFieldDataService, mapperService, similarityService, provider.getScriptService(), provider.getIndicesQueriesRegistry());
this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, queryShardContext);
}

View File

@ -46,6 +46,7 @@ import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.search.dfs.AggregatedDfs;
@ -160,7 +161,8 @@ public class TermVectorsService {
private static boolean isValidField(MappedFieldType fieldType) {
// must be a string
if (fieldType instanceof StringFieldMapper.StringFieldType == false
&& fieldType instanceof KeywordFieldMapper.KeywordFieldType == false) {
&& fieldType instanceof KeywordFieldMapper.KeywordFieldType == false
&& fieldType instanceof TextFieldMapper.TextFieldType == false) {
return false;
}
// and must be indexed

View File

@ -38,6 +38,7 @@ import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.ShortFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.core.TokenCountFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
@ -97,6 +98,7 @@ public class IndicesModule extends AbstractModule {
registerMapper(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser());
registerMapper(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser());
registerMapper(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser());
registerMapper(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser());
registerMapper(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser());
registerMapper(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser());
registerMapper(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser());

View File

@ -115,7 +115,6 @@ public class RestNodesAction extends AbstractCatAction {
table.startHeaders();
table.addCell("id", "default:false;alias:id,nodeId;desc:unique node id");
table.addCell("pid", "default:false;alias:p;desc:process id");
table.addCell("host", "alias:h;desc:host name");
table.addCell("ip", "alias:i;desc:ip address");
table.addCell("port", "default:false;alias:po;desc:bound transport port");
@ -242,7 +241,6 @@ public class RestNodesAction extends AbstractCatAction {
table.addCell(fullId ? node.id() : Strings.substring(node.getId(), 0, 4));
table.addCell(info == null ? null : info.getProcess().getId());
table.addCell(node.getHostName());
table.addCell(node.getHostAddress());
if (node.address() instanceof InetSocketTransportAddress) {
table.addCell(((InetSocketTransportAddress) node.address()).address().getPort());

View File

@ -489,6 +489,10 @@ public class ScriptService extends AbstractComponent implements Closeable {
return scriptMetrics.stats();
}
public Client getClient() {
return client;
}
/**
* A small listener for the script cache that calls each
* {@code ScriptEngineService}'s {@code scriptRemoved} method when the

View File

@ -543,7 +543,6 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher,
defaultSearchTimeout, fetchPhase);
SearchContext.setCurrent(context);
try {
if (request.scroll() != null) {
context.scrollContext(new ScrollContext());

View File

@ -81,7 +81,6 @@ public class SearchServiceTransportAction extends AbstractComponent {
super(settings);
this.transportService = transportService;
this.searchService = searchService;
transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ScrollFreeContextRequest::new, ThreadPool.Names.SAME, new FreeContextTransportHandler<>());
transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, SearchFreeContextRequest::new, ThreadPool.Names.SAME, new FreeContextTransportHandler<SearchFreeContextRequest>());
transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, ClearScrollContextsRequest::new, ThreadPool.Names.SAME, new ClearScrollContextsTransportHandler());

View File

@ -40,6 +40,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.search.aggregations.AggregatorBuilder;
@ -1408,4 +1409,5 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
&& Objects.equals(version, other.version)
&& Objects.equals(profile, other.profile);
}
}

View File

@ -355,7 +355,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
targetOptionsBuilder.options(highlighterBuilder.options);
}
if (highlighterBuilder.highlightQuery != null) {
targetOptionsBuilder.highlightQuery(highlighterBuilder.highlightQuery.toQuery(context));
targetOptionsBuilder.highlightQuery(QueryBuilder.rewriteQuery(highlighterBuilder.highlightQuery, context).toQuery(context));
}
}

View File

@ -149,7 +149,7 @@ public class QueryRescorerBuilder extends RescoreBuilder<QueryRescorerBuilder> {
public QueryRescoreContext build(QueryShardContext context) throws IOException {
org.elasticsearch.search.rescore.QueryRescorer rescorer = new org.elasticsearch.search.rescore.QueryRescorer();
QueryRescoreContext queryRescoreContext = new QueryRescoreContext(rescorer);
queryRescoreContext.setQuery(this.queryBuilder.toQuery(context));
queryRescoreContext.setQuery(QueryBuilder.rewriteQuery(this.queryBuilder, context).toQuery(context));
queryRescoreContext.setQueryWeight(this.queryWeight);
queryRescoreContext.setRescoreQueryWeight(this.rescoreQueryWeight);
queryRescoreContext.setScoreMode(this.scoreMode);
@ -239,4 +239,4 @@ public class QueryRescorerBuilder extends RescoreBuilder<QueryRescorerBuilder> {
this.scoreMode = scoreMode;
}
}
}
}

View File

@ -25,11 +25,8 @@ import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.transport.TransportAddress;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public abstract class TransportMessage<TM extends TransportMessage<TM>> implements Streamable {
public abstract class TransportMessage implements Streamable {
private TransportAddress remoteAddress;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.tasks.Task;
/**
*/
public abstract class TransportRequest extends TransportMessage<TransportRequest> {
public abstract class TransportRequest extends TransportMessage {
public static class Empty extends TransportRequest {
public static final Empty INSTANCE = new Empty();
@ -32,7 +32,6 @@ public abstract class TransportRequest extends TransportMessage<TransportRequest
public TransportRequest() {
}
/**
* Returns the task object that should be used to keep track of the processing of the request.
*
@ -48,5 +47,4 @@ public abstract class TransportRequest extends TransportMessage<TransportRequest
public String getDescription() {
return "";
}
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.transport;
/**
*/
public abstract class TransportResponse extends TransportMessage<TransportResponse> {
public abstract class TransportResponse extends TransportMessage {
public static class Empty extends TransportResponse {
public static final Empty INSTANCE = new Empty();

View File

@ -247,13 +247,13 @@ public class CreateIndexIT extends ESIntegTestCase {
CreateIndexRequestBuilder b = prepareCreate("test");
b.addMapping("type1", jsonBuilder().startObject().startObject("properties")
.startObject("text")
.field("type", "string")
.field("type", "text")
.field("analyzer", "standard")
.field("search_analyzer", "whitespace")
.endObject().endObject().endObject());
b.addMapping("type2", jsonBuilder().humanReadable(true).startObject().startObject("properties")
.startObject("text")
.field("type", "string")
.field("type", "text")
.endObject().endObject().endObject());
try {
b.get();

View File

@ -55,7 +55,7 @@ public class IndicesStatsTests extends ESSingleNodeTestCase {
.field("store", true)
.endObject()
.startObject("bar")
.field("type", "string")
.field("type", "text")
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()

View File

@ -61,7 +61,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("type", "text")
.field("term_vector", "with_positions_offsets_payloads")
.field("analyzer", "tv_test")
.endObject()

View File

@ -66,7 +66,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("type", "text")
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()
@ -92,7 +92,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("existingfield")
.field("type", "string")
.field("type", "text")
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()
@ -119,7 +119,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("existingfield")
.field("type", "string")
.field("type", "text")
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()
@ -150,11 +150,11 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
.addAlias(new Alias("alias"))
.addMapping("type1",
"field0", "type=integer,", // no tvs
"field1", "type=string,index=no", // no tvs
"field2", "type=string,index=no,store=true", // no tvs
"field3", "type=string,index=no,term_vector=yes", // no tvs
"field1", "type=text,index=false", // no tvs
"field2", "type=text,index=false,store=true", // no tvs
"field3", "type=text,index=false,term_vector=yes", // no tvs
"field4", "type=keyword", // yes tvs
"field5", "type=string,index=analyzed")); // yes tvs
"field5", "type=text,index=true")); // yes tvs
ensureYellow();
@ -190,7 +190,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("type", "text")
.field("term_vector", "with_positions_offsets_payloads")
.field("analyzer", "tv_test")
.endObject()
@ -278,7 +278,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("type", "text")
.field("term_vector", optionString)
.field("analyzer", "tv_test")
.endObject()
@ -428,7 +428,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
String queryString = createString(tokens, payloads, encoding, delimiter.charAt(0));
//create the mapping
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field").field("type", "string").field("term_vector", "with_positions_offsets_payloads")
.startObject("field").field("type", "text").field("term_vector", "with_positions_offsets_payloads")
.field("analyzer", "payload_test").endObject().endObject().endObject().endObject();
assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings(
settingsBuilder()
@ -586,7 +586,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
XContentBuilder source = jsonBuilder().startObject();
for (String field : fieldNames) {
mapping.startObject(field)
.field("type", "string")
.field("type", "text")
.field("term_vector", randomBoolean() ? "with_positions_offsets_payloads" : "no")
.field("analyzer", "tv_test")
.endObject();
@ -672,9 +672,9 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
// setup indices
String[] indexNames = new String[] {"with_tv", "without_tv"};
assertAcked(prepareCreate(indexNames[0])
.addMapping("type1", "field1", "type=string,term_vector=with_positions_offsets,analyzer=keyword"));
.addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets,analyzer=keyword"));
assertAcked(prepareCreate(indexNames[1])
.addMapping("type1", "field1", "type=string,term_vector=no,analyzer=keyword"));
.addMapping("type1", "field1", "type=text,term_vector=no,analyzer=keyword"));
ensureGreen();
// index documents with and without term vectors
@ -763,7 +763,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
XContentBuilder source = jsonBuilder().startObject();
for (int i = 0; i < numFields; i++) {
mapping.startObject("field" + i)
.field("type", "string")
.field("type", "text")
.field("term_vector", randomBoolean() ? "yes" : "no")
.endObject();
source.field("field" + i, "some text here");
@ -790,7 +790,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
.put("index.analysis.analyzer", "standard");
assertAcked(prepareCreate("test")
.setSettings(settings)
.addMapping("type1", "field1", "type=string,term_vector=with_positions_offsets"));
.addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets"));
ensureGreen();
// index documents existing document
@ -848,7 +848,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
.put("index.analysis.analyzer", "standard");
assertAcked(prepareCreate("test")
.setSettings(settings)
.addMapping("type1", "field1", "type=string"));
.addMapping("type1", "field1", "type=text"));
ensureGreen();
// request tvs from artificial document
@ -882,7 +882,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
withTermVectors.add(fieldName);
}
mapping.startObject(fieldName)
.field("type", "string")
.field("type", "text")
.field("term_vector", withTermVectors.contains(fieldName) ? "yes" : "no")
.endObject();
source.field(fieldName, "some text here");
@ -1090,7 +1090,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
.put("index.analysis.analyzer", "keyword");
assertAcked(prepareCreate("test")
.setSettings(settings)
.addMapping("type1", "tags", "type=string"));
.addMapping("type1", "tags", "type=text"));
ensureYellow();
int numTerms = scaledRandomIntBetween(10, 50);
@ -1128,7 +1128,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
.put("index.analysis.analyzer", "keyword");
assertAcked(prepareCreate("test")
.setSettings(settings)
.addMapping("type1", "tags", "type=string"));
.addMapping("type1", "tags", "type=text"));
ensureYellow();
logger.info("Indexing one document with tags of increasing frequencies ...");
@ -1169,7 +1169,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
.put("index.number_of_shards", 1); // no dfs
assertAcked(prepareCreate("test")
.setSettings(settings)
.addMapping("type1", "tags", "type=string"));
.addMapping("type1", "tags", "type=text"));
ensureYellow();
int numDocs = scaledRandomIntBetween(10, 50); // as many terms as there are docs

View File

@ -136,7 +136,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
public void testFilteringAliases() throws Exception {
logger.info("--> creating index [test]");
assertAcked(prepareCreate("test").addMapping("type", "user", "type=string"));
assertAcked(prepareCreate("test").addMapping("type", "user", "type=text"));
ensureGreen();
@ -163,7 +163,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
public void testSearchingFilteringAliasesSingleIndex() throws Exception {
logger.info("--> creating index [test]");
assertAcked(prepareCreate("test").addMapping("type1", "id", "type=string", "name", "type=string"));
assertAcked(prepareCreate("test").addMapping("type1", "id", "type=text", "name", "type=text"));
ensureGreen();
@ -243,9 +243,9 @@ public class IndexAliasesIT extends ESIntegTestCase {
public void testSearchingFilteringAliasesTwoIndices() throws Exception {
logger.info("--> creating index [test1]");
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=string"));
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=text"));
logger.info("--> creating index [test2]");
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=string"));
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=text"));
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
@ -310,7 +310,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
assertAcked(client().admin().indices().preparePutMapping("test1", "test2", "test3")
.setType("type1")
.setSource("name", "type=string"));
.setSource("name", "type=text"));
ensureGreen();
@ -370,8 +370,8 @@ public class IndexAliasesIT extends ESIntegTestCase {
public void testDeletingByQueryFilteringAliases() throws Exception {
logger.info("--> creating index [test1] and [test2");
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=string"));
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=string"));
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=text"));
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=text"));
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
@ -407,8 +407,8 @@ public class IndexAliasesIT extends ESIntegTestCase {
public void testDeleteAliases() throws Exception {
logger.info("--> creating index [test1] and [test2]");
assertAcked(prepareCreate("test1").addMapping("type", "name", "type=string"));
assertAcked(prepareCreate("test2").addMapping("type", "name", "type=string"));
assertAcked(prepareCreate("test1").addMapping("type", "name", "type=text"));
assertAcked(prepareCreate("test2").addMapping("type", "name", "type=text"));
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
@ -486,7 +486,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
public void testSameAlias() throws Exception {
logger.info("--> creating index [test]");
assertAcked(prepareCreate("test").addMapping("type", "name", "type=string"));
assertAcked(prepareCreate("test").addMapping("type", "name", "type=text"));
ensureGreen();
logger.info("--> creating alias1 ");
@ -547,7 +547,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
createIndex("bazbar");
assertAcked(client().admin().indices().preparePutMapping("foobar", "test", "test123", "foobarbaz", "bazbar")
.setType("type").setSource("field", "type=string"));
.setType("type").setSource("field", "type=text"));
ensureGreen();
logger.info("--> creating aliases [alias1, alias2]");
@ -956,7 +956,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
public void testCreateIndexWithAliases() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type", "field", "type=string")
.addMapping("type", "field", "type=text")
.addAlias(new Alias("alias1"))
.addAlias(new Alias("alias2").filter(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("field"))))
.addAlias(new Alias("alias3").indexRouting("index").searchRouting("search")));
@ -978,7 +978,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
public void testCreateIndexWithAliasesSource() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type", "field", "type=string")
.addMapping("type", "field", "type=text")
.setAliases("{\n" +
" \"alias1\" : {},\n" +
" \"alias2\" : {\"filter\" : {\"term\": {\"field\":\"value\"}}},\n" +

View File

@ -54,7 +54,7 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
for (int i = 0; i < fields.length; i++) {
fields[i++] = "field_" + fieldId++;
String analyzer = randomAnalyzer();
fields[i] = "type=string,analyzer=" + analyzer;
fields[i] = "type=text,analyzer=" + analyzer;
}
assertAcked(prepareCreate("test")
.addMapping("type", (Object[])fields)

View File

@ -561,7 +561,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
public void testAnalyze() {
createIndexWithAlias();
assertAcked(client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=string,analyzer=keyword"));
assertAcked(client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=text,analyzer=keyword"));
ensureYellow("test");
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("this is a test").setIndex(indexOrAlias()).setField("field").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(1));
@ -586,7 +586,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
public void testGetTermVector() throws IOException {
createIndexWithAlias();
assertAcked(client().admin().indices().preparePutMapping("test").setType("type1").setSource("field", "type=string,term_vector=with_positions_offsets_payloads").get());
assertAcked(client().admin().indices().preparePutMapping("test").setType("type1").setSource("field", "type=text,term_vector=with_positions_offsets_payloads").get());
ensureYellow("test");
client().prepareIndex(indexOrAlias(), "type1", "1")

View File

@ -504,6 +504,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
assertThat(processedLatch.await(1, TimeUnit.SECONDS), equalTo(true));
}
@TestLogging("_root:debug,action.admin.cluster.tasks:trace")
public void testPendingUpdateTask() throws Exception {
Settings settings = settingsBuilder()
.put("discovery.type", "local")

View File

@ -90,7 +90,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field1").field("type", "string").field("store", true).endObject()
.startObject("field1").field("type", "text").field("store", true).endObject()
.startObject("field2").field("type", "keyword").field("store", true).endObject()
.endObject().endObject().endObject())
.get();
@ -99,7 +99,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase {
.setTemplate("test*")
.setOrder(1)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field2").field("type", "string").field("store", "no").endObject()
.startObject("field2").field("type", "text").field("store", "no").endObject()
.endObject().endObject().endObject())
.get();
@ -132,7 +132,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase {
int counter = 0;
int numberOfFields = 0;
while (true) {
mapping.startObject(Strings.randomBase64UUID()).field("type", "string").endObject();
mapping.startObject(Strings.randomBase64UUID()).field("type", "text").endObject();
counter += 10; // each field is about 10 bytes, assuming compression in place
numberOfFields++;
if (counter > estimatedBytesSize) {

View File

@ -209,7 +209,7 @@ public class ExplainActionIT extends ESIntegTestCase {
public void testExplainWithFilteredAlias() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("test", "field2", "type=string")
.addMapping("test", "field2", "type=text")
.addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2"))));
ensureGreen("test");
@ -225,7 +225,7 @@ public class ExplainActionIT extends ESIntegTestCase {
public void testExplainWithFilteredAliasFetchSource() throws Exception {
assertAcked(client().admin().indices().prepareCreate("test")
.addMapping("test", "field2", "type=string")
.addMapping("test", "field2", "type=text")
.addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2"))));
ensureGreen("test");

View File

@ -47,7 +47,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
public void testRandom() throws Exception {
assertAcked(prepareCreate("test").addMapping(
"test", "string", "type=string", "date", "type=date", "double", "type=double", "double", "type=double",
"test", "string", "type=text", "date", "type=date", "double", "type=double", "double", "type=double",
"float", "type=float", "long", "type=long", "integer", "type=integer", "short", "type=short", "byte", "type=byte"
));
ensureGreen("test");
@ -185,7 +185,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
"test", "value", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=string"
"test", "value", "type=text"
));
ensureGreen("test1", "test2");

View File

@ -72,7 +72,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
}
public void testString() {
createIndex("test", Settings.EMPTY, "test", "field", "type=string");
createIndex("test", Settings.EMPTY, "test", "field", "type=text");
for (int value = 0; value <= 10; value++) {
client().prepareIndex("test", "test").setSource("field", String.format(Locale.ENGLISH, "%03d", value)).get();
}
@ -185,11 +185,11 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
}
public void testInvalidField() {
createIndex("test1", Settings.EMPTY, "test", "field1", "type=string");
createIndex("test1", Settings.EMPTY, "test", "field1", "type=text");
client().prepareIndex("test1", "test").setSource("field1", "a").get();
client().prepareIndex("test1", "test").setSource("field1", "b").get();
createIndex("test2", Settings.EMPTY, "test", "field2", "type=string");
createIndex("test2", Settings.EMPTY, "test", "field2", "type=text");
client().prepareIndex("test2", "test").setSource("field2", "a").get();
client().prepareIndex("test2", "test").setSource("field2", "b").get();
client().admin().indices().prepareRefresh().get();

View File

@ -69,7 +69,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
internalCluster().startNode();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("appAccountIds").field("type", "string").endObject().endObject()
.startObject("properties").startObject("appAccountIds").field("type", "text").endObject().endObject()
.endObject().endObject().string();
assertAcked(prepareCreate("test").addMapping("type1", mapping));
@ -109,7 +109,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
internalCluster().startNode();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("field").field("type", "string").endObject().startObject("num").field("type", "integer").endObject().endObject()
.startObject("properties").startObject("field").field("type", "text").endObject().startObject("num").field("type", "integer").endObject().endObject()
.endObject().endObject().string();
// note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test.
int numberOfShards = numberOfShards();
@ -301,7 +301,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field1").field("type", "string").field("store", true).endObject()
.startObject("field1").field("type", "text").field("store", true).endObject()
.startObject("field2").field("type", "keyword").field("store", true).endObject()
.endObject().endObject().endObject())
.execute().actionGet();

View File

@ -253,12 +253,12 @@ public class GetActionIT extends ESIntegTestCase {
public void testGetDocWithMultivaluedFields() throws Exception {
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("field").field("type", "string").field("store", true).endObject()
.startObject("field").field("type", "text").field("store", true).endObject()
.endObject()
.endObject().endObject().string();
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2")
.startObject("properties")
.startObject("field").field("type", "string").field("store", true).endObject()
.startObject("field").field("type", "text").field("store", true).endObject()
.endObject()
.endObject().endObject().string();
assertAcked(prepareCreate("test")
@ -718,7 +718,7 @@ public class GetActionIT extends ESIntegTestCase {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.addMapping("my-type1", jsonBuilder().startObject().startObject("my-type1").startObject("properties")
.startObject("field1").startObject("properties")
.startObject("field2").field("type", "string").endObject()
.startObject("field2").field("type", "text").endObject()
.endObject().endObject()
.endObject().endObject().endObject())
.setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)));
@ -846,7 +846,7 @@ public class GetActionIT extends ESIntegTestCase {
.endObject()
.startObject("properties")
.startObject("some_field")
.field("type", "string")
.field("type", "text")
.endObject()
.endObject()
.endObject()

View File

@ -188,7 +188,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
.build();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
ensureGreen(IDX);
// So basically, the primary should fail and the replica will need to
@ -265,7 +265,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
.build();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
ensureYellow(IDX);
client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get();
client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get();
@ -323,7 +323,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
.build();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
ensureYellow(IDX);
client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get();
client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get();
@ -383,7 +383,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
.build();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
ensureYellow(IDX);
// Node1 has the primary, now node2 has the replica
String node2 = internalCluster().startNode(nodeSettings);
@ -458,7 +458,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
.build();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
ensureYellow(IDX);
// Node1 has the primary, now node2 has the replica
String node2 = internalCluster().startNode(nodeSettings);
@ -551,7 +551,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
.build();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
ensureGreen(IDX);
client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get();
client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get();
@ -590,7 +590,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
.build();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
ensureGreen(IDX);
int docCount = randomIntBetween(10, 100);
@ -797,8 +797,8 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
.build();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
prepareCreate(IDX2).setSettings(idx2Settings).addMapping("doc", "foo", "type=string").get();
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
prepareCreate(IDX2).setSettings(idx2Settings).addMapping("doc", "foo", "type=text").get();
ensureGreen(IDX, IDX2);
int docCount = randomIntBetween(10, 100);

View File

@ -86,7 +86,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(analyzerName, AnalyzerScope.INDEX, randomPreBuiltAnalyzer.getAnalyzer(randomVersion)).get();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("analyzer", analyzerName).endObject().endObject()
.startObject("properties").startObject("field").field("type", "text").field("analyzer", analyzerName).endObject().endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));

View File

@ -46,7 +46,7 @@ public class FieldDataFilterIntegrationIT extends ESIntegTestCase {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("name")
.field("type", "string")
.field("type", "text")
.startObject("fielddata")
.startObject("filter")
.startObject("regex")
@ -56,7 +56,7 @@ public class FieldDataFilterIntegrationIT extends ESIntegTestCase {
.endObject()
.endObject()
.startObject("not_filtered")
.field("type", "string")
.field("type", "text")
.endObject()
.endObject()
.endObject().endObject();

View File

@ -33,7 +33,7 @@ public class FieldDataLoadingIT extends ESIntegTestCase {
assertAcked(prepareCreate("test")
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("name")
.field("type", "string")
.field("type", "text")
.startObject("fielddata").field("loading", "eager").endObject()
.endObject()
.endObject().endObject().endObject()));
@ -50,7 +50,7 @@ public class FieldDataLoadingIT extends ESIntegTestCase {
assertAcked(prepareCreate("test")
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("name")
.field("type", "string")
.field("type", "text")
.startObject("fielddata").field("loading", "eager_global_ordinals").endObject()
.endObject()
.endObject().endObject().endObject()));

View File

@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.core.FloatFieldMapper;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
@ -55,7 +55,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "true")
.startObject("properties")
.startObject("field1").field("type", "string").endObject()
.startObject("field1").field("type", "text").endObject()
.endObject()
.endObject().endObject().string();
@ -75,7 +75,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.startObject("properties")
.startObject("field1").field("type", "string").endObject()
.startObject("field1").field("type", "text").endObject()
.endObject()
.endObject().endObject().string();
@ -96,7 +96,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.startObject("properties")
.startObject("field1").field("type", "string").endObject()
.startObject("field1").field("type", "text").endObject()
.endObject()
.endObject().endObject().string();
@ -130,7 +130,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.field("dynamic", "false")
.startObject("properties")
.startObject("obj1").startObject("properties")
.startObject("field1").field("type", "string").endObject()
.startObject("field1").field("type", "text").endObject()
.endObject().endObject()
.endObject()
.endObject().endObject().string();
@ -153,7 +153,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.field("dynamic", "strict")
.startObject("properties")
.startObject("obj1").startObject("properties")
.startObject("field1").field("type", "string").endObject()
.startObject("field1").field("type", "text").endObject()
.endObject().endObject()
.endObject()
.endObject().endObject().string();
@ -218,7 +218,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
.startObject("properties").startObject("foo").field("type", "text").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
@ -251,7 +251,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
// Make sure that mapping updates are incremental, this is important for performance otherwise
// every new field introduction runs in linear time with the total number of fields
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
.startObject("properties").startObject("foo").field("type", "text").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
@ -374,7 +374,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
public void testReuseExistingMappings() throws IOException, Exception {
IndexService indexService = createIndex("test", Settings.EMPTY, "type",
"my_field1", "type=string,store=true",
"my_field1", "type=text,store=true",
"my_field2", "type=integer,precision_step=10",
"my_field3", "type=long,doc_values=false",
"my_field4", "type=float,index_options=freqs",
@ -423,9 +423,9 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
}
assertNotNull(myField1Mapper);
// same type
assertTrue(myField1Mapper instanceof StringFieldMapper);
assertTrue(myField1Mapper instanceof TextFieldMapper);
// and same option
assertTrue(((StringFieldMapper) myField1Mapper).fieldType().stored());
assertTrue(((TextFieldMapper) myField1Mapper).fieldType().stored());
// Even if dynamic mappings would map a numeric field as a long, here it should map it as a integer
// since we already have a mapping of type integer
@ -470,7 +470,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("template1")
.field("match_mapping_type", "string")
.startObject("mapping")
.field("type", "string")
.field("type", "text")
.startObject("fields")
.startObject("raw")
.field("type", "keyword")
@ -486,7 +486,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.startObject("type2")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("type", "text")
.endObject()
.endObject()
.endObject().endObject();

View File

@ -60,7 +60,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
.admin()
.indices()
.prepareCreate(index)
.addMapping(type, field, "type=string")
.addMapping(type, field, "type=text")
.execute()
.actionGet();
}
@ -76,7 +76,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
.admin()
.indices()
.prepareCreate(index)
.addMapping(type, field, "type=string")
.addMapping(type, field, "type=text")
.execute()
.actionGet();
}

View File

@ -362,7 +362,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
fail("Expected MapperParsingException");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
assertThat(e.getMessage(), containsString("[type : string]"));
assertThat(e.getMessage(), containsString("[type : text]"));
}
}
@ -374,7 +374,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
fail("Expected MapperParsingException");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
assertThat(e.getMessage(), containsString("type=string"));
assertThat(e.getMessage(), containsString("type=text"));
}
}
@ -386,7 +386,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
fail("Expected MapperParsingException");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
assertThat(e.getMessage(), containsString("type=string"));
assertThat(e.getMessage(), containsString("type=text"));
}
}
@ -451,7 +451,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
public void testAutoBoost() throws Exception {
for (boolean boost : new boolean[] {false, true}) {
String index = "test_" + boost;
IndexService indexService = createIndex(index, client().admin().indices().prepareCreate(index).addMapping("type", "foo", "type=string" + (boost ? ",boost=2" : "")));
IndexService indexService = createIndex(index, client().admin().indices().prepareCreate(index).addMapping("type", "foo", "type=text" + (boost ? ",boost=2" : "")));
client().prepareIndex(index, "type").setSource("foo", "bar").get();
client().admin().indices().prepareRefresh(index).get();
Query query = indexService.mapperService().documentMapper("type").allFieldMapper().fieldType().termQuery("bar", null);

View File

@ -72,7 +72,7 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
public void testDynamicObjectCopyTo() throws Exception {
String mapping = jsonBuilder().startObject().startObject("doc").startObject("properties")
.startObject("foo")
.field("type", "string")
.field("type", "text")
.field("copy_to", "root.top.child")
.endObject()
.endObject().endObject().endObject().string();
@ -102,7 +102,7 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
.startObject().startObject("template_all")
.field("match", "*")
.field("match_mapping_type", "string")
.startObject("mapping").field("type", "string").field("copy_to", "{name}_raw").endObject()
.startObject("mapping").field("type", "text").field("copy_to", "{name}_raw").endObject()
.endObject().endObject()
.endArray();

View File

@ -37,7 +37,7 @@ import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.Arrays;
@ -58,16 +58,16 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
public void testCopyToFieldsParsing() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("type", "text")
.array("copy_to", "another_field", "cyclic_test")
.endObject()
.startObject("another_field")
.field("type", "string")
.field("type", "text")
.endObject()
.startObject("cyclic_test")
.field("type", "string")
.field("type", "text")
.array("copy_to", "copy_test")
.endObject()
@ -84,7 +84,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
FieldMapper fieldMapper = docMapper.mappers().getMapper("copy_test");
// Check json serialization
StringFieldMapper stringFieldMapper = (StringFieldMapper) fieldMapper;
TextFieldMapper stringFieldMapper = (TextFieldMapper) fieldMapper;
XContentBuilder builder = jsonBuilder().startObject();
stringFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject();
builder.close();
@ -93,7 +93,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
serializedMap = parser.map();
}
Map<String, Object> copyTestMap = (Map<String, Object>) serializedMap.get("copy_test");
assertThat(copyTestMap.get("type").toString(), is("string"));
assertThat(copyTestMap.get("type").toString(), is("text"));
List<String> copyToList = (List<String>) copyTestMap.get("copy_to");
assertThat(copyToList.size(), equalTo(2));
assertThat(copyToList.get(0).toString(), equalTo("another_field"));
@ -138,7 +138,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("type", "text")
.field("copy_to", "very.inner.field")
.endObject()
@ -173,7 +173,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
String mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("type", "text")
.field("copy_to", "very.inner.field")
.endObject()
.endObject()
@ -201,7 +201,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
String mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("type", "text")
.field("copy_to", "very.far.inner.field")
.endObject()
.startObject("very")
@ -238,7 +238,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.field("dynamic", "strict")
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("type", "text")
.field("copy_to", "very.inner.field")
.endObject()
.endObject()
@ -262,7 +262,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
String mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("type", "text")
.field("copy_to", "very.far.field")
.endObject()
.startObject("very")
@ -296,7 +296,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
String mappingBefore = jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("type", "text")
.array("copy_to", "foo", "bar")
.endObject()
@ -305,7 +305,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
String mappingAfter = jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("type", "text")
.array("copy_to", "baz", "bar")
.endObject()
@ -438,7 +438,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endArray()
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("type", "text")
.field("copy_to", "very.inner.field")
.endObject()
.endObject()

View File

@ -134,8 +134,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
.field("type", "boolean")
.startObject("fields")
.startObject("as_string")
.field("type", "string")
.field("index", "not_analyzed")
.field("type", "keyword")
.endObject()
.endObject()
.endObject().endObject()

View File

@ -69,13 +69,13 @@ public class MultiFieldCopyToMapperTests extends ESTestCase {
.startObject("type")
.startObject("properties")
.startObject("a")
.field("type", "string")
.field("type", "text")
.endObject()
.startObject("b")
.field("type", "string")
.field("type", "text")
.startObject("fields")
.startObject("c")
.field("type", "string")
.field("type", "text")
.field("copy_to", "a")
.endObject()
.endObject()

View File

@ -0,0 +1,389 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class TextFieldMapperTests extends ESSingleNodeTestCase {
IndexService indexService;
DocumentMapperParser parser;
@Before
public void before() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
}
public void testDefaults() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "text").endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertEquals("1234", fields[0].stringValue());
IndexableFieldType fieldType = fields[0].fieldType();
assertThat(fieldType.omitNorms(), equalTo(false));
assertTrue(fieldType.tokenized());
assertFalse(fieldType.stored());
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS));
assertThat(fieldType.storeTermVectors(), equalTo(false));
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
assertEquals(DocValuesType.NONE, fieldType.docValuesType());
}
public void testEnableStore() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "text").field("store", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertTrue(fields[0].fieldType().stored());
}
public void testDisableIndex() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "text").field("index", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
}
public void testDisableNorms() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "text")
.startObject("norms")
.field("enabled", false)
.endObject()
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertTrue(fields[0].fieldType().omitNorms());
}
public void testIndexOptions() throws IOException {
Map<String, IndexOptions> supportedOptions = new HashMap<>();
supportedOptions.put("docs", IndexOptions.DOCS);
supportedOptions.put("freqs", IndexOptions.DOCS_AND_FREQS);
supportedOptions.put("positions", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
supportedOptions.put("offsets", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties");
for (String option : supportedOptions.keySet()) {
mappingBuilder.startObject(option).field("type", "text").field("index_options", option).endObject();
}
String mapping = mappingBuilder.endObject().endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
XContentBuilder jsonDoc = XContentFactory.jsonBuilder().startObject();
for (String option : supportedOptions.keySet()) {
jsonDoc.field(option, "1234");
}
ParsedDocument doc = mapper.parse("test", "type", "1", jsonDoc.endObject().bytes());
for (Map.Entry<String, IndexOptions> entry : supportedOptions.entrySet()) {
String field = entry.getKey();
IndexOptions options = entry.getValue();
IndexableField[] fields = doc.rootDoc().getFields(field);
assertEquals(1, fields.length);
assertEquals(options, fields[0].fieldType().indexOptions());
}
}
public void testDefaultPositionIncrementGap() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "text").endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = indexService.mapperService().merge("type",
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", new String[] {"a", "b"})
.endObject()
.bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals("a", fields[0].stringValue());
assertEquals("b", fields[1].stringValue());
IndexShard shard = indexService.getShard(0);
shard.index(new Engine.Index(new Term("_uid", "1"), doc));
shard.refresh("test");
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
TermsEnum terms = leaf.terms("field").iterator();
assertTrue(terms.seekExact(new BytesRef("b")));
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
assertEquals(0, postings.nextDoc());
assertEquals(TextFieldMapper.Defaults.POSITION_INCREMENT_GAP + 1, postings.nextPosition());
}
}
public void testPositionIncrementGap() throws IOException {
final int positionIncrementGap = randomIntBetween(1, 1000);
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "text")
.field("position_increment_gap", positionIncrementGap)
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = indexService.mapperService().merge("type",
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", new String[] {"a", "b"})
.endObject()
.bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals("a", fields[0].stringValue());
assertEquals("b", fields[1].stringValue());
IndexShard shard = indexService.getShard(0);
shard.index(new Engine.Index(new Term("_uid", "1"), doc));
shard.refresh("test");
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
TermsEnum terms = leaf.terms("field").iterator();
assertTrue(terms.seekExact(new BytesRef("b")));
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
assertEquals(0, postings.nextDoc());
assertEquals(positionIncrementGap + 1, postings.nextPosition());
}
}
public void testSearchAnalyzerSerialization() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "text")
.field("analyzer", "standard")
.field("search_analyzer", "keyword")
.endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
// special case: default index analyzer
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "text")
.field("analyzer", "default")
.field("search_analyzer", "keyword")
.endObject()
.endObject().endObject().endObject().string();
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
}
public void testSearchQuoteAnalyzerSerialization() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "text")
.field("analyzer", "standard")
.field("search_analyzer", "standard")
.field("search_quote_analyzer", "keyword")
.endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
// special case: default index/search analyzer
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "text")
.field("analyzer", "default")
.field("search_analyzer", "default")
.field("search_quote_analyzer", "keyword")
.endObject()
.endObject().endObject().endObject().string();
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
}
public void testTermVectors() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1")
.field("type", "text")
.field("term_vector", "no")
.endObject()
.startObject("field2")
.field("type", "text")
.field("term_vector", "yes")
.endObject()
.startObject("field3")
.field("type", "text")
.field("term_vector", "with_offsets")
.endObject()
.startObject("field4")
.field("type", "text")
.field("term_vector", "with_positions")
.endObject()
.startObject("field5")
.field("type", "text")
.field("term_vector", "with_positions_offsets")
.endObject()
.startObject("field6")
.field("type", "text")
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field1", "1234")
.field("field2", "1234")
.field("field3", "1234")
.field("field4", "1234")
.field("field5", "1234")
.field("field6", "1234")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false));
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false));
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false));
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false));
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false));
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true));
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false));
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false));
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true));
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true));
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true));
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true));
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true));
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true));
}
}

View File

@ -0,0 +1,29 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class TextFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new TextFieldMapper.TextFieldType();
}
}

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
@ -81,7 +82,7 @@ public class ExternalMapper extends FieldMapper {
public Builder(String name, String generatedValue, String mapperName) {
super(name, new ExternalFieldType(), new ExternalFieldType());
this.builder = this;
this.stringBuilder = new StringFieldMapper.Builder(name).store(false);
this.stringBuilder = new TextFieldMapper.Builder(name).store(false);
this.generatedValue = generatedValue;
this.mapperName = mapperName;
}
@ -224,7 +225,7 @@ public class ExternalMapper extends FieldMapper {
BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType);
GeoPointFieldMapper pointMapperUpdate = (GeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType);
GeoShapeFieldMapper shapeMapperUpdate = (GeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType);
StringFieldMapper stringMapperUpdate = (StringFieldMapper) stringMapper.updateFieldType(fullNameToFieldType);
TextFieldMapper stringMapperUpdate = (TextFieldMapper) stringMapper.updateFieldType(fullNameToFieldType);
if (update == this
&& multiFieldsUpdate == multiFields
&& binMapperUpdate == binMapper

View File

@ -88,7 +88,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase {
.field("type", ExternalMapperPlugin.EXTERNAL_UPPER)
.startObject("fields")
.startObject("g")
.field("type", "string")
.field("type", "text")
.field("store", true)
.startObject("fields")
.startObject("raw")

View File

@ -31,7 +31,7 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -106,7 +106,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test", settings);
Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>();
mapperParsers.put(ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
mapperParsers.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser());
mapperParsers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser());
mapperParsers.put(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser());
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
@ -119,7 +119,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
.field("type", ExternalMapperPlugin.EXTERNAL)
.startObject("fields")
.startObject("field")
.field("type", "string")
.field("type", "text")
.field("store", true)
.startObject("fields")
.startObject("raw")
@ -165,7 +165,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>();
mapperParsers.put(ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
mapperParsers.put(ExternalMapperPlugin.EXTERNAL_BIS, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "bar"));
mapperParsers.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser());
mapperParsers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser());
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
@ -177,18 +177,18 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
.field("type", ExternalMapperPlugin.EXTERNAL)
.startObject("fields")
.startObject("field")
.field("type", "string")
.field("type", "text")
.startObject("fields")
.startObject("generated")
.field("type", ExternalMapperPlugin.EXTERNAL_BIS)
.endObject()
.startObject("raw")
.field("type", "string")
.field("type", "text")
.endObject()
.endObject()
.endObject()
.startObject("raw")
.field("type", "string")
.field("type", "text")
.endObject()
.endObject()
.endObject()

View File

@ -30,7 +30,6 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -49,12 +48,12 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
public void test1Merge() throws Exception {
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
.startObject("name").field("type", "string").endObject()
.startObject("name").field("type", "text").endObject()
.endObject().endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping));
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
.startObject("name").field("type", "string").endObject()
.startObject("name").field("type", "text").endObject()
.startObject("age").field("type", "integer").endObject()
.startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject().endObject()
.endObject().endObject().endObject().string();
@ -112,10 +111,10 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
public void testMergeSearchAnalyzer() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject()
.startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject()
.endObject().endObject().string();
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject()
.startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject()
.endObject().endObject().string();
DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1));
@ -130,10 +129,10 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
public void testChangeSearchAnalyzerToDefault() throws Exception {
MapperService mapperService = createIndex("test").mapperService();
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject()
.startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject()
.endObject().endObject().string();
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject()
.startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").endObject().endObject()
.endObject().endObject().string();
DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE, false);
@ -142,7 +141,6 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard"));
assertThat(((StringFieldMapper) (merged.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14));
}
public void testConcurrentMergeTest() throws Throwable {
@ -219,7 +217,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
.startObject("type")
.startObject("properties")
.startObject("foo")
.field("type", "string")
.field("type", "text")
.endObject()
.endObject()
.endObject().endObject().bytes());

View File

@ -35,7 +35,9 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.core.TokenCountFieldMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -94,25 +96,25 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
assertThat(f.stringValue(), equalTo("2010-01-01"));
assertThat(docMapper.mappers().getMapper("name"), notNullValue());
assertThat(docMapper.mappers().getMapper("name"), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().getMapper("name"), instanceOf(TextFieldMapper.class));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("name").fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().getMapper("name").fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue());
assertThat(docMapper.mappers().getMapper("name.indexed"), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().getMapper("name.indexed"), instanceOf(TextFieldMapper.class));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.indexed").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue());
assertThat(docMapper.mappers().getMapper("name.not_indexed"), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().getMapper("name.not_indexed"), instanceOf(TextFieldMapper.class));
assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("name.not_indexed").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().tokenized(), equalTo(true));
assertThat(docMapper.mappers().getMapper("name.test1"), notNullValue());
assertThat(docMapper.mappers().getMapper("name.test1"), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().getMapper("name.test1"), instanceOf(TextFieldMapper.class));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.test1").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("name.test1").fieldType().stored(), equalTo(true));
assertThat(docMapper.mappers().getMapper("name.test1").fieldType().tokenized(), equalTo(true));
@ -129,7 +131,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
assertThat(docMapper.mappers().getMapper("object1.multi1"), notNullValue());
assertThat(docMapper.mappers().getMapper("object1.multi1"), instanceOf(DateFieldMapper.class));
assertThat(docMapper.mappers().getMapper("object1.multi1.string"), notNullValue());
assertThat(docMapper.mappers().getMapper("object1.multi1.string"), instanceOf(StringFieldMapper.class));
assertThat(docMapper.mappers().getMapper("object1.multi1.string"), instanceOf(KeywordFieldMapper.class));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("object1.multi1.string").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("object1.multi1.string").fieldType().tokenized(), equalTo(false));
}
@ -139,8 +141,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
DocumentMapper builderDocMapper = new DocumentMapper.Builder(new RootObjectMapper.Builder("person").add(
new StringFieldMapper.Builder("name").store(true)
.addMultiField(new StringFieldMapper.Builder("indexed").index(true).tokenized(true))
.addMultiField(new StringFieldMapper.Builder("not_indexed").index(false).store(true))
.addMultiField(new TextFieldMapper.Builder("indexed").index(true).tokenized(true))
.addMultiField(new TextFieldMapper.Builder("not_indexed").index(false).store(true))
), indexService.mapperService()).build(indexService.mapperService());
String builtMapping = builderDocMapper.mappingSource().string();
@ -181,9 +183,9 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
}
XContentBuilder builder = jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("my_field").field("type", "string").startObject("fields");
.startObject("my_field").field("type", "text").startObject("fields");
for (String multiFieldName : multiFieldNames) {
builder = builder.startObject(multiFieldName).field("type", "string").endObject();
builder = builder.startObject(multiFieldName).field("type", "text").endObject();
}
builder = builder.endObject().endObject().endObject().endObject().endObject();
String mapping = builder.string();
@ -218,8 +220,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
// Generate a mapping with the a random subset of possible fielddata settings
XContentBuilder builder = jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("my_field").field("type", "string").startObject("fields").startObject(MY_MULTI_FIELD)
.field("type", "string").startObject("fielddata");
.startObject("my_field").field("type", "text").startObject("fields").startObject(MY_MULTI_FIELD)
.field("type", "text").startObject("fielddata");
String[] keys = possibleSettings.keySet().toArray(new String[]{});
Collections.shuffle(Arrays.asList(keys), random());
for(int i = randomIntBetween(0, possibleSettings.size()-1); i >= 0; --i)
@ -235,7 +237,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
public void testObjectFieldNotAllowed() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field")
.field("type", "string").startObject("fields").startObject("multi").field("type", "object").endObject().endObject()
.field("type", "text").startObject("fields").startObject("multi").field("type", "object").endObject().endObject()
.endObject().endObject().endObject().endObject().string();
final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
try {
@ -248,7 +250,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
public void testNestedFieldNotAllowed() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field")
.field("type", "string").startObject("fields").startObject("multi").field("type", "nested").endObject().endObject()
.field("type", "text").startObject("fields").startObject("multi").field("type", "nested").endObject().endObject()
.endObject().endObject().endObject().endObject().string();
final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
try {
@ -265,10 +267,10 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
.startObject("my_type")
.startObject("properties")
.startObject("city")
.field("type", "string")
.field("type", "text")
.startObject("fields")
.startObject("raw.foo")
.field("type", "string")
.field("type", "text")
.field("index", "not_analyzed")
.endObject()
.endObject()

View File

@ -234,7 +234,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
return XContentFactory.jsonBuilder().startObject().startObject("my-type")
.startObject("properties")
.startObject("title")
.field("type", "string")
.field("type", "text")
.startObject("fields")
.startObject("not_analyzed")
.field("type", "keyword")
@ -249,10 +249,10 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
return XContentFactory.jsonBuilder().startObject().startObject("my-type")
.startObject("properties")
.startObject("title")
.field("type", "string")
.field("type", "text")
.startObject("fields")
.startObject("uncased")
.field("type", "string")
.field("type", "text")
.field("analyzer", "whitespace")
.endObject()
.endObject()

View File

@ -320,7 +320,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase {
public void testNestedArrayStrict() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("nested1").field("type", "nested").field("dynamic", "strict").startObject("properties")
.startObject("field1").field("type", "string")
.startObject("field1").field("type", "text")
.endObject().endObject()
.endObject().endObject().endObject().string();

View File

@ -69,8 +69,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
.startObject("tweet")
.startObject("properties")
.startObject("name")
.field("type", "string")
.field("index", "analyzed")
.field("type", "text")
.startArray("fields")
.endArray()
.endObject()
@ -87,8 +86,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
.startObject("tweet")
.startObject("properties")
.startObject("name")
.field("type", "string")
.field("index", "analyzed")
.field("type", "text")
.startArray("fields")
.startObject().field("test", "string").endObject()
.startObject().field("test2", "string").endObject()
@ -148,8 +146,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
.startObject("tweet")
.startObject("properties")
.startObject("name")
.field("type", "string")
.field("index", "analyzed")
.field("type", "text")
.startObject("fields")
.startObject("raw")
.field("type", "keyword")

View File

@ -28,7 +28,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import org.elasticsearch.index.mapper.Uid;
@ -48,7 +48,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test");
DocumentMapper docMapper = new DocumentMapper.Builder(
new RootObjectMapper.Builder("person")
.add(new ObjectMapper.Builder("name").add(new StringFieldMapper.Builder("first").store(true).index(false))),
.add(new ObjectMapper.Builder("name").add(new TextFieldMapper.Builder("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService());
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
@ -108,7 +108,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test");
DocumentMapper docMapper = new DocumentMapper.Builder(
new RootObjectMapper.Builder("person")
.add(new ObjectMapper.Builder("name").add(new StringFieldMapper.Builder("first").store(true).index(false))),
.add(new ObjectMapper.Builder("name").add(new TextFieldMapper.Builder("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService());
BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
@ -124,7 +124,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test");
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo.bar").field("type", "string").endObject()
.startObject("foo.bar").field("type", "text").endObject()
.endObject().endObject().string();
try {
mapperParser.parse("type", new CompressedXContent(mapping));

View File

@ -259,12 +259,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
// Cases where search_quote_analyzer should be present.
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1")
.field("type", "string")
.field("position_increment_gap", 1000)
.field("search_quote_analyzer", "simple")
.endObject()
.startObject("field2")
.startObject("field")
.field("type", "string")
.field("position_increment_gap", 1000)
.field("analyzer", "standard")
@ -275,10 +270,8 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
mapper = parser.parse("type", new CompressedXContent(mapping));
for (String fieldName : Arrays.asList("field1", "field2")) {
Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper);
assertEquals(serializedMap.get("search_quote_analyzer"), "simple");
}
Map<String, Object> serializedMap = getSerializedMap("field", mapper);
assertEquals(serializedMap.get("search_quote_analyzer"), "simple");
}
public void testSearchAnalyzerSerialization() throws IOException {

View File

@ -84,14 +84,14 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
public void testThatEnablingTTLFieldOnMergeWorks() throws Exception {
String mappingWithoutTtl = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
.endObject().endObject().string();
String mappingWithTtl = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_ttl")
.field("enabled", "yes")
.endObject()
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
.endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService();
@ -107,14 +107,14 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
.startObject("_ttl")
.field("enabled", "yes")
.endObject()
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
.endObject().endObject().string();
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_ttl")
.field("default", "1w")
.endObject()
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
.endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService();
@ -185,18 +185,18 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"text\"}}}}")));
}
public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception {
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d");
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"text\"}}}}")));
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"text\"}}}}")));
}
public void testIncludeInObjectNotAllowed() throws Exception {
@ -230,7 +230,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
mapping.field("default", defaultValue);
}
return mapping.endObject()
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
.endObject().endObject();
}
@ -242,14 +242,14 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
mapping.field("default", defaultValue);
}
return mapping.endObject()
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
.endObject().endObject();
}
private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithOnlyTtlDefaultSet(String defaultValue) throws IOException {
return XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_ttl").field("default", defaultValue).endObject()
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
.endObject().endObject();
}
}

View File

@ -40,7 +40,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
public void testAllEnabled() throws Exception {
XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", "false").endObject().endObject().endObject().endObject();
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", "true").endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", "true").endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
String errorMessage = "[_all] enabled is false now encountering true";
testConflict(mapping.string(), mappingUpdate.string(), errorMessage);
}
@ -63,7 +63,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
public void testAllDisabled() throws Exception {
XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", true).endObject().endObject().endObject().endObject();
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
String errorMessage = "[_all] enabled is true now encountering false";
testConflict(mapping.string(), mappingUpdate.string(), errorMessage);
}
@ -84,7 +84,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
String docMappingUpdate = jsonBuilder().startObject().startObject("doc")
.startObject("properties")
.startObject("text")
.field("type", "string")
.field("type", "text")
.endObject()
.endObject()
.endObject()
@ -125,7 +125,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
}
public void testDocValuesInvalidMappingOnUpdate() throws Exception {
String mapping = jsonBuilder().startObject().startObject(TYPE).startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject().string();
String mapping = jsonBuilder().startObject().startObject(TYPE).startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject().string();
prepareCreate(INDEX).addMapping(TYPE, mapping).get();
String mappingUpdate = jsonBuilder().startObject().startObject(TYPE).startObject("_all").startObject("fielddata").field("format", "doc_values").endObject().endObject().endObject().endObject().string();
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();

Some files were not shown because too many files have changed in this diff Show More