Core: Added the `index.query.parse.allow_unmapped_fields` setting to fail queries if they refer to unmapped fields.
The percolator and filters in aliases by default enforce strict query parsing. Closes #7335
This commit is contained in:
parent
fee832df66
commit
52f1ab6e16
|
@ -9,7 +9,7 @@ include::getting-started.asciidoc[]
|
|||
|
||||
include::setup.asciidoc[]
|
||||
|
||||
include::migration/migrate_1_0.asciidoc[]
|
||||
include::migration/index.asciidoc[]
|
||||
|
||||
include::api-conventions.asciidoc[]
|
||||
|
||||
|
|
|
@ -72,12 +72,34 @@ It is an error to index to an alias which points to more than one index.
|
|||
Aliases with filters provide an easy way to create different "views" of
|
||||
the same index. The filter can be defined using Query DSL and is applied
|
||||
to all Search, Count, Delete By Query and More Like This operations with
|
||||
this alias. Here is an example:
|
||||
this alias.
|
||||
|
||||
coming[1.4.0,Fields referred to in alias filters must exist in the mappings of the index/indices pointed to by the alias]
|
||||
|
||||
To create a filtered alias, first we need to ensure that the fields already
|
||||
exist in the mapping:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'http://localhost:9200/_aliases' -d '
|
||||
{
|
||||
curl -XPUT 'http://localhost:9200/test1' -d '{
|
||||
"mappings": {
|
||||
"type1": {
|
||||
"properties": {
|
||||
"user" : {
|
||||
"type": "string",
|
||||
"index": "not_analyzed"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Now we can create an alias that uses a filter on field `user`:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'http://localhost:9200/_aliases' -d '{
|
||||
"actions" : [
|
||||
{
|
||||
"add" : {
|
||||
|
@ -173,14 +195,34 @@ You can also use the plural `_aliases`.
|
|||
[float]
|
||||
==== Examples:
|
||||
|
||||
Adding time based alias:
|
||||
|
||||
Adding time based alias::
|
||||
+
|
||||
--
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPUT 'localhost:9200/logs_201305/_alias/2013'
|
||||
--------------------------------------------------
|
||||
--
|
||||
|
||||
Adding user alias:
|
||||
Adding a user alias::
|
||||
+
|
||||
--
|
||||
First create the index and add a mapping for the `user_id` field:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPUT 'localhost:9200/users' -d '{
|
||||
"mappings" : {
|
||||
"user" : {
|
||||
"properties" : {
|
||||
"user_id" : {"type" : "integer"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
Then add the alias for a specific user:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -194,6 +236,8 @@ curl -XPUT 'localhost:9200/users/_alias/user_12' -d '{
|
|||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
--
|
||||
|
||||
[float]
|
||||
[[alias-index-creation]]
|
||||
=== Aliases during index creation
|
||||
|
@ -205,6 +249,13 @@ Aliases can also be specified during <<create-index-aliases,index creation>>:
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPUT localhost:9200/logs_20142801 -d '{
|
||||
"mappings" : {
|
||||
"type" : {
|
||||
"properties" : {
|
||||
"year" : {"type" : "integer"}
|
||||
}
|
||||
}
|
||||
},
|
||||
"aliases" : {
|
||||
"current_day" : {},
|
||||
"2014" : {
|
||||
|
|
|
@ -63,3 +63,19 @@ root and inner object types:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
[float]
|
||||
=== Unmapped fields in queries
|
||||
|
||||
coming[1.4.0]
|
||||
|
||||
Queries and filters can refer to fields which don't exist in a mapping, except
|
||||
when registering a new <<search-percolate,percolator query>> or when creating
|
||||
a <<filtered,filtered alias>>. In these two cases, any fields referred to in
|
||||
the query or filter must already exist in the mapping, otherwise there is a
|
||||
chance that the wrong field type will be used.
|
||||
|
||||
This requirement can be disabled by setting
|
||||
`index.query.parse.allow_unmapped_fields` to `true`, in which case you run the
|
||||
risk that your query or filter might not work correctly.
|
||||
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
[[breaking-changes]]
|
||||
= Breaking changes
|
||||
|
||||
[partintro]
|
||||
--
|
||||
This section discusses the changes that you need to be aware of when migrating
|
||||
your application from one version of Elasticsearch to another.
|
||||
|
||||
As a general rule:
|
||||
|
||||
* Migration between major versions -- e.g. `1.x` to `2.x` --
|
||||
requires a <<restart-upgrade,full cluster restart>>.
|
||||
|
||||
* Migration between minor versions -- e.g. `1.x` to `1.y` -- can be
|
||||
performed by <<rolling-upgrades,upgrading one node at a time>>.
|
||||
|
||||
See <<setup-upgrade>> for more info.
|
||||
--
|
||||
|
||||
include::migrate_1_x.asciidoc[]
|
||||
|
||||
include::migrate_1_0.asciidoc[]
|
||||
|
||||
|
||||
|
|
@ -1,13 +1,10 @@
|
|||
[[breaking-changes]]
|
||||
= Breaking changes in 1.0
|
||||
[[breaking-changes-1.0]]
|
||||
== Breaking changes in 1.0
|
||||
|
||||
[partintro]
|
||||
--
|
||||
This section discusses the changes that you need to be aware of when migrating
|
||||
your application to Elasticsearch 1.0.
|
||||
--
|
||||
|
||||
== System and settings
|
||||
=== System and settings
|
||||
|
||||
* Elasticsearch now runs in the foreground by default. There is no more `-f`
|
||||
flag on the command line. Instead, to run elasticsearch as a daemon, use
|
||||
|
@ -41,7 +38,7 @@ your application to Elasticsearch 1.0.
|
|||
cluster.routing.allocation.enable: all|primaries|new_primaries|none
|
||||
---------------
|
||||
|
||||
== Stats and Info APIs
|
||||
=== Stats and Info APIs
|
||||
|
||||
The <<cluster-state,`cluster_state`>>, <<cluster-nodes-info,`nodes_info`>>,
|
||||
<<cluster-nodes-stats,`nodes_stats`>> and <<indices-stats,`indices_stats`>>
|
||||
|
@ -80,7 +77,7 @@ GET /_nodes/stats/transport,http
|
|||
See the links above for full details.
|
||||
|
||||
|
||||
== Indices APIs
|
||||
=== Indices APIs
|
||||
|
||||
The `mapping`, `alias`, `settings`, and `warmer` index APIs are all similar
|
||||
but there are subtle differences in the order of the URL and the response
|
||||
|
@ -150,7 +147,7 @@ mapping`>>, <<indices-get-field-mapping,`get-field-mapping`>>,
|
|||
<<indices-update-settings,`update-settings`>>, <<indices-get-settings,`get-settings`>>,
|
||||
<<indices-warmers,`warmers`>>, and <<indices-aliases,`aliases`>> for more details.
|
||||
|
||||
== Index request
|
||||
=== Index request
|
||||
|
||||
Previously a document could be indexed as itself, or wrapped in an outer
|
||||
object which specified the `type` name:
|
||||
|
@ -170,7 +167,7 @@ name as the `type`. We no longer accept the outer `type` wrapper, but this
|
|||
behaviour can be reenabled on an index-by-index basis with the setting:
|
||||
`index.mapping.allow_type_wrapper`.
|
||||
|
||||
== Search requests
|
||||
=== Search requests
|
||||
|
||||
While the `search` API takes a top-level `query` parameter, the
|
||||
<<search-count,`count`>>, <<docs-delete-by-query,`delete-by-query`>> and
|
||||
|
@ -218,7 +215,7 @@ GET /_search
|
|||
}
|
||||
---------------
|
||||
|
||||
== Multi-fields
|
||||
=== Multi-fields
|
||||
|
||||
Multi-fields are dead! Long live multi-fields! Well, the field type
|
||||
`multi_field` has been removed. Instead, any of the core field types
|
||||
|
@ -254,14 +251,14 @@ Also, instead of having to use the arcane `path` and `index_name` parameters
|
|||
in order to index multiple fields into a single ``custom +_all+ field'', you
|
||||
can now use the <<copy-to,`copy_to` parameter>>.
|
||||
|
||||
== Stopwords
|
||||
=== Stopwords
|
||||
|
||||
Previously, the <<analysis-standard-analyzer,`standard`>> and
|
||||
<<analysis-pattern-analyzer,`pattern`>> analyzers used the list of English stopwords
|
||||
by default, which caused some hard to debug indexing issues. Now they are set to
|
||||
use the empty stopwords list (ie `_none_`) instead.
|
||||
|
||||
== Dates without years
|
||||
=== Dates without years
|
||||
|
||||
When dates are specified without a year, for example: `Dec 15 10:00:00` they
|
||||
are treated as dates in 2000 during indexing and range searches... except for
|
||||
|
@ -269,7 +266,7 @@ the upper included bound `lte` where they were treated as dates in 1970! Now,
|
|||
all https://github.com/elasticsearch/elasticsearch/issues/4451[dates without years]
|
||||
use `1970` as the default.
|
||||
|
||||
== Parameters
|
||||
=== Parameters
|
||||
|
||||
* Geo queries used to use `miles` as the default unit. And we
|
||||
http://en.wikipedia.org/wiki/Mars_Climate_Orbiter[all know what
|
||||
|
@ -300,7 +297,7 @@ DELETE /*
|
|||
Setting `action.destructive_requires_name` to `true` provides further safety
|
||||
by disabling wildcard expansion on destructive actions.
|
||||
|
||||
== Return values
|
||||
=== Return values
|
||||
|
||||
* The `ok` return value has been removed from all response bodies as it added
|
||||
no useful information.
|
||||
|
@ -345,7 +342,7 @@ in the query string.
|
|||
* The <<indices-analyze,`analyze`>> API no longer supports the text response
|
||||
format, but does support JSON and YAML.
|
||||
|
||||
== Deprecations
|
||||
=== Deprecations
|
||||
|
||||
* The `text` query has been removed. Use the
|
||||
<<query-dsl-match-query,`match`>> query instead.
|
||||
|
@ -363,7 +360,7 @@ in the query string.
|
|||
* The `custom_score` and `custom_boost_score` is no longer supported. You can
|
||||
use <<query-dsl-function-score-query,`function_score`>> instead.
|
||||
|
||||
== Percolator
|
||||
=== Percolator
|
||||
|
||||
The percolator has been redesigned and because of this the dedicated `_percolator` index is no longer used by the percolator,
|
||||
but instead the percolator works with a dedicated `.percolator` type. Read the http://www.elasticsearch.org/blog/percolator-redesign-blog-post/[redesigned percolator]
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
[[breaking-changes-1.x]]
|
||||
== Breaking changes in 1.x
|
||||
|
||||
This section discusses the changes that you need to be aware of when migrating
|
||||
your application from Elasticsearch 1.x to Elasticsearch 1.y.
|
||||
|
||||
[float]
|
||||
=== Facets
|
||||
|
||||
Facets are deprecated and will be removed in a future release. You are
|
||||
encouraged to migrate to <<search-aggregations, aggregations>> instead.
|
||||
|
||||
[[breaking-changes-1.4]]
|
||||
=== 1.4
|
||||
|
||||
==== Percolator
|
||||
|
||||
In indices created with version `1.4.0` or later, percolation queries can only
|
||||
refer to fields that already exist in the mappings in that index. There are
|
||||
two ways to make sure that a field mapping exist:
|
||||
|
||||
* Add or update a mapping via the <<indices-create-index,create index>> or
|
||||
<<indices-put-mapping,put mapping>> apis.
|
||||
* Percolate a document before registering a query. Percolating a document can
|
||||
add field mappings dynamically, in the same way as happens when indexing a
|
||||
document.
|
||||
|
||||
==== Aliases
|
||||
|
||||
<<indices-aliases,Aliases>> can include <<query-dsl-filters,filters>> which
|
||||
are automatically applied to any search performed via the alias.
|
||||
<<filtered,Filtered aliases>> created with version `1.4.0` or later can only
|
||||
refer to field names which exist in the mappings of the index (or indices)
|
||||
pointed to by the alias.
|
||||
|
||||
Add or update a mapping via the <<indices-create-index,create index>> or
|
||||
<<indices-put-mapping,put mapping>> apis.
|
||||
|
|
@ -15,10 +15,43 @@ in a request to the percolate api.
|
|||
The percolator and most of its features work in realtime, so once a percolate query is indexed it can immediately be used
|
||||
in the percolate api.
|
||||
|
||||
[IMPORTANT]
|
||||
=====================================
|
||||
|
||||
Field referred to in a percolator query must *already* exist in the mapping
|
||||
assocated with the index used for percolation.
|
||||
coming[1.4.0,Applies to indices created in 1.4.0 or later]
|
||||
There are two ways to make sure that a field mapping exist:
|
||||
|
||||
* Add or update a mapping via the <<indices-create-index,create index>> or
|
||||
<<indices-put-mapping,put mapping>> apis.
|
||||
* Percolate a document before registering a query. Percolating a document can
|
||||
add field mappings dynamically, in the same way as happens when indexing a
|
||||
document.
|
||||
|
||||
=====================================
|
||||
|
||||
[float]
|
||||
=== Sample usage
|
||||
|
||||
Adding a query to the percolator:
|
||||
Create an index with a mapping for the field `message`:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPUT 'localhost:9200/my-index' -d '{
|
||||
"mappings": {
|
||||
"my-type": {
|
||||
"properties": {
|
||||
"message": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Register a query in the percolator:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -31,7 +64,7 @@ curl -XPUT 'localhost:9200/my-index/.percolator/1' -d '{
|
|||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
Matching documents to the added queries:
|
||||
Match a document to the registered percolator queries:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -54,7 +87,7 @@ The above request will yield the following response:
|
|||
"failed" : 0
|
||||
},
|
||||
"total" : 1,
|
||||
"matches" : [
|
||||
"matches" : [ <1>
|
||||
{
|
||||
"_index" : "my-index",
|
||||
"_id" : "1"
|
||||
|
@ -63,7 +96,7 @@ The above request will yield the following response:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
The percolate api returns matches that refer to percolate queries that have matched with the document defined in the percolate api.
|
||||
<1> The percolate query with id `1` matches our document.
|
||||
|
||||
[float]
|
||||
=== Indexing percolator queries
|
||||
|
|
|
@ -15,7 +15,8 @@ Elasticsearch can usually be upgraded using a rolling upgrade process, resulting
|
|||
|1.x |1.x |Rolling Upgrade
|
||||
|=======================================================================
|
||||
|
||||
Before upgrading from 0.90.x or any earlier version to 1.x or later, it is a good idea to consult the <<breaking-changes,breaking changes>> docs.
|
||||
TIP: Before upgrading Elasticsearch, it is a good idea to consult the
|
||||
<<breaking-changes,breaking changes>> docs.
|
||||
|
||||
[float]
|
||||
[[backup]]
|
||||
|
|
|
@ -55,6 +55,12 @@
|
|||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
mappings:
|
||||
type_1:
|
||||
properties:
|
||||
foo:
|
||||
type: string
|
||||
|
||||
- do:
|
||||
indices.put_alias:
|
||||
|
|
|
@ -56,6 +56,11 @@
|
|||
indices.create:
|
||||
index: test_index
|
||||
body:
|
||||
mappings:
|
||||
type_1:
|
||||
properties:
|
||||
field:
|
||||
type: string
|
||||
aliases:
|
||||
test_alias: {}
|
||||
test_blias:
|
||||
|
|
|
@ -4,6 +4,12 @@
|
|||
- do:
|
||||
indices.create:
|
||||
index: test_index
|
||||
body:
|
||||
mappings:
|
||||
type_1:
|
||||
properties:
|
||||
foo:
|
||||
type: string
|
||||
|
||||
- do:
|
||||
index:
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.InvalidAliasNameException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -143,9 +144,13 @@ public class AliasValidator extends AbstractComponent {
|
|||
}
|
||||
|
||||
private void validateAliasFilter(XContentParser parser, IndexQueryParserService indexQueryParserService) throws IOException {
|
||||
QueryParseContext context = indexQueryParserService.getParseContext();
|
||||
try {
|
||||
indexQueryParserService.parseInnerFilter(parser);
|
||||
context.reset(parser);
|
||||
context.setAllowUnmappedFields(false);
|
||||
context.parseInnerFilter();
|
||||
} finally {
|
||||
context.reset(null);
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
|||
import org.elasticsearch.index.percolator.stats.ShardPercolateService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -53,6 +54,7 @@ import org.elasticsearch.index.shard.service.IndexShard;
|
|||
import org.elasticsearch.indices.IndicesLifecycle;
|
||||
import org.elasticsearch.percolator.PercolatorService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
@ -66,6 +68,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
*/
|
||||
public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
||||
|
||||
public final static String ALLOW_UNMAPPED_FIELDS = "index.percolator.allow_unmapped_fields";
|
||||
|
||||
// This is a shard level service, but these below are index level service:
|
||||
private final IndexQueryParserService queryParserService;
|
||||
private final MapperService mapperService;
|
||||
|
@ -81,6 +85,7 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
|||
private final RealTimePercolatorOperationListener realTimePercolatorOperationListener = new RealTimePercolatorOperationListener();
|
||||
private final PercolateTypeListener percolateTypeListener = new PercolateTypeListener();
|
||||
private final AtomicBoolean realTimePercolatorEnabled = new AtomicBoolean(false);
|
||||
private final boolean allowUnmappedFields;
|
||||
|
||||
private CloseableThreadLocal<QueryParseContext> cache = new CloseableThreadLocal<QueryParseContext>() {
|
||||
@Override
|
||||
|
@ -101,6 +106,7 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
|||
this.indexCache = indexCache;
|
||||
this.indexFieldDataService = indexFieldDataService;
|
||||
this.shardPercolateService = shardPercolateService;
|
||||
this.allowUnmappedFields = indexSettings.getAsBoolean(ALLOW_UNMAPPED_FIELDS, false);
|
||||
|
||||
indicesLifecycle.addListener(shardLifecycleListener);
|
||||
mapperService.addTypeListener(percolateTypeListener);
|
||||
|
@ -151,64 +157,61 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
|||
Query parsePercolatorDocument(String id, BytesReference source) {
|
||||
String type = null;
|
||||
BytesReference querySource = null;
|
||||
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentHelper.createParser(source);
|
||||
try (XContentParser sourceParser = XContentHelper.createParser(source)) {
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token = parser.nextToken(); // move the START_OBJECT
|
||||
XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchException("failed to parse query [" + id + "], not starting with OBJECT");
|
||||
}
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
currentFieldName = sourceParser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
if (type != null) {
|
||||
return parseQuery(type, null, parser);
|
||||
return parseQuery(type, sourceParser);
|
||||
} else {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
||||
builder.copyCurrentStructure(parser);
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(sourceParser.contentType());
|
||||
builder.copyCurrentStructure(sourceParser);
|
||||
querySource = builder.bytes();
|
||||
builder.close();
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
sourceParser.skipChildren();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
parser.skipChildren();
|
||||
sourceParser.skipChildren();
|
||||
} else if (token.isValue()) {
|
||||
if ("type".equals(currentFieldName)) {
|
||||
type = parser.text();
|
||||
type = sourceParser.text();
|
||||
}
|
||||
}
|
||||
}
|
||||
return parseQuery(type, querySource, null);
|
||||
try (XContentParser queryParser = XContentHelper.createParser(querySource)) {
|
||||
return parseQuery(type, queryParser);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new PercolatorException(shardId().index(), "failed to parse query [" + id + "]", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Query parseQuery(String type, BytesReference querySource, XContentParser parser) {
|
||||
private Query parseQuery(String type, XContentParser parser) {
|
||||
String[] previousTypes = null;
|
||||
if (type != null) {
|
||||
QueryParseContext.setTypesWithPrevious(new String[]{type});
|
||||
}
|
||||
QueryParseContext context = cache.get();
|
||||
try {
|
||||
if (parser != null) {
|
||||
return queryParserService.parse(cache.get(), parser).query();
|
||||
} else {
|
||||
return queryParserService.parse(cache.get(), querySource).query();
|
||||
}
|
||||
context.reset(parser);
|
||||
context.setAllowUnmappedFields(allowUnmappedFields);
|
||||
return queryParserService.parseInnerQuery(context);
|
||||
} catch (IOException e) {
|
||||
throw new QueryParsingException(queryParserService.index(), "Failed to parse", e);
|
||||
} finally {
|
||||
if (type != null) {
|
||||
QueryParseContext.setTypes(previousTypes);
|
||||
}
|
||||
context.reset(null);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.Filter;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -66,6 +67,11 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
public static final String FILTER_PREFIX = "index.queryparser.filter";
|
||||
}
|
||||
|
||||
public static final String DEFAULT_FIELD = "index.query.default_field";
|
||||
public static final String QUERY_STRING_LENIENT = "index.query_string.lenient";
|
||||
public static final String PARSE_STRICT = "index.query.parse.strict";
|
||||
public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields";
|
||||
|
||||
private CloseableThreadLocal<QueryParseContext> cache = new CloseableThreadLocal<QueryParseContext>() {
|
||||
@Override
|
||||
protected QueryParseContext initialValue() {
|
||||
|
@ -96,6 +102,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
private String defaultField;
|
||||
private boolean queryStringLenient;
|
||||
private final boolean strict;
|
||||
private final boolean defaultAllowUnmappedFields;
|
||||
|
||||
@Inject
|
||||
public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings,
|
||||
|
@ -116,9 +123,10 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
this.indexEngine = indexEngine;
|
||||
this.fixedBitSetFilterCache = fixedBitSetFilterCache;
|
||||
|
||||
this.defaultField = indexSettings.get("index.query.default_field", AllFieldMapper.NAME);
|
||||
this.queryStringLenient = indexSettings.getAsBoolean("index.query_string.lenient", false);
|
||||
this.strict = indexSettings.getAsBoolean("index.query.parse.strict", false);
|
||||
this.defaultField = indexSettings.get(DEFAULT_FIELD, AllFieldMapper.NAME);
|
||||
this.queryStringLenient = indexSettings.getAsBoolean(QUERY_STRING_LENIENT, false);
|
||||
this.strict = indexSettings.getAsBoolean(PARSE_STRICT, false);
|
||||
this.defaultAllowUnmappedFields = indexSettings.getAsBoolean(ALLOW_UNMAPPED, true);
|
||||
|
||||
List<QueryParser> queryParsers = newArrayList();
|
||||
if (namedQueryParsers != null) {
|
||||
|
@ -301,6 +309,33 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public Query parseInnerQuery(QueryParseContext parseContext) throws IOException {
|
||||
if (strict) {
|
||||
parseContext.parseFlags(EnumSet.of(ParseField.Flag.STRICT));
|
||||
}
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (query == null) {
|
||||
query = Queries.newMatchNoDocsQuery();
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
public QueryParseContext getParseContext() {
|
||||
return cache.get();
|
||||
}
|
||||
|
||||
public boolean defaultAllowUnmappedFields() {
|
||||
return defaultAllowUnmappedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The lowest node version in the cluster when the index was created or <code>null</code> if that was unknown
|
||||
*/
|
||||
public Version getIndexCreatedVersion() {
|
||||
return Version.indexCreated(indexSettings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Selectively parses a query from a top level query or query_binary json field from the specified source.
|
||||
*/
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.queryparser.classic.QueryParserSettings;
|
|||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.lucene.search.NoCacheFilter;
|
||||
|
@ -78,7 +79,7 @@ public class QueryParseContext {
|
|||
|
||||
private boolean propagateNoCache = false;
|
||||
|
||||
IndexQueryParserService indexQueryParser;
|
||||
final IndexQueryParserService indexQueryParser;
|
||||
|
||||
private final Map<String, Filter> namedFilters = Maps.newHashMap();
|
||||
|
||||
|
@ -90,6 +91,8 @@ public class QueryParseContext {
|
|||
|
||||
private final boolean disableFilterCaching;
|
||||
|
||||
private boolean allowUnmappedFields;
|
||||
|
||||
public QueryParseContext(Index index, IndexQueryParserService indexQueryParser) {
|
||||
this(index, indexQueryParser, false);
|
||||
}
|
||||
|
@ -110,6 +113,7 @@ public class QueryParseContext {
|
|||
}
|
||||
|
||||
public void reset(XContentParser jp) {
|
||||
allowUnmappedFields = indexQueryParser.defaultAllowUnmappedFields();
|
||||
this.parseFlags = ParseField.EMPTY_FLAGS;
|
||||
this.lookup = null;
|
||||
this.parser = jp;
|
||||
|
@ -316,17 +320,34 @@ public class QueryParseContext {
|
|||
}
|
||||
|
||||
public MapperService.SmartNameFieldMappers smartFieldMappers(String name) {
|
||||
return indexQueryParser.mapperService.smartName(name, getTypes());
|
||||
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartName(name, getTypes()));
|
||||
}
|
||||
|
||||
public FieldMapper smartNameFieldMapper(String name) {
|
||||
return indexQueryParser.mapperService.smartNameFieldMapper(name, getTypes());
|
||||
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldMapper(name, getTypes()));
|
||||
}
|
||||
|
||||
public MapperService.SmartNameObjectMapper smartObjectMapper(String name) {
|
||||
return indexQueryParser.mapperService.smartNameObjectMapper(name, getTypes());
|
||||
}
|
||||
|
||||
public void setAllowUnmappedFields(boolean allowUnmappedFields) {
|
||||
this.allowUnmappedFields = allowUnmappedFields;
|
||||
}
|
||||
|
||||
private <T> T failIfFieldMappingNotFound(String name, T fieldMapping) {
|
||||
if (allowUnmappedFields) {
|
||||
return fieldMapping;
|
||||
} else {
|
||||
Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion();
|
||||
if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0)) {
|
||||
throw new QueryParsingException(index, "Strict field resolution and no field mapping can be found for the field with name [" + name + "]");
|
||||
} else {
|
||||
return fieldMapping;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the narrowed down explicit types, or, if not set, all types.
|
||||
*/
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.index.query.FilterBuilder;
|
||||
import org.elasticsearch.index.query.FilterBuilders;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.indices.IndexMissingException;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesMissingException;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
@ -124,7 +125,7 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
@Test
|
||||
public void testFilteringAliases() throws Exception {
|
||||
logger.info("--> creating index [test]");
|
||||
createIndex("test");
|
||||
assertAcked(prepareCreate("test").addMapping("type", "user", "type=string"));
|
||||
|
||||
ensureGreen();
|
||||
|
||||
|
@ -234,11 +235,9 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
@Test
|
||||
public void testSearchingFilteringAliasesTwoIndices() throws Exception {
|
||||
logger.info("--> creating index [test1]");
|
||||
createIndex("test1");
|
||||
|
||||
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=string"));
|
||||
logger.info("--> creating index [test2]");
|
||||
createIndex("test2");
|
||||
|
||||
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=string"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> adding filtering aliases to index [test1]");
|
||||
|
@ -302,6 +301,14 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
logger.info("--> creating indices");
|
||||
createIndex("test1", "test2", "test3");
|
||||
|
||||
client().admin().indices().preparePutMapping("test1", "test2", "test3")
|
||||
.setType("type1")
|
||||
.setSource("name", "type=string")
|
||||
.get();
|
||||
waitForConcreteMappingsOnAll("test1", "type1", "name");
|
||||
waitForConcreteMappingsOnAll("test2", "type1", "name");
|
||||
waitForConcreteMappingsOnAll("test3", "type1", "name");
|
||||
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> adding aliases to indices");
|
||||
|
@ -361,8 +368,8 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
@Test
|
||||
public void testDeletingByQueryFilteringAliases() throws Exception {
|
||||
logger.info("--> creating index [test1] and [test2");
|
||||
createIndex("test1", "test2");
|
||||
|
||||
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=string"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> adding filtering aliases to index [test1]");
|
||||
|
@ -421,8 +428,8 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
@Test
|
||||
public void testDeleteAliases() throws Exception {
|
||||
logger.info("--> creating index [test1] and [test2]");
|
||||
createIndex("test1", "test2");
|
||||
|
||||
assertAcked(prepareCreate("test1").addMapping("type", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test2").addMapping("type", "name", "type=string"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> adding filtering aliases to index [test1]");
|
||||
|
@ -505,10 +512,8 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void testSameAlias() throws Exception {
|
||||
|
||||
logger.info("--> creating index [test]");
|
||||
createIndex("test");
|
||||
|
||||
assertAcked(prepareCreate("test").addMapping("type", "name", "type=string"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> creating alias1 ");
|
||||
|
@ -566,6 +571,14 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
createIndex("foobarbaz");
|
||||
createIndex("bazbar");
|
||||
|
||||
client().admin().indices().preparePutMapping("foobar", "test", "test123", "foobarbaz", "bazbar")
|
||||
.setType("type").setSource("field", "type=string").get();
|
||||
waitForConcreteMappingsOnAll("foobar", "type", "field");
|
||||
waitForConcreteMappingsOnAll("test", "type", "field");
|
||||
waitForConcreteMappingsOnAll("test123", "type", "field");
|
||||
waitForConcreteMappingsOnAll("foobarbaz", "type", "field");
|
||||
waitForConcreteMappingsOnAll("bazbar", "type", "field");
|
||||
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> creating aliases [alias1, alias2]");
|
||||
|
@ -841,7 +854,9 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void testCreateIndexWithAliases() throws Exception {
|
||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias1"))
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", "field", "type=string")
|
||||
.addAlias(new Alias("alias1"))
|
||||
.addAlias(new Alias("alias2").filter(FilterBuilders.missingFilter("field")))
|
||||
.addAlias(new Alias("alias3").indexRouting("index").searchRouting("search")));
|
||||
|
||||
|
@ -853,7 +868,7 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
assertAcked(prepareCreate("test").setSource("{\n" +
|
||||
" \"aliases\" : {\n" +
|
||||
" \"alias1\" : {},\n" +
|
||||
" \"alias2\" : {\"filter\" : {\"term\": {\"field\":\"value\"}}},\n" +
|
||||
" \"alias2\" : {\"filter\" : {\"match_all\": {}}},\n" +
|
||||
" \"alias3\" : { \"index_routing\" : \"index\", \"search_routing\" : \"search\"}\n" +
|
||||
" }\n" +
|
||||
"}"));
|
||||
|
@ -863,7 +878,9 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void testCreateIndexWithAliasesSource() throws Exception {
|
||||
assertAcked(prepareCreate("test").setAliases("{\n" +
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", "field", "type=string")
|
||||
.setAliases("{\n" +
|
||||
" \"alias1\" : {},\n" +
|
||||
" \"alias2\" : {\"filter\" : {\"term\": {\"field\":\"value\"}}},\n" +
|
||||
" \"alias3\" : { \"index_routing\" : \"index\", \"search_routing\" : \"search\"}\n" +
|
||||
|
@ -895,6 +912,33 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddAliasWithFilterNoMapping() throws Exception {
|
||||
assertAcked(prepareCreate("test"));
|
||||
|
||||
try {
|
||||
client().admin().indices().prepareAliases()
|
||||
.addAlias("test", "a", FilterBuilders.termFilter("field1", "term"))
|
||||
.get();
|
||||
fail();
|
||||
} catch (ElasticsearchIllegalArgumentException e) {
|
||||
assertThat(e.getRootCause(), instanceOf(QueryParsingException.class));
|
||||
}
|
||||
|
||||
try {
|
||||
client().admin().indices().prepareAliases()
|
||||
.addAlias("test", "a", FilterBuilders.rangeFilter("field2").from(0).to(1))
|
||||
.get();
|
||||
fail();
|
||||
} catch (ElasticsearchIllegalArgumentException e) {
|
||||
assertThat(e.getRootCause(), instanceOf(QueryParsingException.class));
|
||||
}
|
||||
|
||||
client().admin().indices().prepareAliases()
|
||||
.addAlias("test", "a", FilterBuilders.matchAllFilter()) // <-- no fail, b/c no field mentioned
|
||||
.get();
|
||||
}
|
||||
|
||||
private void checkAliases() {
|
||||
GetAliasesResponse getAliasesResponse = admin().indices().prepareGetAliases("alias1").get();
|
||||
assertThat(getAliasesResponse.getAliases().get("test").size(), equalTo(1));
|
||||
|
|
|
@ -209,7 +209,9 @@ public class ExplainActionTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void testExplainWithFilteredAlias() throws Exception {
|
||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias1").filter(FilterBuilders.termFilter("field2", "value2"))));
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("test", "field2", "type=string")
|
||||
.addAlias(new Alias("alias1").filter(FilterBuilders.termFilter("field2", "value2"))));
|
||||
ensureGreen("test");
|
||||
|
||||
client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get();
|
||||
|
@ -225,6 +227,7 @@ public class ExplainActionTests extends ElasticsearchIntegrationTest {
|
|||
@Test
|
||||
public void testExplainWithFilteredAliasFetchSource() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.addMapping("test", "field2", "type=string")
|
||||
.addAlias(new Alias("alias1").filter(FilterBuilders.termFilter("field2", "value2"))));
|
||||
ensureGreen("test");
|
||||
|
||||
|
|
|
@ -337,6 +337,7 @@ public class SimpleIndexTemplateTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
client().admin().indices().preparePutTemplate("template_with_aliases")
|
||||
.setTemplate("te*")
|
||||
.addMapping("type1", "{\"type1\" : {\"properties\" : {\"value\" : {\"type\" : \"string\"}}}}")
|
||||
.addAlias(new Alias("simple_alias"))
|
||||
.addAlias(new Alias("templated_alias-{index}"))
|
||||
.addAlias(new Alias("filtered_alias").filter("{\"type\":{\"value\":\"type2\"}}"))
|
||||
|
|
|
@ -440,6 +440,9 @@ public class SimpleNestedTests extends ElasticsearchIntegrationTest {
|
|||
assertAcked(prepareCreate("test")
|
||||
.setSettings(settingsBuilder().put(indexSettings()).put("index.referesh_interval", -1).build())
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "string")
|
||||
.endObject()
|
||||
.startObject("nested1")
|
||||
.field("type", "nested")
|
||||
.endObject()
|
||||
|
|
|
@ -293,7 +293,7 @@ public class ConcurrentPercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void testConcurrentAddingAndRemovingWhilePercolating() throws Exception {
|
||||
createIndex("index");
|
||||
assertAcked(prepareCreate("index").addMapping("type", "field1", "type=string"));
|
||||
ensureGreen();
|
||||
final int numIndexThreads = scaledRandomIntBetween(1, 3);
|
||||
final int numberPercolateOperation = scaledRandomIntBetween(10, 100);
|
||||
|
|
|
@ -46,7 +46,7 @@ public class MultiPercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void testBasics() throws Exception {
|
||||
client().admin().indices().prepareCreate("test").execute().actionGet();
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=string"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a queries");
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.percolator;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.percolate.PercolateResponse;
|
||||
import org.elasticsearch.action.percolate.PercolateSourceBuilder;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.index.percolator.PercolatorException;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class PercolatorBackwardsCompatibilityTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
@Test
|
||||
public void testPercolatorUpgrading() throws Exception {
|
||||
// Simulates an index created on an node before 1.4.0 where the field resolution isn't strict.
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_3_0).put(indexSettings())));
|
||||
ensureGreen();
|
||||
int numDocs = randomIntBetween(100, 150);
|
||||
IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
docs[i] = client().prepareIndex("test", PercolatorService.TYPE_NAME)
|
||||
.setSource(jsonBuilder().startObject().field("query", termQuery("field1", "value")).endObject());
|
||||
}
|
||||
indexRandom(true, docs);
|
||||
PercolateResponse response = client().preparePercolate().setIndices("test").setDocumentType("type")
|
||||
.setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("field1", "value"))
|
||||
.get();
|
||||
assertMatchCount(response, (long) numDocs);
|
||||
|
||||
// After upgrade indices, indices created before the upgrade allow that queries refer to fields not available in mapping
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME)
|
||||
.setSource(jsonBuilder().startObject().field("query", termQuery("field2", "value")).endObject()).get();
|
||||
|
||||
// However on new indices, the field resolution is strict, no queries with unmapped fields are allowed
|
||||
createIndex("test2");
|
||||
try {
|
||||
client().prepareIndex("test2", PercolatorService.TYPE_NAME)
|
||||
.setSource(jsonBuilder().startObject().field("query", termQuery("field1", "value")).endObject()).get();
|
||||
fail();
|
||||
} catch (PercolatorException e) {
|
||||
e.printStackTrace();
|
||||
assertThat(e.getRootCause(), instanceOf(QueryParsingException.class));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -35,6 +35,7 @@ import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuild
|
|||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.arrayWithSize;
|
||||
|
@ -48,7 +49,7 @@ public class PercolatorFacetsAndAggregationsTests extends ElasticsearchIntegrati
|
|||
@Test
|
||||
// Just test the integration with facets and aggregations, not the facet and aggregation functionality!
|
||||
public void testFacetsAndAggregations() throws Exception {
|
||||
client().admin().indices().prepareCreate("test").execute().actionGet();
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=string", "field2", "type=string"));
|
||||
ensureGreen();
|
||||
|
||||
int numQueries = scaledRandomIntBetween(250, 500);
|
||||
|
|
|
@ -39,9 +39,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.engine.DocumentMissingException;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.index.percolator.PercolatorException;
|
||||
import org.elasticsearch.index.query.FilterBuilders;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.factor.FactorBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
|
@ -74,7 +76,8 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
ensureGreen();
|
||||
|
||||
logger.info("--> Add dummy doc");
|
||||
client().prepareIndex("test", "type", "1").setSource("field", "value").execute().actionGet();
|
||||
client().prepareIndex("test", "type", "1").setSource("field1", "value").execute().actionGet();
|
||||
waitForConcreteMappingsOnAll("test", "type", "field1");
|
||||
|
||||
logger.info("--> register a queries");
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "1")
|
||||
|
@ -150,6 +153,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
@Test
|
||||
public void testSimple2() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long"));
|
||||
ensureGreen();
|
||||
|
||||
// introduce the doc
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().startObject("doc")
|
||||
|
@ -162,6 +166,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
.execute().actionGet();
|
||||
assertMatchCount(response, 0l);
|
||||
assertThat(response.getMatches(), emptyArray());
|
||||
waitForConcreteMappingsOnAll("test", "type1", "field1", "field2");
|
||||
|
||||
// add first query...
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "test1")
|
||||
|
@ -199,10 +204,10 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
// add a range query (cached)
|
||||
// add a query
|
||||
client().prepareIndex("test1", PercolatorService.TYPE_NAME)
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "test3")
|
||||
.setSource(
|
||||
XContentFactory.jsonBuilder().startObject().field("query",
|
||||
constantScoreQuery(FilterBuilders.rangeFilter("field2").from(1).to(5).includeLower(true).setExecution("fielddata"))
|
||||
constantScoreQuery(FilterBuilders.rangeFilter("field1").from(1).to(5).includeLower(true).setExecution("fielddata"))
|
||||
).endObject()
|
||||
)
|
||||
.execute().actionGet();
|
||||
|
@ -210,9 +215,9 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
response = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type1")
|
||||
.setSource(doc).execute().actionGet();
|
||||
assertMatchCount(response, 1l);
|
||||
assertThat(response.getMatches(), arrayWithSize(1));
|
||||
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("test1"));
|
||||
assertMatchCount(response, 2l);
|
||||
assertThat(response.getMatches(), arrayWithSize(2));
|
||||
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("test1", "test3"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -284,9 +289,10 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
createIndex("test");
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "test", "1").setSource("field1", "value1").execute().actionGet();
|
||||
client().prepareIndex("my-queries-index", "test", "1").setSource("field1", "value1").execute().actionGet();
|
||||
waitForConcreteMappingsOnAll("my-queries-index", "test", "field1");
|
||||
logger.info("--> register a query");
|
||||
client().prepareIndex("my-queries-index", PercolatorService.TYPE_NAME, "kuku")
|
||||
client().prepareIndex("my-queries-index", PercolatorService.TYPE_NAME, "kuku1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("color", "blue")
|
||||
.field("query", termQuery("field1", "value1"))
|
||||
|
@ -298,9 +304,10 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
createIndex("test");
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "test", "1").setSource("field1", "value1").execute().actionGet();
|
||||
client().prepareIndex("my-queries-index", "test", "1").setSource("field1", "value1").execute().actionGet();
|
||||
waitForConcreteMappingsOnAll("my-queries-index", "test", "field1");
|
||||
logger.info("--> register a query");
|
||||
client().prepareIndex("my-queries-index", PercolatorService.TYPE_NAME, "kuku")
|
||||
client().prepareIndex("my-queries-index", PercolatorService.TYPE_NAME, "kuku2")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("color", "blue")
|
||||
.field("query", termQuery("field1", "value1"))
|
||||
|
@ -324,7 +331,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
.endObject()
|
||||
.endObject().endObject();
|
||||
|
||||
createIndex("test");
|
||||
assertAcked(prepareCreate("test").setSettings(builder).addMapping("doc", mapping));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a query");
|
||||
|
@ -350,7 +357,9 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void createIndexAndThenRegisterPercolator() throws Exception {
|
||||
createIndex("test");
|
||||
prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=string")
|
||||
.get();
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a query");
|
||||
|
@ -399,7 +408,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void multiplePercolators() throws Exception {
|
||||
createIndex("test");
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=string"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a query 1");
|
||||
|
@ -440,7 +449,10 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void dynamicAddingRemovingQueries() throws Exception {
|
||||
createIndex("test");
|
||||
assertAcked(
|
||||
prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=string")
|
||||
);
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a query 1");
|
||||
|
@ -514,6 +526,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
public void percolateWithSizeField() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_size").field("enabled", true).field("stored", "yes").endObject()
|
||||
.startObject("properties").startObject("field1").field("type", "string").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping));
|
||||
|
@ -911,7 +924,8 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
ensureGreen();
|
||||
|
||||
logger.info("--> Add dummy doc");
|
||||
client().prepareIndex("test", "type", "1").setSource("field", "value").execute().actionGet();
|
||||
client().prepareIndex("test", "type", "1").setSource("field1", "value").execute().actionGet();
|
||||
waitForConcreteMappingsOnAll("test", "type", "field1");
|
||||
|
||||
logger.info("--> register a queries");
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "1")
|
||||
|
@ -1314,64 +1328,34 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void testPercolatorWithHighlighting() throws Exception {
|
||||
Client client = client();
|
||||
createIndex("test");
|
||||
ensureGreen();
|
||||
|
||||
StringBuilder fieldMapping = new StringBuilder("type=string")
|
||||
.append(",store=").append(randomBoolean());
|
||||
if (randomBoolean()) {
|
||||
// FVH HL
|
||||
client.admin().indices().preparePutMapping("test").setType("type")
|
||||
.setSource(
|
||||
jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", randomBoolean())
|
||||
.field("term_vector", "with_positions_offsets").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()
|
||||
).get();
|
||||
fieldMapping.append(",term_vector=with_positions_offsets");
|
||||
} else if (randomBoolean()) {
|
||||
// plain hl with stored fields
|
||||
client.admin().indices().preparePutMapping("test").setType("type")
|
||||
.setSource(
|
||||
jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()
|
||||
).get();
|
||||
} else if (randomBoolean()) {
|
||||
// positions hl
|
||||
client.admin().indices().preparePutMapping("test").setType("type")
|
||||
.setSource(
|
||||
jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string")
|
||||
.field("index_options", "offsets")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()
|
||||
).get();
|
||||
fieldMapping.append(",index_options=offsets");
|
||||
}
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", fieldMapping.toString()));
|
||||
|
||||
logger.info("--> register a queries");
|
||||
client.prepareIndex("test", PercolatorService.TYPE_NAME, "1")
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "1")
|
||||
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "brown fox")).endObject())
|
||||
.execute().actionGet();
|
||||
client.prepareIndex("test", PercolatorService.TYPE_NAME, "2")
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "2")
|
||||
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "lazy dog")).endObject())
|
||||
.execute().actionGet();
|
||||
client.prepareIndex("test", PercolatorService.TYPE_NAME, "3")
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "3")
|
||||
.setSource(jsonBuilder().startObject().field("query", termQuery("field1", "jumps")).endObject())
|
||||
.execute().actionGet();
|
||||
client.prepareIndex("test", PercolatorService.TYPE_NAME, "4")
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "4")
|
||||
.setSource(jsonBuilder().startObject().field("query", termQuery("field1", "dog")).endObject())
|
||||
.execute().actionGet();
|
||||
client.prepareIndex("test", PercolatorService.TYPE_NAME, "5")
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "5")
|
||||
.setSource(jsonBuilder().startObject().field("query", termQuery("field1", "fox")).endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
logger.info("--> Percolate doc with field1=The quick brown fox jumps over the lazy dog");
|
||||
PercolateResponse response = client.preparePercolate()
|
||||
PercolateResponse response = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type")
|
||||
.setSize(5)
|
||||
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
|
||||
|
@ -1396,10 +1380,10 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
|
||||
|
||||
// Anything with percolate query isn't realtime
|
||||
client.admin().indices().prepareRefresh("test").execute().actionGet();
|
||||
client().admin().indices().prepareRefresh("test").execute().actionGet();
|
||||
|
||||
logger.info("--> Query percolate doc with field1=The quick brown fox jumps over the lazy dog");
|
||||
response = client.preparePercolate()
|
||||
response = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type")
|
||||
.setSize(5)
|
||||
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
|
||||
|
@ -1425,7 +1409,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
|
||||
|
||||
logger.info("--> Query percolate with score for doc with field1=The quick brown fox jumps over the lazy dog");
|
||||
response = client.preparePercolate()
|
||||
response = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type")
|
||||
.setSize(5)
|
||||
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
|
||||
|
@ -1457,7 +1441,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
|
||||
|
||||
logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog");
|
||||
response = client.preparePercolate()
|
||||
response = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type")
|
||||
.setSize(5)
|
||||
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
|
||||
|
@ -1489,7 +1473,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
|
||||
|
||||
logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog");
|
||||
response = client.preparePercolate()
|
||||
response = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type")
|
||||
.setSize(5)
|
||||
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
|
||||
|
@ -1521,12 +1505,12 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox <em>jumps</em> over the lazy dog"));
|
||||
|
||||
// Highlighting an existing doc
|
||||
client.prepareIndex("test", "type", "1")
|
||||
client().prepareIndex("test", "type", "1")
|
||||
.setSource(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())
|
||||
.get();
|
||||
|
||||
logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog");
|
||||
response = client.preparePercolate()
|
||||
response = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type")
|
||||
.setSize(5)
|
||||
.setGetRequest(Requests.getRequest("test").type("type").id("1"))
|
||||
|
@ -1630,7 +1614,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void percolateNonMatchingConstantScoreQuery() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("test"));
|
||||
assertAcked(prepareCreate("test").addMapping("doc", "message", "type=string"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a query");
|
||||
|
@ -1710,12 +1694,14 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
.endObject().endObject()));
|
||||
ensureGreen("idx");
|
||||
|
||||
try {
|
||||
client().prepareIndex("idx", PercolatorService.TYPE_NAME, "1")
|
||||
.setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryString("color:red")).endObject())
|
||||
.get();
|
||||
client().prepareIndex("idx", PercolatorService.TYPE_NAME, "2")
|
||||
.setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryString("color:blue")).endObject())
|
||||
.get();
|
||||
fail();
|
||||
} catch (PercolatorException e) {
|
||||
|
||||
}
|
||||
|
||||
PercolateResponse percolateResponse = client().preparePercolate().setDocumentType("type")
|
||||
.setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(jsonBuilder().startObject().startObject("custom").field("color", "blue").endObject().endObject()))
|
||||
|
@ -1727,6 +1713,9 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
// The previous percolate request introduced the custom.color field, so now we register the query again
|
||||
// and the field name `color` will be resolved to `custom.color` field in mapping via smart field mapping resolving.
|
||||
client().prepareIndex("idx", PercolatorService.TYPE_NAME, "1")
|
||||
.setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryString("color:red")).endObject())
|
||||
.get();
|
||||
client().prepareIndex("idx", PercolatorService.TYPE_NAME, "2")
|
||||
.setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryString("color:blue")).field("type", "type").endObject())
|
||||
.get();
|
||||
|
@ -1792,6 +1781,30 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddQueryWithNoMapping() throws Exception {
|
||||
client().admin().indices().prepareCreate("test").get();
|
||||
ensureGreen();
|
||||
|
||||
try {
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME)
|
||||
.setSource(jsonBuilder().startObject().field("query", termQuery("field1", "value")).endObject())
|
||||
.get();
|
||||
fail();
|
||||
} catch (PercolatorException e) {
|
||||
assertThat(e.getRootCause(), instanceOf(QueryParsingException.class));
|
||||
}
|
||||
|
||||
try {
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME)
|
||||
.setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(0).to(1)).endObject())
|
||||
.get();
|
||||
fail();
|
||||
} catch (PercolatorException e) {
|
||||
assertThat(e.getRootCause(), instanceOf(QueryParsingException.class));
|
||||
}
|
||||
}
|
||||
|
||||
void initNestedIndexAndPercolation() throws IOException {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder();
|
||||
mapping.startObject().startObject("properties").startObject("companyname").field("type", "string").endObject()
|
||||
|
@ -1833,8 +1846,10 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
String mapping = "{\n" +
|
||||
" \"doc\": {\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"name\": {\"type\":\"string\"},\n" +
|
||||
" \"persons\": {\n" +
|
||||
" \"type\": \"nested\"\n" +
|
||||
" \"type\": \"nested\"\n," +
|
||||
" \"properties\" : {\"foo\" : {\"type\" : \"string\"}}" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
|
|
|
@ -72,7 +72,7 @@ public class RecoveryPercolatorTests extends ElasticsearchIntegrationTest {
|
|||
@Slow
|
||||
public void testRestartNodePercolator1() throws Exception {
|
||||
internalCluster().startNode();
|
||||
createIndex("test");
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=string"));
|
||||
|
||||
logger.info("--> register a query");
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku")
|
||||
|
@ -112,7 +112,7 @@ public class RecoveryPercolatorTests extends ElasticsearchIntegrationTest {
|
|||
@Slow
|
||||
public void testRestartNodePercolator2() throws Exception {
|
||||
internalCluster().startNode();
|
||||
createIndex("test");
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=string"));
|
||||
|
||||
logger.info("--> register a query");
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku")
|
||||
|
|
|
@ -71,6 +71,7 @@ public class TTLPercolatorTests extends ElasticsearchIntegrationTest {
|
|||
String typeMapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.startObject("properties").startObject("field1").field("type", "string").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
client.admin().indices().prepareCreate("test")
|
||||
|
|
Loading…
Reference in New Issue