Merge branch 'master' into ccr

* master:
  [DOCS] Fixes typos in security settings
  Fix GeoShapeQueryBuilder serialization after backport
  [DOCS] Splits auditing.asciidoc into smaller files
  Reintroduce mandatory http pipelining support (#30820)
  Painless: Types Section Clean Up (#30283)
  Add support for indexed shape routing in geo_shape query (#30760)
  [test] java tests for archive packaging (#30734)
  Revert "Make http pipelining support mandatory (#30695)" (#30813)
  [DOCS] Fix more edit URLs in Stack Overview (#30704)
  Use correct cluster state version for node fault detection (#30810)
  Change serialization version of doc-value fields.
  [DOCS] Fixes broken link for native realm
  [DOCS] Clarified audit.index.client.hosts (#30797)
  [TEST] Don't expect acks when isolating nodes
  Add a `format` option to `docvalue_fields`. (#29639)
  Fixes UpdateSettingsRequestStreamableTests mutate bug
  Mustes {p0=snapshot.get_repository/10_basic/*} YAML test
  Revert "Mutes MachineLearningTests.testNoAttributes_givenSameAndMlEnabled"
  Only allow x-pack metadata if all nodes are ready (#30743)
  Mutes MachineLearningTests.testNoAttributes_givenSameAndMlEnabled
  Use original settings on full-cluster restart (#30780)
  Only ack cluster state updates successfully applied on all nodes (#30672)
  Expose Lucene's FeatureField. (#30618)
  Fix a grammatical error in the 'search types' documentation.
  Remove http pipelining from integration test case (#30788)
This commit is contained in:
Nhat Nguyen 2018-05-23 23:05:14 -04:00
commit 3fb2c45fa7
174 changed files with 5454 additions and 1000 deletions

View File

@ -512,7 +512,9 @@ into it
vagrant ssh debian-9 vagrant ssh debian-9
-------------------------------------------- --------------------------------------------
Now inside the VM, to run the https://github.com/sstephenson/bats[bats] packaging tests Now inside the VM, start the packaging tests from the terminal. There are two packaging
test projects. The old ones are written with https://github.com/sstephenson/bats[bats]
and only run on linux. To run them do
-------------------------------------------- --------------------------------------------
cd $PACKAGING_ARCHIVES cd $PACKAGING_ARCHIVES
@ -524,18 +526,36 @@ sudo bats $BATS_TESTS/*.bats
sudo bats $BATS_TESTS/20_tar_package.bats $BATS_TESTS/25_tar_plugins.bats sudo bats $BATS_TESTS/20_tar_package.bats $BATS_TESTS/25_tar_plugins.bats
-------------------------------------------- --------------------------------------------
To run the Java packaging tests, again inside the VM The new packaging tests are written in Java and run on both linux and windows. On
linux (again, inside the VM)
-------------------------------------------- --------------------------------------------
bash $PACKAGING_TESTS/run-tests.sh # run the full suite
sudo bash $PACKAGING_TESTS/run-tests.sh
# run specific test cases
sudo bash $PACKAGING_TESTS/run-tests.sh \
org.elasticsearch.packaging.test.DefaultZipTests \
org.elasticsearch.packaging.test.OssZipTests
-------------------------------------------- --------------------------------------------
or on Windows or on Windows, from a terminal running as Administrator
-------------------------------------------- --------------------------------------------
# run the full suite
powershell -File $Env:PACKAGING_TESTS/run-tests.ps1 powershell -File $Env:PACKAGING_TESTS/run-tests.ps1
# run specific test cases
powershell -File $Env:PACKAGING_TESTS/run-tests.ps1 `
org.elasticsearch.packaging.test.DefaultZipTests `
org.elasticsearch.packaging.test.OssZipTests
-------------------------------------------- --------------------------------------------
Note that on Windows boxes when running from inside the GUI, you may have to log out and
back in to the `vagrant` user (password `vagrant`) for the environment variables that
locate the packaging tests and distributions to take effect, due to how vagrant provisions
Windows machines.
When you've made changes you want to test, keep the VM up and reload the tests and When you've made changes you want to test, keep the VM up and reload the tests and
distributions inside by running (on the host) distributions inside by running (on the host)

1
Vagrantfile vendored
View File

@ -237,6 +237,7 @@ def linux_common(config,
config.vm.provision 'markerfile', type: 'shell', inline: <<-SHELL config.vm.provision 'markerfile', type: 'shell', inline: <<-SHELL
touch /etc/is_vagrant_vm touch /etc/is_vagrant_vm
touch /is_vagrant_vm # for consistency between linux and windows
SHELL SHELL
# This prevents leftovers from previous tests using the # This prevents leftovers from previous tests using the

View File

@ -52,6 +52,8 @@ class VagrantTestPlugin implements Plugin<Project> {
static final List<String> DISTRIBUTIONS = unmodifiableList([ static final List<String> DISTRIBUTIONS = unmodifiableList([
'archives:tar', 'archives:tar',
'archives:oss-tar', 'archives:oss-tar',
'archives:zip',
'archives:oss-zip',
'packages:rpm', 'packages:rpm',
'packages:oss-rpm', 'packages:oss-rpm',
'packages:deb', 'packages:deb',
@ -242,13 +244,27 @@ class VagrantTestPlugin implements Plugin<Project> {
Task createLinuxRunnerScript = project.tasks.create('createLinuxRunnerScript', FileContentsTask) { Task createLinuxRunnerScript = project.tasks.create('createLinuxRunnerScript', FileContentsTask) {
dependsOn copyPackagingTests dependsOn copyPackagingTests
file "${testsDir}/run-tests.sh" file "${testsDir}/run-tests.sh"
contents "java -cp \"\$PACKAGING_TESTS/*\" org.junit.runner.JUnitCore ${-> project.extensions.esvagrant.testClass}" contents """\
if [ "\$#" -eq 0 ]; then
test_args=( "${-> project.extensions.esvagrant.testClass}" )
else
test_args=( "\$@" )
fi
java -cp "\$PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner "\${test_args[@]}"
"""
} }
Task createWindowsRunnerScript = project.tasks.create('createWindowsRunnerScript', FileContentsTask) { Task createWindowsRunnerScript = project.tasks.create('createWindowsRunnerScript', FileContentsTask) {
dependsOn copyPackagingTests dependsOn copyPackagingTests
file "${testsDir}/run-tests.ps1" file "${testsDir}/run-tests.ps1"
// the use of $args rather than param() here is deliberate because the syntax for array (multivalued) parameters is likely
// a little trappy for those unfamiliar with powershell
contents """\ contents """\
java -cp "\$Env:PACKAGING_TESTS/*" org.junit.runner.JUnitCore ${-> project.extensions.esvagrant.testClass} if (\$args.Count -eq 0) {
\$testArgs = @("${-> project.extensions.esvagrant.testClass}")
} else {
\$testArgs = \$args
}
java -cp "\$Env:PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner @testArgs
exit \$LASTEXITCODE exit \$LASTEXITCODE
""" """
} }
@ -525,9 +541,10 @@ class VagrantTestPlugin implements Plugin<Project> {
if (LINUX_BOXES.contains(box)) { if (LINUX_BOXES.contains(box)) {
javaPackagingTest.command = 'ssh' javaPackagingTest.command = 'ssh'
javaPackagingTest.args = ['--command', 'bash "$PACKAGING_TESTS/run-tests.sh"'] javaPackagingTest.args = ['--command', 'sudo bash "$PACKAGING_TESTS/run-tests.sh"']
} else { } else {
javaPackagingTest.command = 'winrm' javaPackagingTest.command = 'winrm'
// winrm commands run as administrator
javaPackagingTest.args = ['--command', 'powershell -File "$Env:PACKAGING_TESTS/run-tests.ps1"'] javaPackagingTest.args = ['--command', 'powershell -File "$Env:PACKAGING_TESTS/run-tests.ps1"']
} }

View File

@ -1,172 +1,456 @@
[[painless-casting]] [[painless-casting]]
=== Casting === Casting
Casting is the conversion of one type to another. Implicit casts are casts that A cast converts the value of an original type to the equivalent value of a
occur automatically, such as during an assignment operation. Explicit casts are target type. An implicit cast infers the target type and automatically occurs
casts where you use the casting operator to explicitly convert one type to during certain <<painless-operators, operations>>. An explicit cast specifies
another. This is necessary during operations where the cast cannot be inferred. the target type and forcefully occurs as its own operation. Use the *cast
operator* to specify an explicit cast.
To cast to a new type, precede the expression by the new type enclosed in *Errors*
parentheses, for example
`(int)x`.
The following sections specify the implicit casts that can be performed and the * If during a cast there exists no equivalent value for the target type.
explicit casts that are allowed. The only other permitted cast is casting * If an implicit cast is given, but an explicit cast is required.
a single character `String` to a `char`.
*Grammar:* *Grammar*
[source,ANTLR4] [source,ANTLR4]
---- ----
cast: '(' TYPE ')' expression cast: '(' TYPE ')' expression
---- ----
[[numeric-casting]] *Examples*
==== Numeric Casting
The following table shows the allowed implicit and explicit casts between * Valid casts.
numeric types. Read the table by row. To find out if you need to explicitly +
cast from type A to type B, find the row for type A and scan across to the [source,Painless]
column for type B. ----
<1> int i = (int)5L;
<2> Map m = new HashMap();
<3> HashMap hm = (HashMap)m;
----
+
<1> declare `int i`;
explicit cast `long 5` to `int 5` -> `int 5`;
assign `int 5` to `i`
<2> declare `Map m`;
allocate `HashMap` instance -> `HashMap reference`;
implicit cast `HashMap reference` to `Map reference` -> `Map reference`;
assign `Map reference` to `m`
<3> declare `HashMap hm`;
access `m` -> `Map reference`;
explicit cast `Map reference` to `HashMap reference` -> `HashMap reference`;
assign `HashMap reference` to `hm`
IMPORTANT: Explicit casts between numeric types can result in some data loss. A [[numeric-type-casting]]
smaller numeric type cannot necessarily accommodate the value from a larger ==== Numeric Type Casting
numeric type. You might also lose precision when casting from integer types
to floating point types. A <<primitive-types, numeric type>> cast converts the value of an original
numeric type to the equivalent value of a target numeric type. A cast between
two numeric type values results in data loss when the value of the original
numeric type is larger than the target numeric type can accommodate. A cast
between an integer type value and a floating point type value can result in
precision loss.
The allowed casts for values of each numeric type are shown as a row in the
following table:
|==== |====
| | byte | short | char | int | long | float | double | | byte | short | char | int | long | float | double
| byte | | implicit | implicit | implicit | implicit | implicit | implicit | byte | | implicit | implicit | implicit | implicit | implicit | implicit
| short | explicit | | explicit | implicit | implicit | implicit | implicit | short | explicit | | explicit | implicit | implicit | implicit | implicit
| char | explicit | explicit | | implicit | implicit | implicit | implicit | char | explicit | explicit | | implicit | implicit | implicit | implicit
| int | explicit | explicit | explicit | | implicit | implicit | implicit | int | explicit | explicit | explicit | | implicit | implicit | implicit
| long | explicit | explicit | explicit | explicit | | implicit | implicit | long | explicit | explicit | explicit | explicit | | implicit | implicit
| float | explicit | explicit | explicit | explicit | explicit | | implicit | float | explicit | explicit | explicit | explicit | explicit | | implicit
| double | explicit | explicit | explicit | explicit | explicit | explicit | | double | explicit | explicit | explicit | explicit | explicit | explicit |
|==== |====
*Examples*
Example(s) * Valid numeric type casts.
[source,Java] +
[source,Painless]
---- ----
int a = 1; // Declare int variable a and set it to the literal <1> int a = 1;
// value 1 <2> long b = a;
long b = a; // Declare long variable b and set it to int variable <3> short c = (short)b;
// a with an implicit cast to convert from int to long <4> double e = (double)a;
short c = (short)b; // Declare short variable c, explicitly cast b to a
// short, and assign b to c
byte d = a; // ERROR: Casting an int to a byte requires an explicit
// cast
double e = (double)a; // Explicitly cast int variable a to a double and assign
// it to the double variable e. The explicit cast is
// allowed, but it is not necessary.
---- ----
+
[[reference-casting]] <1> declare `int a`;
==== Reference Casting assign `int 1` to `a`
<2> declare `long b`;
A reference type can be implicitly cast to another reference type as long as access `a` -> `int 1`;
the type being cast _from_ is a descendant of the type being cast _to_. A implicit cast `int 1` to `long 1` -> `long 1`;
reference type can be explicitly cast _to_ if the type being cast to is a assign `long 1` to `b`
descendant of the type being cast _from_. <3> declare `short c`;
access `b` -> `long 1`;
*Examples:* explicit cast `long 1` to `short 1` -> `short 1`;
[source,Java] assign `short 1` value to `c`
<4> declare `double e`;
access `a` -> `int 1`;
explicit cast `int 1` to `double 1.0`;
assign `double 1.0` to `e`;
(note the explicit cast is extraneous since an implicit cast is valid)
+
* Invalid numeric type casts resulting in errors.
+
[source,Painless]
---- ----
List x; // Declare List variable x <1> int a = 1.0; // error
ArrayList y = new ArrayList(); // Declare ArrayList variable y and assign it a <2> int b = 2;
// newly allocated ArrayList [1] <3> byte c = b; // error
x = y; // Assign Arraylist y to List x using an
// implicit cast
y = (ArrayList)x; // Explicitly cast List x to an ArrayList and
// assign it to ArrayList y
x = (List)y; // Set List x to ArrayList y using an explicit
// cast (the explicit cast is not necessary)
y = x; // ERROR: List x cannot be implicitly cast to
// an ArrayList, an explicit cast is required
Map m = y; // ERROR: Cannot implicitly or explicitly cast [2]
// an ArrayList to a Map, no relationship
// exists between the two types.
---- ----
[1] `ArrayList` is a descendant of the `List` type. +
[2] `Map` is unrelated to the `List` and `ArrayList` types. <1> declare `int i`;
*error* -> cannot implicit cast `double 1.0` to `int 1`;
(note an explicit cast is valid)
<2> declare `int b`;
assign `int 2` to `b`
<3> declare byte `c`;
access `b` -> `int 2`;
*error* -> cannot implicit cast `int 2` to `byte 2`;
(note an explicit cast is valid)
[[def-type-casting]] [[reference-type-casting]]
==== def Type Casting ==== Reference Type Casting
All primitive and reference types can always be implicitly cast to
`def`. While it is possible to explicitly cast to `def`, it is not necessary.
However, it is not always possible to implicitly cast a `def` to other A <<reference-types, reference type>> cast converts the value of an original
primitive and reference types. An explicit cast is required if an explicit reference type to the equivalent value of a target reference type. An implicit
cast would normally be required between the non-def types. cast between two reference type values is allowed when the original reference
type is a descendant of the target type. An explicit cast between two reference
type values is allowed when the original type is a descendant of the target type
or the target type is a descendant of the original type.
*Examples*
*Examples:* * Valid reference type casts.
[source,Java] +
[source,Painless]
---- ----
def x; // Declare def variable x and set it to null <1> List x;
x = 3; // Set the def variable x to the literal 3 with an implicit <2> ArrayList y = new ArrayList();
// cast from int to def <3> x = y;
double a = x; // Declare double variable a and set it to def variable x, <4> y = (ArrayList)x;
// which contains a double <5> x = (List)y;
int b = x; // ERROR: Results in a run-time error because an explicit cast is
// required to cast from a double to an int
int c = (int)x; // Declare int variable c, explicitly cast def variable x to an
// int, and assign x to c
---- ----
+
<1> declare `List x`;
assign default value `null` to `x`
<2> declare `ArrayList y`;
allocate `ArrayList` instance -> `ArrayList reference`;
assign `ArrayList reference` to `y`;
<3> access `y` -> `ArrayList reference`;
implicit cast `ArrayList reference` to `List reference` -> `List reference`;
assign `List reference` to `x`;
(note `ArrayList` is a descendant of `List`)
<4> access `x` -> `List reference`;
explicit cast `List reference` to `ArrayList reference`
-> `ArrayList reference`;
assign `ArrayList reference` to `y`;
<5> access `y` -> `ArrayList reference`;
explicit cast `ArrayList reference` to `List reference` -> `List reference`;
assign `List reference` to `x`;
(note the explicit cast is extraneous, and an implicit cast is valid)
+
* Invalid reference type casts resulting in errors.
+
[source,Painless]
----
<1> List x = new ArrayList();
<2> ArrayList y = x; // error
<3> Map m = (Map)x; // error
----
+
<1> declare `List x`;
allocate `ArrayList` instance -> `ArrayList reference`;
implicit cast `ArrayList reference` to `List reference` -> `List reference`;
assign `List reference` to `x`
<2> declare `ArrayList y`;
access `x` -> `List reference`;
*error* -> cannot implicit cast `List reference` to `ArrayList reference`;
(note an explicit cast is valid since `ArrayList` is a descendant of `List`)
<3> declare `ArrayList y`;
access `x` -> `List reference`;
*error* -> cannot explicit cast `List reference` to `Map reference`;
(note no cast would be valid since neither `List` nor `Map` is a descendant
of the other)
[[dynamic-type-casting]]
==== Dynamic Type Casting
A <<dynamic-types, dynamic (`def`) type>> cast converts the value of an original
`def` type to the equivalent value of any target type or converts the value of
any original type to the equivalent value of a target `def` type.
An implicit cast from any original type value to a `def` type value is always
allowed. An explicit cast from any original type value to a `def` type value is
always allowed but never necessary.
An implicit or explicit cast from an original `def` type value to
any target type value is allowed if and only if the cast is normally allowed
based on the current type value the `def` type value represents.
*Examples*
* Valid dynamic type casts with any original type to a target `def` type.
+
[source,Painless]
----
<1> def d0 = 3;
<2> d0 = new ArrayList();
<3> Object o = new HashMap();
<4> def d1 = o;
<5> int i = d1.size();
----
+
<1> declare `def d0`;
implicit cast `int 3` to `def`;
assign `int 3` to `d0`
<2> allocate `ArrayList` instance -> `ArrayList reference`;
implicit cast `ArrayList reference` to `def` -> `def`;
assign `def` to `d0`
<3> declare `Object o`;
allocate `HashMap` instance -> `HashMap reference`;
implicit cast `HashMap reference` to `Object reference`
-> `Object reference`;
assign `Object reference` to `o`
<4> declare `def d1`;
access `o` -> `Object reference`;
implicit cast `Object reference` to `def` -> `def`;
assign `def` to `d1`
<5> declare `int i`;
access `d1` -> `def`;
implicit cast `def` to `HashMap reference` -> HashMap reference`;
call `size` on `HashMap reference` -> `int 0`;
assign `int 0` to `i`;
(note `def` was implicit cast to `HashMap reference` since `HashMap` is the
child-most descendant type value that the `def` type value
represents)
+
* Valid dynamic type casts with an original `def` type to any target type.
+
[source,Painless]
----
<1> def d = 1.0;
<2> int i = (int)d;
<3> d = 1;
<4> float f = d;
<5> d = new ArrayList();
<6> List l = d;
----
+
<1> declare `def d`;
implicit cast `double 1.0` to `def` -> `def`;
assign `def` to `d`
<2> declare `int i`;
access `d` -> `def`;
implicit cast `def` to `double 1.0` -> `double 1.0`;
explicit cast `double 1.0` to `int 1` -> `int 1`;
assign `int 1` to `i`;
(note the explicit cast is necessary since a `double` value cannot be
converted to an `int` value implicitly)
<3> assign `int 1` to `d`;
(note the switch in the type `d` represents from `double` to `int`)
<4> declare `float i`;
access `d` -> `def`;
implicit cast `def` to `int 1` -> `int 1`;
implicit cast `int 1` to `float 1.0` -> `float 1.0`;
assign `float 1.0` to `f`
<5> allocate `ArrayList` instance -> `ArrayList reference`;
assign `ArrayList reference` to `d`;
(note the switch in the type `d` represents from `int` to `ArrayList`)
<6> declare `List l`;
access `d` -> `def`;
implicit cast `def` to `ArrayList reference` -> `ArrayList reference`;
implicit cast `ArrayList reference` to `List reference` -> `List reference`;
assign `List reference` to `l`
+
* Invalid dynamic type casts resulting in errors.
+
[source,Painless]
----
<1> def d = 1;
<2> short s = d; // error
<3> d = new HashMap();
<4> List l = d; // error
----
<1> declare `def d`;
implicit cast `int 1` to `def` -> `def`;
assign `def` to `d`
<2> declare `short s`;
access `d` -> `def`;
implicit cast `def` to `int 1` -> `int 1`;
*error* -> cannot implicit cast `int 1` to `short 1`;
(note an explicit cast is valid)
<3> allocate `HashMap` instance -> `HashMap reference`;
implicit cast `HashMap reference` to `def` -> `def`;
assign `def` to `d`
<4> declare `List l`;
access `d` -> `def`;
implicit cast `def` to `HashMap reference`;
*error* -> cannot implicit cast `HashMap reference` to `List reference`;
(note no cast would be valid since neither `HashMap` nor `List` is a
descendant of the other)
[[string-character-casting]]
==== String to Character Casting
Use the *cast operator* to convert a <<string-type, `String` type>> value into a
<<primitive-types, `char` type>> value.
*Errors*
* If the `String` type value isn't one character in length.
* If the `String` type value is `null`.
*Examples*
* Casting string literals into `char` type values.
+
[source,Painless]
----
<1> char c = (char)"C"
<2> c = (char)'c'
----
+
<1> declare `char c`;
explicit cast `String "C"` to `char C` -> `char C`;
assign `char C` to `c`
<2> explicit cast `String 'c'` to `char c` -> `char c`;
assign `char c` to `c`
+
* Casting a `String` reference into a `char` value.
+
[source,Painless]
----
<1> String s = "s";
<2> char c = (char)s;
----
<1> declare `String s`;
assign `String "s"` to `s`;
<2> declare `char c`
access `s` -> `String "s"`;
explicit cast `String "s"` to `char s` -> `char s`;
assign `char s` to `c`
[[boxing-unboxing]] [[boxing-unboxing]]
==== Boxing and Unboxing ==== Boxing and Unboxing
Boxing is where a cast is used to convert a primitive type to its corresponding Boxing is a special type of cast used to convert a primitive type to its
reference type. Unboxing is the reverse, converting a reference type to the corresponding reference type. Unboxing is the reverse used to convert a
corresponding primitive type. reference type to its corresponding primitive type.
There are two places Painless performs implicit boxing and unboxing: Implicit boxing/unboxing occurs during the following operations:
* When you call methods, Painless automatically boxes and unboxes arguments * Conversions between a `def` type and a primitive type will be implicitly
so you can specify either primitive types or their corresponding reference boxed/unboxed as necessary, though this is referred to as an implicit cast
types. throughout the documentation.
* When you use the `def` type, Painless automatically boxes and unboxes as * Method/function call arguments will be implicitly boxed/unboxed as necessary.
needed when converting to and from `def`. * A primitive type value will be implicitly boxed when a reference type method
call is invoked on it.
The casting operator does not support any way to explicitly box a primitive Explicit boxing/unboxing is not allowed. Use the reference type API to
type or unbox a reference type. explicitly convert a primitive type value to its respective reference type
value and vice versa.
If a primitive type needs to be converted to a reference type, the Painless *Errors*
reference type API supports methods that can do that. However, under normal
circumstances this should not be necessary.
*Examples:* * If an explicit cast is made to box/unbox a primitive type.
[source,Java]
*Examples*
* Uses of implicit boxing/unboxing.
+
[source,Painless]
---- ----
Integer x = 1; // ERROR: not a legal implicit cast <1> List l = new ArrayList();
Integer y = (Integer)1; // ERROR: not a legal explicit cast <2> l.add(1);
int a = new Integer(1); // ERROR: not a legal implicit cast <3> Integer I = Integer.valueOf(0);
int b = (int)new Integer(1); // ERROR: not a legal explicit cast <4> int i = l.get(i);
---- ----
+
<1> declare `List l`;
allocate `ArrayList` instance -> `ArrayList reference`;
assign `ArrayList reference` to `l`;
<2> access `l` -> `List reference`;
implicit cast `int 1` to `def` -> `def`;
call `add` on `List reference` with arguments (`def`);
(note internally `int 1` is boxed to `Integer 1` to store as a `def` type
value)
<3> declare `Integer I`;
call `valueOf` on `Integer` with arguments of (`int 0`) -> `Integer 0`;
assign `Integer 0` to `I`;
<4> declare `int i`;
access `I` -> `Integer 0`;
unbox `Integer 0` -> `int 0`;
access `l` -> `List reference`;
call `get` on `List reference` with arguments (`int 0`) -> `def`;
implicit cast `def` to `int 1` -> `int 1`;
assign `int 1` to `i`;
(note internally `int 1` is unboxed from `Integer 1` when loaded from a
`def` type value)
+
* Uses of invalid boxing/unboxing resulting in errors.
+
[source,Painless]
----
<1> Integer x = 1; // error
<2> Integer y = (Integer)1; // error
<3> int a = Integer.valueOf(1); // error
<4> int b = (int)Integer.valueOf(1); // error
----
+
<1> declare `Integer x`;
*error* -> cannot implicit box `int 1` to `Integer 1` during assignment
<2> declare `Integer y`;
*error* -> cannot explicit box `int 1` to `Integer 1` during assignment
<3> declare `int a`;
call `valueOf` on `Integer` with arguments of (`int 1`) -> `Integer 1`;
*error* -> cannot implicit unbox `Integer 1` to `int 1` during assignment
<4> declare `int a`;
call `valueOf` on `Integer` with arguments of (`int 1`) -> `Integer 1`;
*error* -> cannot explicit unbox `Integer 1` to `int 1` during assignment
[[promotion]] [[promotion]]
==== Promotion ==== Promotion
Promotion is where certain operations require types to be either a minimum Promotion is when a single value is implicitly cast to a certain type or
numerical type or for two (or more) types to be equivalent. multiple values are implicitly cast to the same type as required for evaluation
The documentation for each operation that has these requirements by certain operations. Each operation that requires promotion has a promotion
includes promotion tables that describe how this is handled. table that shows all required implicit casts based on the type(s) of value(s). A
value can be promoted to a `def` type at compile-time; however, the promoted
type value is derived from what the `def` type value represents at run-time.
When an operation promotes a type or types, the resultant type *Errors*
of the operation is the promoted type. Types can be promoted to def
at compile-time; however, at run-time, the resultant type will be the
promotion of the types the `def` is representing.
*Examples:* * If a specific operation cannot find an allowed promotion type for the type(s)
[source,Java] of value(s) given.
*Examples*
* Uses of promotion.
+
[source,Painless]
---- ----
2 + 2.0 // Add the literal int 2 and the literal double 2.0. The literal <1> double d = 2 + 2.0;
// 2 is promoted to a double and the resulting value is a double. <2> def x = 1;
<3> float f = x + 2.0F;
def x = 1; // Declare def variable x and set it to the literal int 1 through
// an implicit cast
x + 2.0F // Add def variable x and the literal float 2.0.
// At compile-time the types are promoted to def.
// At run-time the types are promoted to float.
---- ----
<1> declare `double d`;
promote `int 2` and `double 2.0 @0` -> `double 2.0 @0`;
implicit cast `int 2` to `double 2.0 @1` -> `double 2.0 @1`;
add `double 2.0 @1` and `double 2.0 @0` -> `double 4.0`;
assign `double 4.0` to `d`
<2> declare `def x`;
implicit cast `int 1` to `def` -> `def`;
assign `def` to `x`;
<3> declare `float f`;
access `x` -> `def`;
implicit cast `def` to `int 1` -> `int 1`;
promote `int 1` and `float 2.0` -> `float 2.0`;
implicit cast `int 1` to `float 1.0` -> `float `1.0`;
add `float 1.0` and `float 2.0` -> `float 3.0`;
assign `float 3.0` to `f`;
(note this example illustrates promotion done at run-time as promotion
done at compile-time would have resolved to a `def` type value)

View File

@ -1,12 +1,12 @@
[[painless-comments]] [[painless-comments]]
=== Comments === Comments
Use the `//` token anywhere on a line to specify a single-line comment. All Use a comment to annotate or explain code within a script. Use the `//` token
characters from the `//` token to the end of the line are ignored. Use an anywhere on a line to specify a single-line comment. All characters from the
opening `/*` token and a closing `*/` token to specify a multi-line comment. `//` token to the end of the line are ignored. Use an opening `/*` token and a
Multi-line comments can start anywhere on a line, and all characters in between closing `*/` token to specify a multi-line comment. Multi-line comments can
the `/*` token and `*/` token are ignored. Comments can be included anywhere start anywhere on a line, and all characters in between the `/*` token and `*/`
within a script. token are ignored. Comments can be included anywhere within a script.
*Grammar* *Grammar*
[source,ANTLR4] [source,ANTLR4]

View File

@ -1,10 +1,10 @@
[[painless-identifiers]] [[painless-identifiers]]
=== Identifiers === Identifiers
Specify identifiers to <<declaration, declare>>, <<assignment, assign>>, and Use an identifier as a named token to specify a
<<painless-operators, use>> variables, <<dot-operator, access fields>>, and <<painless-variables, variable>>, <<painless-types, type>>,
<<dot-operator, call methods>>. <<painless-keywords, Keywords>> and <<dot-operator, field>>, <<dot-operator, method>>, or function.
<<painless-types, types>> cannot be used as identifiers. <<painless-keywords, Keywords>> cannot be used as identifiers.
*Grammar* *Grammar*
[source,ANTLR4] [source,ANTLR4]

View File

@ -1,9 +1,9 @@
[[painless-keywords]] [[painless-keywords]]
=== Keywords === Keywords
The keywords in the table below are reserved for built-in language Keywords are reserved tokens for built-in language features and cannot be used
features. These keywords cannot be used as as <<painless-identifiers, identifiers>> within a script. The following are
<<painless-identifiers, identifiers>> or <<painless-types, types>>. keywords:
[cols="^1,^1,^1,^1,^1"] [cols="^1,^1,^1,^1,^1"]
|==== |====

View File

@ -6,7 +6,7 @@ Painless syntax is similar to Java syntax along with some additional
features such as dynamic typing, Map and List accessor shortcuts, and array features such as dynamic typing, Map and List accessor shortcuts, and array
initializers. As a direct comparison to Java, there are some important initializers. As a direct comparison to Java, there are some important
differences, especially related to the casting model. For more detailed differences, especially related to the casting model. For more detailed
conceptual information about the basic constructs that Java and Painless share, conceptual information about the basic constructs that Painless and Java share,
refer to the corresponding topics in the refer to the corresponding topics in the
https://docs.oracle.com/javase/specs/jls/se8/html/index.html[Java Language https://docs.oracle.com/javase/specs/jls/se8/html/index.html[Java Language
Specification]. Specification].

View File

@ -1,18 +1,19 @@
[[painless-literals]] [[painless-literals]]
=== Literals === Literals
Use literals to specify different types of values directly in a script. Use a literal to specify a value directly in an
<<painless-operators, operation>>.
[[integers]] [[integers]]
==== Integers ==== Integers
Use integer literals to specify an integer value in decimal, octal, or hex Use an integer literal to specify an integer type value in decimal, octal, or
notation of the <<primitive-types, primitive types>> `int`, `long`, `float`, hex notation of a <<primitive-types, primitive type>> `int`, `long`, `float`,
or `double`. Use the following single letter designations to specify the or `double`. Use the following single letter designations to specify the
<<primitive-types, primitive type>>: `l` or `L` for `long`, `f` or `F` for primitive type: `l` or `L` for `long`, `f` or `F` for `float`, and `d` or `D`
`float`, and `d` or `D` for `double`. If not specified, the type defaults to for `double`. If not specified, the type defaults to `int`. Use `0` as a prefix
`int`. Use `0` as a prefix to specify an integer literal as octal, and use to specify an integer literal as octal, and use `0x` or `0X` as a prefix to
`0x` or `0X` as a prefix to specify an integer literal as hex. specify an integer literal as hex.
*Grammar* *Grammar*
[source,ANTLR4] [source,ANTLR4]
@ -46,11 +47,10 @@ HEX: '-'? '0' [xX] [0-9a-fA-F]+ [lL]?;
[[floats]] [[floats]]
==== Floats ==== Floats
Use floating point literals to specify a floating point value of the Use a floating point literal to specify a floating point type value of a
<<primitive-types, primitive types>> `float` or `double`. Use the following <<primitive-types, primitive type>> `float` or `double`. Use the following
single letter designations to specify the <<primitive-types, primitive type>>: single letter designations to specify the primitive type: `f` or `F` for `float`
`f` or `F` for `float` and `d` or `D` for `double`. If not specified, the type defaults and `d` or `D` for `double`. If not specified, the type defaults to `double`.
to `double`.
*Grammar* *Grammar*
[source,ANTLR4] [source,ANTLR4]
@ -81,7 +81,7 @@ EXPONENT: ( [eE] [+\-]? [0-9]+ );
[[strings]] [[strings]]
==== Strings ==== Strings
Use string literals to specify <<string-type, String>> values with Use a string literal to specify a <<string-type, `String` type>> value with
either single-quotes or double-quotes. Use a `\"` token to include a either single-quotes or double-quotes. Use a `\"` token to include a
double-quote as part of a double-quoted string literal. Use a `\'` token to double-quote as part of a double-quoted string literal. Use a `\'` token to
include a single-quote as part of a single-quoted string literal. Use a `\\` include a single-quote as part of a single-quoted string literal. Use a `\\`
@ -117,26 +117,6 @@ STRING: ( '"' ( '\\"' | '\\\\' | ~[\\"] )*? '"' )
[[characters]] [[characters]]
==== Characters ==== Characters
Use the <<painless-casting, casting operator>> to convert string literals or A character literal cannot be specified directly. Instead, use the
<<string-type, String>> values into <<primitive-types, char>> values. <<string-character-casting, cast operator>> to convert a `String` type value
<<string-type, String>> values converted into into a `char` type value.
<<primitive-types, char>> values must be exactly one character in length
or an error will occur.
*Examples*
* Casting string literals into <<primitive-types, char>> values.
+
[source,Painless]
----
(char)"C"
(char)'c'
----
+
* Casting a <<string-type, String>> value into a <<primitive-types, char>> value.
+
[source,Painless]
----
String s = "s";
char c = (char)s;
----

View File

@ -240,6 +240,7 @@ operator. See Function Calls [MARK] for more information.
The brackets operator `[]` is used to create and access arrays, lists, and maps. The brackets operator `[]` is used to create and access arrays, lists, and maps.
The braces operator `{}` is used to intialize arrays. The braces operator `{}` is used to intialize arrays.
[[array-initialization]]
===== Creating and Initializing Arrays ===== Creating and Initializing Arrays
You create and initialize arrays using the brackets `[]` and braces `{}` You create and initialize arrays using the brackets `[]` and braces `{}`
@ -248,9 +249,49 @@ initialize each dimension with are specified as a comma-separated list enclosed
in braces. For example, `new int[] {1, 2, 3}` creates a one dimensional `int` in braces. For example, `new int[] {1, 2, 3}` creates a one dimensional `int`
array with a size of 3 and the values 1, 2, and 3. array with a size of 3 and the values 1, 2, and 3.
For more information about allocating and initializing arrays, see <<array-type, To allocate an array, you use the `new` keyword followed by the type and a
Array Type>>. set of brackets for each dimension. You can explicitly define the size of each dimension by specifying an expression within the brackets, or initialize each
dimension with the desired number of values. The allocated size of each
dimension is its permanent size.
To initialize an array, specify the values you want to initialize
each dimension with as a comma-separated list of expressions enclosed in braces.
For example, `new int[] {1, 2, 3}` creates a one-dimensional `int` array with a
size of 3 and the values 1, 2, and 3.
When you initialize an array, the order of the expressions is maintained. Each expression used as part of the initialization is converted to the
array's type. An error occurs if the types do not match.
*Grammar:*
[source,ANTLR4]
----
declare_array: TYPE ('[' ']')+;
array_initialization: 'new' TYPE '[' ']' '{' expression (',' expression) '}'
| 'new' TYPE '[' ']' '{' '}';
----
*Examples:*
[source,Java]
----
int[] x = new int[5]; // Declare int array x and assign it a newly
// allocated int array with a size of 5
def[][] y = new def[5][5]; // Declare the 2-dimensional def array y and
// assign it a newly allocated 2-dimensional
// array where both dimensions have a size of 5
int[] x = new int[] {1, 2, 3}; // Declare int array x and set it to an int
// array with values 1, 2, 3 and a size of 3
int i = 1;
long l = 2L;
float f = 3.0F;
double d = 4.0;
String s = "5";
def[] da = new def[] {i, l, f*d, s}; // Declare def array da and set it to
// a def array with a size of 4 and the
// values i, l, f*d, and s
----
[[array-access]]
===== Accessing Array Elements ===== Accessing Array Elements
Elements in an array are stored and accessed using the brackets `[]` operator. Elements in an array are stored and accessed using the brackets `[]` operator.
@ -298,6 +339,7 @@ return d[z]; // Access the 1st element of array d using the
NOTE: The use of the `def` type in the second example means that the types NOTE: The use of the `def` type in the second example means that the types
cannot be resolved until runtime. cannot be resolved until runtime.
[[array-length]]
===== Array Length ===== Array Length
Arrays contain a special member known as 'length' that is a read-only value that contains the size of the array. This member can be accessed from an array using the dot operator. Arrays contain a special member known as 'length' that is a read-only value that contains the size of the array. This member can be accessed from an array using the dot operator.
@ -727,6 +769,7 @@ def e; // declares the def variable e
e = new HashMap(m); // sets e to a newly allocated HashMap using the constructor with a single argument m e = new HashMap(m); // sets e to a newly allocated HashMap using the constructor with a single argument m
---- ----
[[new-array]]
==== New Array ==== New Array
An array type instance can be allocated using the new operator. The format starts with the new operator followed by the type followed by a series of opening and closing braces each containing an expression for the size of the dimension. An array type instance can be allocated using the new operator. The format starts with the new operator followed by the type followed by a series of opening and closing braces each containing an expression for the size of the dimension.

View File

@ -1,269 +1,466 @@
[[painless-types]] [[painless-types]]
=== Types === Types
Painless supports both dynamic and static types. Static types are split into A type is a classification of data used to define the properties of a value.
_primitive types_ and _reference types_. These properties specify what data a value represents and the rules for how a
value is evaluated during an <<painless-operators, operation>>. Each type
[[dynamic-types]] belongs to one of the following categories: <<primitive-types, primitive>>,
==== Dynamic Types <<reference-types, reference>>, or <<dynamic-types, dynamic>>.
Painless supports one dynamic type: `def`. The `def` type can represent any
primitive or reference type. When you use the `def` type, it mimics the exact
behavior of whatever type it represents at runtime. The default value for the
def type is `null.`
Internally, if the `def` type represents a primitive type, it is converted to the
corresponding reference type. It still behaves like the primitive type, however,
including within the casting model. The `def` type can be assigned to different
types during the course of script execution.
IMPORTANT: Because a `def` type variable can be assigned to different types
during execution, type conversion errors that occur when using the `def` type
happen at runtime.
Using the `def` type can have a slight impact on performance. If performance is
critical, it's better to declare static types.
*Examples:*
[source,Java]
----
def x = 1; // Declare def variable x and set it to the
// literal int 1
def l = new ArrayList(); // Declare def variable l and set it a newly
// allocated ArrayList
----
[[primitive-types]] [[primitive-types]]
==== Primitive Types ==== Primitive Types
Primitive types are allocated directly onto the stack according to the standard A primitive type represents basic data built natively into the JVM and is
Java memory model. allocated to non-heap memory. Declare a primitive type
<<painless-variables, variable>>, and assign it a primitive type value for
evaluation during later operations. The default value for a newly-declared
primitive type variable is listed as part of the definitions below. A primitive
type value is copied during an assignment or as an argument for a
method/function call.
Primitive types can behave as their corresponding (<<boxing-unboxing, boxed>>) A primitive type has a corresponding reference type (also known as a boxed
reference type. This means any piece of a reference type can be accessed or type). Use the <<field-access, field access operator>> or
called through the primitive type. Operations performed in this manner convert <<method-access, method call operator>> on a primitive type value to force
the primitive type to its corresponding reference type at runtime and perform evaluation as its corresponding reference type value.
the field access or method call without needing to perform any other
operations.
Painless supports the following primitive types. The following primitive types are available:
byte:: [horizontal]
An 8-bit, signed, two's complement integer. `byte`::
Range: [-128, 127]. 8-bit, signed, two's complement integer
Default value: 0. * range: [`-128`, `127`]
Reference type: Byte. * default value: `0`
* reference type: `Byte`
short:: `short`::
A 16-bit, signed, two's complement integer. 16-bit, signed, two's complement integer
Range: [-32768, 32767]. * range: [`-32768`, `32767`]
Default value: 0. * default value: `0`
Reference type: Short. * reference type: `Short`
char:: `char`::
A 16-bit Unicode character. 16-bit, unsigned, Unicode character
Range: [0, 65535]. * range: [`0`, `65535`]
Default value: 0 or `\u0000`. * default value: `0` or `\u0000`
Reference type: Character. * reference type: `Character`
int:: `int`::
A 32-bit, signed, two's complement integer. 32-bit, signed, two's complement integer
Range: [-2^32, 2^32-1]. * range: [`-2^32`, `2^32-1`]
Default value: 0. * default value: `0`
Reference type: Integer. * reference type: `Integer`
long:: `long`::
A 64-bit, signed, two's complement integer. 64-bit, signed, two's complement integer
Range: [-2^64, 2^64-1]. * range: [`-2^64`, `2^64-1`]
Default value: 0. * default value: `0`
Reference type: Long. * reference type: `Long`
float:: `float`::
A 32-bit, single-precision, IEEE 754 floating point number. 32-bit, signed, single-precision, IEEE 754 floating point number
Range: Depends on multiple factors. * default value: `0.0`
Default value: 0.0. * reference type: `Float`
Reference type: Float.
double:: `double`::
A 64-bit, double-precision, IEEE 754 floating point number. 64-bit, signed, double-precision, IEEE 754 floating point number
Range: Depends on multiple factors. * default value: `0.0`
Default value: 0.0. * reference type: `Double`
Reference type: Double.
boolean:: `boolean`::
A logical quanity with two possible values: true and false. logical quantity with two possible values of `true` and `false`
Range: true/false. * default value: `false`
Default value: false. * reference type: `Boolean`
Reference type: Boolean.
*Examples*
*Examples:* * Primitive types used in declaration, declaration and assignment.
[source,Java] +
[source,Painless]
---- ----
int i = 1; // Declare variable i as an int and set it to the <1> int i = 1;
// literal 1 <2> double d;
double d; // Declare variable d as a double and set it to the <3> boolean b = true;
// default value of 0.0
boolean b = true; // Declare variable b as a boolean and set it to true
---- ----
+
Using methods from the corresponding reference type on a primitive type. <1> declare `int i`;
assign `int 1` to `i`
[source,Java] <2> declare `double d`;
assign default `double 0.0` to `d`
<3> declare `boolean b`;
assign `boolean true` to `b`
+
* Method call on a primitive type using the corresponding reference type.
+
[source,Painless]
---- ----
int i = 1; // Declare variable i as an int and set it to the <1> int i = 1;
// literal 1 <2> i.toString();
i.toString(); // Invokes the Integer method toString on variable i
---- ----
+
<1> declare `int i`;
assign `int 1` to `i`
<2> access `i` -> `int 1`;
box `int 1` -> `Integer 1 reference`;
call `toString` on `Integer 1 reference` -> `String '1'`
[[reference-types]] [[reference-types]]
==== Reference Types ==== Reference Types
Reference types are similar to Java classes and can contain multiple pieces A reference type is a named construct (object), potentially representing
known as _members_. However, reference types do not support access modifiers. multiple pieces of data (member fields) and logic to manipulate that data
You allocate reference type instances on the heap using the `new` operator. (member methods), defined as part of the application programming interface
(API) for scripts.
Reference types can have both static and non-static members: A reference type instance is a single set of data for one reference type
object allocated to the heap. Use the
<<constructor-call, new instance operator>> to allocate a reference type
instance. Use a reference type instance to load from, store to, and manipulate
complex data.
* Static members are shared by all instances of the same reference type and A reference type value refers to a reference type instance, and multiple
can be accessed without allocating an instance of the reference type. For reference type values may refer to the same reference type instance. A change to
example `Integer.MAX_VALUE`. a reference type instance will affect all reference type values referring to
* Non-static members are specific to an instance of the reference type that specific instance.
and can only be accessed through the allocated instance.
The default value for a reference type is `null`, indicating that no memory has Declare a reference type <<painless-variables, variable>>, and assign it a
been allocated for it. When you assign `null` to a reference type, its previous reference type value for evaluation during later operations. The default value
value is discarded and garbage collected in accordance with the Java memory for a newly-declared reference type variable is `null`. A reference type value
model as long as there are no other references to that value. is shallow-copied during an assignment or as an argument for a method/function
call. Assign `null` to a reference type variable to indicate the reference type
value refers to no reference type instance. The JVM will garbage collect a
reference type instance when it is no longer referred to by any reference type
values. Pass `null` as an argument to a method/function call to indicate the
argument refers to no reference type instance.
A reference type can contain: A reference type object defines zero-to-many of each of the following:
* Zero to many primitive types. Primitive type members can be static or static member field::
non-static and read-only or read-write.
* Zero to many reference types. Reference type members can be static or
non-static and read-only or read-write.
* Methods that call an internal function to return a value and/or manipulate
the primitive or reference type members. Method members can be static or
non-static.
* Constructors that call an internal function to return a newly-allocated
reference type instance. Constructors are non-static methods that can
optionally manipulate the primitive and reference type members.
Reference types support a Java-style inheritance model. Consider types A and B. A static member field is a named and typed piece of data. Each reference type
Type A is considered to be a parent of B, and B a child of A, if B inherits *object* contains one set of data representative of its static member fields.
(is able to access as its own) all of A's fields and methods. Type B is Use the <<field-access, field access operator>> in correspondence with the
reference type object name to access a static member field for loading and
storing to a specific reference type *object*. No reference type instance
allocation is necessary to use a static member field.
non-static member field::
A non-static member field is a named and typed piece of data. Each reference
type *instance* contains one set of data representative of its reference type
object's non-static member fields. Use the
<<field-access, field access operator>> for loading and storing to a non-static
member field of a specific reference type *instance*. An allocated reference
type instance is required to use a non-static member field.
static member method::
A static member method is a function called on a reference type *object*. Use
the <<method-access, method call operator>> in correspondence with the reference
type object name to call a static member method. No reference type instance
allocation is necessary to use a static member method.
non-static member method::
A non-static member method is a function called on a reference type *instance*.
A non-static member method called on a reference type instance can load from and
store to non-static member fields of that specific reference type instance. Use
the <<method-access, method call operator>> in correspondence with a specific
reference type instance to call a non-static member method. An allocated
reference type instance is required to use a non-static member method.
constructor::
A constructor is a special type of function used to allocate a reference type
*instance* defined by a specific reference type *object*. Use the
<<constructor-call, new instance operator>> to allocate a reference type
instance.
A reference type object follows a basic inheritance model. Consider types A and
B. Type A is considered to be a parent of B, and B a child of A, if B inherits
(is able to access as its own) all of A's non-static members. Type B is
considered a descendant of A if there exists a recursive parent-child considered a descendant of A if there exists a recursive parent-child
relationship from B to A with none to many types in between. In this case, B relationship from B to A with none to many types in between. In this case, B
inherits all of A's fields and methods along with all of the fields and inherits all of A's non-static members along with all of the non-static members
methods of the types in between. Type B is also considered to be a type A of the types in between. Type B is also considered to be a type A in both
in both relationships. relationships.
For the complete list of Painless reference types and their supported methods, *Examples*
see the https://www.elastic.co/guide/en/elasticsearch/reference/current/painless-api-reference.html[Painless API Reference].
For more information about working with reference types, see * Reference types evaluated in several different operations.
<<field-access, Accessing Fields>> and <<method-access, Calling Methods>>. +
[source,Painless]
*Examples:*
[source,Java]
---- ----
ArrayList al = new ArrayList(); // Declare variable al as an ArrayList and <1> List l = new ArrayList();
// set it to a newly allocated ArrayList <2> l.add(1);
List l = new ArrayList(); // Declare variable l as a List and set <3> int i = l.get(0) + 2;
// it to a newly allocated ArrayList, which is
// allowed because ArrayList inherits from List
Map m; // Declare variable m as a Map and set it
// to the default value of null
---- ----
+
<1> declare `List l`;
allocate `ArrayList` instance -> `ArrayList reference`;
implicit cast `ArrayList reference` to `List reference` -> `List reference`;
assign `List reference` to `l`
<2> access `l` -> `List reference`;
implicit cast `int 1` to `def` -> `def`
call `add` on `List reference` with arguments (`def`)
<3> declare `int i`;
access `l` -> `List reference`;
call `get` on `List reference` with arguments (`int 0`) -> `def`;
implicit cast `def` to `int 1` -> `int 1`;
add `int 1` and `int 2` -> `int 3`;
assign `int 3` to `i`
+
* Sharing a reference type instance.
+
[source,Painless]
----
<1> List l0 = new ArrayList();
<2> List l1 = l0;
<3> l0.add(1);
<4> l1.add(2);
<5> int i = l1.get(0) + l0.get(1);
----
+
<1> declare `List l0`;
allocate `ArrayList` instance -> `ArrayList reference`;
implicit cast `ArrayList reference` to `List reference` -> `List reference`;
assign `List reference` to `l0`
<2> declare `List l1`;
access `l0` -> `List reference`;
assign `List reference` to `l1`
(note `l0` and `l1` refer to the same instance known as a shallow-copy)
<3> access `l0` -> `List reference`;
implicit cast `int 1` to `def` -> `def`
call `add` on `List reference` with arguments (`def`)
<4> access `l1` -> `List reference`;
implicit cast `int 2` to `def` -> `def`
call `add` on `List reference` with arguments (`def`)
<5> declare `int i`;
access `l0` -> `List reference`;
call `get` on `List reference` with arguments (`int 0`) -> `def @0`;
implicit cast `def @0` to `int 1` -> `int 1`;
access `l1` -> `List reference`;
call `get` on `List reference` with arguments (`int 1`) -> `def @1`;
implicit cast `def @1` to `int 2` -> `int 2`;
add `int 1` and `int 2` -> `int 3`;
assign `int 3` to `i`;
+
* Using the static members of a reference type.
+
[source,Painless]
----
<1> int i = Integer.MAX_VALUE;
<2> long l = Long.parseLong("123L");
----
+
<1> declare `int i`;
access `MAX_VALUE` on `Integer` -> `int 2147483647`;
assign `int 2147483647` to `i`
<2> declare `long l`;
call `parseLong` on `Long` with arguments (`long 123`) -> `long 123`;
assign `long 123` to `l`
Directly accessing static pieces of a reference type. [[dynamic-types]]
==== Dynamic Types
[source,Java] A dynamic type value can represent the value of any primitive type or
reference type using a single type name `def`. A `def` type value mimics
the behavior of whatever value it represents at run-time and will always
represent the child-most descendant type value of any type value when evaluated
during operations.
Declare a `def` type <<painless-variables, variable>>, and assign it
any type of value for evaluation during later operations. The default value
for a newly-declared `def` type variable is `null`. A `def` type variable or
method/function parameter can change the type it represents during the
compilation and evaluation of a script.
Using the `def` type can have a slight impact on performance. Use only primitive
types and reference types directly when performance is critical.
*Errors*
* If a `def` type value represents an inappropriate type for evaluation of an
operation at run-time.
*Examples*
* General uses of the `def` type.
+
[source,Painless]
---- ----
Integer.MAX_VALUE // a static field access <1> def dp = 1;
Long.parseLong("123L") // a static function call <2> def dr = new ArrayList();
<3> dr = dp;
---- ----
+
<1> declare `def dp`;
implicit cast `int 1` to `def` -> `def`;
assign `def` to `dp`
<2> declare `def dr`;
allocate `ArrayList` instance -> `ArrayList reference`;
implicit cast `ArrayList reference` to `def` -> `def`;
assign `def` to `dr`
<3> access `dp` -> `def`;
assign `def` to `dr`;
(note the switch in the type `dr` represents from `ArrayList` to `int`)
+
* A `def` type value representing the child-most descendant of a value.
+
[source,Painless]
----
<1> Object l = new ArrayList();
<2> def d = l;
<3> d.ensureCapacity(10);
----
+
<1> declare `Object l`;
allocate `ArrayList` instance -> `ArrayList reference`;
implicit cast `ArrayList reference` to `Object reference`
-> `Object reference`;
assign `Object reference` to `l`
<2> declare `def d`;
access `l` -> `Object reference`;
implicit cast `Object reference` to `def` -> `def`;
assign `def` to `d`;
<3> access `d` -> `def`;
implicit cast `def` to `ArrayList reference` -> `ArrayList reference`;
call `ensureCapacity` on `ArrayList reference` with arguments (`int 10`);
(note `def` was implicit cast to `ArrayList reference`
since ArrayList` is the child-most descendant type value that the
`def` type value represents)
[[string-type]] [[string-type]]
==== String Type ==== String Type
A `String` is a specialized reference type that is immutable and does not have The `String` type is a specialized reference type that does not require
to be explicitly allocated. You can directly assign to a `String` without first explicit allocation. Use a <<strings, string literal>> to directly evaluate a
allocating it with the `new` keyword. (Strings can be allocated with the `new` `String` type value. While not required, the
keyword, but it's not required.) <<constructor-call, new instance operator>> can allocate `String` type
instances.
When assigning a value to a `String`, you must enclose the text in single or *Examples*
double quotes. Strings are allocated according to the standard Java Memory Model.
The default value for a `String` is `null.`
*Examples:* * General use of the `String` type.
[source,Java] +
[source,Painless]
---- ----
String r = "some text"; // Declare String r and set it to the <1> String r = "some text";
// String "some text" <2> String s = 'some text';
String s = 'some text'; // Declare String s and set it to the <3> String t = new String("some text");
// String 'some text' <4> String u;
String t = new String("some text"); // Declare String t and set it to the
// String "some text"
String u; // Declare String u and set it to the
// default value null
---- ----
+
<1> declare `String r`;
assign `String "some text"` to `r`
<2> declare `String s`;
assign `String 'some text'` to `s`
<3> declare `String t`;
allocate `String` instance with arguments (`String "some text"`)
-> `String "some text"`;
assign `String "some text"` to `t`
<4> declare `String u`;
assign default `null` to `u`
[[void-type]] [[void-type]]
==== void Type ==== void Type
The `void` type represents the concept of no type. In Painless, `void` declares The `void` type represents the concept of a lack of type. Use the `void` type to
that a function has no return value. indicate a function returns no value.
*Examples*
* Use of the `void` type in a function.
+
[source,Painless]
----
void addToList(List l, def d) {
l.add(d);
}
----
[[array-type]] [[array-type]]
==== Array Type ==== Array Type
Arrays contain a series of elements of the same type that can be allocated An array type is a specialized reference type where an array type instance
simultaneously. Painless supports both single and multi-dimensional arrays for represents a series of values allocated to the heap. All values in an array
all types except void (including `def`). type instance are of the same type. Each value is assigned an index from within
the range `[0, length)` where length is the total number of values allocated for
the array type instance.
You declare an array by specifying a type followed by a series of empty brackets, Use the <<new-array, new array operator>> or the
where each set of brackets represents a dimension. Declared arrays have a default <<array-initialization, array initialization operator>> to allocate an array
value of `null` and are themselves a reference type. type instance. Declare an array type <<painless-variables, variable>>, and
assign it an array type value for evaluation during later operations. The
default value for a newly-declared array type variable is `null`. An array type
value is shallow-copied during an assignment or as an argument for a
method/function call. Assign `null` to an array type variable to indicate the
array type value refers to no array type instance. The JVM will garbage collect
an array type instance when it is no longer referred to by any array type
values. Pass `null` as an argument to a method/function call to indicate the
argument refers to no array type instance.
To allocate an array, you use the `new` keyword followed by the type and a Use the <<array-length, array length operator>> to retrieve the length of an
set of brackets for each dimension. You can explicitly define the size of each dimension by specifying an expression within the brackets, or initialize each array type value as an int type value. Use the
dimension with the desired number of values. The allocated size of each <<array-access, array access operator>> to load from and store to individual
dimension is its permanent size. values within an array type value.
To initialize an array, specify the values you want to initialize When an array type instance is allocated with multiple dimensions using the
each dimension with as a comma-separated list of expressions enclosed in braces. range `[2, d]` where `d >= 2`, each dimension in the range `[1, d-1]` is also
For example, `new int[] {1, 2, 3}` creates a one-dimensional `int` array with a an array type. The array type of each dimension, `n`, is an array type with the
size of 3 and the values 1, 2, and 3. number of dimensions equal to `d-n`. For example, consider `int[][][]` with 3
dimensions. The 3rd dimension, `d-3`, is the primitive type `int`. The 2nd
dimension, `d-2`, is the array type `int[]`. And the 1st dimension, `d-1` is
the array type `int[][]`.
When you initialize an array, the order of the expressions is maintained. Each expression used as part of the initialization is converted to the *Examples*
array's type. An error occurs if the types do not match.
*Grammar:* * General use of single-dimensional arrays.
[source,ANTLR4] +
[source,Painless]
---- ----
declare_array: TYPE ('[' ']')+; <1> int[] x;
<2> float[] y = new float[10];
array_initialization: 'new' TYPE '[' ']' '{' expression (',' expression) '}' <3> def z = new float[5];
| 'new' TYPE '[' ']' '{' '}'; <4> y[9] = 1.0F;
<5> z[0] = y[9];
---- ----
+
*Examples:* <1> declare `int[] x`;
[source,Java] assign default `null` to `x`
<2> declare `float[] y`;
allocate `1-d float array` instance with `length [10]`
-> `1-d float array reference`;
assign `1-d float array reference` to `y`
<3> declare `def z`;
allocate `1-d float array` instance with `length [5]`
-> `1-d float array reference`;
implicit cast `1-d float array reference` to `def` -> `def`;
assign `def` to `z`
<4> access `y` -> `1-d float array reference`;
assign `float 1.0` to `index [9]` of `1-d float array reference`
<5> access `y` -> `1-d float array reference @0`;
access `index [9]` of `1-d float array reference @0` -> `float 1.0`;
access `z` -> `def`;
implicit cast `def` to `1-d float array reference @1`
-> `1-d float array reference @1`;
assign `float 1.0` to `index [0]` of `1-d float array reference @1`
+
* Use of a multi-dimensional array.
+
[source,Painless]
---- ----
int[] x = new int[5]; // Declare int array x and assign it a newly <1> int[][][] ia3 = new int[2][3][4];
// allocated int array with a size of 5 <2> ia3[1][2][3] = 99;
def[][] y = new def[5][5]; // Declare the 2-dimensional def array y and <3> int i = ia3[1][2][3];
// assign it a newly allocated 2-dimensional
// array where both dimensions have a size of 5
int[] x = new int[] {1, 2, 3}; // Declare int array x and set it to an int
// array with values 1, 2, 3 and a size of 3
int i = 1;
long l = 2L;
float f = 3.0F;
double d = 4.0;
String s = "5";
def[] da = new def[] {i, l, f*d, s}; // Declare def array da and set it to
// a def array with a size of 4 and the
// values i, l, f*d, and s
---- ----
+
<1> declare `int[][][] ia`;
allocate `3-d int array` instance with length `[2, 3, 4]`
-> `3-d int array reference`;
assign `3-d int array reference` to `ia3`
<2> access `ia3` -> `3-d int array reference`;
assign `int 99` to `index [1, 2, 3]` of `3-d int array reference`
<3> declare `int i`;
access `ia3` -> `3-d int array reference`;
access `index [1, 2, 3]` of `3-d int array reference` -> `int 99`;
assign `int 99` to `i`

View File

@ -1,29 +1,31 @@
[[painless-variables]] [[painless-variables]]
=== Variables === Variables
<<declaration, Declare>> variables to <<assignment, assign>> values for A variable loads and stores a value for evaluation during
<<painless-operators, use>> in expressions. Specify variables as a <<painless-operators, operations>>.
<<primitive-types, primitive type>>, <<reference-types, reference type>>, or
<<dynamic-types, dynamic type>>. Variable operations follow the structure of a
standard JVM in relation to instruction execution and memory usage.
[[declaration]] [[declaration]]
==== Declaration ==== Declaration
Declare variables before use with the format of <<painless-types, type>> Declare a variable before use with the format of <<painless-types, type>>
<<painless-identifiers, identifier>>. Specify a comma-separated list of followed by <<painless-identifiers, identifier>>. Declare an
<<painless-identifiers, identifiers>> following the <<painless-types, type>> <<array-type, array type>> variable using an opening `[` token and a closing `]`
to declare multiple variables in a single statement. Use an token for each dimension directly after the identifier. Specify a
<<assignment, assignment>> statement combined with a declaration statement to comma-separated list of identifiers following the type to declare multiple
immediately assign a value to a variable. Variables not immediately assigned a variables in a single statement. Use an <<assignment, assignment operator>>
value will have a default value assigned implicitly based on the combined with a declaration to immediately assign a value to a variable.
<<painless-types, type>>. A variable not immediately assigned a value will have a default value assigned
implicitly based on the type.
*Errors*
* If a variable is used prior to or without declaration.
*Grammar* *Grammar*
[source,ANTLR4] [source,ANTLR4]
---- ----
declaration : type ID assignment? (',' ID assignment?)*; declaration : type ID assignment? (',' ID assignment?)*;
type: ID ('[' ']')*; type: ID ('.' ID)* ('[' ']')*;
assignment: '=' expression; assignment: '=' expression;
---- ----
@ -35,27 +37,43 @@ assignment: '=' expression;
---- ----
<1> int x; <1> int x;
<2> List y; <2> List y;
<3> int x, y, z; <3> int x, y = 5, z;
<4> def[] d; <4> def d;
<5> int i = 10; <5> int i = 10;
<6> float[] f;
<7> Map[][] m;
---- ----
+ +
<1> declare a variable of type `int` and identifier `x` <1> declare `int x`;
<2> declare a variable of type `List` and identifier `y` assign default `null` to `x`
<3> declare three variables of type `int` and identifiers `x`, `y`, `z` <2> declare `List y`;
<4> declare a variable of type `def[]` and identifier `d` assign default `null` to `y`
<5> declare a variable of type `int` and identifier `i`; <3> declare `int x`;
assign the integer literal `10` to `i` assign default `int 0` to `x`;
declare `int y`;
assign `int 5` to `y`;
declare `int z`;
assign default `int 0` to `z`;
<4> declare `def d`;
assign default `null` to `d`
<5> declare `int i`;
assign `int 10` to `i`
<6> declare `float[] f`;
assign default `null` to `f`
<7> declare `Map[][] m`;
assign default `null` to `m`
[[assignment]] [[assignment]]
==== Assignment ==== Assignment
Use the `equals` operator (`=`) to assign a value to a variable. Any expression Use the *assignment operator* to store a value in a variable. Any operation
that produces a value can be assigned to any variable as long as the that produces a value can be assigned to any variable as long as the
<<painless-types, types>> are the same or the resultant <<painless-types, types>> are the same or the resultant type can be
<<painless-types, type>> can be implicitly <<painless-casting, cast>> to <<painless-casting, implicitly cast>> to the variable type.
the variable <<painless-types, type>>. Otherwise, an error will occur.
<<reference-types, Reference type>> values are shallow-copied when assigned. *Errors*
* If the type of value is unable to match the type of variable.
*Grammar* *Grammar*
[source,ANTLR4] [source,ANTLR4]
@ -65,7 +83,7 @@ assignment: ID '=' expression
*Examples* *Examples*
* Variable assignment with an <<integers, integer literal>>. * Variable assignment with an integer literal.
+ +
[source,Painless] [source,Painless]
---- ----
@ -73,10 +91,11 @@ assignment: ID '=' expression
<2> i = 10; <2> i = 10;
---- ----
+ +
<1> declare `int i` <1> declare `int i`;
<2> assign `10` to `i` assign default `int 0` to `i`
<2> assign `int 10` to `i`
+ +
* <<declaration, Declaration>> combined with immediate variable assignment. * Declaration combined with immediate assignment.
+ +
[source,Painless] [source,Painless]
---- ----
@ -84,11 +103,12 @@ assignment: ID '=' expression
<2> double j = 2.0; <2> double j = 2.0;
---- ----
+ +
<1> declare `int i`; assign `10` to `i` <1> declare `int i`;
<2> declare `double j`; assign `2.0` to `j` assign `int 10` to `i`
<2> declare `double j`;
assign `double 2.0` to `j`
+ +
* Assignment of one variable to another using * Assignment of one variable to another using primitive types.
<<primitive-types, primitive types>>.
+ +
[source,Painless] [source,Painless]
---- ----
@ -96,11 +116,13 @@ assignment: ID '=' expression
<2> int j = i; <2> int j = i;
---- ----
+ +
<1> declare `int i`; assign `10` to `i` <1> declare `int i`;
<2> declare `int j`; assign `j` to `i` assign `int 10` to `i`
<2> declare `int j`;
access `i` -> `int 10`;
assign `int 10` to `j`
+ +
* Assignment with <<reference-types, reference types>> using the * Assignment with reference types using the *new instance operator*.
<<constructor-call, new operator>>.
+ +
[source,Painless] [source,Painless]
---- ----
@ -108,12 +130,15 @@ assignment: ID '=' expression
<2> Map m = new HashMap(); <2> Map m = new HashMap();
---- ----
+ +
<1> declare `ArrayList l`; assign a newly-allocated `Arraylist` to `l` <1> declare `ArrayList l`;
<2> declare `Map m`; assign a newly-allocated `HashMap` to `m` allocate `ArrayList` instance -> `ArrayList reference`;
with an implicit cast to `Map` assign `ArrayList reference` to `l`
<2> declare `Map m`;
allocate `HashMap` instance -> `HashMap reference`;
implicit cast `HashMap reference` to `Map reference` -> `Map reference`;
assign `Map reference` to `m`
+ +
* Assignment of one variable to another using * Assignment of one variable to another using reference types.
<<reference-types, reference types>>.
+ +
[source,Painless] [source,Painless]
---- ----
@ -123,8 +148,52 @@ assignment: ID '=' expression
<4> m = k; <4> m = k;
---- ----
+ +
<1> declare `List l`; assign a newly-allocated `Arraylist` to `l` <1> declare `List l`;
with an implicit cast to `List` allocate `ArrayList` instance -> `ArrayList reference`;
<2> declare `List k`; assign a shallow-copy of `l` to `k` implicit cast `ArrayList reference` to `List reference` -> `List reference`;
assign `List reference` to `l`
<2> declare `List k`;
access `l` -> `List reference`;
assign `List reference` to `k`;
(note `l` and `k` refer to the same instance known as a shallow-copy)
<3> declare `List m`; <3> declare `List m`;
<4> assign a shallow-copy of `k` to `m` assign default `null` to `m`
<4> access `k` -> `List reference`;
assign `List reference` to `m`;
(note `l`, `k`, and `m` refer to the same instance)
+
* Assignment with an array type variable using the *new array operator*.
+
[source,Painless]
----
<1> int[] ia1;
<2> ia1 = new int[2];
<3> ia1[0] = 1;
<4> int[] ib1 = ia1;
<5> int[][] ic2 = new int[2][5];
<6> ic2[1][3] = 2;
<7> ic2[0] = ia1;
----
+
<1> declare `int[] ia1`;
assign default `null` to `ia1`
<2> allocate `1-d int array` instance with `length [2]`
-> `1-d int array reference`;
assign `1-d int array reference` to `ia1`
<3> access `ia1` -> `1-d int array reference`;
assign `int 1` to `index [0]` of `1-d int array reference`
<4> declare `int[] ib1`;
access `ia1` -> `1-d int array reference`;
assign `1-d int array reference` to `ib1`;
(note `ia1` and `ib1` refer to the same instance known as a shallow copy)
<5> declare `int[][] ic2`;
allocate `2-d int array` instance with `length [2, 5]`
-> `2-d int array reference`;
assign `2-d int array reference` to `ic2`
<6> access `ic2` -> `2-d int array reference`;
assign `int 2` to `index [1, 3]` of `2-d int array reference`
<7> access `ia1` -> `1-d int array reference`;
access `ic2` -> `2-d int array reference`;
assign `1-d int array reference` to
`index [0]` of `2-d int array reference`;
(note `ia1`, `ib1`, and `index [0]` of `ia2` refer to the same instance)

View File

@ -40,6 +40,8 @@ string:: <<text,`text`>> and <<keyword,`keyword`>>
<<parent-join>>:: Defines parent/child relation for documents within the same index <<parent-join>>:: Defines parent/child relation for documents within the same index
<<feature>>:: Record numeric features to boost hits at query time.
[float] [float]
=== Multi-fields === Multi-fields
@ -86,6 +88,6 @@ include::types/percolator.asciidoc[]
include::types/parent-join.asciidoc[] include::types/parent-join.asciidoc[]
include::types/feature.asciidoc[]

View File

@ -0,0 +1,59 @@
[[feature]]
=== Feature datatype
A `feature` field can index numbers so that they can later be used to boost
documents in queries with a <<query-dsl-feature-query,`feature`>> query.
[source,js]
--------------------------------------------------
PUT my_index
{
"mappings": {
"_doc": {
"properties": {
"pagerank": {
"type": "feature" <1>
},
"url_length": {
"type": "feature",
"positive_score_impact": false <2>
}
}
}
}
}
PUT my_index/_doc/1
{
"pagerank": 8,
"url_length": 22
}
GET my_index/_search
{
"query": {
"feature": {
"field": "pagerank"
}
}
}
--------------------------------------------------
// CONSOLE
<1> Feature fields must use the `feature` field type
<2> Features that correlate negatively with the score need to declare it
NOTE: `feature` fields only support single-valued fields and strictly positive
values. Multi-valued fields and negative values will be rejected.
NOTE: `feature` fields do not support querying, sorting or aggregating. They may
only be used within <<query-dsl-feature-query,`feature`>> queries.
NOTE: `feature` fields only preserve 9 significant bits for the precision, which
translates to a relative error of about 0.4%.
Features that correlate negatively with the score should set
`positive_score_impact` to `false` (defaults to `true`). This will be used by
the <<query-dsl-feature-query,`feature`>> query to modify the scoring formula
in such a way that the score decreases with the value of the feature instead of
increasing. For instance in web search, the url length is a commonly used
feature which correlates negatively with scores.

View File

@ -0,0 +1,181 @@
[[query-dsl-feature-query]]
=== Feature Query
The `feature` query is a specialized query that only works on
<<feature,`feature`>> fields. Its goal is to boost the score of documents based
on the values of numeric features. It is typically put in a `should` clause of
a <<query-dsl-bool-query,`bool`>> query so that its score is added to the score
of the query.
Compared to using <<query-dsl-function-score-query,`function_score`>> or other
ways to modify the score, this query has the benefit of being able to
efficiently skip non-competitive hits when
<<search-uri-request,`track_total_hits`>> is set to `false`. Speedups may be
spectacular.
Here is an example:
[source,js]
--------------------------------------------------
PUT test
{
"mappings": {
"_doc": {
"properties": {
"pagerank": {
"type": "feature"
},
"url_length": {
"type": "feature",
"positive_score_impact": false
}
}
}
}
}
PUT test/_doc/1
{
"pagerank": 10,
"url_length": 50
}
PUT test/_doc/2
{
"pagerank": 100,
"url_length": 20
}
POST test/_refresh
GET test/_search
{
"query": {
"feature": {
"field": "pagerank"
}
}
}
GET test/_search
{
"query": {
"feature": {
"field": "url_length"
}
}
}
--------------------------------------------------
// CONSOLE
[float]
=== Supported functions
The `feature` query supports 3 functions in order to boost scores using the
values of features. If you do not know where to start, we recommend that you
start with the `saturation` function, which is the default when no function is
provided.
[float]
==== Saturation
This function gives a score that is equal to `S / (S + pivot)` where `S` is the
value of the feature and `pivot` is a configurable pivot value so that the
result will be less than +0.5+ if `S` is less than pivot and greater than +0.5+
otherwise. Scores are always is +(0, 1)+.
If the feature has a negative score impact then the function will be computed as
`pivot / (S + pivot)`, which decreases when `S` increases.
[source,js]
--------------------------------------------------
GET test/_search
{
"query": {
"feature": {
"field": "pagerank",
"saturation": {
"pivot": 8
}
}
}
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
If +pivot+ is not supplied then Elasticsearch will compute a default value that
will be approximately equal to the geometric mean of all feature values that
exist in the index. We recommend this if you haven't had the opportunity to
train a good pivot value.
[source,js]
--------------------------------------------------
GET test/_search
{
"query": {
"feature": {
"field": "pagerank",
"saturation": {}
}
}
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
[float]
==== Logarithm
This function gives a score that is equal to `log(scaling_factor + S)` where
`S` is the value of the feature and `scaling_factor` is a configurable scaling
factor. Scores are unbounded.
This function only supports features that have a positive score impact.
[source,js]
--------------------------------------------------
GET test/_search
{
"query": {
"feature": {
"field": "pagerank",
"log": {
"scaling_factor": 4
}
}
}
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
[float]
==== Sigmoid
This function is an extension of `saturation` which adds a configurable
exponent. Scores are computed as `S^exp^ / (S^exp^ + pivot^exp^)`. Like for the
`saturation` function, `pivot` is the value of `S` that gives a score of +0.5+
and scores are in +(0, 1)+.
`exponent` must be positive, but is typically in +[0.5, 1]+. A good value should
be computed via traning. If you don't have the opportunity to do so, we recommend
that you stick to the `saturation` function instead.
[source,js]
--------------------------------------------------
GET test/_search
{
"query": {
"feature": {
"field": "pagerank",
"sigmoid": {
"pivot": 7,
"exponent": 0.6
}
}
}
}
--------------------------------------------------
// CONSOLE
// TEST[continued]

View File

@ -93,6 +93,7 @@ to 'shapes'.
* `type` - Index type where the pre-indexed shape is. * `type` - Index type where the pre-indexed shape is.
* `path` - The field specified as path containing the pre-indexed shape. * `path` - The field specified as path containing the pre-indexed shape.
Defaults to 'shape'. Defaults to 'shape'.
* `routing` - The routing of the shape document if required.
The following is an example of using the Filter with a pre-indexed The following is an example of using the Filter with a pre-indexed
shape: shape:

View File

@ -19,6 +19,11 @@ This query allows a script to act as a filter. Also see the
This query finds queries that are stored as documents that match with This query finds queries that are stored as documents that match with
the specified document. the specified document.
<<query-dsl-feature-query,`feature` query>>::
A query that computes scores based on the values of numeric features and is
able to efficiently skip non-competitive hits.
<<query-dsl-wrapper-query,`wrapper` query>>:: <<query-dsl-wrapper-query,`wrapper` query>>::
A query that accepts other queries as json or yaml string. A query that accepts other queries as json or yaml string.
@ -29,4 +34,6 @@ include::script-query.asciidoc[]
include::percolate-query.asciidoc[] include::percolate-query.asciidoc[]
include::feature-query.asciidoc[]
include::wrapper-query.asciidoc[] include::wrapper-query.asciidoc[]

View File

@ -11,13 +11,38 @@ GET /_search
"query" : { "query" : {
"match_all": {} "match_all": {}
}, },
"docvalue_fields" : ["test1", "test2"] "docvalue_fields" : [
{
"field": "my_ip_field", <1>
"format": "use_field_mapping" <2>
},
{
"field": "my_date_field",
"format": "epoch_millis" <3>
}
]
} }
-------------------------------------------------- --------------------------------------------------
// CONSOLE // CONSOLE
<1> the name of the field
<2> the special `use_field_mapping` format tells Elasticsearch to use the format from the mapping
<3> date fields may use a custom format
Doc value fields can work on fields that are not stored. Doc value fields can work on fields that are not stored.
Note that if the fields parameter specifies fields without docvalues it will try to load the value from the fielddata cache Note that if the fields parameter specifies fields without docvalues it will try to load the value from the fielddata cache
causing the terms for that field to be loaded to memory (cached), which will result in more memory consumption. causing the terms for that field to be loaded to memory (cached), which will result in more memory consumption.
[float]
==== Custom formats
While most fields do not support custom formats, some of them do:
- <<date,Date>> fields can take any <<mapping-date-format,date format>>.
- <<number,Numeric>> fields accept a https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat pattern].
All fields support the special `use_field_mapping` format, which tells
Elasticsearch to use the mappings to figure out a default format.
NOTE: The default is currently to return the same output as
<<search-request-script-fields,script fields>>. However it will change in 7.0
to behave as if the `use_field_mapping` format was provided.

View File

@ -242,7 +242,12 @@ POST test/_search
}, },
"inner_hits": { "inner_hits": {
"_source" : false, "_source" : false,
"docvalue_fields" : ["comments.text.keyword"] "docvalue_fields" : [
{
"field": "comments.text.keyword",
"format": "use_field_mapping"
}
]
} }
} }
} }

View File

@ -7,7 +7,7 @@ scattered to all the relevant shards and then all the results are
gathered back. When doing scatter/gather type execution, there are gathered back. When doing scatter/gather type execution, there are
several ways to do that, specifically with search engines. several ways to do that, specifically with search engines.
One of the questions when executing a distributed search is how much One of the questions when executing a distributed search is how many
results to retrieve from each shard. For example, if we have 10 shards, results to retrieve from each shard. For example, if we have 10 shards,
the 1st shard might hold the most relevant results from 0 till 10, with the 1st shard might hold the most relevant results from 0 till 10, with
other shards results ranking below it. For this reason, when executing a other shards results ranking below it. For this reason, when executing a

View File

@ -0,0 +1,248 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.FeatureField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* A {@link FieldMapper} that exposes Lucene's {@link FeatureField}.
*/
public class FeatureFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "feature";
public static class Defaults {
public static final MappedFieldType FIELD_TYPE = new FeatureFieldType();
static {
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
FIELD_TYPE.setHasDocValues(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.freeze();
}
}
public static class Builder extends FieldMapper.Builder<Builder, FeatureFieldMapper> {
public Builder(String name) {
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
builder = this;
}
@Override
public FeatureFieldType fieldType() {
return (FeatureFieldType) super.fieldType();
}
public Builder positiveScoreImpact(boolean v) {
fieldType().setPositiveScoreImpact(v);
return builder;
}
@Override
public FeatureFieldMapper build(BuilderContext context) {
setupFieldType(context);
return new FeatureFieldMapper(
name, fieldType, defaultFieldType,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
FeatureFieldMapper.Builder builder = new FeatureFieldMapper.Builder(name);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("positive_score_impact")) {
builder.positiveScoreImpact(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
}
}
return builder;
}
}
public static final class FeatureFieldType extends MappedFieldType {
private boolean positiveScoreImpact = true;
public FeatureFieldType() {
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
}
protected FeatureFieldType(FeatureFieldType ref) {
super(ref);
this.positiveScoreImpact = ref.positiveScoreImpact;
}
public FeatureFieldType clone() {
return new FeatureFieldType(this);
}
@Override
public boolean equals(Object o) {
if (super.equals(o) == false) {
return false;
}
FeatureFieldType other = (FeatureFieldType) o;
return Objects.equals(positiveScoreImpact, other.positiveScoreImpact);
}
@Override
public int hashCode() {
int h = super.hashCode();
h = 31 * h + Objects.hashCode(positiveScoreImpact);
return h;
}
@Override
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
super.checkCompatibility(other, conflicts);
if (positiveScoreImpact != ((FeatureFieldType) other).positiveScoreImpact()) {
conflicts.add("mapper [" + name() + "] has different [positive_score_impact] values");
}
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
public boolean positiveScoreImpact() {
return positiveScoreImpact;
}
public void setPositiveScoreImpact(boolean positiveScoreImpact) {
checkIfFrozen();
this.positiveScoreImpact = positiveScoreImpact;
}
@Override
public Query existsQuery(QueryShardContext context) {
return new TermQuery(new Term("_feature", name()));
}
@Override
public Query nullValueQuery() {
if (nullValue() == null) {
return null;
}
return termQuery(nullValue(), null);
}
@Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder();
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new UnsupportedOperationException("Queries on [feature] fields are not supported");
}
}
private FeatureFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0;
}
@Override
protected FeatureFieldMapper clone() {
return (FeatureFieldMapper) super.clone();
}
@Override
public FeatureFieldType fieldType() {
return (FeatureFieldType) super.fieldType();
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
float value;
if (context.externalValueSet()) {
Object v = context.externalValue();
if (v instanceof Number) {
value = ((Number) v).floatValue();
} else {
value = Float.parseFloat(v.toString());
}
} else if (context.parser().currentToken() == Token.VALUE_NULL) {
// skip
return;
} else {
value = context.parser().floatValue();
}
if (context.doc().getByKey(name()) != null) {
throw new IllegalArgumentException("[feature] fields do not support indexing multiple values for the same field [" + name() +
"] in the same document");
}
if (fieldType().positiveScoreImpact() == false) {
value = 1 / value;
}
context.doc().addWithKey(name(), new FeatureField("_feature", name(), value));
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || fieldType().nullValue() != null) {
builder.field("null_value", fieldType().nullValue());
}
if (includeDefaults || fieldType().positiveScoreImpact() == false) {
builder.field("positive_score_impact", fieldType().positiveScoreImpact());
}
}
}

View File

@ -0,0 +1,151 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* This meta field only exists because feature fields index everything into a
* common _feature field and Elasticsearch has a custom codec that complains
* when fields exist in the index and not in mappings.
*/
public class FeatureMetaFieldMapper extends MetadataFieldMapper {
public static final String NAME = "_feature";
public static final String CONTENT_TYPE = "_feature";
public static class Defaults {
public static final MappedFieldType FIELD_TYPE = new FeatureMetaFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
}
public static class Builder extends MetadataFieldMapper.Builder<Builder, FeatureMetaFieldMapper> {
public Builder(MappedFieldType existing) {
super(NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
}
@Override
public FeatureMetaFieldMapper build(BuilderContext context) {
setupFieldType(context);
return new FeatureMetaFieldMapper(fieldType, context.indexSettings());
}
}
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?,?> parse(String name,
Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
return new Builder(parserContext.mapperService().fullName(NAME));
}
@Override
public MetadataFieldMapper getDefault(MappedFieldType fieldType, ParserContext context) {
final Settings indexSettings = context.mapperService().getIndexSettings().getSettings();
if (fieldType != null) {
return new FeatureMetaFieldMapper(indexSettings, fieldType);
} else {
return parse(NAME, Collections.emptyMap(), context)
.build(new BuilderContext(indexSettings, new ContentPath(1)));
}
}
}
public static final class FeatureMetaFieldType extends MappedFieldType {
public FeatureMetaFieldType() {
}
protected FeatureMetaFieldType(FeatureMetaFieldType ref) {
super(ref);
}
@Override
public FeatureMetaFieldType clone() {
return new FeatureMetaFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Query existsQuery(QueryShardContext context) {
throw new UnsupportedOperationException("Cannot run exists query on [_feature]");
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new UnsupportedOperationException("The [_feature] field may not be queried directly");
}
}
private FeatureMetaFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(existing.clone(), indexSettings);
}
private FeatureMetaFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
}
@Override
public void preParse(ParseContext context) throws IOException {}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
throw new AssertionError("Should never be called");
}
@Override
public void postParse(ParseContext context) throws IOException {}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder;
}
}

View File

@ -19,21 +19,37 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser;
import org.elasticsearch.index.query.FeatureQueryBuilder;
import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import java.util.Collections; import java.util.Collections;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
public class MapperExtrasPlugin extends Plugin implements MapperPlugin { public class MapperExtrasPlugin extends Plugin implements MapperPlugin, SearchPlugin {
@Override @Override
public Map<String, Mapper.TypeParser> getMappers() { public Map<String, Mapper.TypeParser> getMappers() {
Map<String, Mapper.TypeParser> mappers = new LinkedHashMap<>(); Map<String, Mapper.TypeParser> mappers = new LinkedHashMap<>();
mappers.put(ScaledFloatFieldMapper.CONTENT_TYPE, new ScaledFloatFieldMapper.TypeParser()); mappers.put(ScaledFloatFieldMapper.CONTENT_TYPE, new ScaledFloatFieldMapper.TypeParser());
mappers.put(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser()); mappers.put(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser());
mappers.put(FeatureFieldMapper.CONTENT_TYPE, new FeatureFieldMapper.TypeParser());
return Collections.unmodifiableMap(mappers); return Collections.unmodifiableMap(mappers);
} }
@Override
public Map<String, TypeParser> getMetadataMappers() {
return Collections.singletonMap(FeatureMetaFieldMapper.CONTENT_TYPE, new FeatureMetaFieldMapper.TypeParser());
}
@Override
public List<QuerySpec<?>> getQueries() {
return Collections.singletonList(
new QuerySpec<>(FeatureQueryBuilder.NAME, FeatureQueryBuilder::new, p -> FeatureQueryBuilder.PARSER.parse(p, null)));
}
} }

View File

@ -0,0 +1,354 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.document.FeatureField;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.FeatureFieldMapper.FeatureFieldType;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
/**
* Query to run on a [feature] field.
*/
public final class FeatureQueryBuilder extends AbstractQueryBuilder<FeatureQueryBuilder> {
/**
* Scoring function for a [feature] field.
*/
public abstract static class ScoreFunction {
private ScoreFunction() {} // prevent extensions by users
abstract void writeTo(StreamOutput out) throws IOException;
abstract Query toQuery(String feature, boolean positiveScoreImpact) throws IOException;
abstract void doXContent(XContentBuilder builder) throws IOException;
/**
* A scoring function that scores documents as {@code Math.log(scalingFactor + S)}
* where S is the value of the static feature.
*/
public static class Log extends ScoreFunction {
private static final ConstructingObjectParser<Log, Void> PARSER = new ConstructingObjectParser<>(
"log", a -> new Log((Float) a[0]));
static {
PARSER.declareFloat(ConstructingObjectParser.constructorArg(), new ParseField("scaling_factor"));
}
private final float scalingFactor;
public Log(float scalingFactor) {
this.scalingFactor = scalingFactor;
}
private Log(StreamInput in) throws IOException {
this(in.readFloat());
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
Log that = (Log) obj;
return scalingFactor == that.scalingFactor;
}
@Override
public int hashCode() {
return Float.hashCode(scalingFactor);
}
@Override
void writeTo(StreamOutput out) throws IOException {
out.writeByte((byte) 0);
out.writeFloat(scalingFactor);
}
@Override
void doXContent(XContentBuilder builder) throws IOException {
builder.startObject("log");
builder.field("scaling_factor", scalingFactor);
builder.endObject();
}
@Override
Query toQuery(String feature, boolean positiveScoreImpact) throws IOException {
if (positiveScoreImpact == false) {
throw new IllegalArgumentException("Cannot use the [log] function with a field that has a negative score impact as " +
"it would trigger negative scores");
}
return FeatureField.newLogQuery("_feature", feature, DEFAULT_BOOST, scalingFactor);
}
}
/**
* A scoring function that scores documents as {@code S / (S + pivot)} where S is
* the value of the static feature.
*/
public static class Saturation extends ScoreFunction {
private static final ConstructingObjectParser<Saturation, Void> PARSER = new ConstructingObjectParser<>(
"saturation", a -> new Saturation((Float) a[0]));
static {
PARSER.declareFloat(ConstructingObjectParser.optionalConstructorArg(), new ParseField("pivot"));
}
private final Float pivot;
/** Constructor with a default pivot, computed as the geometric average of
* all feature values in the index. */
public Saturation() {
this((Float) null);
}
public Saturation(float pivot) {
this(Float.valueOf(pivot));
}
private Saturation(Float pivot) {
this.pivot = pivot;
}
private Saturation(StreamInput in) throws IOException {
this(in.readOptionalFloat());
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
Saturation that = (Saturation) obj;
return Objects.equals(pivot, that.pivot);
}
@Override
public int hashCode() {
return Objects.hashCode(pivot);
}
@Override
void writeTo(StreamOutput out) throws IOException {
out.writeByte((byte) 1);
out.writeOptionalFloat(pivot);
}
@Override
void doXContent(XContentBuilder builder) throws IOException {
builder.startObject("saturation");
if (pivot != null) {
builder.field("pivot", pivot);
}
builder.endObject();
}
@Override
Query toQuery(String feature, boolean positiveScoreImpact) throws IOException {
if (pivot == null) {
return FeatureField.newSaturationQuery("_feature", feature);
} else {
return FeatureField.newSaturationQuery("_feature", feature, DEFAULT_BOOST, pivot);
}
}
}
/**
* A scoring function that scores documents as {@code S^exp / (S^exp + pivot^exp)}
* where S is the value of the static feature.
*/
public static class Sigmoid extends ScoreFunction {
private static final ConstructingObjectParser<Sigmoid, Void> PARSER = new ConstructingObjectParser<>(
"sigmoid", a -> new Sigmoid((Float) a[0], ((Float) a[1]).floatValue()));
static {
PARSER.declareFloat(ConstructingObjectParser.constructorArg(), new ParseField("pivot"));
PARSER.declareFloat(ConstructingObjectParser.constructorArg(), new ParseField("exponent"));
}
private final float pivot;
private final float exp;
public Sigmoid(float pivot, float exp) {
this.pivot = pivot;
this.exp = exp;
}
private Sigmoid(StreamInput in) throws IOException {
this(in.readFloat(), in.readFloat());
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
Sigmoid that = (Sigmoid) obj;
return pivot == that.pivot
&& exp == that.exp;
}
@Override
public int hashCode() {
return Objects.hash(pivot, exp);
}
@Override
void writeTo(StreamOutput out) throws IOException {
out.writeByte((byte) 2);
out.writeFloat(pivot);
out.writeFloat(exp);
}
@Override
void doXContent(XContentBuilder builder) throws IOException {
builder.startObject("sigmoid");
builder.field("pivot", pivot);
builder.field("exponent", exp);
builder.endObject();
}
@Override
Query toQuery(String feature, boolean positiveScoreImpact) throws IOException {
return FeatureField.newSigmoidQuery("_feature", feature, DEFAULT_BOOST, pivot, exp);
}
}
}
private static ScoreFunction readScoreFunction(StreamInput in) throws IOException {
byte b = in.readByte();
switch (b) {
case 0:
return new ScoreFunction.Log(in);
case 1:
return new ScoreFunction.Saturation(in);
case 2:
return new ScoreFunction.Sigmoid(in);
default:
throw new IOException("Illegal score function id: " + b);
}
}
public static ConstructingObjectParser<FeatureQueryBuilder, Void> PARSER = new ConstructingObjectParser<>(
"feature", args -> {
final String field = (String) args[0];
final float boost = args[1] == null ? DEFAULT_BOOST : (Float) args[1];
final String queryName = (String) args[2];
long numNonNulls = Arrays.stream(args, 3, args.length).filter(Objects::nonNull).count();
final FeatureQueryBuilder query;
if (numNonNulls > 1) {
throw new IllegalArgumentException("Can only specify one of [log], [saturation] and [sigmoid]");
} else if (numNonNulls == 0) {
query = new FeatureQueryBuilder(field, new ScoreFunction.Saturation());
} else {
ScoreFunction scoreFunction = (ScoreFunction) Arrays.stream(args, 3, args.length)
.filter(Objects::nonNull)
.findAny()
.get();
query = new FeatureQueryBuilder(field, scoreFunction);
}
query.boost(boost);
query.queryName(queryName);
return query;
});
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("field"));
PARSER.declareFloat(ConstructingObjectParser.optionalConstructorArg(), BOOST_FIELD);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NAME_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
ScoreFunction.Log.PARSER, new ParseField("log"));
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
ScoreFunction.Saturation.PARSER, new ParseField("saturation"));
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
ScoreFunction.Sigmoid.PARSER, new ParseField("sigmoid"));
}
public static final String NAME = "feature";
private final String field;
private final ScoreFunction scoreFunction;
public FeatureQueryBuilder(String field, ScoreFunction scoreFunction) {
this.field = Objects.requireNonNull(field);
this.scoreFunction = Objects.requireNonNull(scoreFunction);
}
public FeatureQueryBuilder(StreamInput in) throws IOException {
super(in);
this.field = in.readString();
this.scoreFunction = readScoreFunction(in);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(field);
scoreFunction.writeTo(out);
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getName());
builder.field("field", field);
scoreFunction.doXContent(builder);
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
final MappedFieldType ft = context.fieldMapper(field);
if (ft == null) {
return new MatchNoDocsQuery();
}
if (ft instanceof FeatureFieldType == false) {
throw new IllegalArgumentException("[feature] query only works on [feature] fields, not [" + ft.typeName() + "]");
}
final FeatureFieldType fft = (FeatureFieldType) ft;
return scoreFunction.toQuery(field, fft.positiveScoreImpact());
}
@Override
protected boolean doEquals(FeatureQueryBuilder other) {
return Objects.equals(field, other.field) && Objects.equals(scoreFunction, other.scoreFunction);
}
@Override
protected int doHashCode() {
return Objects.hash(field, scoreFunction);
}
}

View File

@ -0,0 +1,173 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermFrequencyAttribute;
import org.apache.lucene.document.FeatureField;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.hamcrest.Matchers;
import org.junit.Before;
import java.io.IOException;
import java.util.Collection;
public class FeatureFieldMapperTests extends ESSingleNodeTestCase {
IndexService indexService;
DocumentMapperParser parser;
@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperExtrasPlugin.class);
}
private static int getFrequency(TokenStream tk) throws IOException {
TermFrequencyAttribute freqAttribute = tk.addAttribute(TermFrequencyAttribute.class);
tk.reset();
assertTrue(tk.incrementToken());
int freq = freqAttribute.getTermFrequency();
assertFalse(tk.incrementToken());
return freq;
}
public void testDefaults() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "feature").endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 10)
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc1.rootDoc().getFields("_feature");
assertEquals(1, fields.length);
assertThat(fields[0], Matchers.instanceOf(FeatureField.class));
FeatureField featureField1 = (FeatureField) fields[0];
ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 12)
.endObject()),
XContentType.JSON));
FeatureField featureField2 = (FeatureField) doc2.rootDoc().getFields("_feature")[0];
int freq1 = getFrequency(featureField1.tokenStream(null, null));
int freq2 = getFrequency(featureField2.tokenStream(null, null));
assertTrue(freq1 < freq2);
}
public void testNegativeScoreImpact() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "feature")
.field("positive_score_impact", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 10)
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc1.rootDoc().getFields("_feature");
assertEquals(1, fields.length);
assertThat(fields[0], Matchers.instanceOf(FeatureField.class));
FeatureField featureField1 = (FeatureField) fields[0];
ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 12)
.endObject()),
XContentType.JSON));
FeatureField featureField2 = (FeatureField) doc2.rootDoc().getFields("_feature")[0];
int freq1 = getFrequency(featureField1.tokenStream(null, null));
int freq2 = getFrequency(featureField2.tokenStream(null, null));
assertTrue(freq1 > freq2);
}
public void testRejectMultiValuedFields() throws MapperParsingException, IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "feature").endObject().startObject("foo")
.startObject("properties").startObject("field").field("type", "feature").endObject().endObject()
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
MapperParsingException e = null;/*expectThrows(MapperParsingException.class,
() -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", Arrays.asList(10, 20))
.endObject()),
XContentType.JSON)));
assertEquals("[feature] fields do not support indexing multiple values for the same field [field] in the same document",
e.getCause().getMessage());*/
e = expectThrows(MapperParsingException.class,
() -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("foo")
.startObject()
.field("field", 10)
.endObject()
.startObject()
.field("field", 20)
.endObject()
.endArray()
.endObject()),
XContentType.JSON)));
assertEquals("[feature] fields do not support indexing multiple values for the same field [foo.field] in the same document",
e.getCause().getMessage());
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.junit.Before;
public class FeatureFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new FeatureFieldMapper.FeatureFieldType();
}
@Before
public void setupProperties() {
addModifier(new Modifier("positive_score_impact", false) {
@Override
public void modify(MappedFieldType ft) {
FeatureFieldMapper.FeatureFieldType tft = (FeatureFieldMapper.FeatureFieldType)ft;
tft.setPositiveScoreImpact(tft.positiveScoreImpact() == false);
}
@Override
public void normalizeOther(MappedFieldType other) {
super.normalizeOther(other);
((FeatureFieldMapper.FeatureFieldType) other).setPositiveScoreImpact(true);
}
});
}
}

View File

@ -0,0 +1,58 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.junit.Before;
import java.util.Collection;
public class FeatureMetaFieldMapperTests extends ESSingleNodeTestCase {
IndexService indexService;
DocumentMapperParser parser;
@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperExtrasPlugin.class);
}
public void testBasics() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "feature").endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
assertNotNull(mapper.metadataMapper(FeatureMetaFieldMapper.class));
}
}

View File

@ -0,0 +1,29 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
public class FeatureMetaFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new FeatureMetaFieldMapper.FeatureMetaFieldType();
}
}

View File

@ -0,0 +1,130 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.document.FeatureField;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.index.mapper.MapperExtrasPlugin;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.FeatureQueryBuilder.ScoreFunction;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.either;
public class FeatureQueryBuilderTests extends AbstractQueryTestCase<FeatureQueryBuilder> {
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
for (String type : getCurrentTypes()) {
mapperService.merge(type, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(type,
"my_feature_field", "type=feature",
"my_negative_feature_field", "type=feature,positive_score_impact=false"))), MapperService.MergeReason.MAPPING_UPDATE);
}
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singleton(MapperExtrasPlugin.class);
}
@Override
protected FeatureQueryBuilder doCreateTestQueryBuilder() {
ScoreFunction function;
switch (random().nextInt(3)) {
case 0:
function = new ScoreFunction.Log(1 + randomFloat());
break;
case 1:
if (randomBoolean()) {
function = new ScoreFunction.Saturation();
} else {
function = new ScoreFunction.Saturation(randomFloat());
}
break;
case 2:
function = new ScoreFunction.Sigmoid(randomFloat(), randomFloat());
break;
default:
throw new AssertionError();
}
return new FeatureQueryBuilder("my_feature_field", function);
}
@Override
protected void doAssertLuceneQuery(FeatureQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
Class<?> expectedClass = FeatureField.newSaturationQuery("", "", 1, 1).getClass();
assertThat(query, either(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(expectedClass)));
}
@Override
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/30605")
public void testUnknownField() {
super.testUnknownField();
}
public void testDefaultScoreFunction() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"feature\" : {\n" +
" \"field\": \"my_feature_field\"\n" +
" }\n" +
"}";
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertEquals(FeatureField.newSaturationQuery("_feature", "my_feature_field"), parsedQuery);
}
public void testIllegalField() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"feature\" : {\n" +
" \"field\": \"" + STRING_FIELD_NAME + "\"\n" +
" }\n" +
"}";
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(query).toQuery(createShardContext()));
assertEquals("[feature] query only works on [feature] fields, not [text]", e.getMessage());
}
public void testIllegalCombination() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"feature\" : {\n" +
" \"field\": \"my_negative_feature_field\",\n" +
" \"log\" : {\n" +
" \"scaling_factor\": 4.5\n" +
" }\n" +
" }\n" +
"}";
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(query).toQuery(createShardContext()));
assertEquals(
"Cannot use the [log] function with a field that has a negative score impact as it would trigger negative scores",
e.getMessage());
}
}

View File

@ -0,0 +1,160 @@
setup:
- skip:
version: " - 6.99.99"
reason: "The feature field/query was introduced in 7.0.0"
- do:
indices.create:
index: test
body:
settings:
number_of_replicas: 0
mappings:
_doc:
properties:
pagerank:
type: feature
url_length:
type: feature
positive_score_impact: false
- do:
index:
index: test
type: _doc
id: 1
body:
pagerank: 10
url_length: 50
- do:
index:
index: test
type: _doc
id: 2
body:
pagerank: 100
url_length: 20
- do:
indices.refresh: {}
---
"Positive log":
- do:
search:
body:
query:
feature:
field: pagerank
log:
scaling_factor: 3
- match:
hits.total: 2
- match:
hits.hits.0._id: "2"
- match:
hits.hits.1._id: "1"
---
"Positive saturation":
- do:
search:
body:
query:
feature:
field: pagerank
saturation:
pivot: 20
- match:
hits.total: 2
- match:
hits.hits.0._id: "2"
- match:
hits.hits.1._id: "1"
---
"Positive sigmoid":
- do:
search:
body:
query:
feature:
field: pagerank
sigmoid:
pivot: 20
exponent: 0.6
- match:
hits.total: 2
- match:
hits.hits.0._id: "2"
- match:
hits.hits.1._id: "1"
---
"Negative log":
- do:
catch: bad_request
search:
body:
query:
feature:
field: url_length
log:
scaling_factor: 3
---
"Negative saturation":
- do:
search:
body:
query:
feature:
field: url_length
saturation:
pivot: 20
- match:
hits.total: 2
- match:
hits.hits.0._id: "2"
- match:
hits.hits.1._id: "1"
---
"Negative sigmoid":
- do:
search:
body:
query:
feature:
field: url_length
sigmoid:
pivot: 20
exponent: 0.6
- match:
hits.total: 2
- match:
hits.hits.0._id: "2"
- match:
hits.hits.1._id: "1"

View File

@ -56,7 +56,7 @@ public class Netty4HttpPipeliningHandler extends ChannelDuplexHandler {
@Override @Override
public void channelRead(final ChannelHandlerContext ctx, final Object msg) { public void channelRead(final ChannelHandlerContext ctx, final Object msg) {
if (msg instanceof LastHttpContent) { if (msg instanceof LastHttpContent) {
HttpPipelinedRequest<LastHttpContent> pipelinedRequest = aggregator.read(((LastHttpContent) msg).retain()); HttpPipelinedRequest<LastHttpContent> pipelinedRequest = aggregator.read(((LastHttpContent) msg));
ctx.fireChannelRead(pipelinedRequest); ctx.fireChannelRead(pipelinedRequest);
} else { } else {
ctx.fireChannelRead(msg); ctx.fireChannelRead(msg);

View File

@ -56,7 +56,7 @@ public class NioHttpPipeliningHandler extends ChannelDuplexHandler {
@Override @Override
public void channelRead(final ChannelHandlerContext ctx, final Object msg) { public void channelRead(final ChannelHandlerContext ctx, final Object msg) {
if (msg instanceof LastHttpContent) { if (msg instanceof LastHttpContent) {
HttpPipelinedRequest<LastHttpContent> pipelinedRequest = aggregator.read(((LastHttpContent) msg).retain()); HttpPipelinedRequest<LastHttpContent> pipelinedRequest = aggregator.read(((LastHttpContent) msg));
ctx.fireChannelRead(pipelinedRequest); ctx.fireChannelRead(pipelinedRequest);
} else { } else {
ctx.fireChannelRead(msg); ctx.fireChannelRead(msg);

View File

@ -29,9 +29,9 @@ plugins {
dependencies { dependencies {
compile "junit:junit:${versions.junit}" compile "junit:junit:${versions.junit}"
compile "org.hamcrest:hamcrest-core:${versions.hamcrest}" compile "org.hamcrest:hamcrest-core:${versions.hamcrest}"
compile "org.hamcrest:hamcrest-library:${versions.hamcrest}"
// needs to be on the classpath for JarHell compile project(':libs:elasticsearch-core')
testRuntime project(':libs:elasticsearch-core')
// pulls in the jar built by this project and its dependencies // pulls in the jar built by this project and its dependencies
packagingTest project(path: project.path, configuration: 'runtime') packagingTest project(path: project.path, configuration: 'runtime')

View File

@ -19,13 +19,20 @@
package org.elasticsearch.packaging; package org.elasticsearch.packaging;
import org.junit.Test; import org.elasticsearch.packaging.test.OssTarTests;
import org.elasticsearch.packaging.test.OssZipTests;
import org.elasticsearch.packaging.test.DefaultTarTests;
import org.elasticsearch.packaging.test.DefaultZipTests;
/** import org.junit.runner.RunWith;
* This class doesn't have any tests yet import org.junit.runners.Suite;
*/ import org.junit.runners.Suite.SuiteClasses;
public class PackagingTests {
@Test @RunWith(Suite.class)
public void testDummy() {} @SuiteClasses({
} DefaultTarTests.class,
DefaultZipTests.class,
OssTarTests.class,
OssZipTests.class
})
public class PackagingTests {}

View File

@ -0,0 +1,40 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging;
import org.junit.runner.JUnitCore;
import java.nio.file.Files;
import java.nio.file.Paths;
/**
* Ensures that the current JVM is running on a virtual machine before delegating to {@link JUnitCore}. We just check for the existence
* of a special file that we create during VM provisioning.
*/
public class VMTestRunner {
public static void main(String[] args) {
if (Files.exists(Paths.get("/is_vagrant_vm"))) {
JUnitCore.main(args);
} else {
throw new RuntimeException("This filesystem does not have an expected marker file indicating it's a virtual machine. These " +
"tests should only run in a virtual machine because they're destructive.");
}
}
}

View File

@ -0,0 +1,65 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.test;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.elasticsearch.packaging.util.Distribution;
import org.elasticsearch.packaging.util.Installation;
import static org.elasticsearch.packaging.util.Cleanup.cleanEverything;
import static org.elasticsearch.packaging.util.Archives.installArchive;
import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assume.assumeThat;
/**
* Tests that apply to the archive distributions (tar, zip). To add a case for a distribution, subclass and
* override {@link ArchiveTestCase#distribution()}. These tests should be the same across all archive distributions
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public abstract class ArchiveTestCase {
private static Installation installation;
/** The {@link Distribution} that should be tested in this case */
protected abstract Distribution distribution();
@BeforeClass
public static void cleanup() {
installation = null;
cleanEverything();
}
@Before
public void onlyCompatibleDistributions() {
assumeThat(distribution().packaging.compatible, is(true));
}
@Test
public void test10Install() {
installation = installArchive(distribution());
verifyArchiveInstallation(installation, distribution());
}
}

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.test;
import org.elasticsearch.packaging.util.Distribution;
public class DefaultTarTests extends ArchiveTestCase {
@Override
protected Distribution distribution() {
return Distribution.DEFAULT_TAR;
}
}

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.test;
import org.elasticsearch.packaging.util.Distribution;
public class DefaultZipTests extends ArchiveTestCase {
@Override
protected Distribution distribution() {
return Distribution.DEFAULT_ZIP;
}
}

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.test;
import org.elasticsearch.packaging.util.Distribution;
public class OssTarTests extends ArchiveTestCase {
@Override
protected Distribution distribution() {
return Distribution.OSS_TAR;
}
}

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.test;
import org.elasticsearch.packaging.util.Distribution;
public class OssZipTests extends ArchiveTestCase {
@Override
protected Distribution distribution() {
return Distribution.OSS_ZIP;
}
}

View File

@ -0,0 +1,239 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.util;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.stream.Stream;
import static org.elasticsearch.packaging.util.FileMatcher.Fileness.Directory;
import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File;
import static org.elasticsearch.packaging.util.FileMatcher.file;
import static org.elasticsearch.packaging.util.FileMatcher.p644;
import static org.elasticsearch.packaging.util.FileMatcher.p660;
import static org.elasticsearch.packaging.util.FileMatcher.p755;
import static org.elasticsearch.packaging.util.FileUtils.getCurrentVersion;
import static org.elasticsearch.packaging.util.FileUtils.getDefaultArchiveInstallPath;
import static org.elasticsearch.packaging.util.FileUtils.getPackagingArchivesDir;
import static org.elasticsearch.packaging.util.FileUtils.lsGlob;
import static org.elasticsearch.packaging.util.FileUtils.mv;
import static org.elasticsearch.packaging.util.Platforms.isDPKG;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.collection.IsEmptyCollection.empty;
import static org.hamcrest.collection.IsCollectionWithSize.hasSize;
/**
* Installation and verification logic for archive distributions
*/
public class Archives {
public static Installation installArchive(Distribution distribution) {
return installArchive(distribution, getDefaultArchiveInstallPath(), getCurrentVersion());
}
public static Installation installArchive(Distribution distribution, Path fullInstallPath, String version) {
final Shell sh = new Shell();
final Path distributionFile = getPackagingArchivesDir().resolve(distribution.filename(version));
final Path baseInstallPath = fullInstallPath.getParent();
final Path extractedPath = baseInstallPath.resolve("elasticsearch-" + version);
assertThat("distribution file must exist", Files.exists(distributionFile), is(true));
assertThat("elasticsearch must not already be installed", lsGlob(baseInstallPath, "elasticsearch*"), empty());
if (distribution.packaging == Distribution.Packaging.TAR) {
if (Platforms.LINUX) {
sh.run("tar", "-C", baseInstallPath.toString(), "-xzpf", distributionFile.toString());
} else {
throw new RuntimeException("Distribution " + distribution + " is not supported on windows");
}
} else if (distribution.packaging == Distribution.Packaging.ZIP) {
if (Platforms.LINUX) {
sh.run("unzip", distributionFile.toString(), "-d", baseInstallPath.toString());
} else {
sh.run("powershell.exe", "-Command",
"Add-Type -AssemblyName 'System.IO.Compression.Filesystem'; " +
"[IO.Compression.ZipFile]::ExtractToDirectory('" + distributionFile + "', '" + baseInstallPath + "')");
}
} else {
throw new RuntimeException("Distribution " + distribution + " is not a known archive type");
}
assertThat("archive was extracted", Files.exists(extractedPath), is(true));
mv(extractedPath, fullInstallPath);
assertThat("extracted archive moved to install location", Files.exists(fullInstallPath));
final List<Path> installations = lsGlob(baseInstallPath, "elasticsearch*");
assertThat("only the intended installation exists", installations, hasSize(1));
assertThat("only the intended installation exists", installations.get(0), is(fullInstallPath));
if (Platforms.LINUX) {
setupArchiveUsersLinux(fullInstallPath);
}
return new Installation(fullInstallPath);
}
private static void setupArchiveUsersLinux(Path installPath) {
final Shell sh = new Shell();
if (sh.runIgnoreExitCode("getent", "group", "elasticsearch").isSuccess() == false) {
if (isDPKG()) {
sh.run("addgroup", "--system", "elasticsearch");
} else {
sh.run("groupadd", "-r", "elasticsearch");
}
}
if (sh.runIgnoreExitCode("id", "elasticsearch").isSuccess() == false) {
if (isDPKG()) {
sh.run("adduser",
"--quiet",
"--system",
"--no-create-home",
"--ingroup", "elasticsearch",
"--disabled-password",
"--shell", "/bin/false",
"elasticsearch");
} else {
sh.run("useradd",
"--system",
"-M",
"--gid", "elasticsearch",
"--shell", "/sbin/nologin",
"--comment", "elasticsearch user",
"elasticsearch");
}
}
sh.run("chown", "-R", "elasticsearch:elasticsearch", installPath.toString());
}
public static void verifyArchiveInstallation(Installation installation, Distribution distribution) {
// on Windows for now we leave the installation owned by the vagrant user that the tests run as. Since the vagrant account
// is a local administrator, the files really end up being owned by the local administrators group. In the future we'll
// install and run elasticesearch with a role user on Windows
final String owner = Platforms.WINDOWS
? "BUILTIN\\Administrators"
: "elasticsearch";
verifyOssInstallation(installation, distribution, owner);
if (distribution.flavor == Distribution.Flavor.DEFAULT) {
verifyDefaultInstallation(installation, distribution, owner);
}
}
private static void verifyOssInstallation(Installation es, Distribution distribution, String owner) {
Stream.of(
es.home,
es.config,
es.plugins,
es.modules,
es.logs
).forEach(dir -> assertThat(dir, file(Directory, owner, owner, p755)));
assertThat(Files.exists(es.data), is(false));
assertThat(Files.exists(es.scripts), is(false));
assertThat(es.home.resolve("bin"), file(Directory, owner, owner, p755));
assertThat(es.home.resolve("lib"), file(Directory, owner, owner, p755));
assertThat(Files.exists(es.config.resolve("elasticsearch.keystore")), is(false));
Stream.of(
"bin/elasticsearch",
"bin/elasticsearch-env",
"bin/elasticsearch-keystore",
"bin/elasticsearch-plugin",
"bin/elasticsearch-translog"
).forEach(executable -> {
assertThat(es.home.resolve(executable), file(File, owner, owner, p755));
if (distribution.packaging == Distribution.Packaging.ZIP) {
assertThat(es.home.resolve(executable + ".bat"), file(File, owner));
}
});
if (distribution.packaging == Distribution.Packaging.ZIP) {
Stream.of(
"bin/elasticsearch-service.bat",
"bin/elasticsearch-service-mgr.exe",
"bin/elasticsearch-service-x64.exe"
).forEach(executable -> assertThat(es.home.resolve(executable), file(File, owner)));
}
Stream.of(
"elasticsearch.yml",
"jvm.options",
"log4j2.properties"
).forEach(config -> assertThat(es.config.resolve(config), file(File, owner, owner, p660)));
Stream.of(
"NOTICE.txt",
"LICENSE.txt",
"README.textile"
).forEach(doc -> assertThat(es.home.resolve(doc), file(File, owner, owner, p644)));
}
private static void verifyDefaultInstallation(Installation es, Distribution distribution, String owner) {
Stream.of(
"bin/elasticsearch-certgen",
"bin/elasticsearch-certutil",
"bin/elasticsearch-croneval",
"bin/elasticsearch-migrate",
"bin/elasticsearch-saml-metadata",
"bin/elasticsearch-setup-passwords",
"bin/elasticsearch-sql-cli",
"bin/elasticsearch-syskeygen",
"bin/elasticsearch-users",
"bin/x-pack-env",
"bin/x-pack-security-env",
"bin/x-pack-watcher-env"
).forEach(executable -> {
assertThat(es.home.resolve(executable), file(File, owner, owner, p755));
if (distribution.packaging == Distribution.Packaging.ZIP) {
assertThat(es.home.resolve(executable + ".bat"), file(File, owner));
}
});
// at this time we only install the current version of archive distributions, but if that changes we'll need to pass
// the version through here
assertThat(es.home.resolve("bin/elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), file(File, owner, owner, p755));
Stream.of(
"users",
"users_roles",
"roles.yml",
"role_mapping.yml",
"log4j2.properties"
).forEach(config -> assertThat(es.config.resolve(config), file(File, owner, owner, p660)));
}
}

View File

@ -0,0 +1,121 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.util;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.packaging.util.FileUtils.getTempDir;
import static org.elasticsearch.packaging.util.FileUtils.lsGlob;
import static org.elasticsearch.packaging.util.Platforms.isAptGet;
import static org.elasticsearch.packaging.util.Platforms.isDPKG;
import static org.elasticsearch.packaging.util.Platforms.isRPM;
import static org.elasticsearch.packaging.util.Platforms.isSystemd;
import static org.elasticsearch.packaging.util.Platforms.isYUM;
public class Cleanup {
private static final List<String> ELASTICSEARCH_FILES_LINUX = Arrays.asList(
"/usr/share/elasticsearch",
"/etc/elasticsearch",
"/var/lib/elasticsearch",
"/var/log/elasticsearch",
"/etc/default/elasticsearch",
"/etc/sysconfig/elasticsearch",
"/var/run/elasticsearch",
"/usr/share/doc/elasticsearch",
"/usr/lib/systemd/system/elasticsearch.conf",
"/usr/lib/tmpfiles.d/elasticsearch.conf",
"/usr/lib/sysctl.d/elasticsearch.conf"
);
// todo
private static final List<String> ELASTICSEARCH_FILES_WINDOWS = Collections.emptyList();
public static void cleanEverything() {
final Shell sh = new Shell();
// kill elasticsearch processes
if (Platforms.WINDOWS) {
// the view of processes returned by Get-Process doesn't expose command line arguments, so we use WMI here
sh.runIgnoreExitCode("powershell.exe", "-Command",
"Get-WmiObject Win32_Process | " +
"Where-Object { $_.CommandLine -Match 'org.elasticsearch.bootstrap.Elasticsearch' } | " +
"ForEach-Object { $_.Terminate() }");
} else {
sh.runIgnoreExitCode("pkill", "-u", "elasticsearch");
sh.runIgnoreExitCode("bash", "-c",
"ps aux | grep -i 'org.elasticsearch.bootstrap.Elasticsearch' | awk {'print $2'} | xargs kill -9");
}
if (Platforms.LINUX) {
purgePackagesLinux();
}
// remove elasticsearch users
if (Platforms.LINUX) {
sh.runIgnoreExitCode("userdel", "elasticsearch");
sh.runIgnoreExitCode("groupdel", "elasticsearch");
}
// delete files that may still exist
lsGlob(getTempDir(), "elasticsearch*").forEach(FileUtils::rm);
final List<String> filesToDelete = Platforms.WINDOWS
? ELASTICSEARCH_FILES_WINDOWS
: ELASTICSEARCH_FILES_LINUX;
filesToDelete.stream()
.map(Paths::get)
.filter(Files::exists)
.forEach(FileUtils::rm);
// disable elasticsearch service
// todo add this for windows when adding tests for service intallation
if (Platforms.LINUX && isSystemd()) {
sh.run("systemctl", "unmask", "systemd-sysctl.service");
}
}
private static void purgePackagesLinux() {
final Shell sh = new Shell();
if (isRPM()) {
sh.runIgnoreExitCode("rpm", "--quiet", "-e", "elasticsearch", "elasticsearch-oss");
}
if (isYUM()) {
sh.runIgnoreExitCode("yum", "remove", "-y", "elasticsearch", "elasticsearch-oss");
}
if (isDPKG()) {
sh.runIgnoreExitCode("dpkg", "--purge", "elasticsearch", "elasticsearch-oss");
}
if (isAptGet()) {
sh.runIgnoreExitCode("apt-get", "--quiet", "--yes", "purge", "elasticsearch", "elasticsearch-oss");
}
}
}

View File

@ -0,0 +1,76 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.util;
public enum Distribution {
OSS_TAR(Packaging.TAR, Flavor.OSS),
OSS_ZIP(Packaging.ZIP, Flavor.OSS),
OSS_DEB(Packaging.DEB, Flavor.OSS),
OSS_RPM(Packaging.RPM, Flavor.OSS),
DEFAULT_TAR(Packaging.TAR, Flavor.DEFAULT),
DEFAULT_ZIP(Packaging.ZIP, Flavor.DEFAULT),
DEFAULT_DEB(Packaging.DEB, Flavor.DEFAULT),
DEFAULT_RPM(Packaging.RPM, Flavor.DEFAULT);
public final Packaging packaging;
public final Flavor flavor;
Distribution(Packaging packaging, Flavor flavor) {
this.packaging = packaging;
this.flavor = flavor;
}
public String filename(String version) {
return flavor.name + "-" + version + packaging.extension;
}
public enum Packaging {
TAR(".tar.gz", Platforms.LINUX),
ZIP(".zip", true),
DEB(".deb", Platforms.isDPKG()),
RPM(".rpm", Platforms.isRPM());
/** The extension of this distribution's file */
public final String extension;
/** Whether the distribution is intended for use on the platform the current JVM is running on */
public final boolean compatible;
Packaging(String extension, boolean compatible) {
this.extension = extension;
this.compatible = compatible;
}
}
public enum Flavor {
OSS("elasticsearch-oss"),
DEFAULT("elasticsearch");
public final String name;
Flavor(String name) {
this.name = name;
}
}
}

View File

@ -0,0 +1,137 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.util;
import org.hamcrest.Description;
import org.hamcrest.TypeSafeMatcher;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.PosixFileAttributes;
import java.nio.file.attribute.PosixFilePermission;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.packaging.util.FileUtils.getBasicFileAttributes;
import static org.elasticsearch.packaging.util.FileUtils.getFileOwner;
import static org.elasticsearch.packaging.util.FileUtils.getPosixFileAttributes;
import static java.nio.file.attribute.PosixFilePermissions.fromString;
/**
* Asserts that a file at a path matches its status as Directory/File, and its owner. If on a posix system, also matches the permission
* set is what we expect.
*
* This class saves information about its failed matches in instance variables and so instances should not be reused
*/
public class FileMatcher extends TypeSafeMatcher<Path> {
public enum Fileness { File, Directory }
public static final Set<PosixFilePermission> p755 = fromString("rwxr-xr-x");
public static final Set<PosixFilePermission> p660 = fromString("rw-rw----");
public static final Set<PosixFilePermission> p644 = fromString("rw-r--r--");
private final Fileness fileness;
private final String owner;
private final String group;
private final Set<PosixFilePermission> posixPermissions;
private String mismatch;
public FileMatcher(Fileness fileness, String owner, String group, Set<PosixFilePermission> posixPermissions) {
this.fileness = Objects.requireNonNull(fileness);
this.owner = Objects.requireNonNull(owner);
this.group = group;
this.posixPermissions = posixPermissions;
}
@Override
protected boolean matchesSafely(Path path) {
if (Files.exists(path) == false) {
mismatch = "Does not exist";
return false;
}
if (Platforms.WINDOWS) {
final BasicFileAttributes attributes = getBasicFileAttributes(path);
final String attributeViewOwner = getFileOwner(path);
if (fileness.equals(Fileness.Directory) != attributes.isDirectory()) {
mismatch = "Is " + (attributes.isDirectory() ? "a directory" : "a file");
return false;
}
if (attributeViewOwner.contains(owner) == false) {
mismatch = "Owned by " + attributeViewOwner;
return false;
}
} else {
final PosixFileAttributes attributes = getPosixFileAttributes(path);
if (fileness.equals(Fileness.Directory) != attributes.isDirectory()) {
mismatch = "Is " + (attributes.isDirectory() ? "a directory" : "a file");
return false;
}
if (owner.equals(attributes.owner().getName()) == false) {
mismatch = "Owned by " + attributes.owner().getName();
return false;
}
if (group != null && group.equals(attributes.group().getName()) == false) {
mismatch = "Owned by group " + attributes.group().getName();
return false;
}
if (posixPermissions != null && posixPermissions.equals(attributes.permissions()) == false) {
mismatch = "Has permissions " + attributes.permissions();
return false;
}
}
return true;
}
@Override
public void describeMismatchSafely(Path path, Description description) {
description.appendText("path ").appendValue(path);
if (mismatch != null) {
description.appendText(mismatch);
}
}
@Override
public void describeTo(Description description) {
description.appendValue("file/directory: ").appendValue(fileness)
.appendText(" with owner ").appendValue(owner)
.appendText(" with group ").appendValue(group)
.appendText(" with posix permissions ").appendValueList("[", ",", "]", posixPermissions);
}
public static FileMatcher file(Fileness fileness, String owner) {
return file(fileness, owner, null, null);
}
public static FileMatcher file(Fileness fileness, String owner, String group, Set<PosixFilePermission> permissions) {
return new FileMatcher(fileness, owner, group, permissions);
}
}

View File

@ -0,0 +1,134 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.util;
import org.elasticsearch.core.internal.io.IOUtils;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileOwnerAttributeView;
import java.nio.file.attribute.PosixFileAttributes;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsNot.not;
import static org.hamcrest.text.IsEmptyString.isEmptyOrNullString;
/**
* Wrappers and convenience methods for common filesystem operations
*/
public class FileUtils {
public static List<Path> lsGlob(Path directory, String glob) {
List<Path> paths = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(directory, glob)) {
for (Path path : stream) {
paths.add(path);
}
return paths;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static void rm(Path... paths) {
try {
IOUtils.rm(paths);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static Path mv(Path source, Path target) {
try {
return Files.move(source, target);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static String slurp(Path file) {
try {
return String.join("\n", Files.readAllLines(file));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Gets the owner of a file in a way that should be supported by all filesystems that have a concept of file owner
*/
public static String getFileOwner(Path path) {
try {
FileOwnerAttributeView view = Files.getFileAttributeView(path, FileOwnerAttributeView.class);
return view.getOwner().getName();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Gets attributes that are supported by all filesystems
*/
public static BasicFileAttributes getBasicFileAttributes(Path path) {
try {
return Files.readAttributes(path, BasicFileAttributes.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Gets attributes that are supported by posix filesystems
*/
public static PosixFileAttributes getPosixFileAttributes(Path path) {
try {
return Files.readAttributes(path, PosixFileAttributes.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// vagrant creates /tmp for us in windows so we use that to avoid long paths
public static Path getTempDir() {
return Paths.get("/tmp");
}
public static Path getDefaultArchiveInstallPath() {
return getTempDir().resolve("elasticsearch");
}
public static String getCurrentVersion() {
return slurp(getPackagingArchivesDir().resolve("version"));
}
public static Path getPackagingArchivesDir() {
String fromEnv = System.getenv("PACKAGING_ARCHIVES");
assertThat(fromEnv, not(isEmptyOrNullString()));
return Paths.get(fromEnv);
}
}

View File

@ -0,0 +1,58 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.util;
import java.nio.file.Path;
/**
* Represents an installation of Elasticsearch
*/
public class Installation {
public final Path home;
public final Path config;
public final Path data;
public final Path logs;
public final Path plugins;
public final Path modules;
public final Path scripts;
public Installation(Path home, Path config, Path data, Path logs, Path plugins, Path modules, Path scripts) {
this.home = home;
this.config = config;
this.data = data;
this.logs = logs;
this.plugins = plugins;
this.modules = modules;
this.scripts = scripts;
}
public Installation(Path home) {
this(
home,
home.resolve("config"),
home.resolve("data"),
home.resolve("logs"),
home.resolve("plugins"),
home.resolve("modules"),
home.resolve("scripts")
);
}
}

View File

@ -0,0 +1,68 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.util;
public class Platforms {
public static final String OS_NAME = System.getProperty("os.name");
public static final boolean LINUX = OS_NAME.startsWith("Linux");
public static final boolean WINDOWS = OS_NAME.startsWith("Windows");
public static boolean isDPKG() {
if (WINDOWS) {
return false;
}
return new Shell().runIgnoreExitCode("which", "dpkg").isSuccess();
}
public static boolean isAptGet() {
if (WINDOWS) {
return false;
}
return new Shell().runIgnoreExitCode("which", "apt-get").isSuccess();
}
public static boolean isRPM() {
if (WINDOWS) {
return false;
}
return new Shell().runIgnoreExitCode("which", "rpm").isSuccess();
}
public static boolean isYUM() {
if (WINDOWS) {
return false;
}
return new Shell().runIgnoreExitCode("which", "yum").isSuccess();
}
public static boolean isSystemd() {
if (WINDOWS) {
return false;
}
return new Shell().runIgnoreExitCode("which", "systemctl").isSuccess();
}
public static boolean isSysVInit() {
if (WINDOWS) {
return false;
}
return new Shell().runIgnoreExitCode("which", "service").isSuccess();
}
}

View File

@ -0,0 +1,193 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.util;
import org.elasticsearch.common.SuppressForbidden;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.emptyMap;
/**
* Wrapper to run shell commands and collect their outputs in a less verbose way
*/
public class Shell {
final Map<String, String> env;
final Path workingDirectory;
public Shell() {
this(emptyMap(), null);
}
public Shell(Map<String, String> env) {
this(env, null);
}
public Shell(Path workingDirectory) {
this(emptyMap(), workingDirectory);
}
public Shell(Map<String, String> env, Path workingDirectory) {
this.env = new HashMap<>(env);
this.workingDirectory = workingDirectory;
}
public Result run(String... command) {
Result result = runIgnoreExitCode(command);
if (result.isSuccess() == false) {
throw new RuntimeException("Command was not successful: [" + String.join(" ", command) + "] result: " + result.toString());
}
return result;
}
public Result runIgnoreExitCode(String... command) {
ProcessBuilder builder = new ProcessBuilder();
builder.command(command);
if (workingDirectory != null) {
setWorkingDirectory(builder, workingDirectory);
}
if (env != null && env.isEmpty() == false) {
for (Map.Entry<String, String> entry : env.entrySet()) {
builder.environment().put(entry.getKey(), entry.getValue());
}
}
try {
Process process = builder.start();
StringBuilder stdout = new StringBuilder();
StringBuilder stderr = new StringBuilder();
Thread stdoutThread = new Thread(new StreamCollector(process.getInputStream(), stdout));
Thread stderrThread = new Thread(new StreamCollector(process.getErrorStream(), stderr));
stdoutThread.start();
stderrThread.start();
stdoutThread.join();
stderrThread.join();
int exitCode = process.waitFor();
return new Result(exitCode, stdout.toString(), stderr.toString());
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@SuppressForbidden(reason = "ProcessBuilder expects java.io.File")
private static void setWorkingDirectory(ProcessBuilder builder, Path path) {
builder.directory(path.toFile());
}
public String toString() {
return new StringBuilder()
.append("<")
.append(this.getClass().getName())
.append(" ")
.append("env = [")
.append(env)
.append("]")
.append("workingDirectory = [")
.append(workingDirectory)
.append("]")
.append(">")
.toString();
}
public static class Result {
public final int exitCode;
public final String stdout;
public final String stderr;
public Result(int exitCode, String stdout, String stderr) {
this.exitCode = exitCode;
this.stdout = stdout;
this.stderr = stderr;
}
public boolean isSuccess() {
return exitCode == 0;
}
public String toString() {
return new StringBuilder()
.append("<")
.append(this.getClass().getName())
.append(" ")
.append("exitCode = [")
.append(exitCode)
.append("]")
.append(" ")
.append("stdout = [")
.append(stdout)
.append("]")
.append(" ")
.append("stderr = [")
.append(stderr)
.append("]")
.append(">")
.toString();
}
}
private static class StreamCollector implements Runnable {
private final InputStream input;
private final Appendable appendable;
StreamCollector(InputStream input, Appendable appendable) {
this.input = Objects.requireNonNull(input);
this.appendable = Objects.requireNonNull(appendable);
}
public void run() {
try {
BufferedReader reader = new BufferedReader(reader(input));
String line;
while ((line = reader.readLine()) != null) {
appendable.append(line);
appendable.append("\n");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@SuppressForbidden(reason = "the system's default character set is a best guess of what subprocesses will use")
private static InputStreamReader reader(InputStream inputStream) {
return new InputStreamReader(inputStream);
}
}
}

View File

@ -45,9 +45,8 @@ setup:
"Nested doc version and seqIDs": "Nested doc version and seqIDs":
- skip: - skip:
# fixed in 6.0.1 version: " - 6.3.99"
version: " - 6.0.0" reason: "object notation for docvalue_fields was introduced in 6.4"
reason: "version and seq IDs where not accurate in previous versions"
- do: - do:
index: index:
@ -61,7 +60,7 @@ setup:
- do: - do:
search: search:
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": ["_seq_no"]} }}, "version": true, "docvalue_fields" : ["_seq_no"] } body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ { "field": "_seq_no", "format": "use_field_mapping" } ]} }}, "version": true, "docvalue_fields" : [ { "field": "_seq_no", "format": "use_field_mapping" } ] }
- match: { hits.total: 1 } - match: { hits.total: 1 }
- match: { hits.hits.0._index: "test" } - match: { hits.hits.0._index: "test" }
@ -84,7 +83,7 @@ setup:
- do: - do:
search: search:
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": ["_seq_no"]} }}, "version": true, "docvalue_fields" : ["_seq_no"] } body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ { "field": "_seq_no", "format": "use_field_mapping" } ]} }}, "version": true, "docvalue_fields" : [ { "field": "_seq_no", "format": "use_field_mapping" } ] }
- match: { hits.total: 1 } - match: { hits.total: 1 }
- match: { hits.hits.0._index: "test" } - match: { hits.hits.0._index: "test" }

View File

@ -133,7 +133,53 @@ setup:
--- ---
"docvalue_fields": "docvalue_fields":
- skip:
version: " - 6.3.99"
reason: format option was added in 6.4
features: warnings
- do: - do:
warnings:
- 'Doc-value field [count] is not using a format. The output will change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass [format=use_field_mapping] with the doc value field in order to opt in for the future behaviour and ease the migration to 7.0.'
search:
body:
docvalue_fields: [ "count" ]
- match: { hits.hits.0.fields.count: [1] }
---
"docvalue_fields as url param":
- skip:
version: " - 6.3.99"
reason: format option was added in 6.4
features: warnings
- do:
warnings:
- 'Doc-value field [count] is not using a format. The output will change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass [format=use_field_mapping] with the doc value field in order to opt in for the future behaviour and ease the migration to 7.0.'
search: search:
docvalue_fields: [ "count" ] docvalue_fields: [ "count" ]
- match: { hits.hits.0.fields.count: [1] } - match: { hits.hits.0.fields.count: [1] }
---
"docvalue_fields with default format":
- skip:
version: " - 6.3.99"
reason: format option was added in 6.4
- do:
search:
body:
docvalue_fields:
- field: "count"
format: "use_field_mapping"
- match: { hits.hits.0.fields.count: [1] }
---
"docvalue_fields with explicit format":
- skip:
version: " - 6.3.99"
reason: format option was added in 6.4
- do:
search:
body:
docvalue_fields:
- field: "count"
format: "#.0"
- match: { hits.hits.0.fields.count: ["1.0"] }

View File

@ -62,6 +62,9 @@ setup:
--- ---
"Docvalues_fields size limit": "Docvalues_fields size limit":
- skip:
version: " - 6.3.99"
reason: "The object notation for docvalue_fields is only supported on 6.4+"
- do: - do:
catch: /Trying to retrieve too many docvalue_fields\. Must be less than or equal to[:] \[2\] but was \[3\]\. This limit can be set by changing the \[index.max_docvalue_fields_search\] index level setting\./ catch: /Trying to retrieve too many docvalue_fields\. Must be less than or equal to[:] \[2\] but was \[3\]\. This limit can be set by changing the \[index.max_docvalue_fields_search\] index level setting\./
search: search:
@ -69,7 +72,13 @@ setup:
body: body:
query: query:
match_all: {} match_all: {}
docvalue_fields: ["one", "two", "three"] docvalue_fields:
- field: "one"
format: "use_field_mapping"
- field: "two"
format: "use_field_mapping"
- field: "three"
format: "use_field_mapping"
--- ---
"Script_fields size limit": "Script_fields size limit":

View File

@ -51,6 +51,9 @@ setup:
--- ---
"Verify created repository": "Verify created repository":
- skip:
version: " - 6.99.99"
reason: AwaitsFix for https://github.com/elastic/elasticsearch/issues/30807
- do: - do:
snapshot.verify_repository: snapshot.verify_repository:
repository: test_repo_get_2 repository: test_repo_get_2

View File

@ -153,7 +153,7 @@ final class ExpandSearchPhase extends SearchPhase {
} }
} }
if (options.getDocValueFields() != null) { if (options.getDocValueFields() != null) {
options.getDocValueFields().forEach(groupSource::docValueField); options.getDocValueFields().forEach(ff -> groupSource.docValueField(ff.field, ff.format));
} }
if (options.getStoredFieldsContext() != null && options.getStoredFieldsContext().fieldNames() != null) { if (options.getStoredFieldsContext() != null && options.getStoredFieldsContext().fieldNames() != null) {
options.getStoredFieldsContext().fieldNames().forEach(groupSource::storedField); options.getStoredFieldsContext().fieldNames().forEach(groupSource::storedField);

View File

@ -290,11 +290,21 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
* *
* @param name The field to get from the docvalue * @param name The field to get from the docvalue
*/ */
public SearchRequestBuilder addDocValueField(String name) { public SearchRequestBuilder addDocValueField(String name, String format) {
sourceBuilder().docValueField(name); sourceBuilder().docValueField(name, format);
return this; return this;
} }
/**
* Adds a docvalue based field to load and return. The field does not have to be stored,
* but its recommended to use non analyzed or numeric fields.
*
* @param name The field to get from the docvalue
*/
public SearchRequestBuilder addDocValueField(String name) {
return addDocValueField(name, null);
}
/** /**
* Adds a stored field to load and return (note, it must be stored) as part of the search request. * Adds a stored field to load and return (note, it must be stored) as part of the search request.
*/ */

View File

@ -25,7 +25,11 @@ import org.elasticsearch.common.unit.TimeValue;
public interface AckedClusterStateTaskListener extends ClusterStateTaskListener { public interface AckedClusterStateTaskListener extends ClusterStateTaskListener {
/** /**
* Called to determine which nodes the acknowledgement is expected from * Called to determine which nodes the acknowledgement is expected from.
*
* As this method will be called multiple times to determine the set of acking nodes,
* it is crucial for it to return consistent results: Given the same listener instance
* and the same node parameter, the method implementation should return the same result.
* *
* @param discoveryNode a node * @param discoveryNode a node
* @return true if the node is expected to send ack back, false otherwise * @return true if the node is expected to send ack back, false otherwise

View File

@ -61,7 +61,7 @@ public abstract class AckedClusterStateUpdateTask<Response> extends ClusterState
* @param e optional error that might have been thrown * @param e optional error that might have been thrown
*/ */
public void onAllNodesAcked(@Nullable Exception e) { public void onAllNodesAcked(@Nullable Exception e) {
listener.onResponse(newResponse(true)); listener.onResponse(newResponse(e == null));
} }
protected abstract Response newResponse(boolean acknowledged); protected abstract Response newResponse(boolean acknowledged);

View File

@ -363,7 +363,7 @@ public class MetaDataMappingService extends AbstractComponent {
@Override @Override
public void onAllNodesAcked(@Nullable Exception e) { public void onAllNodesAcked(@Nullable Exception e) {
listener.onResponse(new ClusterStateUpdateResponse(true)); listener.onResponse(new ClusterStateUpdateResponse(e == null));
} }
@Override @Override

View File

@ -563,7 +563,7 @@ public class MasterService extends AbstractLifecycleComponent {
private final AckedClusterStateTaskListener ackedTaskListener; private final AckedClusterStateTaskListener ackedTaskListener;
private final CountDown countDown; private final CountDown countDown;
private final DiscoveryNodes nodes; private final DiscoveryNode masterNode;
private final long clusterStateVersion; private final long clusterStateVersion;
private final Future<?> ackTimeoutCallback; private final Future<?> ackTimeoutCallback;
private Exception lastFailure; private Exception lastFailure;
@ -572,15 +572,14 @@ public class MasterService extends AbstractLifecycleComponent {
ThreadPool threadPool) { ThreadPool threadPool) {
this.ackedTaskListener = ackedTaskListener; this.ackedTaskListener = ackedTaskListener;
this.clusterStateVersion = clusterStateVersion; this.clusterStateVersion = clusterStateVersion;
this.nodes = nodes; this.masterNode = nodes.getMasterNode();
int countDown = 0; int countDown = 0;
for (DiscoveryNode node : nodes) { for (DiscoveryNode node : nodes) {
if (ackedTaskListener.mustAck(node)) { //we always wait for at least the master node
if (node.equals(masterNode) || ackedTaskListener.mustAck(node)) {
countDown++; countDown++;
} }
} }
//we always wait for at least 1 node (the master)
countDown = Math.max(1, countDown);
logger.trace("expecting {} acknowledgements for cluster_state update (version: {})", countDown, clusterStateVersion); logger.trace("expecting {} acknowledgements for cluster_state update (version: {})", countDown, clusterStateVersion);
this.countDown = new CountDown(countDown); this.countDown = new CountDown(countDown);
this.ackTimeoutCallback = threadPool.schedule(ackedTaskListener.ackTimeout(), ThreadPool.Names.GENERIC, () -> onTimeout()); this.ackTimeoutCallback = threadPool.schedule(ackedTaskListener.ackTimeout(), ThreadPool.Names.GENERIC, () -> onTimeout());
@ -588,11 +587,8 @@ public class MasterService extends AbstractLifecycleComponent {
@Override @Override
public void onNodeAck(DiscoveryNode node, @Nullable Exception e) { public void onNodeAck(DiscoveryNode node, @Nullable Exception e) {
if (!ackedTaskListener.mustAck(node)) { if (node.equals(masterNode) == false && ackedTaskListener.mustAck(node) == false) {
//we always wait for the master ack anyway return;
if (!node.equals(nodes.getMasterNode())) {
return;
}
} }
if (e == null) { if (e == null) {
logger.trace("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion); logger.trace("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion);

View File

@ -44,6 +44,7 @@ import java.util.Collections;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Supplier;
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap;
@ -66,13 +67,16 @@ public class NodesFaultDetection extends FaultDetection {
private final ConcurrentMap<DiscoveryNode, NodeFD> nodesFD = newConcurrentMap(); private final ConcurrentMap<DiscoveryNode, NodeFD> nodesFD = newConcurrentMap();
private volatile long clusterStateVersion = ClusterState.UNKNOWN_VERSION; private final Supplier<ClusterState> clusterStateSupplier;
private volatile DiscoveryNode localNode; private volatile DiscoveryNode localNode;
public NodesFaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterName clusterName) { public NodesFaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService,
Supplier<ClusterState> clusterStateSupplier, ClusterName clusterName) {
super(settings, threadPool, transportService, clusterName); super(settings, threadPool, transportService, clusterName);
this.clusterStateSupplier = clusterStateSupplier;
logger.debug("[node ] uses ping_interval [{}], ping_timeout [{}], ping_retries [{}]", pingInterval, pingRetryTimeout, logger.debug("[node ] uses ping_interval [{}], ping_timeout [{}], ping_retries [{}]", pingInterval, pingRetryTimeout,
pingRetryCount); pingRetryCount);
@ -208,15 +212,18 @@ public class NodesFaultDetection extends FaultDetection {
return NodeFD.this.equals(nodesFD.get(node)); return NodeFD.this.equals(nodesFD.get(node));
} }
private PingRequest newPingRequest() {
return new PingRequest(node, clusterName, localNode, clusterStateSupplier.get().version());
}
@Override @Override
public void run() { public void run() {
if (!running()) { if (!running()) {
return; return;
} }
final PingRequest pingRequest = new PingRequest(node, clusterName, localNode, clusterStateVersion);
final TransportRequestOptions options = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.PING) final TransportRequestOptions options = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.PING)
.withTimeout(pingRetryTimeout).build(); .withTimeout(pingRetryTimeout).build();
transportService.sendRequest(node, PING_ACTION_NAME, pingRequest, options, new TransportResponseHandler<PingResponse>() { transportService.sendRequest(node, PING_ACTION_NAME, newPingRequest(), options, new TransportResponseHandler<PingResponse>() {
@Override @Override
public PingResponse newInstance() { public PingResponse newInstance() {
return new PingResponse(); return new PingResponse();
@ -254,7 +261,7 @@ public class NodesFaultDetection extends FaultDetection {
} }
} else { } else {
// resend the request, not reschedule, rely on send timeout // resend the request, not reschedule, rely on send timeout
transportService.sendRequest(node, PING_ACTION_NAME, pingRequest, options, this); transportService.sendRequest(node, PING_ACTION_NAME, newPingRequest(), options, this);
} }
} }

View File

@ -205,7 +205,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
this.masterFD = new MasterFaultDetection(settings, threadPool, transportService, this::clusterState, masterService, clusterName); this.masterFD = new MasterFaultDetection(settings, threadPool, transportService, this::clusterState, masterService, clusterName);
this.masterFD.addListener(new MasterNodeFailureListener()); this.masterFD.addListener(new MasterNodeFailureListener());
this.nodesFD = new NodesFaultDetection(settings, threadPool, transportService, clusterName); this.nodesFD = new NodesFaultDetection(settings, threadPool, transportService, this::clusterState, clusterName);
this.nodesFD.addListener(new NodeFaultDetectionListener()); this.nodesFD.addListener(new NodeFaultDetectionListener());
this.pendingStatesQueue = new PendingClusterStatesQueue(logger, MAX_PENDING_CLUSTER_STATES_SETTING.get(settings)); this.pendingStatesQueue = new PendingClusterStatesQueue(logger, MAX_PENDING_CLUSTER_STATES_SETTING.get(settings));

View File

@ -29,6 +29,7 @@ import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.query.SpatialArgs; import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation; import org.apache.lucene.spatial.query.SpatialOperation;
import org.apache.lucene.util.SetOnce; import org.apache.lucene.util.SetOnce;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
@ -77,6 +78,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
private static final ParseField SHAPE_TYPE_FIELD = new ParseField("type"); private static final ParseField SHAPE_TYPE_FIELD = new ParseField("type");
private static final ParseField SHAPE_INDEX_FIELD = new ParseField("index"); private static final ParseField SHAPE_INDEX_FIELD = new ParseField("index");
private static final ParseField SHAPE_PATH_FIELD = new ParseField("path"); private static final ParseField SHAPE_PATH_FIELD = new ParseField("path");
private static final ParseField SHAPE_ROUTING_FIELD = new ParseField("routing");
private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped"); private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped");
private final String fieldName; private final String fieldName;
@ -89,8 +91,10 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
private final String indexedShapeId; private final String indexedShapeId;
private final String indexedShapeType; private final String indexedShapeType;
private String indexedShapeIndex = DEFAULT_SHAPE_INDEX_NAME; private String indexedShapeIndex = DEFAULT_SHAPE_INDEX_NAME;
private String indexedShapePath = DEFAULT_SHAPE_FIELD_NAME; private String indexedShapePath = DEFAULT_SHAPE_FIELD_NAME;
private String indexedShapeRouting;
private ShapeRelation relation = DEFAULT_SHAPE_RELATION; private ShapeRelation relation = DEFAULT_SHAPE_RELATION;
@ -166,6 +170,11 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
indexedShapeType = in.readOptionalString(); indexedShapeType = in.readOptionalString();
indexedShapeIndex = in.readOptionalString(); indexedShapeIndex = in.readOptionalString();
indexedShapePath = in.readOptionalString(); indexedShapePath = in.readOptionalString();
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
indexedShapeRouting = in.readOptionalString();
} else {
indexedShapeRouting = null;
}
} }
relation = ShapeRelation.readFromStream(in); relation = ShapeRelation.readFromStream(in);
strategy = in.readOptionalWriteable(SpatialStrategy::readFromStream); strategy = in.readOptionalWriteable(SpatialStrategy::readFromStream);
@ -188,6 +197,11 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
out.writeOptionalString(indexedShapeType); out.writeOptionalString(indexedShapeType);
out.writeOptionalString(indexedShapeIndex); out.writeOptionalString(indexedShapeIndex);
out.writeOptionalString(indexedShapePath); out.writeOptionalString(indexedShapePath);
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
out.writeOptionalString(indexedShapeRouting);
} else if (indexedShapeRouting != null) {
throw new IllegalStateException("indexed shape routing cannot be serialized to older nodes");
}
} }
relation.writeTo(out); relation.writeTo(out);
out.writeOptionalWriteable(strategy); out.writeOptionalWriteable(strategy);
@ -285,6 +299,26 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
return indexedShapePath; return indexedShapePath;
} }
/**
* Sets the optional routing to the indexed Shape that will be used in the query
*
* @param indexedShapeRouting indexed shape routing
* @return this
*/
public GeoShapeQueryBuilder indexedShapeRouting(String indexedShapeRouting) {
this.indexedShapeRouting = indexedShapeRouting;
return this;
}
/**
* @return the optional routing to the indexed Shape that will be used in the
* Query
*/
public String indexedShapeRouting() {
return indexedShapeRouting;
}
/** /**
* Sets the relation of query shape and indexed shape. * Sets the relation of query shape and indexed shape.
* *
@ -473,6 +507,9 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
if (indexedShapePath != null) { if (indexedShapePath != null) {
builder.field(SHAPE_PATH_FIELD.getPreferredName(), indexedShapePath); builder.field(SHAPE_PATH_FIELD.getPreferredName(), indexedShapePath);
} }
if (indexedShapeRouting != null) {
builder.field(SHAPE_ROUTING_FIELD.getPreferredName(), indexedShapeRouting);
}
builder.endObject(); builder.endObject();
} }
@ -498,6 +535,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
String type = null; String type = null;
String index = null; String index = null;
String shapePath = null; String shapePath = null;
String shapeRouting = null;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
@ -544,6 +582,8 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
index = parser.text(); index = parser.text();
} else if (SHAPE_PATH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { } else if (SHAPE_PATH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
shapePath = parser.text(); shapePath = parser.text();
} else if (SHAPE_ROUTING_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
shapeRouting = parser.text();
} }
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
@ -581,6 +621,9 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
if (shapePath != null) { if (shapePath != null) {
builder.indexedShapePath(shapePath); builder.indexedShapePath(shapePath);
} }
if (shapeRouting != null) {
builder.indexedShapeRouting(shapeRouting);
}
if (shapeRelation != null) { if (shapeRelation != null) {
builder.relation(shapeRelation); builder.relation(shapeRelation);
} }
@ -602,6 +645,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
&& Objects.equals(indexedShapeIndex, other.indexedShapeIndex) && Objects.equals(indexedShapeIndex, other.indexedShapeIndex)
&& Objects.equals(indexedShapePath, other.indexedShapePath) && Objects.equals(indexedShapePath, other.indexedShapePath)
&& Objects.equals(indexedShapeType, other.indexedShapeType) && Objects.equals(indexedShapeType, other.indexedShapeType)
&& Objects.equals(indexedShapeRouting, other.indexedShapeRouting)
&& Objects.equals(relation, other.relation) && Objects.equals(relation, other.relation)
&& Objects.equals(shape, other.shape) && Objects.equals(shape, other.shape)
&& Objects.equals(supplier, other.supplier) && Objects.equals(supplier, other.supplier)
@ -612,7 +656,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
@Override @Override
protected int doHashCode() { protected int doHashCode() {
return Objects.hash(fieldName, indexedShapeId, indexedShapeIndex, return Objects.hash(fieldName, indexedShapeId, indexedShapeIndex,
indexedShapePath, indexedShapeType, relation, shape, strategy, ignoreUnmapped, supplier); indexedShapePath, indexedShapeType, indexedShapeRouting, relation, shape, strategy, ignoreUnmapped, supplier);
} }
@Override @Override
@ -629,6 +673,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
SetOnce<ShapeBuilder> supplier = new SetOnce<>(); SetOnce<ShapeBuilder> supplier = new SetOnce<>();
queryRewriteContext.registerAsyncAction((client, listener) -> { queryRewriteContext.registerAsyncAction((client, listener) -> {
GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId); GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId);
getRequest.routing(indexedShapeRouting);
fetch(client, getRequest, indexedShapePath, ActionListener.wrap(builder-> { fetch(client, getRequest, indexedShapePath, ActionListener.wrap(builder-> {
supplier.set(builder); supplier.set(builder);
listener.onResponse(null); listener.onResponse(null);

View File

@ -33,6 +33,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilder;
@ -45,6 +46,7 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.XContentParser.Token.END_OBJECT; import static org.elasticsearch.common.xcontent.XContentParser.Token.END_OBJECT;
@ -65,7 +67,8 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
PARSER.declareBoolean(InnerHitBuilder::setVersion, SearchSourceBuilder.VERSION_FIELD); PARSER.declareBoolean(InnerHitBuilder::setVersion, SearchSourceBuilder.VERSION_FIELD);
PARSER.declareBoolean(InnerHitBuilder::setTrackScores, SearchSourceBuilder.TRACK_SCORES_FIELD); PARSER.declareBoolean(InnerHitBuilder::setTrackScores, SearchSourceBuilder.TRACK_SCORES_FIELD);
PARSER.declareStringArray(InnerHitBuilder::setStoredFieldNames, SearchSourceBuilder.STORED_FIELDS_FIELD); PARSER.declareStringArray(InnerHitBuilder::setStoredFieldNames, SearchSourceBuilder.STORED_FIELDS_FIELD);
PARSER.declareStringArray(InnerHitBuilder::setDocValueFields, SearchSourceBuilder.DOCVALUE_FIELDS_FIELD); PARSER.declareObjectArray(InnerHitBuilder::setDocValueFields,
(p,c) -> FieldAndFormat.fromXContent(p), SearchSourceBuilder.DOCVALUE_FIELDS_FIELD);
PARSER.declareField((p, i, c) -> { PARSER.declareField((p, i, c) -> {
try { try {
Set<ScriptField> scriptFields = new HashSet<>(); Set<ScriptField> scriptFields = new HashSet<>();
@ -102,7 +105,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
private StoredFieldsContext storedFieldsContext; private StoredFieldsContext storedFieldsContext;
private QueryBuilder query = DEFAULT_INNER_HIT_QUERY; private QueryBuilder query = DEFAULT_INNER_HIT_QUERY;
private List<SortBuilder<?>> sorts; private List<SortBuilder<?>> sorts;
private List<String> docValueFields; private List<FieldAndFormat> docValueFields;
private Set<ScriptField> scriptFields; private Set<ScriptField> scriptFields;
private HighlightBuilder highlightBuilder; private HighlightBuilder highlightBuilder;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
@ -134,7 +137,18 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
version = in.readBoolean(); version = in.readBoolean();
trackScores = in.readBoolean(); trackScores = in.readBoolean();
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new); storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
docValueFields = (List<String>) in.readGenericValue(); if (in.getVersion().before(Version.V_6_4_0)) {
List<String> fieldList = (List<String>) in.readGenericValue();
if (fieldList == null) {
docValueFields = null;
} else {
docValueFields = fieldList.stream()
.map(field -> new FieldAndFormat(field, null))
.collect(Collectors.toList());
}
} else {
docValueFields = in.readBoolean() ? in.readList(FieldAndFormat::new) : null;
}
if (in.readBoolean()) { if (in.readBoolean()) {
int size = in.readVInt(); int size = in.readVInt();
scriptFields = new HashSet<>(size); scriptFields = new HashSet<>(size);
@ -174,7 +188,16 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
out.writeBoolean(version); out.writeBoolean(version);
out.writeBoolean(trackScores); out.writeBoolean(trackScores);
out.writeOptionalWriteable(storedFieldsContext); out.writeOptionalWriteable(storedFieldsContext);
out.writeGenericValue(docValueFields); if (out.getVersion().before(Version.V_6_4_0)) {
out.writeGenericValue(docValueFields == null
? null
: docValueFields.stream().map(ff -> ff.field).collect(Collectors.toList()));
} else {
out.writeBoolean(docValueFields != null);
if (docValueFields != null) {
out.writeList(docValueFields);
}
}
boolean hasScriptFields = scriptFields != null; boolean hasScriptFields = scriptFields != null;
out.writeBoolean(hasScriptFields); out.writeBoolean(hasScriptFields);
if (hasScriptFields) { if (hasScriptFields) {
@ -248,7 +271,9 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
out.writeBoolean(version); out.writeBoolean(version);
out.writeBoolean(trackScores); out.writeBoolean(trackScores);
out.writeOptionalWriteable(storedFieldsContext); out.writeOptionalWriteable(storedFieldsContext);
out.writeGenericValue(docValueFields); out.writeGenericValue(docValueFields == null
? null
: docValueFields.stream().map(ff -> ff.field).collect(Collectors.toList()));
boolean hasScriptFields = scriptFields != null; boolean hasScriptFields = scriptFields != null;
out.writeBoolean(hasScriptFields); out.writeBoolean(hasScriptFields);
if (hasScriptFields) { if (hasScriptFields) {
@ -390,14 +415,14 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
/** /**
* Gets the docvalue fields. * Gets the docvalue fields.
*/ */
public List<String> getDocValueFields() { public List<FieldAndFormat> getDocValueFields() {
return docValueFields; return docValueFields;
} }
/** /**
* Sets the stored fields to load from the docvalue and return. * Sets the stored fields to load from the docvalue and return.
*/ */
public InnerHitBuilder setDocValueFields(List<String> docValueFields) { public InnerHitBuilder setDocValueFields(List<FieldAndFormat> docValueFields) {
this.docValueFields = docValueFields; this.docValueFields = docValueFields;
return this; return this;
} }
@ -405,14 +430,21 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
/** /**
* Adds a field to load from the docvalue and return. * Adds a field to load from the docvalue and return.
*/ */
public InnerHitBuilder addDocValueField(String field) { public InnerHitBuilder addDocValueField(String field, String format) {
if (docValueFields == null) { if (docValueFields == null) {
docValueFields = new ArrayList<>(); docValueFields = new ArrayList<>();
} }
docValueFields.add(field); docValueFields.add(new FieldAndFormat(field, null));
return this; return this;
} }
/**
* Adds a field to load from doc values and return.
*/
public InnerHitBuilder addDocValueField(String field) {
return addDocValueField(field, null);
}
public Set<ScriptField> getScriptFields() { public Set<ScriptField> getScriptFields() {
return scriptFields; return scriptFields;
} }
@ -489,8 +521,15 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
} }
if (docValueFields != null) { if (docValueFields != null) {
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName()); builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName());
for (String docValueField : docValueFields) { for (FieldAndFormat docValueField : docValueFields) {
builder.value(docValueField); if (docValueField.format == null) {
builder.value(docValueField.field);
} else {
builder.startObject()
.field("field", docValueField.field)
.field("format", docValueField.format)
.endObject();
}
} }
builder.endArray(); builder.endArray();
} }

View File

@ -231,6 +231,7 @@ public class Node implements Closeable {
private final Lifecycle lifecycle = new Lifecycle(); private final Lifecycle lifecycle = new Lifecycle();
private final Injector injector; private final Injector injector;
private final Settings settings; private final Settings settings;
private final Settings originalSettings;
private final Environment environment; private final Environment environment;
private final NodeEnvironment nodeEnvironment; private final NodeEnvironment nodeEnvironment;
private final PluginsService pluginsService; private final PluginsService pluginsService;
@ -261,6 +262,7 @@ public class Node implements Closeable {
logger.info("initializing ..."); logger.info("initializing ...");
} }
try { try {
originalSettings = environment.settings();
Settings tmpSettings = Settings.builder().put(environment.settings()) Settings tmpSettings = Settings.builder().put(environment.settings())
.put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build(); .put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build();
@ -568,7 +570,14 @@ public class Node implements Closeable {
} }
/** /**
* The settings that were used to create the node. * The original settings that were used to create the node
*/
public Settings originalSettings() {
return originalSettings;
}
/**
* The settings that are used by this node. Contains original settings as well as additional settings provided by plugins.
*/ */
public Settings settings() { public Settings settings() {
return this.settings; return this.settings;

View File

@ -70,6 +70,8 @@ public interface ClusterPlugin {
* Returns a map of {@link ClusterState.Custom} supplier that should be invoked to initialize the initial clusterstate. * Returns a map of {@link ClusterState.Custom} supplier that should be invoked to initialize the initial clusterstate.
* This allows custom clusterstate extensions to be always present and prevents invariants where clusterstates are published * This allows custom clusterstate extensions to be always present and prevents invariants where clusterstates are published
* but customs are not initialized. * but customs are not initialized.
*
* TODO: Remove this whole concept of InitialClusterStateCustomSupplier, it's not used anymore
*/ */
default Map<String, Supplier<ClusterState.Custom>> getInitialClusterStateCustomSupplier() { return Collections.emptyMap(); } default Map<String, Supplier<ClusterState.Custom>> getInitialClusterStateCustomSupplier() { return Collections.emptyMap(); }
} }

View File

@ -214,7 +214,7 @@ public class RestSearchAction extends BaseRestHandler {
if (Strings.hasText(sDocValueFields)) { if (Strings.hasText(sDocValueFields)) {
String[] sFields = Strings.splitStringByCommaToArray(sDocValueFields); String[] sFields = Strings.splitStringByCommaToArray(sDocValueFields);
for (String field : sFields) { for (String field : sFields) {
searchSourceBuilder.docValueField(field); searchSourceBuilder.docValueField(field, null);
} }
} }
} }

View File

@ -49,17 +49,17 @@ public interface DocValueFormat extends NamedWriteable {
/** Format a long value. This is used by terms and histogram aggregations /** Format a long value. This is used by terms and histogram aggregations
* to format keys for fields that use longs as a doc value representation * to format keys for fields that use longs as a doc value representation
* such as the {@code long} and {@code date} fields. */ * such as the {@code long} and {@code date} fields. */
String format(long value); Object format(long value);
/** Format a double value. This is used by terms and stats aggregations /** Format a double value. This is used by terms and stats aggregations
* to format keys for fields that use numbers as a doc value representation * to format keys for fields that use numbers as a doc value representation
* such as the {@code long}, {@code double} or {@code date} fields. */ * such as the {@code long}, {@code double} or {@code date} fields. */
String format(double value); Object format(double value);
/** Format a binary value. This is used by terms aggregations to format /** Format a binary value. This is used by terms aggregations to format
* keys for fields that use binary doc value representations such as the * keys for fields that use binary doc value representations such as the
* {@code keyword} and {@code ip} fields. */ * {@code keyword} and {@code ip} fields. */
String format(BytesRef value); Object format(BytesRef value);
/** Parse a value that was formatted with {@link #format(long)} back to the /** Parse a value that was formatted with {@link #format(long)} back to the
* original long value. */ * original long value. */
@ -85,13 +85,13 @@ public interface DocValueFormat extends NamedWriteable {
} }
@Override @Override
public String format(long value) { public Long format(long value) {
return Long.toString(value); return value;
} }
@Override @Override
public String format(double value) { public Double format(double value) {
return Double.toString(value); return value;
} }
@Override @Override
@ -235,13 +235,13 @@ public interface DocValueFormat extends NamedWriteable {
} }
@Override @Override
public String format(long value) { public Boolean format(long value) {
return java.lang.Boolean.valueOf(value != 0).toString(); return java.lang.Boolean.valueOf(value != 0);
} }
@Override @Override
public String format(double value) { public Boolean format(double value) {
return java.lang.Boolean.valueOf(value != 0).toString(); return java.lang.Boolean.valueOf(value != 0);
} }
@Override @Override

View File

@ -407,8 +407,8 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
final long high = nextTransition; final long high = nextTransition;
final DocValueFormat format = ft.docValueFormat(null, null); final DocValueFormat format = ft.docValueFormat(null, null);
final String formattedLow = format.format(low); final Object formattedLow = format.format(low);
final String formattedHigh = format.format(high); final Object formattedHigh = format.format(high);
if (ft.isFieldWithinQuery(reader, formattedLow, formattedHigh, if (ft.isFieldWithinQuery(reader, formattedLow, formattedHigh,
true, false, tz, null, context) == Relation.WITHIN) { true, false, tz, null, context) == Relation.WITHIN) {
// All values in this reader have the same offset despite daylight saving times. // All values in this reader have the same offset despite daylight saving times.

View File

@ -107,7 +107,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
@Override @Override
public String getKeyAsString() { public String getKeyAsString() {
return format.format(key); return format.format(key).toString();
} }
@Override @Override
@ -138,7 +138,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
String keyAsString = format.format(key); String keyAsString = format.format(key).toString();
if (keyed) { if (keyed) {
builder.startObject(keyAsString); builder.startObject(keyAsString);
} else { } else {

View File

@ -103,7 +103,7 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
@Override @Override
public String getKeyAsString() { public String getKeyAsString() {
return format.format(key); return format.format(key).toString();
} }
@Override @Override
@ -134,7 +134,7 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
String keyAsString = format.format(key); String keyAsString = format.format(key).toString();
if (keyed) { if (keyed) {
builder.startObject(keyAsString); builder.startObject(keyAsString);
} else { } else {

View File

@ -155,7 +155,7 @@ public final class InternalBinaryRange
@Override @Override
public String getFromAsString() { public String getFromAsString() {
return from == null ? null : format.format(from); return from == null ? null : format.format(from).toString();
} }
@Override @Override
@ -165,7 +165,7 @@ public final class InternalBinaryRange
@Override @Override
public String getToAsString() { public String getToAsString() {
return to == null ? null : format.format(to); return to == null ? null : format.format(to).toString();
} }
@Override @Override

View File

@ -98,7 +98,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
if (Double.isInfinite(from)) { if (Double.isInfinite(from)) {
return null; return null;
} else { } else {
return format.format(from); return format.format(from).toString();
} }
} }
@ -107,7 +107,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
if (Double.isInfinite(to)) { if (Double.isInfinite(to)) {
return null; return null;
} else { } else {
return format.format(to); return format.format(to).toString();
} }
} }

View File

@ -78,7 +78,7 @@ public class SignificantLongTerms extends InternalMappedSignificantTerms<Signifi
@Override @Override
public String getKeyAsString() { public String getKeyAsString() {
return format.format(term); return format.format(term).toString();
} }
@Override @Override

View File

@ -83,7 +83,7 @@ public class SignificantStringTerms extends InternalMappedSignificantTerms<Signi
@Override @Override
public String getKeyAsString() { public String getKeyAsString() {
return format.format(termBytes); return format.format(termBytes).toString();
} }
@Override @Override

View File

@ -153,12 +153,12 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
} }
public long getBackgroundFrequency(BytesRef termBytes) throws IOException { public long getBackgroundFrequency(BytesRef termBytes) throws IOException {
String value = config.format().format(termBytes); String value = config.format().format(termBytes).toString();
return getBackgroundFrequency(value); return getBackgroundFrequency(value);
} }
public long getBackgroundFrequency(long termNum) throws IOException { public long getBackgroundFrequency(long termNum) throws IOException {
String value = config.format().format(termNum); String value = config.format().format(termNum).toString();
return getBackgroundFrequency(value); return getBackgroundFrequency(value);
} }

View File

@ -135,7 +135,7 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory<Signific
} }
public long getBackgroundFrequency(BytesRef termBytes) throws IOException { public long getBackgroundFrequency(BytesRef termBytes) throws IOException {
String value = format.format(termBytes); String value = format.format(termBytes).toString();
return getBackgroundFrequency(value); return getBackgroundFrequency(value);
} }

View File

@ -63,7 +63,7 @@ public class DoubleTerms extends InternalMappedTerms<DoubleTerms, DoubleTerms.Bu
@Override @Override
public String getKeyAsString() { public String getKeyAsString() {
return format.format(term); return format.format(term).toString();
} }
@Override @Override
@ -90,7 +90,7 @@ public class DoubleTerms extends InternalMappedTerms<DoubleTerms, DoubleTerms.Bu
protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
builder.field(CommonFields.KEY.getPreferredName(), term); builder.field(CommonFields.KEY.getPreferredName(), term);
if (format != DocValueFormat.RAW) { if (format != DocValueFormat.RAW) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term)); builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term).toString());
} }
return builder; return builder;
} }

View File

@ -63,7 +63,7 @@ public class LongTerms extends InternalMappedTerms<LongTerms, LongTerms.Bucket>
@Override @Override
public String getKeyAsString() { public String getKeyAsString() {
return format.format(term); return format.format(term).toString();
} }
@Override @Override
@ -90,7 +90,7 @@ public class LongTerms extends InternalMappedTerms<LongTerms, LongTerms.Bucket>
protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
builder.field(CommonFields.KEY.getPreferredName(), term); builder.field(CommonFields.KEY.getPreferredName(), term);
if (format != DocValueFormat.RAW) { if (format != DocValueFormat.RAW) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term)); builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term).toString());
} }
return builder; return builder;
} }

View File

@ -80,7 +80,7 @@ public class StringTerms extends InternalMappedTerms<StringTerms, StringTerms.Bu
@Override @Override
public String getKeyAsString() { public String getKeyAsString() {
return format.format(termBytes); return format.format(termBytes).toString();
} }
@Override @Override

View File

@ -48,7 +48,7 @@ public abstract class InternalNumericMetricsAggregation extends InternalAggregat
@Override @Override
public String getValueAsString() { public String getValueAsString() {
return format.format(value()); return format.format(value()).toString();
} }
@Override @Override
@ -79,7 +79,7 @@ public abstract class InternalNumericMetricsAggregation extends InternalAggregat
public abstract double value(String name); public abstract double value(String name);
public String valueAsString(String name) { public String valueAsString(String name) {
return format.format(value(name)); return format.format(value(name)).toString();
} }
@Override @Override

View File

@ -113,7 +113,7 @@ public class InternalAvg extends InternalNumericMetricsAggregation.SingleValue i
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(CommonFields.VALUE.getPreferredName(), count != 0 ? getValue() : null); builder.field(CommonFields.VALUE.getPreferredName(), count != 0 ? getValue() : null);
if (count != 0 && format != DocValueFormat.RAW) { if (count != 0 && format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(getValue())); builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(getValue()).toString());
} }
return builder; return builder;
} }

View File

@ -85,7 +85,7 @@ public class InternalMax extends InternalNumericMetricsAggregation.SingleValue i
boolean hasValue = !Double.isInfinite(max); boolean hasValue = !Double.isInfinite(max);
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? max : null); builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? max : null);
if (hasValue && format != DocValueFormat.RAW) { if (hasValue && format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(max)); builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(max).toString());
} }
return builder; return builder;
} }

View File

@ -85,7 +85,7 @@ public class InternalMin extends InternalNumericMetricsAggregation.SingleValue i
boolean hasValue = !Double.isInfinite(min); boolean hasValue = !Double.isInfinite(min);
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? min : null); builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? min : null);
if (hasValue && format != DocValueFormat.RAW) { if (hasValue && format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(min)); builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(min).toString());
} }
return builder; return builder;
} }

View File

@ -137,7 +137,7 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr
builder.field(CommonFields.KEY.getPreferredName(), keys[i]); builder.field(CommonFields.KEY.getPreferredName(), keys[i]);
builder.field(CommonFields.VALUE.getPreferredName(), value); builder.field(CommonFields.VALUE.getPreferredName(), value);
if (format != DocValueFormat.RAW) { if (format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value)); builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString());
} }
builder.endObject(); builder.endObject();
} }

View File

@ -120,7 +120,7 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics
builder.field(CommonFields.KEY.getPreferredName(), keys[i]); builder.field(CommonFields.KEY.getPreferredName(), keys[i]);
builder.field(CommonFields.VALUE.getPreferredName(), value); builder.field(CommonFields.VALUE.getPreferredName(), value);
if (format != DocValueFormat.RAW) { if (format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value)); builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString());
} }
builder.endObject(); builder.endObject();
} }

View File

@ -95,7 +95,7 @@ public class InternalSum extends InternalNumericMetricsAggregation.SingleValue i
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(CommonFields.VALUE.getPreferredName(), sum); builder.field(CommonFields.VALUE.getPreferredName(), sum);
if (format != DocValueFormat.RAW) { if (format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(sum)); builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(sum).toString());
} }
return builder; return builder;
} }

View File

@ -39,6 +39,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
@ -70,7 +71,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
private List<SortBuilder<?>> sorts = null; private List<SortBuilder<?>> sorts = null;
private HighlightBuilder highlightBuilder; private HighlightBuilder highlightBuilder;
private StoredFieldsContext storedFieldsContext; private StoredFieldsContext storedFieldsContext;
private List<String> fieldDataFields; private List<FieldAndFormat> docValueFields;
private Set<ScriptField> scriptFields; private Set<ScriptField> scriptFields;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
@ -91,7 +92,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
new HighlightBuilder(clone.highlightBuilder, clone.highlightBuilder.highlightQuery(), clone.highlightBuilder.fields()); new HighlightBuilder(clone.highlightBuilder, clone.highlightBuilder.highlightQuery(), clone.highlightBuilder.fields());
this.storedFieldsContext = clone.storedFieldsContext == null ? null : this.storedFieldsContext = clone.storedFieldsContext == null ? null :
new StoredFieldsContext(clone.storedFieldsContext); new StoredFieldsContext(clone.storedFieldsContext);
this.fieldDataFields = clone.fieldDataFields == null ? null : new ArrayList<>(clone.fieldDataFields); this.docValueFields = clone.docValueFields == null ? null : new ArrayList<>(clone.docValueFields);
this.scriptFields = clone.scriptFields == null ? null : new HashSet<>(clone.scriptFields); this.scriptFields = clone.scriptFields == null ? null : new HashSet<>(clone.scriptFields);
this.fetchSourceContext = clone.fetchSourceContext == null ? null : this.fetchSourceContext = clone.fetchSourceContext == null ? null :
new FetchSourceContext(clone.fetchSourceContext.fetchSource(), clone.fetchSourceContext.includes(), new FetchSourceContext(clone.fetchSourceContext.fetchSource(), clone.fetchSourceContext.includes(),
@ -112,9 +113,9 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
if (in.readBoolean()) { if (in.readBoolean()) {
int size = in.readVInt(); int size = in.readVInt();
fieldDataFields = new ArrayList<>(size); docValueFields = new ArrayList<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
fieldDataFields.add(in.readString()); docValueFields.add(new FieldAndFormat(in));
} }
} }
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new); storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
@ -143,12 +144,12 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
protected void doWriteTo(StreamOutput out) throws IOException { protected void doWriteTo(StreamOutput out) throws IOException {
out.writeBoolean(explain); out.writeBoolean(explain);
out.writeOptionalWriteable(fetchSourceContext); out.writeOptionalWriteable(fetchSourceContext);
boolean hasFieldDataFields = fieldDataFields != null; boolean hasFieldDataFields = docValueFields != null;
out.writeBoolean(hasFieldDataFields); out.writeBoolean(hasFieldDataFields);
if (hasFieldDataFields) { if (hasFieldDataFields) {
out.writeVInt(fieldDataFields.size()); out.writeVInt(docValueFields.size());
for (String fieldName : fieldDataFields) { for (FieldAndFormat ff : docValueFields) {
out.writeString(fieldName); ff.writeTo(out);
} }
} }
out.writeOptionalWriteable(storedFieldsContext); out.writeOptionalWriteable(storedFieldsContext);
@ -404,40 +405,33 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} }
/** /**
* Adds a field to load from the field data cache and return as part of * Adds a field to load from doc values and return as part of
* the search request. * the search request.
*/ */
public TopHitsAggregationBuilder fieldDataField(String fieldDataField) { public TopHitsAggregationBuilder docValueField(String docValueField, String format) {
if (fieldDataField == null) { if (docValueField == null) {
throw new IllegalArgumentException("[fieldDataField] must not be null: [" + name + "]"); throw new IllegalArgumentException("[docValueField] must not be null: [" + name + "]");
} }
if (fieldDataFields == null) { if (docValueFields == null) {
fieldDataFields = new ArrayList<>(); docValueFields = new ArrayList<>();
} }
fieldDataFields.add(fieldDataField); docValueFields.add(new FieldAndFormat(docValueField, format));
return this; return this;
} }
/** /**
* Adds fields to load from the field data cache and return as part of * Adds a field to load from doc values and return as part of
* the search request. * the search request.
*/ */
public TopHitsAggregationBuilder fieldDataFields(List<String> fieldDataFields) { public TopHitsAggregationBuilder docValueField(String docValueField) {
if (fieldDataFields == null) { return docValueField(docValueField, null);
throw new IllegalArgumentException("[fieldDataFields] must not be null: [" + name + "]");
}
if (this.fieldDataFields == null) {
this.fieldDataFields = new ArrayList<>();
}
this.fieldDataFields.addAll(fieldDataFields);
return this;
} }
/** /**
* Gets the field-data fields. * Gets the field-data fields.
*/ */
public List<String> fieldDataFields() { public List<FieldAndFormat> fieldDataFields() {
return fieldDataFields; return docValueFields;
} }
/** /**
@ -587,7 +581,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
optionalSort = SortBuilder.buildSort(sorts, context.getQueryShardContext()); optionalSort = SortBuilder.buildSort(sorts, context.getQueryShardContext());
} }
return new TopHitsAggregatorFactory(name, from, size, explain, version, trackScores, optionalSort, highlightBuilder, return new TopHitsAggregatorFactory(name, from, size, explain, version, trackScores, optionalSort, highlightBuilder,
storedFieldsContext, fieldDataFields, fields, fetchSourceContext, context, parent, subfactoriesBuilder, metaData); storedFieldsContext, docValueFields, fields, fetchSourceContext, context, parent, subfactoriesBuilder, metaData);
} }
@Override @Override
@ -603,10 +597,15 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
if (storedFieldsContext != null) { if (storedFieldsContext != null) {
storedFieldsContext.toXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), builder); storedFieldsContext.toXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), builder);
} }
if (fieldDataFields != null) { if (docValueFields != null) {
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName()); builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName());
for (String fieldDataField : fieldDataFields) { for (FieldAndFormat dvField : docValueFields) {
builder.value(fieldDataField); builder.startObject()
.field("field", dvField.field);
if (dvField.format != null) {
builder.field("format", dvField.format);
}
builder.endObject();
} }
builder.endArray(); builder.endArray();
} }
@ -725,14 +724,9 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} else if (SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { } else if (SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
List<String> fieldDataFields = new ArrayList<>(); List<String> fieldDataFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) { FieldAndFormat ff = FieldAndFormat.fromXContent(parser);
fieldDataFields.add(parser.text()); factory.docValueField(ff.field, ff.format);
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING
+ "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
}
} }
factory.fieldDataFields(fieldDataFields);
} else if (SearchSourceBuilder.SORT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { } else if (SearchSourceBuilder.SORT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
List<SortBuilder<?>> sorts = SortBuilder.fromXContent(parser); List<SortBuilder<?>> sorts = SortBuilder.fromXContent(parser);
factory.sorts(sorts); factory.sorts(sorts);
@ -752,7 +746,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
@Override @Override
protected int doHashCode() { protected int doHashCode() {
return Objects.hash(explain, fetchSourceContext, fieldDataFields, storedFieldsContext, from, highlightBuilder, return Objects.hash(explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder,
scriptFields, size, sorts, trackScores, version); scriptFields, size, sorts, trackScores, version);
} }
@ -761,7 +755,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
TopHitsAggregationBuilder other = (TopHitsAggregationBuilder) obj; TopHitsAggregationBuilder other = (TopHitsAggregationBuilder) obj;
return Objects.equals(explain, other.explain) return Objects.equals(explain, other.explain)
&& Objects.equals(fetchSourceContext, other.fetchSourceContext) && Objects.equals(fetchSourceContext, other.fetchSourceContext)
&& Objects.equals(fieldDataFields, other.fieldDataFields) && Objects.equals(docValueFields, other.docValueFields)
&& Objects.equals(storedFieldsContext, other.storedFieldsContext) && Objects.equals(storedFieldsContext, other.storedFieldsContext)
&& Objects.equals(from, other.from) && Objects.equals(from, other.from)
&& Objects.equals(highlightBuilder, other.highlightBuilder) && Objects.equals(highlightBuilder, other.highlightBuilder)

View File

@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
@ -47,13 +48,13 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
private final Optional<SortAndFormats> sort; private final Optional<SortAndFormats> sort;
private final HighlightBuilder highlightBuilder; private final HighlightBuilder highlightBuilder;
private final StoredFieldsContext storedFieldsContext; private final StoredFieldsContext storedFieldsContext;
private final List<String> docValueFields; private final List<FieldAndFormat> docValueFields;
private final List<ScriptFieldsContext.ScriptField> scriptFields; private final List<ScriptFieldsContext.ScriptField> scriptFields;
private final FetchSourceContext fetchSourceContext; private final FetchSourceContext fetchSourceContext;
TopHitsAggregatorFactory(String name, int from, int size, boolean explain, boolean version, boolean trackScores, TopHitsAggregatorFactory(String name, int from, int size, boolean explain, boolean version, boolean trackScores,
Optional<SortAndFormats> sort, HighlightBuilder highlightBuilder, StoredFieldsContext storedFieldsContext, Optional<SortAndFormats> sort, HighlightBuilder highlightBuilder, StoredFieldsContext storedFieldsContext,
List<String> docValueFields, List<ScriptFieldsContext.ScriptField> scriptFields, FetchSourceContext fetchSourceContext, List<FieldAndFormat> docValueFields, List<ScriptFieldsContext.ScriptField> scriptFields, FetchSourceContext fetchSourceContext,
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactories, Map<String, Object> metaData) SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactories, Map<String, Object> metaData)
throws IOException { throws IOException {
super(name, context, parent, subFactories, metaData); super(name, context, parent, subFactories, metaData);

View File

@ -85,7 +85,7 @@ public class InternalSimpleValue extends InternalNumericMetricsAggregation.Singl
boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value)); boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value));
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null); builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
if (hasValue && format != DocValueFormat.RAW) { if (hasValue && format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value)); builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString());
} }
return builder; return builder;
} }

View File

@ -108,7 +108,7 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation
boolean hasValue = !Double.isInfinite(value); boolean hasValue = !Double.isInfinite(value);
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null); builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
if (hasValue && format != DocValueFormat.RAW) { if (hasValue && format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value)); builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString());
} }
builder.startArray(KEYS_FIELD.getPreferredName()); builder.startArray(KEYS_FIELD.getPreferredName());
for (String key : keys) { for (String key : keys) {

View File

@ -97,7 +97,7 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
@Override @Override
public String percentileAsString(double percent) { public String percentileAsString(double percent) {
return format.format(percentile(percent)); return format.format(percentile(percent)).toString();
} }
DocValueFormat formatter() { DocValueFormat formatter() {

View File

@ -47,6 +47,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.collapse.CollapseBuilder;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
@ -64,6 +65,7 @@ import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
@ -162,7 +164,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER; private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER;
private StoredFieldsContext storedFieldsContext; private StoredFieldsContext storedFieldsContext;
private List<String> docValueFields; private List<FieldAndFormat> docValueFields;
private List<ScriptField> scriptFields; private List<ScriptField> scriptFields;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
@ -197,7 +199,22 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new); aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new);
explain = in.readOptionalBoolean(); explain = in.readOptionalBoolean();
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
docValueFields = (List<String>) in.readGenericValue(); if (in.getVersion().before(Version.V_6_4_0)) {
List<String> dvFields = (List<String>) in.readGenericValue();
if (dvFields == null) {
docValueFields = null;
} else {
docValueFields = dvFields.stream()
.map(field -> new FieldAndFormat(field, null))
.collect(Collectors.toList());
}
} else {
if (in.readBoolean()) {
docValueFields = in.readList(FieldAndFormat::new);
} else {
docValueFields = null;
}
}
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new); storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
from = in.readVInt(); from = in.readVInt();
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new); highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
@ -246,7 +263,16 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
out.writeOptionalWriteable(aggregations); out.writeOptionalWriteable(aggregations);
out.writeOptionalBoolean(explain); out.writeOptionalBoolean(explain);
out.writeOptionalWriteable(fetchSourceContext); out.writeOptionalWriteable(fetchSourceContext);
out.writeGenericValue(docValueFields); if (out.getVersion().before(Version.V_6_4_0)) {
out.writeGenericValue(docValueFields == null
? null
: docValueFields.stream().map(ff -> ff.field).collect(Collectors.toList()));
} else {
out.writeBoolean(docValueFields != null);
if (docValueFields != null) {
out.writeList(docValueFields);
}
}
out.writeOptionalWriteable(storedFieldsContext); out.writeOptionalWriteable(storedFieldsContext);
out.writeVInt(from); out.writeVInt(from);
out.writeOptionalWriteable(highlightBuilder); out.writeOptionalWriteable(highlightBuilder);
@ -764,22 +790,30 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
/** /**
* Gets the docvalue fields. * Gets the docvalue fields.
*/ */
public List<String> docValueFields() { public List<FieldAndFormat> docValueFields() {
return docValueFields; return docValueFields;
} }
/** /**
* Adds a field to load from the docvalue and return as part of the * Adds a field to load from the doc values and return as part of the
* search request. * search request.
*/ */
public SearchSourceBuilder docValueField(String name) { public SearchSourceBuilder docValueField(String name, @Nullable String format) {
if (docValueFields == null) { if (docValueFields == null) {
docValueFields = new ArrayList<>(); docValueFields = new ArrayList<>();
} }
docValueFields.add(name); docValueFields.add(new FieldAndFormat(name, format));
return this; return this;
} }
/**
* Adds a field to load from the doc values and return as part of the
* search request.
*/
public SearchSourceBuilder docValueField(String name) {
return docValueField(name, null);
}
/** /**
* Adds a script field under the given name with the provided script. * Adds a script field under the given name with the provided script.
* *
@ -1076,12 +1110,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
} else if (DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { } else if (DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
docValueFields = new ArrayList<>(); docValueFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) { docValueFields.add(FieldAndFormat.fromXContent(parser));
docValueFields.add(parser.text());
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING +
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
}
} }
} else if (INDICES_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { } else if (INDICES_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
@ -1177,8 +1206,13 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
if (docValueFields != null) { if (docValueFields != null) {
builder.startArray(DOCVALUE_FIELDS_FIELD.getPreferredName()); builder.startArray(DOCVALUE_FIELDS_FIELD.getPreferredName());
for (String docValueField : docValueFields) { for (FieldAndFormat docValueField : docValueFields) {
builder.value(docValueField); builder.startObject()
.field("field", docValueField.field);
if (docValueField.format != null) {
builder.field("format", docValueField.format);
}
builder.endObject();
} }
builder.endArray(); builder.endArray();
} }

View File

@ -18,23 +18,111 @@
*/ */
package org.elasticsearch.search.fetch.subphase; package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
* All the required context to pull a field from the doc values. * All the required context to pull a field from the doc values.
*/ */
public class DocValueFieldsContext { public class DocValueFieldsContext {
private final List<String> fields; public static final String USE_DEFAULT_FORMAT = "use_field_mapping";
public DocValueFieldsContext(List<String> fields) { /**
* Wrapper around a field name and the format that should be used to
* display values of this field.
*/
public static final class FieldAndFormat implements Writeable {
private static final ConstructingObjectParser<FieldAndFormat, Void> PARSER = new ConstructingObjectParser<>("script",
a -> new FieldAndFormat((String) a[0], (String) a[1]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("field"));
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("format"));
}
/**
* Parse a {@link FieldAndFormat} from some {@link XContent}.
*/
public static FieldAndFormat fromXContent(XContentParser parser) throws IOException {
Token token = parser.currentToken();
if (token.isValue()) {
return new FieldAndFormat(parser.text(), null);
} else {
return PARSER.apply(parser, null);
}
}
/** The name of the field. */
public final String field;
/** The format of the field, or {@code null} if defaults should be used. */
public final String format;
/** Sole constructor. */
public FieldAndFormat(String field, @Nullable String format) {
this.field = Objects.requireNonNull(field);
this.format = format;
}
/** Serialization constructor. */
public FieldAndFormat(StreamInput in) throws IOException {
this.field = in.readString();
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
format = in.readOptionalString();
} else {
format = null;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
out.writeOptionalString(format);
}
}
@Override
public int hashCode() {
int h = field.hashCode();
h = 31 * h + Objects.hashCode(format);
return h;
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
FieldAndFormat other = (FieldAndFormat) obj;
return field.equals(other.field) && Objects.equals(format, other.format);
}
}
private final List<FieldAndFormat> fields;
public DocValueFieldsContext(List<FieldAndFormat> fields) {
this.fields = fields; this.fields = fields;
} }
/** /**
* Returns the required docvalue fields * Returns the required docvalue fields
*/ */
public List<String> fields() { public List<FieldAndFormat> fields() {
return this.fields; return this.fields;
} }
} }

View File

@ -20,19 +20,32 @@ package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SortedNumericDocValues;
import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Objects;
/** /**
* Query sub phase which pulls data from doc values * Query sub phase which pulls data from doc values
@ -41,6 +54,8 @@ import java.util.HashMap;
*/ */
public final class DocValueFieldsFetchSubPhase implements FetchSubPhase { public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(DocValueFieldsFetchSubPhase.class));
@Override @Override
public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
@ -48,9 +63,10 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
// retrieve the `doc_value` associated with the collapse field // retrieve the `doc_value` associated with the collapse field
String name = context.collapse().getFieldType().name(); String name = context.collapse().getFieldType().name();
if (context.docValueFieldsContext() == null) { if (context.docValueFieldsContext() == null) {
context.docValueFieldsContext(new DocValueFieldsContext(Collections.singletonList(name))); context.docValueFieldsContext(new DocValueFieldsContext(
} else if (context.docValueFieldsContext().fields().contains(name) == false) { Collections.singletonList(new FieldAndFormat(name, DocValueFieldsContext.USE_DEFAULT_FORMAT))));
context.docValueFieldsContext().fields().add(name); } else if (context.docValueFieldsContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) {
context.docValueFieldsContext().fields().add(new FieldAndFormat(name, DocValueFieldsContext.USE_DEFAULT_FORMAT));
} }
} }
@ -59,24 +75,51 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
} }
hits = hits.clone(); // don't modify the incoming hits hits = hits.clone(); // don't modify the incoming hits
Arrays.sort(hits, (a, b) -> Integer.compare(a.docId(), b.docId())); Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId));
for (String field : context.docValueFieldsContext().fields()) { for (FieldAndFormat fieldAndFormat : context.docValueFieldsContext().fields()) {
String field = fieldAndFormat.field;
MappedFieldType fieldType = context.mapperService().fullName(field); MappedFieldType fieldType = context.mapperService().fullName(field);
if (fieldType != null) { if (fieldType != null) {
final IndexFieldData<?> indexFieldData = context.getForField(fieldType);
final DocValueFormat format;
if (fieldAndFormat.format == null) {
DEPRECATION_LOGGER.deprecated("Doc-value field [" + fieldAndFormat.field + "] is not using a format. The output will " +
"change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass " +
"[format={}] with the doc value field in order to opt in for the future behaviour and ease the migration to " +
"7.0.", DocValueFieldsContext.USE_DEFAULT_FORMAT);
format = null;
} else {
String formatDesc = fieldAndFormat.format;
if (Objects.equals(formatDesc, DocValueFieldsContext.USE_DEFAULT_FORMAT)) {
formatDesc = null;
}
format = fieldType.docValueFormat(formatDesc, null);
}
LeafReaderContext subReaderContext = null; LeafReaderContext subReaderContext = null;
AtomicFieldData data = null; AtomicFieldData data = null;
ScriptDocValues<?> values = null; ScriptDocValues<?> scriptValues = null; // legacy
SortedBinaryDocValues binaryValues = null; // binary / string / ip fields
SortedNumericDocValues longValues = null; // int / date fields
SortedNumericDoubleValues doubleValues = null; // floating-point fields
for (SearchHit hit : hits) { for (SearchHit hit : hits) {
// if the reader index has changed we need to get a new doc values reader instance // if the reader index has changed we need to get a new doc values reader instance
if (subReaderContext == null || hit.docId() >= subReaderContext.docBase + subReaderContext.reader().maxDoc()) { if (subReaderContext == null || hit.docId() >= subReaderContext.docBase + subReaderContext.reader().maxDoc()) {
int readerIndex = ReaderUtil.subIndex(hit.docId(), context.searcher().getIndexReader().leaves()); int readerIndex = ReaderUtil.subIndex(hit.docId(), context.searcher().getIndexReader().leaves());
subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
data = context.getForField(fieldType).load(subReaderContext); data = indexFieldData.load(subReaderContext);
values = data.getScriptValues(); if (format == null) {
scriptValues = data.getScriptValues();
} else if (indexFieldData instanceof IndexNumericFieldData) {
if (((IndexNumericFieldData) indexFieldData).getNumericType().isFloatingPoint()) {
doubleValues = ((AtomicNumericFieldData) data).getDoubleValues();
} else {
longValues = ((AtomicNumericFieldData) data).getLongValues();
}
} else {
binaryValues = data.getBytesValues();
}
} }
int subDocId = hit.docId() - subReaderContext.docBase;
values.setNextDocId(subDocId);
if (hit.fieldsOrNull() == null) { if (hit.fieldsOrNull() == null) {
hit.fields(new HashMap<>(2)); hit.fields(new HashMap<>(2));
} }
@ -85,7 +128,33 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
hitField = new DocumentField(field, new ArrayList<>(2)); hitField = new DocumentField(field, new ArrayList<>(2));
hit.getFields().put(field, hitField); hit.getFields().put(field, hitField);
} }
hitField.getValues().addAll(values); final List<Object> values = hitField.getValues();
int subDocId = hit.docId() - subReaderContext.docBase;
if (scriptValues != null) {
scriptValues.setNextDocId(subDocId);
values.addAll(scriptValues);
} else if (binaryValues != null) {
if (binaryValues.advanceExact(subDocId)) {
for (int i = 0, count = binaryValues.docValueCount(); i < count; ++i) {
values.add(format.format(binaryValues.nextValue()));
}
}
} else if (longValues != null) {
if (longValues.advanceExact(subDocId)) {
for (int i = 0, count = longValues.docValueCount(); i < count; ++i) {
values.add(format.format(longValues.nextValue()));
}
}
} else if (doubleValues != null) {
if (doubleValues.advanceExact(subDocId)) {
for (int i = 0, count = doubleValues.docValueCount(); i < count; ++i) {
values.add(format.format(doubleValues.nextValue()));
}
}
} else {
throw new AssertionError("Unreachable code");
}
} }
} }
} }

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.settings.Settings.Builder;
import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.test.AbstractStreamableTestCase;
import org.elasticsearch.test.ESTestCase;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -38,8 +39,9 @@ public class UpdateSettingsRequestStreamableTests extends AbstractStreamableTest
protected UpdateSettingsRequest mutateInstance(UpdateSettingsRequest request) { protected UpdateSettingsRequest mutateInstance(UpdateSettingsRequest request) {
UpdateSettingsRequest mutation = copyRequest(request); UpdateSettingsRequest mutation = copyRequest(request);
List<Runnable> mutators = new ArrayList<>(); List<Runnable> mutators = new ArrayList<>();
mutators.add(() -> mutation.masterNodeTimeout(randomTimeValue())); mutators.add(() -> mutation
mutators.add(() -> mutation.timeout(randomTimeValue())); .masterNodeTimeout(randomValueOtherThan(request.masterNodeTimeout().getStringRep(), ESTestCase::randomTimeValue)));
mutators.add(() -> mutation.timeout(randomValueOtherThan(request.masterNodeTimeout().getStringRep(), ESTestCase::randomTimeValue)));
mutators.add(() -> mutation.settings(mutateSettings(request.settings()))); mutators.add(() -> mutation.settings(mutateSettings(request.settings())));
mutators.add(() -> mutation.indices(mutateIndices(request.indices()))); mutators.add(() -> mutation.indices(mutateIndices(request.indices())));
mutators.add(() -> mutation.indicesOptions(randomValueOtherThan(request.indicesOptions(), mutators.add(() -> mutation.indicesOptions(randomValueOtherThan(request.indicesOptions(),
@ -72,7 +74,7 @@ public class UpdateSettingsRequestStreamableTests extends AbstractStreamableTest
private static UpdateSettingsRequest copyRequest(UpdateSettingsRequest request) { private static UpdateSettingsRequest copyRequest(UpdateSettingsRequest request) {
UpdateSettingsRequest result = new UpdateSettingsRequest(request.settings(), request.indices()); UpdateSettingsRequest result = new UpdateSettingsRequest(request.settings(), request.indices());
result.masterNodeTimeout(request.timeout()); result.masterNodeTimeout(request.masterNodeTimeout());
result.timeout(request.timeout()); result.timeout(request.timeout());
result.indicesOptions(request.indicesOptions()); result.indicesOptions(request.indicesOptions());
result.setPreserveExisting(request.isPreserveExisting()); result.setPreserveExisting(request.isPreserveExisting());

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
@ -33,8 +34,16 @@ import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceA
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.zen.PublishClusterStateAction;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.transport.TransportService;
import java.util.Arrays;
import java.util.Collection;
import java.util.stream.Stream;
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
@ -43,6 +52,11 @@ import static org.hamcrest.Matchers.equalTo;
@ClusterScope(scope = TEST, minNumDataNodes = 2) @ClusterScope(scope = TEST, minNumDataNodes = 2)
public class AckClusterUpdateSettingsIT extends ESIntegTestCase { public class AckClusterUpdateSettingsIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(MockTransportService.TestPlugin.class);
}
@Override @Override
protected Settings nodeSettings(int nodeOrdinal) { protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder() return Settings.builder()
@ -156,4 +170,32 @@ public class AckClusterUpdateSettingsIT extends ESIntegTestCase {
assertThat(openIndexResponse.isAcknowledged(), equalTo(false)); assertThat(openIndexResponse.isAcknowledged(), equalTo(false));
ensureGreen("test"); // make sure that recovery from disk has completed, so that check index doesn't fail. ensureGreen("test"); // make sure that recovery from disk has completed, so that check index doesn't fail.
} }
public void testAckingFailsIfNotPublishedToAllNodes() {
String masterNode = internalCluster().getMasterName();
String nonMasterNode = Stream.of(internalCluster().getNodeNames())
.filter(node -> node.equals(masterNode) == false).findFirst().get();
MockTransportService masterTransportService =
(MockTransportService) internalCluster().getInstance(TransportService.class, masterNode);
MockTransportService nonMasterTransportService =
(MockTransportService) internalCluster().getInstance(TransportService.class, nonMasterNode);
logger.info("blocking cluster state publishing from master [{}] to non master [{}]", masterNode, nonMasterNode);
if (randomBoolean() && internalCluster().numMasterNodes() != 2) {
masterTransportService.addFailToSendNoConnectRule(nonMasterTransportService, PublishClusterStateAction.SEND_ACTION_NAME);
} else {
masterTransportService.addFailToSendNoConnectRule(nonMasterTransportService, PublishClusterStateAction.COMMIT_ACTION_NAME);
}
CreateIndexResponse response = client().admin().indices().prepareCreate("test").get();
assertFalse(response.isAcknowledged());
logger.info("waiting for cluster to reform");
masterTransportService.clearRule(nonMasterTransportService);
ensureStableCluster(internalCluster().size());
assertAcked(client().admin().indices().prepareDelete("test"));
}
} }

View File

@ -53,7 +53,6 @@ import java.util.concurrent.CountDownLatch;
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING;
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo;
/** /**
@ -256,8 +255,8 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
internalCluster().setDisruptionScheme(isolatePreferredMaster); internalCluster().setDisruptionScheme(isolatePreferredMaster);
isolatePreferredMaster.startDisrupting(); isolatePreferredMaster.startDisrupting();
assertAcked(client(randomFrom(nonPreferredNodes)).admin().indices().prepareCreate("test").setSettings( client(randomFrom(nonPreferredNodes)).admin().indices().prepareCreate("test").setSettings(
Settings.builder().put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1).put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0))); Settings.builder().put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1).put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0)).get();
internalCluster().clearDisruptionScheme(false); internalCluster().clearDisruptionScheme(false);
internalCluster().setDisruptionScheme(isolateAllNodes); internalCluster().setDisruptionScheme(isolateAllNodes);

View File

@ -175,17 +175,19 @@ public class ZenFaultDetectionTests extends ESTestCase {
final Settings pingSettings = Settings.builder() final Settings pingSettings = Settings.builder()
.put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry) .put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry)
.put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "5m").build(); .put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "5m").build();
ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(buildNodesForA(true)).build(); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong())
.nodes(buildNodesForA(true)).build();
NodesFaultDetection nodesFDA = new NodesFaultDetection(Settings.builder().put(settingsA).put(pingSettings).build(), NodesFaultDetection nodesFDA = new NodesFaultDetection(Settings.builder().put(settingsA).put(pingSettings).build(),
threadPool, serviceA, clusterState.getClusterName()); threadPool, serviceA, () -> clusterState, clusterState.getClusterName());
nodesFDA.setLocalNode(nodeA); nodesFDA.setLocalNode(nodeA);
NodesFaultDetection nodesFDB = new NodesFaultDetection(Settings.builder().put(settingsB).put(pingSettings).build(), NodesFaultDetection nodesFDB = new NodesFaultDetection(Settings.builder().put(settingsB).put(pingSettings).build(),
threadPool, serviceB, clusterState.getClusterName()); threadPool, serviceB, () -> clusterState, clusterState.getClusterName());
nodesFDB.setLocalNode(nodeB); nodesFDB.setLocalNode(nodeB);
final CountDownLatch pingSent = new CountDownLatch(1); final CountDownLatch pingSent = new CountDownLatch(1);
nodesFDB.addListener(new NodesFaultDetection.Listener() { nodesFDB.addListener(new NodesFaultDetection.Listener() {
@Override @Override
public void onPingReceived(NodesFaultDetection.PingRequest pingRequest) { public void onPingReceived(NodesFaultDetection.PingRequest pingRequest) {
assertThat(pingRequest.clusterStateVersion(), equalTo(clusterState.version()));
pingSent.countDown(); pingSent.countDown();
} }
}); });

View File

@ -150,7 +150,6 @@ public class SingleNodeDiscoveryIT extends ESIntegTestCase {
internalCluster().getClusterName(), internalCluster().getClusterName(),
configurationSource, configurationSource,
0, 0,
false,
"other", "other",
Arrays.asList(getTestTransportPlugin(), MockHttpTransport.TestPlugin.class), Arrays.asList(getTestTransportPlugin(), MockHttpTransport.TestPlugin.class),
Function.identity())) { Function.identity())) {

View File

@ -36,8 +36,8 @@ public class BooleanFieldTypeTests extends FieldTypeTestCase {
public void testValueFormat() { public void testValueFormat() {
MappedFieldType ft = createDefaultFieldType(); MappedFieldType ft = createDefaultFieldType();
assertEquals("false", ft.docValueFormat(null, null).format(0)); assertEquals(false, ft.docValueFormat(null, null).format(0));
assertEquals("true", ft.docValueFormat(null, null).format(1)); assertEquals(true, ft.docValueFormat(null, null).format(1));
} }
public void testValueForSearch() { public void testValueForSearch() {

View File

@ -59,6 +59,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
private static String indexedShapeType; private static String indexedShapeType;
private static String indexedShapePath; private static String indexedShapePath;
private static String indexedShapeIndex; private static String indexedShapeIndex;
private static String indexedShapeRouting;
private static ShapeBuilder indexedShapeToReturn; private static ShapeBuilder indexedShapeToReturn;
@Override @Override
@ -85,6 +86,10 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
indexedShapePath = randomAlphaOfLengthBetween(3, 20); indexedShapePath = randomAlphaOfLengthBetween(3, 20);
builder.indexedShapePath(indexedShapePath); builder.indexedShapePath(indexedShapePath);
} }
if (randomBoolean()) {
indexedShapeRouting = randomAlphaOfLengthBetween(3, 20);
builder.indexedShapeRouting(indexedShapeRouting);
}
} }
if (randomBoolean()) { if (randomBoolean()) {
SpatialStrategy strategy = randomFrom(SpatialStrategy.values()); SpatialStrategy strategy = randomFrom(SpatialStrategy.values());
@ -112,6 +117,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
assertThat(indexedShapeType, notNullValue()); assertThat(indexedShapeType, notNullValue());
assertThat(getRequest.id(), equalTo(indexedShapeId)); assertThat(getRequest.id(), equalTo(indexedShapeId));
assertThat(getRequest.type(), equalTo(indexedShapeType)); assertThat(getRequest.type(), equalTo(indexedShapeType));
assertThat(getRequest.routing(), equalTo(indexedShapeRouting));
String expectedShapeIndex = indexedShapeIndex == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_INDEX_NAME : indexedShapeIndex; String expectedShapeIndex = indexedShapeIndex == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_INDEX_NAME : indexedShapeIndex;
assertThat(getRequest.index(), equalTo(expectedShapeIndex)); assertThat(getRequest.index(), equalTo(expectedShapeIndex));
String expectedShapePath = indexedShapePath == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_FIELD_NAME : indexedShapePath; String expectedShapePath = indexedShapePath == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_FIELD_NAME : indexedShapePath;
@ -136,6 +142,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
indexedShapeType = null; indexedShapeType = null;
indexedShapePath = null; indexedShapePath = null;
indexedShapeIndex = null; indexedShapeIndex = null;
indexedShapeRouting = null;
} }
@Override @Override

Some files were not shown because too many files have changed in this diff Show More