Merge remote-tracking branch 'es/master' into ccr

* es/master:
  Watcher: Fold two smoke test projects into smoke-test-watcher (#30137)
  In the field capabilities API, deprecate support for providing fields in the request body. (#30157)
  Set JAVA_HOME before forking setup commands (#29647)
  Remove animal sniffer from low-level REST client (#29646)
  Cleanup .gitignore (#30145)
  Do not add noop from local translog to translog again (#29637)
  Build: Assert jar LICENSE and NOTICE files match
  Correct transport compression algorithm in docs (#29645)
  [Test] Fix docs check for DEB package in packaging tests (#30126)
  Painless: Docs Clean Up (#29592)
  Fixes Eclipse build for sql jdbc project (#30114)
  Remove reference to `not_analyzed`.
  [Docs] Add community analysis plugin (#29612)
This commit is contained in:
Martijn van Groningen 2018-04-26 09:25:19 +02:00
commit 4bcecd34a1
No known key found for this signature in database
GPG Key ID: AB236F4FCF2AF12A
53 changed files with 495 additions and 400 deletions

7
.gitignore vendored
View File

@ -20,10 +20,8 @@ nbactions.xml
.gradle/ .gradle/
build/ build/
# maven stuff (to be removed when trunk becomes 4.x) # vscode stuff
*-execution-hints.log .vscode/
target/
dependency-reduced-pom.xml
# testing stuff # testing stuff
**/.local* **/.local*
@ -43,4 +41,3 @@ html_docs
# random old stuff that we should look at the necessity of... # random old stuff that we should look at the necessity of...
/tmp/ /tmp/
eclipse-build eclipse-build

View File

@ -20,6 +20,7 @@
import org.apache.tools.ant.taskdefs.condition.Os import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionCollection import org.elasticsearch.gradle.VersionCollection
import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.VersionProperties
@ -30,6 +31,7 @@ import org.gradle.api.tasks.wrapper.Wrapper.DistributionType
import org.gradle.util.GradleVersion import org.gradle.util.GradleVersion
import org.gradle.util.DistributionLocator import org.gradle.util.DistributionLocator
import java.nio.file.Files
import java.nio.file.Path import java.nio.file.Path
import java.security.MessageDigest import java.security.MessageDigest
@ -459,6 +461,59 @@ gradle.projectsEvaluated {
} }
static void assertLinesInFile(final Path path, final List<String> expectedLines) {
final List<String> actualLines = Files.readAllLines(path)
int line = 0
for (final String expectedLine : expectedLines) {
final String actualLine = actualLines.get(line)
if (expectedLine != actualLine) {
throw new GradleException("expected line [${line + 1}] in [${path}] to be [${expectedLine}] but was [${actualLine}]")
}
line++
}
}
/*
* Check that all generated JARs have our NOTICE.txt and an appropriate
* LICENSE.txt in them. We configurate this in gradle but we'd like to
* be extra paranoid.
*/
subprojects { project ->
project.tasks.withType(Jar).whenTaskAdded { jarTask ->
final Task extract = project.task("extract${jarTask.name.capitalize()}", type: LoggedExec) {
dependsOn jarTask
ext.destination = project.buildDir.toPath().resolve("jar-extracted/${jarTask.name}")
commandLine "${->new File(rootProject.compilerJavaHome, 'bin/jar')}",
'xf', "${-> jarTask.outputs.files.singleFile}", 'META-INF/LICENSE.txt', 'META-INF/NOTICE.txt'
workingDir destination
doFirst {
project.delete(destination)
Files.createDirectories(destination)
}
}
final Task checkNotice = project.task("verify${jarTask.name.capitalize()}Notice") {
dependsOn extract
doLast {
final List<String> noticeLines = Files.readAllLines(project.noticeFile.toPath())
final Path noticePath = extract.destination.resolve('META-INF/NOTICE.txt')
assertLinesInFile(noticePath, noticeLines)
}
}
project.check.dependsOn checkNotice
final Task checkLicense = project.task("verify${jarTask.name.capitalize()}License") {
dependsOn extract
doLast {
final List<String> licenseLines = Files.readAllLines(project.licenseFile.toPath())
final Path licensePath = extract.destination.resolve('META-INF/LICENSE.txt')
assertLinesInFile(licensePath, licenseLines)
}
}
project.check.dependsOn checkLicense
}
}
/* Remove assemble on all qa projects because we don't need to publish /* Remove assemble on all qa projects because we don't need to publish
* artifacts for them. */ * artifacts for them. */
gradle.projectsEvaluated { gradle.projectsEvaluated {

View File

@ -563,16 +563,17 @@ class ClusterFormationTasks {
/** Adds a task to execute a command to help setup the cluster */ /** Adds a task to execute a command to help setup the cluster */
static Task configureExecTask(String name, Project project, Task setup, NodeInfo node, Object[] execArgs) { static Task configureExecTask(String name, Project project, Task setup, NodeInfo node, Object[] execArgs) {
return project.tasks.create(name: name, type: LoggedExec, dependsOn: setup) { return project.tasks.create(name: name, type: LoggedExec, dependsOn: setup) { Exec exec ->
workingDir node.cwd exec.workingDir node.cwd
exec.environment 'JAVA_HOME', node.getJavaHome()
if (Os.isFamily(Os.FAMILY_WINDOWS)) { if (Os.isFamily(Os.FAMILY_WINDOWS)) {
executable 'cmd' exec.executable 'cmd'
args '/C', 'call' exec.args '/C', 'call'
// On Windows the comma character is considered a parameter separator: // On Windows the comma character is considered a parameter separator:
// argument are wrapped in an ExecArgWrapper that escapes commas // argument are wrapped in an ExecArgWrapper that escapes commas
args execArgs.collect { a -> new EscapeCommaWrapper(arg: a) } exec.args execArgs.collect { a -> new EscapeCommaWrapper(arg: a) }
} else { } else {
commandLine execArgs exec.commandLine execArgs
} }
} }
} }

View File

@ -20,7 +20,6 @@
import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.precommit.PrecommitTasks
apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.build'
apply plugin: 'ru.vyarus.animalsniffer'
apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm' apply plugin: 'nebula.maven-scm'
@ -52,8 +51,6 @@ dependencies {
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
testCompile "org.elasticsearch:securemock:${versions.securemock}" testCompile "org.elasticsearch:securemock:${versions.securemock}"
testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}" testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}"
testCompile "org.codehaus.mojo:animal-sniffer-annotations:1.15"
signature "org.codehaus.mojo.signature:java17:1.0@signature"
} }
forbiddenApisMain { forbiddenApisMain {

View File

@ -24,7 +24,6 @@ import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsConfigurator;
import com.sun.net.httpserver.HttpsServer; import com.sun.net.httpserver.HttpsServer;
import org.apache.http.HttpHost; import org.apache.http.HttpHost;
import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement;
import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.mocksocket.MockHttpServer;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -46,8 +45,6 @@ import static org.junit.Assert.fail;
/** /**
* Integration test to validate the builder builds a client with the correct configuration * Integration test to validate the builder builds a client with the correct configuration
*/ */
//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes
@IgnoreJRERequirement
public class RestClientBuilderIntegTests extends RestClientTestCase { public class RestClientBuilderIntegTests extends RestClientTestCase {
private static HttpsServer httpsServer; private static HttpsServer httpsServer;
@ -60,8 +57,6 @@ public class RestClientBuilderIntegTests extends RestClientTestCase {
httpsServer.start(); httpsServer.start();
} }
//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes
@IgnoreJRERequirement
private static class ResponseHandler implements HttpHandler { private static class ResponseHandler implements HttpHandler {
@Override @Override
public void handle(HttpExchange httpExchange) throws IOException { public void handle(HttpExchange httpExchange) throws IOException {

View File

@ -23,7 +23,6 @@ import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpServer; import com.sun.net.httpserver.HttpServer;
import org.apache.http.HttpHost; import org.apache.http.HttpHost;
import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement;
import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.mocksocket.MockHttpServer;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
@ -48,8 +47,6 @@ import static org.junit.Assert.assertTrue;
* Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}. * Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}.
* Works against real http servers, multiple hosts. Also tests failover by randomly shutting down hosts. * Works against real http servers, multiple hosts. Also tests failover by randomly shutting down hosts.
*/ */
//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes
@IgnoreJRERequirement
public class RestClientMultipleHostsIntegTests extends RestClientTestCase { public class RestClientMultipleHostsIntegTests extends RestClientTestCase {
private static HttpServer[] httpServers; private static HttpServer[] httpServers;
@ -90,8 +87,6 @@ public class RestClientMultipleHostsIntegTests extends RestClientTestCase {
return httpServer; return httpServer;
} }
//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes
@IgnoreJRERequirement
private static class ResponseHandler implements HttpHandler { private static class ResponseHandler implements HttpHandler {
private final int statusCode; private final int statusCode;

View File

@ -33,7 +33,6 @@ import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.apache.http.util.EntityUtils; import org.apache.http.util.EntityUtils;
import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement;
import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.mocksocket.MockHttpServer;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -64,8 +63,6 @@ import static org.junit.Assert.fail;
* Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}. * Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}.
* Works against a real http server, one single host. * Works against a real http server, one single host.
*/ */
//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes
@IgnoreJRERequirement
public class RestClientSingleHostIntegTests extends RestClientTestCase { public class RestClientSingleHostIntegTests extends RestClientTestCase {
private static HttpServer httpServer; private static HttpServer httpServer;
@ -91,8 +88,6 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
return httpServer; return httpServer;
} }
//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes
@IgnoreJRERequirement
private static class ResponseHandler implements HttpHandler { private static class ResponseHandler implements HttpHandler {
private final int statusCode; private final int statusCode;

View File

@ -201,8 +201,7 @@ subprojects {
} }
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
final Path licensePath = archiveExtractionDir.toPath().resolve("elasticsearch-${VersionProperties.elasticsearch}/LICENSE.txt") final Path licensePath = archiveExtractionDir.toPath().resolve("elasticsearch-${VersionProperties.elasticsearch}/LICENSE.txt")
final List<String> actualLines = Files.readAllLines(licensePath) assertLinesInFile(licensePath, licenseLines)
assertLinesInFile(licensePath, actualLines, licenseLines)
} }
} }
check.dependsOn checkLicense check.dependsOn checkLicense
@ -213,8 +212,7 @@ subprojects {
doLast { doLast {
final List<String> noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch") final List<String> noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch")
final Path noticePath = archiveExtractionDir.toPath().resolve("elasticsearch-${VersionProperties.elasticsearch}/NOTICE.txt") final Path noticePath = archiveExtractionDir.toPath().resolve("elasticsearch-${VersionProperties.elasticsearch}/NOTICE.txt")
final List<String> actualLines = Files.readAllLines(noticePath) assertLinesInFile(noticePath, noticeLines)
assertLinesInFile(noticePath, actualLines, noticeLines)
} }
} }
check.dependsOn checkNotice check.dependsOn checkNotice
@ -304,4 +302,3 @@ configure(subprojects.findAll { it.name.contains('zip') }) {
} }
} }
} }

View File

@ -460,14 +460,3 @@ subprojects {
return result return result
} }
} }
static void assertLinesInFile(final Path path, final List<String> actualLines, final List<String> expectedLines) {
int line = 0
for (final String expectedLine : expectedLines) {
final String actualLine = actualLines.get(line)
if (expectedLine != actualLine) {
throw new GradleException("expected line [${line + 1}] in [${path}] to be [${expectedLine}] but was [${actualLine}]")
}
line++
}
}

View File

@ -415,8 +415,7 @@ subprojects {
"License: " + expectedLicense) "License: " + expectedLicense)
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
final List<String> expectedLines = header + licenseLines.collect { " " + it } final List<String> expectedLines = header + licenseLines.collect { " " + it }
final List<String> actualLines = Files.readAllLines(copyrightPath) assertLinesInFile(copyrightPath, expectedLines)
assertLinesInFile(copyrightPath, actualLines, expectedLines)
} }
} }
} else { } else {
@ -432,8 +431,7 @@ subprojects {
} }
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt") final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt")
final List<String> actualLines = Files.readAllLines(licensePath) assertLinesInFile(licensePath, licenseLines)
assertLinesInFile(licensePath, actualLines, licenseLines)
} }
} }
} }
@ -444,8 +442,7 @@ subprojects {
doLast { doLast {
final List<String> noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch") final List<String> noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch")
final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt") final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt")
final List<String> actualLines = Files.readAllLines(noticePath) assertLinesInFile(noticePath, noticeLines)
assertLinesInFile(noticePath, actualLines, noticeLines)
} }
} }
check.dependsOn checkNotice check.dependsOn checkNotice

View File

@ -1,12 +1,12 @@
[[painless-comments]] [[painless-comments]]
=== Comments === Comments
Painless supports both single-line and multi-line comments. Comments can be Use the `//` token anywhere on a line to specify a single-line comment. All
included anywhere within a script. Use the `//` token anywhere on a line to characters from the `//` token to the end of the line are ignored. Use an
specify a single-line comment. All characters from the `//` token to the end opening `/*` token and a closing `*/` token to specify a multi-line comment.
of the line are ignored. Use an opening `/*` token and a closing `*/` token Multi-line comments can start anywhere on a line, and all characters in between
to specify a multi-line comment. Multi-line comments can start anywhere on a the `/*` token and `*/` token are ignored. Comments can be included anywhere
line, and all characters in between the `/*` token and `*/` token are ignored. within a script.
*Grammar* *Grammar*
[source,ANTLR4] [source,ANTLR4]
@ -17,17 +17,17 @@ MULTI_LINE_COMMENT: '/*' .*? '*/';
*Examples* *Examples*
Single-line comments. * Single-line comments.
+
[source,Painless] [source,Painless]
---- ----
// single-line comment // single-line comment
int value; // single-line comment int value; // single-line comment
---- ----
+
Multi-line comments. * Multi-line comments.
+
[source,Painless] [source,Painless]
---- ----
/* multi- /* multi-

View File

@ -0,0 +1,29 @@
[[painless-identifiers]]
=== Identifiers
Specify identifiers to <<declaration, declare>>, <<assignment, assign>>, and
<<painless-operators, use>> variables, <<dot-operator, access fields>>, and
<<dot-operator, call methods>>. <<painless-keywords, Keywords>> and
<<painless-types, types>> cannot be used as identifiers.
*Grammar*
[source,ANTLR4]
----
ID: [_a-zA-Z] [_a-zA-Z-0-9]*;
----
*Examples*
* Variations of identifiers.
+
[source,Painless]
----
a
Z
id
list
list0
MAP25
_map25
Map_25
----

View File

@ -2,8 +2,8 @@
=== Keywords === Keywords
The keywords in the table below are reserved for built-in language The keywords in the table below are reserved for built-in language
features. These keywords cannot be used as <<identifiers, identifiers>> or features. These keywords cannot be used as
<<painless-types, types>>. <<painless-identifiers, identifiers>> or <<painless-types, types>>.
[cols="^1,^1,^1,^1,^1"] [cols="^1,^1,^1,^1,^1"]
|==== |====

View File

@ -23,6 +23,8 @@ include::painless-keywords.asciidoc[]
include::painless-literals.asciidoc[] include::painless-literals.asciidoc[]
include::painless-identifiers.asciidoc[]
include::painless-variables.asciidoc[] include::painless-variables.asciidoc[]
include::painless-types.asciidoc[] include::painless-types.asciidoc[]

View File

@ -24,18 +24,18 @@ HEX: '-'? '0' [xX] [0-9a-fA-F]+ [lL]?;
*Examples* *Examples*
Integer literals. * Integer literals.
+
[source,Painless] [source,Painless]
---- ----
0 <1> <1> 0
0D <2> <2> 0D
1234L <3> <3> 1234L
-90f <4> <4> -90f
-022 <5> <5> -022
0xF2A <6> <6> 0xF2A
---- ----
+
<1> `int 0` <1> `int 0`
<2> `double 0.0` <2> `double 0.0`
<3> `long 1234` <3> `long 1234`
@ -61,17 +61,17 @@ EXPONENT: ( [eE] [+\-]? [0-9]+ );
*Examples* *Examples*
Floating point literals. * Floating point literals.
+
[source,Painless] [source,Painless]
---- ----
0.0 <1> <1> 0.0
1E6 <2> <2> 1E6
0.977777 <3> <3> 0.977777
-126.34 <4> <4> -126.34
89.9F <5> <5> 89.9F
---- ----
+
<1> `double 0.0` <1> `double 0.0`
<2> `double 1000000.0` in exponent notation <2> `double 1000000.0` in exponent notation
<3> `double 0.977777` <3> `double 0.977777`
@ -81,12 +81,11 @@ Floating point literals.
[[strings]] [[strings]]
==== Strings ==== Strings
Use string literals to specify string values of the Use string literals to specify <<string-type, String>> values with
<<string-type, String type>> with either single-quotes or double-quotes. either single-quotes or double-quotes. Use a `\"` token to include a
Use a `\"` token to include a double-quote as part of a double-quoted string double-quote as part of a double-quoted string literal. Use a `\'` token to
literal. Use a `\'` token to include a single-quote as part of a single-quoted include a single-quote as part of a single-quoted string literal. Use a `\\`
string literal. Use a `\\` token to include a backslash as part of any string token to include a backslash as part of any string literal.
literal.
*Grammar* *Grammar*
[source,ANTLR4] [source,ANTLR4]
@ -97,22 +96,22 @@ STRING: ( '"' ( '\\"' | '\\\\' | ~[\\"] )*? '"' )
*Examples* *Examples*
String literals using single-quotes. * String literals using single-quotes.
+
[source,Painless] [source,Painless]
---- ----
'single-quoted string literal' 'single-quoted string literal'
'\'single-quoted string with escaped single-quotes\' and backslash \\' '\'single-quoted with escaped single-quotes\' and backslash \\'
'single-quoted string with non-escaped "double-quotes"' 'single-quoted with non-escaped "double-quotes"'
---- ----
+
String literals using double-quotes. * String literals using double-quotes.
+
[source,Painless] [source,Painless]
---- ----
"double-quoted string literal" "double-quoted string literal"
"\"double-quoted string with escaped double-quotes\" and backslash: \\" "\"double-quoted with escaped double-quotes\" and backslash: \\"
"double-quoted string with non-escaped 'single-quotes'" "double-quoted with non-escaped 'single-quotes'"
---- ----
[[characters]] [[characters]]
@ -126,16 +125,16 @@ or an error will occur.
*Examples* *Examples*
Casting string literals into <<primitive-types, char>> values. * Casting string literals into <<primitive-types, char>> values.
+
[source,Painless] [source,Painless]
---- ----
(char)"C" (char)"C"
(char)'c' (char)'c'
---- ----
+
Casting a <<string-type, String>> value into a <<primitive-types, char>> value. * Casting a <<string-type, String>> value into a <<primitive-types, char>> value.
+
[source,Painless] [source,Painless]
---- ----
String s = "s"; String s = "s";

View File

@ -704,6 +704,7 @@ e = ~d; // sets e the negation of d
The cast operator can be used to explicitly convert one type to another. See casting [MARK] for more information. The cast operator can be used to explicitly convert one type to another. See casting [MARK] for more information.
[[constructor-call]]
==== Constructor Call ==== Constructor Call
A constructor call is a special type of method call [MARK] used to allocate a reference type instance using the new operator. The format is the new operator followed by a type, an opening parenthesis, arguments if any, and a closing parenthesis. Arguments are a series of zero-to-many expressions delimited by commas. Auto-boxing and auto-unboxing will be applied automatically for arguments passed into a constructor call. See boxing and unboxing [MARK] for more information on this topic. Constructor argument types can always be resolved at run-time; if appropriate type conversions (casting) cannot be applied an error will occur. Once a reference type instance has been allocated, its members may be used as part of other expressions. A constructor call is a special type of method call [MARK] used to allocate a reference type instance using the new operator. The format is the new operator followed by a type, an opening parenthesis, arguments if any, and a closing parenthesis. Arguments are a series of zero-to-many expressions delimited by commas. Auto-boxing and auto-unboxing will be applied automatically for arguments passed into a constructor call. See boxing and unboxing [MARK] for more information on this topic. Constructor argument types can always be resolved at run-time; if appropriate type conversions (casting) cannot be applied an error will occur. Once a reference type instance has been allocated, its members may be used as part of other expressions.

View File

@ -1,122 +1,130 @@
[[painless-variables]] [[painless-variables]]
=== Variables === Variables
Variables in Painless must be declared and can be <<declaration, Declare>> variables to <<assignment, assign>> values for
statically or <<dynamic-types, dynamically typed>>. <<painless-operators, use>> in expressions. Specify variables as a
<<primitive-types, primitive type>>, <<reference-types, reference type>>, or
[[identifiers]] <<dynamic-types, dynamic type>>. Variable operations follow the structure of a
==== Identifiers standard JVM in relation to instruction execution and memory usage.
Specify variable identifiers using the following grammar. Variable identifiers
must start with a letter or underscore. You cannot use
<<painless-keywords, keywords>> or <<painless-types, types>> as identifiers.
*Grammar:*
[source,ANTLR4]
----
ID: [_a-zA-Z] [_a-zA-Z-0-9]*;
----
*Examples:*
[source,Java]
----
a
Z
id
list
list0
MAP25
_map25
----
[[declaration]] [[declaration]]
==== Declaration ==== Declaration
Variables must be declared before you use them. The format is `type-name Declare variables before use with the format of <<painless-types, type>>
identifier-name`. To declare multiple variables of the same type, specify a <<painless-identifiers, identifier>>. Specify a comma-separated list of
comma-separated list of identifier names. You can immediately assign a value to <<painless-identifiers, identifiers>> following the <<painless-types, type>>
a variable when you declare it. to declare multiple variables in a single statement. Use an
<<assignment, assignment>> statement combined with a declaration statement to
immediately assign a value to a variable. Variables not immediately assigned a
value will have a default value assigned implicitly based on the
<<painless-types, type>>.
*Grammar:* *Grammar*
[source,ANTLR4] [source,ANTLR4]
---- ----
declaration : type ID assignment? (',' ID assignment?)*;
type: ID ('[' ']')*; type: ID ('[' ']')*;
declaration : type ID (',' ID)*; assignment: '=' expression;
---- ----
*Examples:* *Examples*
[source,Java]
----
int x; // Declare a variable with type int and id x
List y; // Declare a variable with type List and id y
int x, y, z; // Declare variables with type int and ids x, y, and z
def[] d; // Declare the variable d with type def[]
int i = 10; // Declare the int variable i and set it to the int literal 10
----
[[variable-assignment]] * Different variations of variable declaration.
+
[source,Painless]
----
<1> int x;
<2> List y;
<3> int x, y, z;
<4> def[] d;
<5> int i = 10;
----
+
<1> declare a variable of type `int` and identifier `x`
<2> declare a variable of type `List` and identifier `y`
<3> declare three variables of type `int` and identifiers `x`, `y`, `z`
<4> declare a variable of type `def[]` and identifier `d`
<5> declare a variable of type `int` and identifier `i`;
assign the integer literal `10` to `i`
[[assignment]]
==== Assignment ==== Assignment
Use the equals operator (`=`) to assign a value to a variable. The format is Use the `equals` operator (`=`) to assign a value to a variable. Any expression
`identifier-name = value`. Any value expression can be assigned to any variable that produces a value can be assigned to any variable as long as the
as long as the types match or the expression's type can be implicitly cast to <<painless-types, types>> are the same or the resultant
the variable's type. An error occurs if the types do not match. <<painless-types, type>> can be implicitly <<painless-casting, cast>> to
the variable <<painless-types, type>>. Otherwise, an error will occur.
<<reference-types, Reference type>> values are shallow-copied when assigned.
*Grammar:* *Grammar*
[source,ANTLR4] [source,ANTLR4]
---- ----
assignment: ID '=' expression assignment: ID '=' expression
---- ----
*Examples*
*Examples:* * Variable assignment with an <<integers, integer literal>>.
+
Assigning a literal of the appropriate type directly to a declared variable. [source,Painless]
[source,Java]
---- ----
int i;   // Declare an int i <1> int i;
i = 10;  // Set the int i to the int literal 10 <2> i = 10;
---- ----
+
Immediately assigning a value when declaring a variable. <1> declare `int i`
<2> assign `10` to `i`
[source,Java] +
* <<declaration, Declaration>> combined with immediate variable assignment.
+
[source,Painless]
---- ----
int i = 10; // Declare the int variable i and set it the int literal 1 <1> int i = 10;
double j = 2.0; // Declare the double variable j and set it to the double <2> double j = 2.0;
// literal 2.0
---- ----
+
Assigning a variable of one primitive type to another variable of the same <1> declare `int i`; assign `10` to `i`
type. <2> declare `double j`; assign `2.0` to `j`
+
[source,Java] * Assignment of one variable to another using
<<primitive-types, primitive types>>.
+
[source,Painless]
---- ----
int i = 10; // Declare the int variable i and set it to the int literal 10 <1> int i = 10;
int j = i;  // Declare the int variable j and set it to the int variable i <2> int j = i;
---- ----
+
Assigning a reference type to a new heap allocation with the `new` operator. <1> declare `int i`; assign `10` to `i`
<2> declare `int j`; assign `j` to `i`
[source,Java] +
* Assignment with <<reference-types, reference types>> using the
<<constructor-call, new operator>>.
+
[source,Painless]
---- ----
ArrayList l = new ArrayList();  // Declare an ArrayList variable l and set it <1> ArrayList l = new ArrayList();
// to a newly allocated ArrayList <2> Map m = new HashMap();
Map m = new HashMap(); // Declare a Map variable m and set it
// to a newly allocated HashMap
---- ----
+
Assigning a variable of one reference type to another variable of the same type. <1> declare `ArrayList l`; assign a newly-allocated `Arraylist` to `l`
<2> declare `Map m`; assign a newly-allocated `HashMap` to `m`
[source,Java] with an implicit cast to `Map`
+
* Assignment of one variable to another using
<<reference-types, reference types>>.
+
[source,Painless]
---- ----
List l = new ArrayList(); // Declare List variable l and set it a newly <1> List l = new ArrayList();
// allocated ArrayList <2> List k = l;
List k = l;  // Declare List variable k and set it to the <3> List m;
// value of the List variable l <4> m = k;
List m;                   // Declare List variable m and set it the
// default value null
m = k;                    // Set the value of List variable m to the value
// of List variable k
---- ----
+
<1> declare `List l`; assign a newly-allocated `Arraylist` to `l`
with an implicit cast to `List`
<2> declare `List k`; assign a shallow-copy of `l` to `k`
<3> declare `List m`;
<4> assign a shallow-copy of `k` to `m`

View File

@ -53,6 +53,7 @@ A number of analysis plugins have been contributed by our community:
* https://github.com/duydo/elasticsearch-analysis-vietnamese[Vietnamese Analysis Plugin] (by Duy Do) * https://github.com/duydo/elasticsearch-analysis-vietnamese[Vietnamese Analysis Plugin] (by Duy Do)
* https://github.com/ofir123/elasticsearch-network-analysis[Network Addresses Analysis Plugin] (by Ofir123) * https://github.com/ofir123/elasticsearch-network-analysis[Network Addresses Analysis Plugin] (by Ofir123)
* https://github.com/medcl/elasticsearch-analysis-string2int[String2Integer Analysis Plugin] (by Medcl) * https://github.com/medcl/elasticsearch-analysis-string2int[String2Integer Analysis Plugin] (by Medcl)
* https://github.com/ZarHenry96/elasticsearch-dandelion-plugin[Dandelion Analysis Plugin] (by ZarHenry96)
include::analysis-icu.asciidoc[] include::analysis-icu.asciidoc[]

View File

@ -81,7 +81,7 @@ Defaults to `true`.
<<mapping-index,`index`>>:: <<mapping-index,`index`>>::
Should the field be searchable? Accepts `not_analyzed` (default) and `no`. Should the field be searchable? Accepts `true` (default) and `false`.
<<null-value,`null_value`>>:: <<null-value,`null_value`>>::

View File

@ -0,0 +1,12 @@
[[breaking-changes-6.4]]
== Breaking changes in 6.4
[[breaking_64_api_changes]]
=== API changes
==== Field capabilities request format
In the past, `fields` could be provided either as a parameter, or as part of the request
body. Specifying `fields` in the request body is now deprecated, and instead they should
always be supplied through a request parameter. In 7.0.0, the field capabilities API will
not accept `fields` supplied in the request body.

View File

@ -41,7 +41,7 @@ addressable from the outside. Defaults to the actual port assigned via
|`transport.tcp.connect_timeout` |The socket connect timeout setting (in |`transport.tcp.connect_timeout` |The socket connect timeout setting (in
time setting format). Defaults to `30s`. time setting format). Defaults to `30s`.
|`transport.tcp.compress` |Set to `true` to enable compression (LZF) |`transport.tcp.compress` |Set to `true` to enable compression (`DEFLATE`)
between all nodes. Defaults to `false`. between all nodes. Defaults to `false`.
|`transport.ping_schedule` | Schedule a regular ping message to ensure that connections are kept alive. Defaults to `5s` in the transport client and `-1` (disabled) elsewhere. |`transport.ping_schedule` | Schedule a regular ping message to ensure that connections are kept alive. Defaults to `5s` in the transport client and `-1` (disabled) elsewhere.

View File

@ -20,7 +20,7 @@ GET twitter/_field_caps?fields=rating
// CONSOLE // CONSOLE
// TEST[setup:twitter] // TEST[setup:twitter]
Alternatively the `fields` option can also be defined in the request body: Alternatively the `fields` option can also be defined in the request body. deprecated[6.4.0, Please use a request parameter instead.]
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -30,6 +30,7 @@ POST _field_caps
} }
-------------------------------------------------- --------------------------------------------------
// CONSOLE // CONSOLE
// TEST[warning:Specifying a request body is deprecated -- the [fields] request parameter should be used instead.]
This is equivalent to the previous request. This is equivalent to the previous request.

View File

@ -191,8 +191,8 @@ setup() {
assert_file_not_exist "/usr/share/elasticsearch" assert_file_not_exist "/usr/share/elasticsearch"
assert_file_not_exist "/usr/share/doc/elasticsearch" assert_file_not_exist "/usr/share/doc/elasticsearch-oss"
assert_file_not_exist "/usr/share/doc/elasticsearch/copyright" assert_file_not_exist "/usr/share/doc/elasticsearch-oss/copyright"
} }
@test "[DEB] package has been completly removed" { @test "[DEB] package has been completly removed" {

View File

@ -116,9 +116,10 @@ verify_package_installation() {
# Env file # Env file
assert_file "/etc/default/elasticsearch" f root elasticsearch 660 assert_file "/etc/default/elasticsearch" f root elasticsearch 660
# Doc files # Machine-readable debian/copyright file
assert_file "/usr/share/doc/elasticsearch" d root root 755 local copyrightDir=$(readlink -f /usr/share/doc/$PACKAGE_NAME)
assert_file "/usr/share/doc/elasticsearch/copyright" f root root 644 assert_file $copyrightDir d root root 755
assert_file "$copyrightDir/copyright" f root root 644
fi fi
if is_rpm; then if is_rpm; then

View File

@ -254,6 +254,7 @@ clean_before_test() {
"/etc/sysconfig/elasticsearch" \ "/etc/sysconfig/elasticsearch" \
"/var/run/elasticsearch" \ "/var/run/elasticsearch" \
"/usr/share/doc/elasticsearch" \ "/usr/share/doc/elasticsearch" \
"/usr/share/doc/elasticsearch-oss" \
"/tmp/elasticsearch" \ "/tmp/elasticsearch" \
"/usr/lib/systemd/system/elasticsearch.conf" \ "/usr/lib/systemd/system/elasticsearch.conf" \
"/usr/lib/tmpfiles.d/elasticsearch.conf" \ "/usr/lib/tmpfiles.d/elasticsearch.conf" \

View File

@ -1310,8 +1310,10 @@ public class InternalEngine extends Engine {
final long seqNo = noOp.seqNo(); final long seqNo = noOp.seqNo();
try { try {
final NoOpResult noOpResult = new NoOpResult(noOp.seqNo()); final NoOpResult noOpResult = new NoOpResult(noOp.seqNo());
if (noOp.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) {
final Translog.Location location = translog.add(new Translog.NoOp(noOp.seqNo(), noOp.primaryTerm(), noOp.reason())); final Translog.Location location = translog.add(new Translog.NoOp(noOp.seqNo(), noOp.primaryTerm(), noOp.reason()));
noOpResult.setTranslogLocation(location); noOpResult.setTranslogLocation(location);
}
noOpResult.setTook(System.nanoTime() - noOp.startTime()); noOpResult.setTook(System.nanoTime() - noOp.startTime());
noOpResult.freeze(); noOpResult.freeze();
return noOpResult; return noOpResult;

View File

@ -57,11 +57,16 @@ public class RestFieldCapabilitiesAction extends BaseRestHandler {
@Override @Override
public RestChannelConsumer prepareRequest(final RestRequest request, public RestChannelConsumer prepareRequest(final RestRequest request,
final NodeClient client) throws IOException { final NodeClient client) throws IOException {
if (request.hasContentOrSourceParam() && request.hasParam("fields")) { if (request.hasContentOrSourceParam()) {
deprecationLogger.deprecated("Specifying a request body is deprecated -- the" +
" [fields] request parameter should be used instead.");
if (request.hasParam("fields")) {
throw new IllegalArgumentException("can't specify a request body and [fields]" + throw new IllegalArgumentException("can't specify a request body and [fields]" +
" request parameter, either specify a request body or the" + " request parameter, either specify a request body or the" +
" [fields] request parameter"); " [fields] request parameter");
} }
}
final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); final String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
final FieldCapabilitiesRequest fieldRequest; final FieldCapabilitiesRequest fieldRequest;
if (request.hasContentOrSourceParam()) { if (request.hasContentOrSourceParam()) {

View File

@ -3609,15 +3609,13 @@ public class InternalEngineTests extends EngineTestCase {
noOpEngine.recoverFromTranslog(); noOpEngine.recoverFromTranslog();
final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get()); final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get());
final String reason = randomAlphaOfLength(16); final String reason = randomAlphaOfLength(16);
noOpEngine.noOp( noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason));
new Engine.NoOp(
maxSeqNo + 1,
primaryTerm.get(),
randomFrom(PRIMARY, REPLICA, PEER_RECOVERY, LOCAL_TRANSLOG_RECOVERY),
System.nanoTime(),
reason));
assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 1))); assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 1)));
assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(1 + gapsFilled)); assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled));
noOpEngine.noOp(
new Engine.NoOp(maxSeqNo + 2, primaryTerm.get(), randomFrom(PRIMARY, REPLICA, PEER_RECOVERY), System.nanoTime(), reason));
assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 2)));
assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled + 1));
// skip to the op that we added to the translog // skip to the op that we added to the translog
Translog.Operation op; Translog.Operation op;
Translog.Operation last = null; Translog.Operation last = null;
@ -3629,7 +3627,7 @@ public class InternalEngineTests extends EngineTestCase {
assertNotNull(last); assertNotNull(last);
assertThat(last, instanceOf(Translog.NoOp.class)); assertThat(last, instanceOf(Translog.NoOp.class));
final Translog.NoOp noOp = (Translog.NoOp) last; final Translog.NoOp noOp = (Translog.NoOp) last;
assertThat(noOp.seqNo(), equalTo((long) (maxSeqNo + 1))); assertThat(noOp.seqNo(), equalTo((long) (maxSeqNo + 2)));
assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get())); assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get()));
assertThat(noOp.reason(), equalTo(reason)); assertThat(noOp.reason(), equalTo(reason));
} finally { } finally {

View File

@ -1669,6 +1669,16 @@ public class IndexShardTests extends IndexShardTestCase {
IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted());
assertDocCount(newShard, 1); assertDocCount(newShard, 1);
assertDocCount(shard, 2); assertDocCount(shard, 2);
for (int i = 0; i < 2; i++) {
newShard = reinitShard(newShard, ShardRoutingHelper.initWithSameId(primaryShardRouting,
RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE));
newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null));
assertTrue(newShard.recoverFromStore());
try (Translog.Snapshot snapshot = getTranslog(newShard).newSnapshot()) {
assertThat(snapshot.totalOperations(), equalTo(2));
}
}
closeShards(newShard, shard); closeShards(newShard, shard);
} }

View File

@ -0,0 +1,59 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestFieldCapabilitiesAction;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
import org.elasticsearch.usage.UsageService;
import org.junit.Before;
import java.io.IOException;
import java.util.Collections;
import static org.mockito.Mockito.mock;
public class RestFieldCapabilitiesActionTests extends ESTestCase {
private RestFieldCapabilitiesAction action;
@Before
public void setUpAction() {
action = new RestFieldCapabilitiesAction(Settings.EMPTY, mock(RestController.class));
}
public void testRequestBodyIsDeprecated() throws IOException {
String content = "{ \"fields\": [\"title\"] }";
RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
.withPath("/_field_caps")
.withContent(new BytesArray(content), XContentType.JSON)
.build();
action.prepareRequest(request, mock(NodeClient.class));
assertWarnings("Specifying a request body is deprecated -- the" +
" [fields] request parameter should be used instead.");
}
}

View File

@ -23,10 +23,8 @@ subprojects {
ext.licenseName = 'Elastic License' ext.licenseName = 'Elastic License'
ext.licenseUrl = "https://raw.githubusercontent.com/elastic/elasticsearch/${licenseCommit}/licenses/ELASTIC-LICENSE.txt" ext.licenseUrl = "https://raw.githubusercontent.com/elastic/elasticsearch/${licenseCommit}/licenses/ELASTIC-LICENSE.txt"
plugins.withType(BuildPlugin).whenPluginAdded { project.ext.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt')
project.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt') project.ext.noticeFile = xpackRootProject.file('NOTICE.txt')
project.noticeFile = xpackRootProject.file('NOTICE.txt')
}
plugins.withType(PluginBuildPlugin).whenPluginAdded { plugins.withType(PluginBuildPlugin).whenPluginAdded {
project.esplugin.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt') project.esplugin.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt')

View File

@ -31,6 +31,20 @@ configurations {
task testJar(type: Jar) { task testJar(type: Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
/*
* Stick the license and notice file in the jar. This isn't strictly
* needed because we don't publish it but it makes our super-paranoid
* tests happy.
*/
metaInf {
from(project.licenseFile.parent) {
include project.licenseFile.name
rename { 'LICENSE.txt' }
}
from(project.noticeFile.parent) {
include project.noticeFile.name
}
}
} }
artifacts { artifacts {
testArtifacts testJar testArtifacts testJar

View File

@ -45,12 +45,24 @@ jar {
} }
dependencies { dependencies {
// Eclipse doesn't know how to deal with these bundled deependencies so make them compile
// dependencies if we are running in Eclipse
if (isEclipse) {
compile (xpackProject('plugin:sql:sql-shared-client')) {
transitive = false
}
compile (xpackProject('plugin:sql:sql-proto')) {
transitive = false
}
} else {
bundled (xpackProject('plugin:sql:sql-shared-client')) { bundled (xpackProject('plugin:sql:sql-shared-client')) {
transitive = false transitive = false
} }
bundled (xpackProject('plugin:sql:sql-proto')) { bundled (xpackProject('plugin:sql:sql-proto')) {
transitive = false transitive = false
} }
}
compile (project(':server')) { compile (project(':server')) {
transitive = false transitive = false
} }

View File

@ -45,9 +45,12 @@ dependencyLicenses {
* can be easilly shipped around and used. * can be easilly shipped around and used.
*/ */
jar { jar {
from { from({
configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } configurations.compile.collect { it.isDirectory() ? it : zipTree(it) }
configurations.runtime.collect { it.isDirectory() ? it : zipTree(it) } configurations.runtime.collect { it.isDirectory() ? it : zipTree(it) }
}) {
// We don't need the META-INF from the things we bundle. For now.
exclude 'META-INF/*'
} }
manifest { manifest {
attributes 'Main-Class': 'org.elasticsearch.xpack.sql.cli.Cli' attributes 'Main-Class': 'org.elasticsearch.xpack.sql.cli.Cli'

View File

@ -1,14 +0,0 @@
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
dependencies {
testCompile project(path: xpackModule('core'), configuration: 'runtime')
testCompile project(path: xpackModule('watcher'), configuration: 'runtime')
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
}
integTestCluster {
setting 'xpack.security.enabled', 'false'
setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
}

View File

@ -1,75 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.smoketest;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.is;
/** Runs rest tests against external cluster */
public class WatcherWithMustacheIT extends ESClientYamlSuiteTestCase {
public WatcherWithMustacheIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@Before
public void startWatcher() throws Exception {
final List<String> watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES);
assertBusy(() -> {
try {
getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap());
for (String template : watcherTemplates) {
ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template",
singletonMap("name", template), emptyList(), emptyMap());
assertThat(templateExistsResponse.getStatusCode(), is(200));
}
ClientYamlTestResponse response =
getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap());
String state = (String) response.evaluate("stats.0.watcher_state");
assertThat(state, is("started"));
} catch (IOException e) {
throw new AssertionError(e);
}
});
}
@After
public void stopWatcher() throws Exception {
assertBusy(() -> {
try {
getAdminExecutionContext().callApi("xpack.watcher.stop", emptyMap(), emptyList(), emptyMap());
ClientYamlTestResponse response =
getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap());
String state = (String) response.evaluate("stats.0.watcher_state");
assertThat(state, is("stopped"));
} catch (IOException e) {
throw new AssertionError(e);
}
});
}
}

View File

@ -1,13 +0,0 @@
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
dependencies {
testCompile project(path: xpackModule('core'), configuration: 'runtime')
testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
}
integTestCluster {
setting 'xpack.security.enabled', 'false'
setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
}

View File

@ -1,74 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.smoketest;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.is;
/** Runs rest tests against external cluster */
public class WatcherWithPainlessIT extends ESClientYamlSuiteTestCase {
public WatcherWithPainlessIT(ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@Before
public void startWatcher() throws Exception {
final List<String> watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES);
assertBusy(() -> {
try {
getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap());
for (String template : watcherTemplates) {
ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template",
singletonMap("name", template), emptyList(), emptyMap());
assertThat(templateExistsResponse.getStatusCode(), is(200));
}
ClientYamlTestResponse response =
getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap());
String state = (String) response.evaluate("stats.0.watcher_state");
assertThat(state, is("started"));
} catch (IOException e) {
throw new AssertionError(e);
}
});
}
@After
public void stopWatcher() throws Exception {
assertBusy(() -> {
try {
getAdminExecutionContext().callApi("xpack.watcher.stop", emptyMap(), emptyList(), emptyMap());
ClientYamlTestResponse response =
getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap());
String state = (String) response.evaluate("stats.0.watcher_state");
assertThat(state, is("stopped"));
} catch (IOException e) {
throw new AssertionError(e);
}
});
}
}

View File

@ -9,6 +9,8 @@ apply plugin: 'elasticsearch.rest-test'
dependencies { dependencies {
testCompile project(path: xpackModule('core'), configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'runtime')
testCompile project(path: xpackModule('watcher'), configuration: 'runtime') testCompile project(path: xpackModule('watcher'), configuration: 'runtime')
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
} }
integTestCluster { integTestCluster {

View File

@ -0,0 +1,95 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.smoketest;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField;
import org.junit.After;
import org.junit.Before;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.is;
/** Runs rest tests against external cluster */
public class WatcherRestIT extends ESClientYamlSuiteTestCase {
public WatcherRestIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@Before
public void startWatcher() throws Exception {
assertBusy(() -> {
ClientYamlTestResponse response =
getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap());
String state = (String) response.evaluate("stats.0.watcher_state");
switch (state) {
case "stopped":
ClientYamlTestResponse startResponse =
getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap());
boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged");
assertThat(isAcknowledged, is(true));
break;
case "stopping":
throw new AssertionError("waiting until stopping state reached stopped state to start again");
case "starting":
throw new AssertionError("waiting until starting state reached started state");
case "started":
// all good here, we are done
break;
default:
throw new AssertionError("unknown state[" + state + "]");
}
});
assertBusy(() -> {
for (String template : WatcherIndexTemplateRegistryField.TEMPLATE_NAMES) {
ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template",
singletonMap("name", template), emptyList(), emptyMap());
assertThat(templateExistsResponse.getStatusCode(), is(200));
}
});
}
@After
public void stopWatcher() throws Exception {
assertBusy(() -> {
ClientYamlTestResponse response =
getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap());
String state = (String) response.evaluate("stats.0.watcher_state");
switch (state) {
case "stopped":
// all good here, we are done
break;
case "stopping":
throw new AssertionError("waiting until stopping state reached stopped state");
case "starting":
throw new AssertionError("waiting until starting state reached started state to stop");
case "started":
ClientYamlTestResponse stopResponse =
getAdminExecutionContext().callApi("xpack.watcher.stop", emptyMap(), emptyList(), emptyMap());
boolean isAcknowledged = (boolean) stopResponse.evaluate("acknowledged");
assertThat(isAcknowledged, is(true));
break;
default:
throw new AssertionError("unknown state[" + state + "]");
}
});
}
}

View File

@ -30,7 +30,7 @@ import java.util.Map;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
public class WatcherTemplateTests extends ESTestCase { public class WatcherTemplateIT extends ESTestCase {
private TextTemplateEngine textTemplateEngine; private TextTemplateEngine textTemplateEngine;