2015-12-09 19:32:37 -05:00
|
|
|
/*
|
|
|
|
* Licensed to Elasticsearch under one or more contributor
|
|
|
|
* license agreements. See the NOTICE file distributed with
|
|
|
|
* this work for additional information regarding copyright
|
|
|
|
* ownership. Elasticsearch licenses this file to you under
|
|
|
|
* the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
* not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing,
|
|
|
|
* software distributed under the License is distributed on an
|
|
|
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
* KIND, either express or implied. See the License for the
|
|
|
|
* specific language governing permissions and limitations
|
|
|
|
* under the License.
|
|
|
|
*/
|
|
|
|
|
|
|
|
import org.apache.tools.ant.types.Path
|
|
|
|
|
|
|
|
esplugin {
|
|
|
|
description 'An easy, safe and fast scripting language for Elasticsearch'
|
2016-01-27 13:37:34 -05:00
|
|
|
classname 'org.elasticsearch.painless.PainlessPlugin'
|
2015-12-09 19:32:37 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
dependencies {
|
|
|
|
compile 'org.antlr:antlr4-runtime:4.5.1-1'
|
2016-06-11 13:23:17 -04:00
|
|
|
compile 'org.ow2.asm:asm-debug-all:5.1'
|
2015-12-28 22:38:55 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
dependencyLicenses {
|
|
|
|
mapping from: /asm-.*/, to: 'asm'
|
2015-12-09 19:32:37 -05:00
|
|
|
}
|
|
|
|
|
Build: Rework integ test setup and shutdown to ensure stop runs when desired (#23304)
Gradle's finalizedBy on tasks only ensures one task runs after another,
but not immediately after. This is problematic for our integration tests
since it allows multiple project's integ test clusters to be
simultaneously. While this has not been a problem thus far (gradle 2.13
happened to keep the finalizedBy tasks close enough that no clusters
were running in parallel), with gradle 3.3 the task graph generation has
changed, and numerous clusters may be running simultaneously, causing
memory pressure, and thus generally slower tests, or even failure if the
system has a limited amount of memory (eg in a vagrant host).
This commit reworks how integ tests are configured. It adds an
`integTestCluster` extension to gradle which is equivalent to the current
`integTest.cluster` and moves the rest test runner task to
`integTestRunner`. The `integTest` task is then just a dummy task,
which depends on the cluster runner task, as well as the cluster stop
task. This means running `integTest` in one project will both run the
rest tests, and shut down the cluster, before running `integTest` in
another project.
2017-02-22 15:43:15 -05:00
|
|
|
integTestCluster {
|
|
|
|
setting 'script.max_compilations_per_minute', '1000'
|
Circuit break the number of inline scripts compiled per minute
When compiling many dynamically changing scripts, parameterized
scripts (<https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting-using.html#prefer-params>)
should be preferred. This enforces a limit to the number of scripts that
can be compiled within a minute. A new dynamic setting is added -
`script.max_compilations_per_minute`, which defaults to 15.
If more dynamic scripts are sent, a user will get the following
exception:
```json
{
"error" : {
"root_cause" : [
{
"type" : "circuit_breaking_exception",
"reason" : "[script] Too many dynamic script compilations within one minute, max: [15/min]; please use on-disk, indexed, or scripts with parameters instead",
"bytes_wanted" : 0,
"bytes_limit" : 0
}
],
"type" : "search_phase_execution_exception",
"reason" : "all shards failed",
"phase" : "query",
"grouped" : true,
"failed_shards" : [
{
"shard" : 0,
"index" : "i",
"node" : "a5V1eXcZRYiIk8lecjZ4Jw",
"reason" : {
"type" : "general_script_exception",
"reason" : "Failed to compile inline script [\"aaaaaaaaaaaaaaaa\"] using lang [painless]",
"caused_by" : {
"type" : "circuit_breaking_exception",
"reason" : "[script] Too many dynamic script compilations within one minute, max: [15/min]; please use on-disk, indexed, or scripts with parameters instead",
"bytes_wanted" : 0,
"bytes_limit" : 0
}
}
}
],
"caused_by" : {
"type" : "general_script_exception",
"reason" : "Failed to compile inline script [\"aaaaaaaaaaaaaaaa\"] using lang [painless]",
"caused_by" : {
"type" : "circuit_breaking_exception",
"reason" : "[script] Too many dynamic script compilations within one minute, max: [15/min]; please use on-disk, indexed, or scripts with parameters instead",
"bytes_wanted" : 0,
"bytes_limit" : 0
}
}
},
"status" : 500
}
```
This also fixes a bug in `ScriptService` where requests being executed
concurrently on a single node could cause a script to be compiled
multiple times (many in the case of a powerful node with many shards)
due to no synchronization between checking the cache and compiling the
script. There is now synchronization so that a script being compiled
will only be compiled once regardless of the number of concurrent
searches on a node.
Relates to #19396
2016-07-26 15:36:29 -04:00
|
|
|
}
|
2017-01-23 17:16:20 -05:00
|
|
|
|
2017-01-26 10:39:19 -05:00
|
|
|
/* Build Javadoc for the Java classes in Painless's public API that are in the
|
|
|
|
* Painless plugin */
|
2017-01-23 17:16:20 -05:00
|
|
|
task apiJavadoc(type: Javadoc) {
|
|
|
|
source = sourceSets.main.allJava
|
|
|
|
include '**/org/elasticsearch/painless/api/'
|
|
|
|
destinationDir = new File(docsDir, 'apiJavadoc')
|
|
|
|
}
|
|
|
|
task apiJavadocJar(type: Jar) {
|
|
|
|
classifier = 'apiJavadoc'
|
|
|
|
from apiJavadoc
|
|
|
|
}
|
|
|
|
assemble.dependsOn apiJavadocJar
|
2017-01-26 10:39:19 -05:00
|
|
|
|
|
|
|
// Reference documentation for Painless's public API.
|
|
|
|
task generatePainlessApi(type: JavaExec) {
|
|
|
|
main = 'org.elasticsearch.painless.PainlessDocGenerator'
|
|
|
|
classpath = sourceSets.test.runtimeClasspath
|
|
|
|
args file('../../docs/reference/painless-api-reference')
|
|
|
|
}
|
2017-03-24 12:44:53 -04:00
|
|
|
|
|
|
|
/**********************************************
|
|
|
|
* Parser regeneration *
|
|
|
|
**********************************************/
|
|
|
|
|
|
|
|
configurations {
|
|
|
|
regenerate
|
|
|
|
}
|
|
|
|
|
|
|
|
dependencies {
|
|
|
|
regenerate 'org.antlr:antlr4:4.5.1-1'
|
|
|
|
}
|
|
|
|
|
|
|
|
String grammarPath = 'src/main/antlr'
|
|
|
|
String outputPath = 'src/main/java/org/elasticsearch/painless/antlr'
|
|
|
|
|
|
|
|
task cleanGenerated(type: Delete) {
|
|
|
|
delete fileTree(grammarPath) {
|
|
|
|
include '*.tokens'
|
|
|
|
}
|
|
|
|
delete fileTree(outputPath) {
|
|
|
|
include 'Painless*.java'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
task regenLexer(type: JavaExec) {
|
|
|
|
dependsOn cleanGenerated
|
|
|
|
main = 'org.antlr.v4.Tool'
|
|
|
|
classpath = configurations.regenerate
|
|
|
|
systemProperty 'file.encoding', 'UTF-8'
|
|
|
|
systemProperty 'user.language', 'en'
|
|
|
|
systemProperty 'user.country', 'US'
|
|
|
|
systemProperty 'user.variant', ''
|
|
|
|
args '-Werror',
|
|
|
|
'-package', 'org.elasticsearch.painless.antlr',
|
|
|
|
'-o', outputPath,
|
|
|
|
"${file(grammarPath)}/PainlessLexer.g4"
|
|
|
|
}
|
|
|
|
|
|
|
|
task regenParser(type: JavaExec) {
|
|
|
|
dependsOn regenLexer
|
|
|
|
main = 'org.antlr.v4.Tool'
|
|
|
|
classpath = configurations.regenerate
|
|
|
|
systemProperty 'file.encoding', 'UTF-8'
|
|
|
|
systemProperty 'user.language', 'en'
|
|
|
|
systemProperty 'user.country', 'US'
|
|
|
|
systemProperty 'user.variant', ''
|
|
|
|
args '-Werror',
|
|
|
|
'-package', 'org.elasticsearch.painless.antlr',
|
|
|
|
'-no-listener',
|
|
|
|
'-visitor',
|
|
|
|
// '-Xlog',
|
|
|
|
'-o', outputPath,
|
|
|
|
"${file(grammarPath)}/PainlessParser.g4"
|
|
|
|
}
|
|
|
|
|
|
|
|
task regen {
|
|
|
|
dependsOn regenParser
|
|
|
|
doLast {
|
|
|
|
// moves token files to grammar directory for use with IDE's
|
|
|
|
ant.move(file: "${outputPath}/PainlessLexer.tokens", toDir: grammarPath)
|
|
|
|
ant.move(file: "${outputPath}/PainlessParser.tokens", toDir: grammarPath)
|
|
|
|
// make the generated classes package private
|
|
|
|
ant.replaceregexp(match: 'public ((interface|class) \\QPainless\\E\\w+)',
|
|
|
|
replace: '\\1',
|
|
|
|
encoding: 'UTF-8') {
|
|
|
|
fileset(dir: outputPath, includes: 'Painless*.java')
|
|
|
|
}
|
|
|
|
// make the lexer abstract
|
|
|
|
ant.replaceregexp(match: '(class \\QPainless\\ELexer)',
|
|
|
|
replace: 'abstract \\1',
|
|
|
|
encoding: 'UTF-8') {
|
|
|
|
fileset(dir: outputPath, includes: 'PainlessLexer.java')
|
|
|
|
}
|
|
|
|
// nuke timestamps/filenames in generated files
|
|
|
|
ant.replaceregexp(match: '\\Q// Generated from \\E.*',
|
|
|
|
replace: '\\/\\/ ANTLR GENERATED CODE: DO NOT EDIT',
|
|
|
|
encoding: 'UTF-8') {
|
|
|
|
fileset(dir: outputPath, includes: 'Painless*.java')
|
|
|
|
}
|
|
|
|
// remove tabs in antlr generated files
|
|
|
|
ant.replaceregexp(match: '\t', flags: 'g', replace: ' ', encoding: 'UTF-8') {
|
|
|
|
fileset(dir: outputPath, includes: 'Painless*.java')
|
|
|
|
}
|
|
|
|
// fix line endings
|
|
|
|
ant.fixcrlf(srcdir: outputPath) {
|
|
|
|
patternset(includes: 'Painless*.java')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|