Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
c7a2c357a3
|
@ -782,9 +782,12 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: remove this once joda time is removed from scriptin in 7.0
|
||||
// TODO: remove this once joda time is removed from scripting in 7.0
|
||||
systemProperty 'es.scripting.use_java_time', 'true'
|
||||
|
||||
// TODO: remove this once ctx isn't added to update script params in 7.0
|
||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||
|
||||
// Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM
|
||||
if (project.inFipsJvm) {
|
||||
systemProperty 'javax.net.ssl.trustStorePassword', 'password'
|
||||
|
|
|
@ -686,6 +686,7 @@
|
|||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionScriptEngine.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]MoreExpressionTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]StoredExpressionTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ContextExampleTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]reindex[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]reindex[/\\]TransportUpdateByQueryAction.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]analysis-icu[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]IcuCollationTokenFilterFactory.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]analysis-icu[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]IcuFoldingTokenFilterFactory.java" checks="LineLength" />
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
elasticsearch = 7.0.0-alpha1
|
||||
lucene = 7.5.0-snapshot-608f0277b0
|
||||
lucene = 7.5.0-snapshot-13b9e28f9d
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.7
|
||||
|
|
|
@ -30,6 +30,14 @@ apply plugin: 'com.github.johnrengelman.shadow'
|
|||
group = 'org.elasticsearch.client'
|
||||
archivesBaseName = 'elasticsearch-rest-high-level-client'
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
nebula {
|
||||
artifactId = archivesBaseName
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions)
|
||||
Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE)
|
||||
test.dependsOn(copyRestSpec)
|
||||
|
|
|
@ -40,6 +40,7 @@ integTestCluster {
|
|||
|
||||
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
|
||||
systemProperty 'es.scripting.use_java_time', 'false'
|
||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||
}
|
||||
|
||||
// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed
|
||||
|
|
|
@ -14,6 +14,8 @@ specialized code may define new ways to use a Painless script.
|
|||
|====
|
||||
| Name | Painless Documentation
|
||||
| Elasticsearch Documentation
|
||||
| Ingest processor | <<painless-ingest-processor-context, Painless Documentation>>
|
||||
| {ref}/script-processor.html[Elasticsearch Documentation]
|
||||
| Update | <<painless-update-context, Painless Documentation>>
|
||||
| {ref}/docs-update.html[Elasticsearch Documentation]
|
||||
| Update by query | <<painless-update-by-query-context, Painless Documentation>>
|
||||
|
@ -44,12 +46,12 @@ specialized code may define new ways to use a Painless script.
|
|||
| {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
|
||||
| Bucket aggregation | <<painless-bucket-agg-context, Painless Documentation>>
|
||||
| {ref}/search-aggregations-pipeline-bucket-script-aggregation.html[Elasticsearch Documentation]
|
||||
| Ingest processor | <<painless-ingest-processor-context, Painless Documentation>>
|
||||
| {ref}/script-processor.html[Elasticsearch Documentation]
|
||||
| Watcher condition | <<painless-watcher-condition-context, Painless Documentation>>
|
||||
| {xpack-ref}/condition-script.html[Elasticsearch Documentation]
|
||||
| Watcher transform | <<painless-watcher-transform-context, Painless Documentation>>
|
||||
| {xpack-ref}/transform-script.html[Elasticsearch Documentation]
|
||||
|====
|
||||
|
||||
include::painless-contexts/painless-context-examples.asciidoc[]
|
||||
|
||||
include::painless-contexts/index.asciidoc[]
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
include::painless-ingest-processor-context.asciidoc[]
|
||||
|
||||
include::painless-update-context.asciidoc[]
|
||||
|
||||
include::painless-update-by-query-context.asciidoc[]
|
||||
|
@ -28,8 +30,6 @@ include::painless-metric-agg-reduce-context.asciidoc[]
|
|||
|
||||
include::painless-bucket-agg-context.asciidoc[]
|
||||
|
||||
include::painless-ingest-processor-context.asciidoc[]
|
||||
|
||||
include::painless-watcher-condition-context.asciidoc[]
|
||||
|
||||
include::painless-watcher-transform-context.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
[[painless-context-examples]]
|
||||
=== Context examples
|
||||
|
||||
To run the examples, index the sample seat data into Elasticsearch. The examples
|
||||
must be run sequentially to work correctly.
|
||||
|
||||
. Download the
|
||||
https://download.elastic.co/demos/painless/contexts/seats.json[seat data]. This
|
||||
data set contains booking information for a collection of plays. Each document
|
||||
represents a single seat for a play at a particular theater on a specific date
|
||||
and time.
|
||||
+
|
||||
Each document contains the following fields:
|
||||
+
|
||||
`theatre` ({ref}/keyword.html[`keyword`])::
|
||||
The name of the theater the play is in.
|
||||
`play` ({ref}/text.html[`text`])::
|
||||
The name of the play.
|
||||
`actors` ({ref}/text.html[`text`])::
|
||||
A list of actors in the play.
|
||||
`row` ({ref}/number.html[`integer`])::
|
||||
The row of the seat.
|
||||
`number` ({ref}/number.html[`integer`])::
|
||||
The number of the seat within a row.
|
||||
`cost` ({ref}/number.html[`double`])::
|
||||
The cost of the ticket for the seat.
|
||||
`sold` ({ref}/boolean.html[`boolean`])::
|
||||
Whether or not the seat is sold.
|
||||
`datetime` ({ref}/date.html[`date`])::
|
||||
The date and time of the play as a date object.
|
||||
`date` ({ref}/keyword.html[`keyword`])::
|
||||
The date of the play as a keyword.
|
||||
`time` ({ref}/keyword.html[`keyword`])::
|
||||
The time of the play as a keyword.
|
||||
|
||||
. {defguide}/running-elasticsearch.html[Start] Elasticsearch. Note these
|
||||
examples assume Elasticsearch and Kibana are running locally. To use the Console
|
||||
editor with a remote Kibana instance, click the settings icon and enter the
|
||||
Console URL. To submit a cURL request to a remote Elasticsearch instance, edit
|
||||
the request URL.
|
||||
|
||||
. Create {ref}/mapping.html[mappings] for the sample data:
|
||||
+
|
||||
[source,js]
|
||||
----
|
||||
PUT /seats
|
||||
{
|
||||
"mappings": {
|
||||
"seat": {
|
||||
"properties": {
|
||||
"theatre": { "type": "keyword" },
|
||||
"play": { "type": "text" },
|
||||
"actors": { "type": "text" },
|
||||
"row": { "type": "integer" },
|
||||
"number": { "type": "integer" },
|
||||
"cost": { "type": "double" },
|
||||
"sold": { "type": "boolean" },
|
||||
"datetime": { "type": "date" },
|
||||
"date": { "type": "keyword" },
|
||||
"time": { "type": "keyword" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
+
|
||||
// CONSOLE
|
||||
|
||||
. Run the <<painless-ingest-processor-context, ingest processor context>>
|
||||
example. This sets up a script ingest processor used on each document as the
|
||||
seat data is indexed.
|
||||
|
||||
. Index the seat data:
|
||||
+
|
||||
[source,js]
|
||||
----
|
||||
curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@/<local-file-path>/seats.json"
|
||||
----
|
||||
// NOTCONSOLE
|
||||
|
|
@ -27,7 +27,7 @@ to modify documents upon insertion.
|
|||
{ref}/mapping-type-field.html[`ctx['_type']`]::
|
||||
Modify this to change the type for the current document.
|
||||
|
||||
`ctx` (`Map`, read-only)::
|
||||
`ctx` (`Map`)::
|
||||
Modify the values in the `Map/List` structure to add, modify, or delete
|
||||
the fields of a document.
|
||||
|
||||
|
@ -38,4 +38,158 @@ void::
|
|||
|
||||
*API*
|
||||
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
||||
The standard <<painless-api-reference, Painless API>> is available.
|
||||
|
||||
*Example*
|
||||
|
||||
To run this example, first follow the steps in
|
||||
<<painless-context-examples, context examples>>.
|
||||
|
||||
The seat data contains:
|
||||
|
||||
* A date in the format `YYYY-MM-DD` where the second digit of both month and day
|
||||
is optional.
|
||||
* A time in the format HH:MM* where the second digit of both hours and minutes
|
||||
is optional. The star (*) represents either the `String` `AM` or `PM`.
|
||||
|
||||
The following ingest script processes the date and time `Strings` and stores the
|
||||
result in a `datetime` field.
|
||||
|
||||
[source,Painless]
|
||||
----
|
||||
String[] split(String s, char d) { <1>
|
||||
int count = 0;
|
||||
|
||||
for (char c : s.toCharArray()) { <2>
|
||||
if (c == d) {
|
||||
++count;
|
||||
}
|
||||
}
|
||||
|
||||
if (count == 0) {
|
||||
return new String[] {s}; <3>
|
||||
}
|
||||
|
||||
String[] r = new String[count + 1]; <4>
|
||||
int i0 = 0, i1 = 0;
|
||||
count = 0;
|
||||
|
||||
for (char c : s.toCharArray()) { <5>
|
||||
if (c == d) {
|
||||
r[count++] = s.substring(i0, i1);
|
||||
i0 = i1 + 1;
|
||||
}
|
||||
|
||||
++i1;
|
||||
}
|
||||
|
||||
r[count] = s.substring(i0, i1); <6>
|
||||
|
||||
return r;
|
||||
}
|
||||
|
||||
String[] dateSplit = split(ctx.date, (char)"-"); <7>
|
||||
String year = dateSplit[0].trim();
|
||||
String month = dateSplit[1].trim();
|
||||
|
||||
if (month.length() == 1) { <8>
|
||||
month = "0" + month;
|
||||
}
|
||||
|
||||
String day = dateSplit[2].trim();
|
||||
|
||||
if (day.length() == 1) { <9>
|
||||
day = "0" + day;
|
||||
}
|
||||
|
||||
boolean pm = ctx.time.substring(ctx.time.length() - 2).equals("PM"); <10>
|
||||
String[] timeSplit = split(
|
||||
ctx.time.substring(0, ctx.time.length() - 2), (char)":"); <11>
|
||||
int hours = Integer.parseInt(timeSplit[0].trim());
|
||||
int minutes = Integer.parseInt(timeSplit[1].trim());
|
||||
|
||||
if (pm) { <12>
|
||||
hours += 12;
|
||||
}
|
||||
|
||||
String dts = year + "-" + month + "-" + day + "T" +
|
||||
(hours < 10 ? "0" + hours : "" + hours) + ":" +
|
||||
(minutes < 10 ? "0" + minutes : "" + minutes) +
|
||||
":00+08:00"; <13>
|
||||
|
||||
ZonedDateTime dt = ZonedDateTime.parse(
|
||||
dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); <14>
|
||||
ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L; <15>
|
||||
----
|
||||
<1> Creates a `split` <<painless-functions, function>> to split a
|
||||
<<string-type, `String`>> type value using a <<primitive-types, `char`>>
|
||||
type value as the delimiter. This is useful for handling the necessity of
|
||||
pulling out the individual pieces of the date and time `Strings` from the
|
||||
original seat data.
|
||||
<2> The first pass through each `char` in the `String` collects how many new
|
||||
`Strings` the original is split into.
|
||||
<3> Returns the original `String` if there are no instances of the delimiting
|
||||
`char`.
|
||||
<4> Creates an <<array-type, array type>> value to collect the split `Strings`
|
||||
into based on the number of `char` delimiters found in the first pass.
|
||||
<5> The second pass through each `char` in the `String` collects each split
|
||||
substring into an array type value of `Strings`.
|
||||
<6> Collects the last substring into the array type value of `Strings`.
|
||||
<7> Uses the `split` function to separate the date `String` from the seat data
|
||||
into year, month, and day `Strings`.
|
||||
Note::
|
||||
* The use of a `String` type value to `char` type value
|
||||
<<string-character-casting, cast>> as part of the second argument since
|
||||
character literals do not exist.
|
||||
* The use of the `ctx` ingest processor context variable to retrieve the
|
||||
data from the `date` field.
|
||||
<8> Appends the <<string-literals, string literal>> `"0"` value to a single
|
||||
digit month since the format of the seat data allows for this case.
|
||||
<9> Appends the <<string-literals, string literal>> `"0"` value to a single
|
||||
digit day since the format of the seat data allows for this case.
|
||||
<10> Sets the <<primitive-types, `boolean type`>>
|
||||
<<painless-variables, variable>> to `true` if the time `String` is a time
|
||||
in the afternoon or evening.
|
||||
Note::
|
||||
* The use of the `ctx` ingest processor context variable to retrieve the
|
||||
data from the `time` field.
|
||||
<11> Uses the `split` function to separate the time `String` from the seat data
|
||||
into hours and minutes `Strings`.
|
||||
Note::
|
||||
* The use of the `substring` method to remove the `AM` or `PM` portion of
|
||||
the time `String`.
|
||||
* The use of a `String` type value to `char` type value
|
||||
<<string-character-casting, cast>> as part of the second argument since
|
||||
character literals do not exist.
|
||||
* The use of the `ctx` ingest processor context variable to retrieve the
|
||||
data from the `date` field.
|
||||
<12> If the time `String` is an afternoon or evening value adds the
|
||||
<<integer-literals, integer literal>> `12` to the existing hours to move to
|
||||
a 24-hour based time.
|
||||
<13> Builds a new time `String` that is parsable using existing API methods.
|
||||
<14> Creates a `ZonedDateTime` <<reference-types, reference type>> value by using
|
||||
the API method `parse` to parse the new time `String`.
|
||||
<15> Sets the datetime field `datetime` to the number of milliseconds retrieved
|
||||
from the API method `getLong`.
|
||||
Note::
|
||||
* The use of the `ctx` ingest processor context variable to set the field
|
||||
`datetime`. Manipulate each document's fields with the `ctx` variable as
|
||||
each document is indexed.
|
||||
|
||||
Submit the following request:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
PUT /_ingest/pipeline/seats
|
||||
{
|
||||
"description": "update datetime for seats",
|
||||
"processors": [
|
||||
{
|
||||
"script": {
|
||||
"source": "String[] split(String s, char d) { int count = 0; for (char c : s.toCharArray()) { if (c == d) { ++count; } } if (count == 0) { return new String[] {s}; } String[] r = new String[count + 1]; int i0 = 0, i1 = 0; count = 0; for (char c : s.toCharArray()) { if (c == d) { r[count++] = s.substring(i0, i1); i0 = i1 + 1; } ++i1; } r[count] = s.substring(i0, i1); return r; } String[] dateSplit = split(ctx.date, (char)\"-\"); String year = dateSplit[0].trim(); String month = dateSplit[1].trim(); if (month.length() == 1) { month = \"0\" + month; } String day = dateSplit[2].trim(); if (day.length() == 1) { day = \"0\" + day; } boolean pm = ctx.time.substring(ctx.time.length() - 2).equals(\"PM\"); String[] timeSplit = split(ctx.time.substring(0, ctx.time.length() - 2), (char)\":\"); int hours = Integer.parseInt(timeSplit[0].trim()); int minutes = Integer.parseInt(timeSplit[1].trim()); if (pm) { hours += 12; } String dts = year + \"-\" + month + \"-\" + day + \"T\" + (hours < 10 ? \"0\" + hours : \"\" + hours) + \":\" + (minutes < 10 ? \"0\" + minutes : \"\" + minutes) + \":00+08:00\"; ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L;"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
----
|
||||
// CONSOLE
|
|
@ -5,7 +5,7 @@ Keywords are reserved tokens for built-in language features.
|
|||
|
||||
*Errors*
|
||||
|
||||
If a keyword is used as an <<painless-identifiers, identifier>>.
|
||||
* If a keyword is used as an <<painless-identifiers, identifier>>.
|
||||
|
||||
*Keywords*
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
fded6bb485b8b01bb2a9280162fd14d4d3ce4510
|
|
@ -1 +0,0 @@
|
|||
bd7d8078a2d0ad11a24f54156cc015630c96858a
|
|
@ -25,6 +25,7 @@ esplugin {
|
|||
integTestCluster {
|
||||
module project.project(':modules:mapper-extras')
|
||||
systemProperty 'es.scripting.use_java_time', 'true'
|
||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
|
|
@ -0,0 +1,311 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
/**
|
||||
* These tests run the Painless scripts used in the context docs against
|
||||
* slightly modified data designed around unit tests rather than a fully-
|
||||
* running Elasticsearch server.
|
||||
*/
|
||||
public class ContextExampleTests extends ScriptTestCase {
|
||||
|
||||
// **** Docs Generator Code ****
|
||||
|
||||
/*
|
||||
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
|
||||
public class Generator {
|
||||
|
||||
public final static String[] theatres = new String[] {"Down Port", "Graye", "Skyline", "Courtyard"};
|
||||
public final static String[] plays = new String[] {"Driving", "Pick It Up", "Sway and Pull", "Harriot",
|
||||
"The Busline", "Ants Underground", "Exploria", "Line and Single", "Shafted", "Sunnyside Down",
|
||||
"Test Run", "Auntie Jo"};
|
||||
public final static String[] actors = new String[] {"James Holland", "Krissy Smith", "Joe Muir", "Ryan Earns",
|
||||
"Joel Madigan", "Jessica Brown", "Baz Knight", "Jo Hangum", "Rachel Grass", "Phoebe Miller", "Sarah Notch",
|
||||
"Brayden Green", "Joshua Iller", "Jon Hittle", "Rob Kettleman", "Laura Conrad", "Simon Hower", "Nora Blue",
|
||||
"Mike Candlestick", "Jacey Bell"};
|
||||
|
||||
public static void writeSeat(FileWriter writer, int id, String theatre, String play, String[] actors,
|
||||
String date, String time, int row, int number, double cost, boolean sold) throws IOException {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("{ \"create\" : { \"_index\" : \"seats\", \"_type\" : \"seat\", \"_id\" : \"");
|
||||
builder.append(id);
|
||||
builder.append("\" } }\n");
|
||||
builder.append("{ \"theatre\" : \"");
|
||||
builder.append(theatre);
|
||||
builder.append("\", \"play\" : \"");
|
||||
builder.append(play);
|
||||
builder.append("\", \"actors\": [ \"");
|
||||
for (String actor : actors) {
|
||||
builder.append(actor);
|
||||
if (actor.equals(actors[actors.length - 1]) == false) {
|
||||
builder.append("\", \"");
|
||||
}
|
||||
}
|
||||
builder.append("\" ], \"date\": \"");
|
||||
builder.append(date);
|
||||
builder.append("\", \"time\": \"");
|
||||
builder.append(time);
|
||||
builder.append("\", \"row\": ");
|
||||
builder.append(row);
|
||||
builder.append(", \"number\": ");
|
||||
builder.append(number);
|
||||
builder.append(", \"cost\": ");
|
||||
builder.append(cost);
|
||||
builder.append(", \"sold\": ");
|
||||
builder.append(sold ? "true" : "false");
|
||||
builder.append(" }\n");
|
||||
writer.write(builder.toString());
|
||||
}
|
||||
|
||||
public static void main(String args[]) throws IOException {
|
||||
FileWriter writer = new FileWriter("/home/jdconrad/test/seats.json");
|
||||
int id = 0;
|
||||
|
||||
for (int playCount = 0; playCount < 12; ++playCount) {
|
||||
String play = plays[playCount];
|
||||
String theatre;
|
||||
String[] actor;
|
||||
int startMonth;
|
||||
int endMonth;
|
||||
String time;
|
||||
|
||||
if (playCount == 0) {
|
||||
theatre = theatres[0];
|
||||
actor = new String[] {actors[0], actors[1], actors[2], actors[3]};
|
||||
startMonth = 4;
|
||||
endMonth = 5;
|
||||
time = "3:00PM";
|
||||
} else if (playCount == 1) {
|
||||
theatre = theatres[0];
|
||||
actor = new String[] {actors[4], actors[5], actors[6], actors[7], actors[8], actors[9]};
|
||||
startMonth = 4;
|
||||
endMonth = 6;
|
||||
time = "8:00PM";
|
||||
} else if (playCount == 2) {
|
||||
theatre = theatres[0];
|
||||
actor = new String[] {actors[0], actors[1], actors[2], actors[3],
|
||||
actors[4], actors[5], actors[6], actors[7]};
|
||||
startMonth = 6;
|
||||
endMonth = 8;
|
||||
time = "3:00 PM";
|
||||
} else if (playCount == 3) {
|
||||
theatre = theatres[0];
|
||||
actor = new String[] {actors[9], actors[10], actors[11], actors[12], actors[13], actors[14],
|
||||
actors[15], actors[16], actors[17], actors[18], actors[19]};
|
||||
startMonth = 7;
|
||||
endMonth = 8;
|
||||
time = "8:00PM";
|
||||
} else if (playCount == 4) {
|
||||
theatre = theatres[0];
|
||||
actor = new String[] {actors[13], actors[14], actors[15], actors[17], actors[18], actors[19]};
|
||||
startMonth = 8;
|
||||
endMonth = 10;
|
||||
time = "3:00PM";
|
||||
} else if (playCount == 5) {
|
||||
theatre = theatres[0];
|
||||
actor = new String[] {actors[8], actors[9], actors[10], actors[11], actors[12]};
|
||||
startMonth = 8;
|
||||
endMonth = 10;
|
||||
time = "8:00PM";
|
||||
} else if (playCount == 6) {
|
||||
theatre = theatres[1];
|
||||
actor = new String[] {actors[10], actors[11], actors[12], actors[13], actors[14], actors[15], actors[16]};
|
||||
startMonth = 4;
|
||||
endMonth = 5;
|
||||
time = "11:00AM";
|
||||
} else if (playCount == 7) {
|
||||
theatre = theatres[1];
|
||||
actor = new String[] {actors[17], actors[18]};
|
||||
startMonth = 6;
|
||||
endMonth = 9;
|
||||
time = "2:00PM";
|
||||
} else if (playCount == 8) {
|
||||
theatre = theatres[1];
|
||||
actor = new String[] {actors[0], actors[1], actors[2], actors[3], actors[16]};
|
||||
startMonth = 10;
|
||||
endMonth = 11;
|
||||
time = "11:00AM";
|
||||
} else if (playCount == 9) {
|
||||
theatre = theatres[2];
|
||||
actor = new String[] {actors[1], actors[2], actors[3], actors[17], actors[18], actors[19]};
|
||||
startMonth = 3;
|
||||
endMonth = 6;
|
||||
time = "4:00PM";
|
||||
} else if (playCount == 10) {
|
||||
theatre = theatres[2];
|
||||
actor = new String[] {actors[2], actors[3], actors[4], actors[5]};
|
||||
startMonth = 7;
|
||||
endMonth = 8;
|
||||
time = "7:30PM";
|
||||
} else if (playCount == 11) {
|
||||
theatre = theatres[2];
|
||||
actor = new String[] {actors[7], actors[13], actors[14], actors[15], actors[16], actors[17]};
|
||||
startMonth = 9;
|
||||
endMonth = 12;
|
||||
time = "5:40PM";
|
||||
} else {
|
||||
throw new RuntimeException("too many plays");
|
||||
}
|
||||
|
||||
int rows;
|
||||
int number;
|
||||
|
||||
if (playCount < 6) {
|
||||
rows = 3;
|
||||
number = 12;
|
||||
} else if (playCount < 9) {
|
||||
rows = 5;
|
||||
number = 9;
|
||||
} else if (playCount < 12) {
|
||||
rows = 11;
|
||||
number = 15;
|
||||
} else {
|
||||
throw new RuntimeException("too many seats");
|
||||
}
|
||||
|
||||
for (int month = startMonth; month <= endMonth; ++month) {
|
||||
for (int day = 1; day <= 14; ++day) {
|
||||
for (int row = 1; row <= rows; ++row) {
|
||||
for (int count = 1; count <= number; ++count) {
|
||||
String date = "2018-" + month + "-" + day;
|
||||
double cost = (25 - row) * 1.25;
|
||||
|
||||
writeSeat(writer, ++id, theatre, play, actor, date, time, row, count, cost, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writer.write("\n");
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
// **** Initial Mappings ****
|
||||
|
||||
/*
|
||||
|
||||
curl -X PUT "localhost:9200/seats" -H 'Content-Type: application/json' -d'
|
||||
{
|
||||
"mappings": {
|
||||
"seat": {
|
||||
"properties": {
|
||||
"theatre": { "type": "keyword" },
|
||||
"play": { "type": "text" },
|
||||
"actors": { "type": "text" },
|
||||
"row": { "type": "integer" },
|
||||
"number": { "type": "integer" },
|
||||
"cost": { "type": "double" },
|
||||
"sold": { "type": "boolean" },
|
||||
"datetime": { "type": "date" },
|
||||
"date": { "type": "keyword" },
|
||||
"time": { "type": "keyword" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
'
|
||||
|
||||
*/
|
||||
|
||||
// Create Ingest to Modify Dates:
|
||||
|
||||
/*
|
||||
|
||||
curl -X PUT "localhost:9200/_ingest/pipeline/seats" -H 'Content-Type: application/json' -d'
|
||||
{
|
||||
"description": "update datetime for seats",
|
||||
"processors": [
|
||||
{
|
||||
"script": {
|
||||
"source": "String[] split(String s, char d) { int count = 0; for (char c : s.toCharArray()) { if (c == d) { ++count; } } if (count == 0) { return new String[] {s}; } String[] r = new String[count + 1]; int i0 = 0, i1 = 0; count = 0; for (char c : s.toCharArray()) { if (c == d) { r[count++] = s.substring(i0, i1); i0 = i1 + 1; } ++i1; } r[count] = s.substring(i0, i1); return r; } String[] dateSplit = split(ctx.date, (char)\"-\"); String year = dateSplit[0].trim(); String month = dateSplit[1].trim(); if (month.length() == 1) { month = \"0\" + month; } String day = dateSplit[2].trim(); if (day.length() == 1) { day = \"0\" + day; } boolean pm = ctx.time.substring(ctx.time.length() - 2).equals(\"PM\"); String[] timeSplit = split(ctx.time.substring(0, ctx.time.length() - 2), (char)\":\"); int hours = Integer.parseInt(timeSplit[0].trim()); int minutes = Integer.parseInt(timeSplit[1].trim()); if (pm) { hours += 12; } String dts = year + \"-\" + month + \"-\" + day + \"T\" + (hours < 10 ? \"0\" + hours : \"\" + hours) + \":\" + (minutes < 10 ? \"0\" + minutes : \"\" + minutes) + \":00+08:00\"; ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L;"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
'
|
||||
|
||||
*/
|
||||
|
||||
public void testIngestProcessorScript() {
|
||||
assertEquals(1535785200000L,
|
||||
exec("String[] split(String s, char d) {" +
|
||||
" int count = 0;" +
|
||||
" for (char c : s.toCharArray()) {" +
|
||||
" if (c == d) {" +
|
||||
" ++count;" +
|
||||
" }" +
|
||||
" }" +
|
||||
" if (count == 0) {" +
|
||||
" return new String[] {s};" +
|
||||
" }" +
|
||||
" String[] r = new String[count + 1];" +
|
||||
" int i0 = 0, i1 = 0;" +
|
||||
" count = 0;" +
|
||||
" for (char c : s.toCharArray()) {" +
|
||||
" if (c == d) {" +
|
||||
" r[count++] = s.substring(i0, i1);" +
|
||||
" i0 = i1 + 1;" +
|
||||
" }" +
|
||||
" ++i1;" +
|
||||
" }" +
|
||||
" r[count] = s.substring(i0, i1);" +
|
||||
" return r;" +
|
||||
"}" +
|
||||
"def x = ['date': '2018-9-1', 'time': '3:00 PM'];" +
|
||||
"String[] dateSplit = split(x.date, (char)'-');" +
|
||||
"String year = dateSplit[0].trim();" +
|
||||
"String month = dateSplit[1].trim();" +
|
||||
"if (month.length() == 1) {" +
|
||||
" month = '0' + month;" +
|
||||
"}" +
|
||||
"String day = dateSplit[2].trim();" +
|
||||
"if (day.length() == 1) {" +
|
||||
" day = '0' + day;" +
|
||||
"}" +
|
||||
"boolean pm = x.time.substring(x.time.length() - 2).equals('PM');" +
|
||||
"String[] timeSplit = split(x.time.substring(0, x.time.length() - 2), (char)':');" +
|
||||
"int hours = Integer.parseInt(timeSplit[0].trim());" +
|
||||
"String minutes = timeSplit[1].trim();" +
|
||||
"if (pm) {" +
|
||||
" hours += 12;" +
|
||||
"}" +
|
||||
"String dts = year + '-' + month + '-' + day + " +
|
||||
"'T' + (hours < 10 ? '0' + hours : '' + hours) + ':' + minutes + ':00+08:00';" +
|
||||
"ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME);" +
|
||||
"return dt.getLong(ChronoField.INSTANT_SECONDS) * 1000L"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Post Generated Data:
|
||||
|
||||
/*
|
||||
|
||||
curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@/home/jdconrad/test/seats.json"
|
||||
|
||||
*/
|
||||
}
|
|
@ -132,7 +132,7 @@
|
|||
body:
|
||||
script:
|
||||
lang: painless
|
||||
source: "for (def key : params.keySet()) { ctx._source[key] = params[key]}"
|
||||
source: "ctx._source.ctx = ctx"
|
||||
params: { bar: 'xxx' }
|
||||
|
||||
- match: { error.root_cause.0.type: "remote_transport_exception" }
|
||||
|
|
|
@ -48,9 +48,9 @@ import org.elasticsearch.index.mapper.SourceFieldMapper;
|
|||
import org.elasticsearch.index.mapper.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.VersionFieldMapper;
|
||||
import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.UpdateScript;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
|
@ -746,7 +746,7 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
|
|||
private final Script script;
|
||||
private final Map<String, Object> params;
|
||||
|
||||
private ExecutableScript executable;
|
||||
private UpdateScript executable;
|
||||
private Map<String, Object> context;
|
||||
|
||||
public ScriptApplier(WorkerBulkByScrollTaskState taskWorker,
|
||||
|
@ -766,7 +766,7 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
|
|||
return request;
|
||||
}
|
||||
if (executable == null) {
|
||||
ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.UPDATE_CONTEXT);
|
||||
UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT);
|
||||
executable = factory.newInstance(params);
|
||||
}
|
||||
if (context == null) {
|
||||
|
@ -787,8 +787,7 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
|
|||
OpType oldOpType = OpType.INDEX;
|
||||
context.put("op", oldOpType.toString());
|
||||
|
||||
executable.setNextVar("ctx", context);
|
||||
executable.run();
|
||||
executable.execute(context);
|
||||
|
||||
String newOp = (String) context.remove("op");
|
||||
if (newOp == null) {
|
||||
|
|
|
@ -26,8 +26,10 @@ import org.elasticsearch.action.delete.DeleteRequest;
|
|||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.UpdateScript;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
|
@ -54,10 +56,16 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
|
|||
protected <T extends ActionRequest> T applyScript(Consumer<Map<String, Object>> scriptBody) {
|
||||
IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar"));
|
||||
ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0);
|
||||
ExecutableScript executableScript = new SimpleExecutableScript(scriptBody);
|
||||
ExecutableScript.Factory factory = params -> executableScript;
|
||||
when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(factory);
|
||||
when(scriptService.compile(any(), eq(ExecutableScript.UPDATE_CONTEXT))).thenReturn(factory);
|
||||
UpdateScript updateScript = new UpdateScript(Collections.emptyMap()) {
|
||||
@Override
|
||||
public void execute(Map<String, Object> ctx) {
|
||||
scriptBody.accept(ctx);
|
||||
}
|
||||
};
|
||||
UpdateScript.Factory factory = params -> updateScript;
|
||||
ExecutableScript simpleExecutableScript = new SimpleExecutableScript(scriptBody);
|
||||
when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(params -> simpleExecutableScript);
|
||||
when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory);
|
||||
AbstractAsyncBulkByScrollAction<Request> action = action(scriptService, request().setScript(mockScript("")));
|
||||
RequestWrapper<?> result = action.buildScriptApplier().apply(AbstractAsyncBulkByScrollAction.wrap(index), doc);
|
||||
return (result != null) ? (T) result.self() : null;
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
a010e852be8d56efe1906e6da5292e4541239724
|
|
@ -1 +0,0 @@
|
|||
7a37816def72a748416c4ae8b0f6817e30efb99f
|
|
@ -0,0 +1 @@
|
|||
88e0ed90d433a9088528485cd4f59311735d92a4
|
|
@ -1 +0,0 @@
|
|||
ca7437178cdbf7b8bfe0d75c75e3c8eb93925724
|
|
@ -0,0 +1 @@
|
|||
0daec9ac3c4bba5f91b1bc413c651b7a98313982
|
|
@ -1 +0,0 @@
|
|||
3f5dec44f380d6d58bc1c8aec51964fcb5390b60
|
|
@ -0,0 +1 @@
|
|||
f5af81eec04c1da0d6969cff18f360ff379b1bf7
|
|
@ -1 +0,0 @@
|
|||
453bf1d60df0415439095624e0b3e42492ad4716
|
|
@ -0,0 +1 @@
|
|||
9e649088ee298293aa95a05391dff9cb0582648e
|
|
@ -1 +0,0 @@
|
|||
70095a45257bca9f46629b5fb6cedf9eff5e2b07
|
|
@ -0,0 +1 @@
|
|||
47fb370054ba7413d050f13c177edf01180c31ca
|
|
@ -1 +0,0 @@
|
|||
7199d6962d268b7877f7b5160e98e4ff21cce5c7
|
|
@ -0,0 +1 @@
|
|||
bc0708acbac195772b67b5ad2e9c4683d27ff450
|
|
@ -1 +0,0 @@
|
|||
12aff508d39d206a1aead5013ecd11882062eb06
|
|
@ -27,7 +27,9 @@ import org.apache.logging.log4j.core.LoggerContext;
|
|||
import org.apache.logging.log4j.core.appender.ConsoleAppender;
|
||||
import org.apache.logging.log4j.core.appender.CountingNoOpAppender;
|
||||
import org.apache.logging.log4j.core.config.Configurator;
|
||||
import org.apache.logging.log4j.spi.ExtendedLogger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.common.Randomness;
|
||||
|
@ -298,8 +300,8 @@ public class EvilLoggerTests extends ESTestCase {
|
|||
public void testPrefixLogger() throws IOException, IllegalAccessException, UserException {
|
||||
setupLogging("prefix");
|
||||
|
||||
final String prefix = randomBoolean() ? null : randomAlphaOfLength(16);
|
||||
final Logger logger = Loggers.getLogger("prefix", prefix);
|
||||
final String prefix = randomAlphaOfLength(16);
|
||||
final Logger logger = new PrefixLogger((ExtendedLogger) LogManager.getLogger("prefix_test"), "prefix_test", prefix);
|
||||
logger.info("test");
|
||||
logger.info("{}", "test");
|
||||
final Exception e = new Exception("exception");
|
||||
|
@ -319,13 +321,8 @@ public class EvilLoggerTests extends ESTestCase {
|
|||
final int expectedLogLines = 3;
|
||||
assertThat(events.size(), equalTo(expectedLogLines + stackTraceLength));
|
||||
for (int i = 0; i < expectedLogLines; i++) {
|
||||
if (prefix == null) {
|
||||
assertThat("Contents of [" + path + "] are wrong",
|
||||
events.get(i), startsWith("[" + getTestName() + "] test"));
|
||||
} else {
|
||||
assertThat("Contents of [" + path + "] are wrong",
|
||||
events.get(i), startsWith("[" + getTestName() + "][" + prefix + "] test"));
|
||||
}
|
||||
assertThat("Contents of [" + path + "] are wrong",
|
||||
events.get(i), startsWith("[" + getTestName() + "]" + prefix + " test"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -334,8 +331,8 @@ public class EvilLoggerTests extends ESTestCase {
|
|||
|
||||
final int prefixes = 1 << 19; // to ensure enough markers that the GC should collect some when we force a GC below
|
||||
for (int i = 0; i < prefixes; i++) {
|
||||
Loggers.getLogger("prefix" + i, "prefix" + i); // this has the side effect of caching a marker with this prefix
|
||||
|
||||
// this has the side effect of caching a marker with this prefix
|
||||
new PrefixLogger((ExtendedLogger) LogManager.getLogger("prefix" + i), "prefix" + i, "prefix" + i);
|
||||
}
|
||||
|
||||
System.gc(); // this will free the weakly referenced keys in the marker cache
|
||||
|
@ -360,7 +357,6 @@ public class EvilLoggerTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32546")
|
||||
public void testNoNodeNameWarning() throws IOException, UserException {
|
||||
setupLogging("no_node_name");
|
||||
|
||||
|
@ -376,7 +372,11 @@ public class EvilLoggerTests extends ESTestCase {
|
|||
+ "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users "
|
||||
+ "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace "
|
||||
+ "`%node_name` with `\\[%node_name\\]%marker ` in these locations:");
|
||||
assertThat(events.get(1), endsWith("no_node_name/log4j2.properties"));
|
||||
if (Constants.WINDOWS) {
|
||||
assertThat(events.get(1), endsWith("no_node_name\\log4j2.properties"));
|
||||
} else {
|
||||
assertThat(events.get(1), endsWith("no_node_name/log4j2.properties"));
|
||||
}
|
||||
}
|
||||
|
||||
private void setupLogging(final String config) throws IOException, UserException {
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
c547b30525ad80d0ceeaa40c2d3a901c7e76fd46
|
|
@ -1 +0,0 @@
|
|||
d27958843ca118db2ffd2c242ae3761bd5a47328
|
|
@ -0,0 +1 @@
|
|||
9c327295d54d5abd2684e00c3aefe58aa1caace7
|
|
@ -1 +0,0 @@
|
|||
7ea220ba8e4accb8b04e280463042ad470e23bc0
|
|
@ -0,0 +1 @@
|
|||
73dd7703a94ec2357581f65ee7c1c4d618ff310f
|
|
@ -1 +0,0 @@
|
|||
471096d6e92338b208aa91f3a85feb2f9cfc4afd
|
|
@ -0,0 +1 @@
|
|||
1c3802fa30990a1758f2df19d17fe2c95fc45870
|
|
@ -1 +0,0 @@
|
|||
f0af947c60d24f779c22f774e81ebd7dd91cc932
|
|
@ -0,0 +1 @@
|
|||
8d7abdbb7900d7e6a76c391d8be07217c0d882ca
|
|
@ -1 +0,0 @@
|
|||
fbc83ac5a0139ed7e7faf6c95a2718f46f28c641
|
|
@ -0,0 +1 @@
|
|||
011f78ae9d9a386fcf20ceea29ba30e75fb512e8
|
|
@ -1 +0,0 @@
|
|||
30adfe493982b0db059dc243e269eea38d850d46
|
|
@ -0,0 +1 @@
|
|||
c3dd461a7cebdcacc77304660218513e10f89adb
|
|
@ -1 +0,0 @@
|
|||
656f304261d9aad05070fb68593beffafe9147e3
|
|
@ -0,0 +1 @@
|
|||
d63101181708d78eccc441b0d1193dd91d1a0bf1
|
|
@ -1 +0,0 @@
|
|||
8bf22ad81a7480c255b55bada401eb131bfdb4df
|
|
@ -0,0 +1 @@
|
|||
22e56fbd44d6a47d7dddbdda3c17ce22ad0a6680
|
|
@ -1 +0,0 @@
|
|||
edb3de4d68a34c1e1ca08f79fe4d103b10e98ad1
|
|
@ -0,0 +1 @@
|
|||
36b38a1d71045f5bee5dc40526f8d57084dbdc00
|
|
@ -1 +0,0 @@
|
|||
7ece30d5f1e18d96f61644451c858c3d9960558f
|
|
@ -0,0 +1 @@
|
|||
21eb8b111bcb94f4abb8c6402dfd10f51ecc0b38
|
|
@ -1 +0,0 @@
|
|||
ad3bd0c2ed96556193c7215bef328e689d0b157f
|
|
@ -0,0 +1 @@
|
|||
d60081c5641ed21aea82d5d0976b40e1f184c8e5
|
|
@ -1 +0,0 @@
|
|||
8a6bd97e39ee5af60126adbe8c8375dc41b1ea8e
|
|
@ -0,0 +1 @@
|
|||
2d42b373546aa8923d25e4e9a673dd186064f9bd
|
|
@ -1 +0,0 @@
|
|||
07e748d2d80000a7a213f3405b82b6e26b452948
|
|
@ -0,0 +1 @@
|
|||
7f31607959e5a2ed84ab2d9a007a3f76e9a2d38c
|
|
@ -1 +0,0 @@
|
|||
fd737bd5562f3943618ee7e73a0aaffb6319fdb2
|
|
@ -0,0 +1 @@
|
|||
f7619348f0619867c52f4801531c70358f49873a
|
|
@ -1 +0,0 @@
|
|||
ff3f260d1dc8c18bc67f3c33aa84a0ad290daac5
|
|
@ -19,6 +19,11 @@
|
|||
|
||||
package org.elasticsearch.action.update;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.LongSupplier;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
|
@ -42,21 +47,22 @@ import org.elasticsearch.index.get.GetResult;
|
|||
import org.elasticsearch.index.mapper.RoutingFieldMapper;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.UpdateScript;
|
||||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.LongSupplier;
|
||||
import static org.elasticsearch.common.Booleans.parseBoolean;
|
||||
|
||||
/**
|
||||
* Helper for translating an update request to an index, delete request or update response.
|
||||
*/
|
||||
public class UpdateHelper extends AbstractComponent {
|
||||
|
||||
/** Whether scripts should add the ctx variable to the params map. */
|
||||
private static final boolean CTX_IN_PARAMS =
|
||||
parseBoolean(System.getProperty("es.scripting.update.ctx_in_params"), true);
|
||||
|
||||
private final ScriptService scriptService;
|
||||
|
||||
public UpdateHelper(Settings settings, ScriptService scriptService) {
|
||||
|
@ -279,10 +285,18 @@ public class UpdateHelper extends AbstractComponent {
|
|||
private Map<String, Object> executeScript(Script script, Map<String, Object> ctx) {
|
||||
try {
|
||||
if (scriptService != null) {
|
||||
ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.UPDATE_CONTEXT);
|
||||
ExecutableScript executableScript = factory.newInstance(script.getParams());
|
||||
executableScript.setNextVar(ContextFields.CTX, ctx);
|
||||
executableScript.run();
|
||||
UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT);
|
||||
final Map<String, Object> params;
|
||||
if (CTX_IN_PARAMS) {
|
||||
params = new HashMap<>(script.getParams());
|
||||
params.put(ContextFields.CTX, ctx);
|
||||
deprecationLogger.deprecated("Using `ctx` via `params.ctx` is deprecated. " +
|
||||
"Use -Des.scripting.update.ctx_in_params=false to enforce non-deprecated usage.");
|
||||
} else {
|
||||
params = script.getParams();
|
||||
}
|
||||
UpdateScript executableScript = factory.newInstance(params);
|
||||
executableScript.execute(ctx);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("failed to execute script", e);
|
||||
|
|
|
@ -345,10 +345,7 @@ final class Bootstrap {
|
|||
if (foreground && maybeConsoleAppender != null) {
|
||||
Loggers.removeAppender(rootLogger, maybeConsoleAppender);
|
||||
}
|
||||
Logger logger = Loggers.getLogger(Bootstrap.class);
|
||||
if (INSTANCE.node != null) {
|
||||
logger = Loggers.getLogger(Bootstrap.class, Node.NODE_NAME_SETTING.get(INSTANCE.node.settings()));
|
||||
}
|
||||
Logger logger = LogManager.getLogger(Bootstrap.class);
|
||||
// HACK, it sucks to do this, but we will run users out of disk space otherwise
|
||||
if (e instanceof CreationException) {
|
||||
// guice: log the shortened exc to the log file
|
||||
|
|
|
@ -62,14 +62,29 @@ public final class ESLoggerFactory {
|
|||
return new PrefixLogger((ExtendedLogger)logger, logger.getName(), prefix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or build a logger.
|
||||
* @deprecated Prefer {@link LogManager#getLogger}
|
||||
*/
|
||||
@Deprecated
|
||||
public static Logger getLogger(Class<?> clazz) {
|
||||
return getLogger(null, clazz);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or build a logger.
|
||||
* @deprecated Prefer {@link LogManager#getLogger}
|
||||
*/
|
||||
@Deprecated
|
||||
public static Logger getLogger(String name) {
|
||||
return getLogger(null, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the root logger.
|
||||
* @deprecated Prefer {@link LogManager#getRootLogger}
|
||||
*/
|
||||
@Deprecated
|
||||
public static Logger getRootLogger() {
|
||||
return LogManager.getRootLogger();
|
||||
}
|
||||
|
|
|
@ -67,11 +67,11 @@ public class Loggers {
|
|||
}
|
||||
|
||||
public static Logger getLogger(Class<?> clazz, Settings settings, String... prefixes) {
|
||||
return Loggers.getLogger(clazz, prefixes);
|
||||
return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz);
|
||||
}
|
||||
|
||||
public static Logger getLogger(String loggerName, Settings settings, String... prefixes) {
|
||||
return Loggers.getLogger(loggerName, prefixes);
|
||||
return ESLoggerFactory.getLogger(formatPrefix(prefixes), loggerName);
|
||||
}
|
||||
|
||||
public static Logger getLogger(Logger parentLogger, String s) {
|
||||
|
@ -82,22 +82,24 @@ public class Loggers {
|
|||
return ESLoggerFactory.getLogger(prefix, parentLogger.getName() + s);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or build a logger.
|
||||
* @deprecated Prefer {@link LogManager#getLogger}
|
||||
*/
|
||||
@Deprecated
|
||||
public static Logger getLogger(String s) {
|
||||
return ESLoggerFactory.getLogger(s);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or build a logger.
|
||||
* @deprecated Prefer {@link LogManager#getLogger}
|
||||
*/
|
||||
@Deprecated
|
||||
public static Logger getLogger(Class<?> clazz) {
|
||||
return ESLoggerFactory.getLogger(clazz);
|
||||
}
|
||||
|
||||
public static Logger getLogger(Class<?> clazz, String... prefixes) {
|
||||
return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz);
|
||||
}
|
||||
|
||||
public static Logger getLogger(String name, String... prefixes) {
|
||||
return ESLoggerFactory.getLogger(formatPrefix(prefixes), name);
|
||||
}
|
||||
|
||||
private static String formatPrefix(String... prefixes) {
|
||||
String prefix = null;
|
||||
if (prefixes != null && prefixes.length > 0) {
|
||||
|
|
|
@ -20,19 +20,18 @@
|
|||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.synonym.SolrSynonymParser;
|
||||
import org.apache.lucene.util.CharsRef;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ESSolrSynonymParser extends SolrSynonymParser {
|
||||
private static final Logger logger = LogManager.getLogger(ESSolrSynonymParser.class);
|
||||
|
||||
private final boolean lenient;
|
||||
private static final Logger logger =
|
||||
Loggers.getLogger(ESSolrSynonymParser.class, "ESSolrSynonymParser");
|
||||
|
||||
public ESSolrSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) {
|
||||
super(dedup, expand, analyzer);
|
||||
|
|
|
@ -20,19 +20,18 @@
|
|||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.synonym.WordnetSynonymParser;
|
||||
import org.apache.lucene.util.CharsRef;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ESWordnetSynonymParser extends WordnetSynonymParser {
|
||||
private static final Logger logger = LogManager.getLogger(ESWordnetSynonymParser.class);
|
||||
|
||||
private final boolean lenient;
|
||||
private static final Logger logger =
|
||||
Loggers.getLogger(ESSolrSynonymParser.class, "ESWordnetSynonymParser");
|
||||
|
||||
public ESWordnetSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) {
|
||||
super(dedup, expand, analyzer);
|
||||
|
|
|
@ -276,7 +276,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
context.doc().add(field);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e);
|
||||
throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(),
|
||||
fieldType().typeName());
|
||||
}
|
||||
multiFields.parse(this, context);
|
||||
return null;
|
||||
|
|
|
@ -510,7 +510,8 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
indexShape(context, shape);
|
||||
} catch (Exception e) {
|
||||
if (ignoreMalformed.value() == false) {
|
||||
throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e);
|
||||
throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(),
|
||||
fieldType().typeName());
|
||||
}
|
||||
context.addIgnoredField(fieldType.name());
|
||||
}
|
||||
|
|
|
@ -46,7 +46,4 @@ public interface ExecutableScript {
|
|||
}
|
||||
|
||||
ScriptContext<Factory> CONTEXT = new ScriptContext<>("executable", Factory.class);
|
||||
|
||||
// TODO: remove these once each has its own script interface
|
||||
ScriptContext<Factory> UPDATE_CONTEXT = new ScriptContext<>("update", Factory.class);
|
||||
}
|
||||
|
|
|
@ -46,10 +46,10 @@ public class ScriptModule {
|
|||
SearchScript.SCRIPT_SORT_CONTEXT,
|
||||
SearchScript.TERMS_SET_QUERY_CONTEXT,
|
||||
ExecutableScript.CONTEXT,
|
||||
UpdateScript.CONTEXT,
|
||||
BucketAggregationScript.CONTEXT,
|
||||
BucketAggregationSelectorScript.CONTEXT,
|
||||
SignificantTermsHeuristicScoreScript.CONTEXT,
|
||||
ExecutableScript.UPDATE_CONTEXT,
|
||||
IngestScript.CONTEXT,
|
||||
FilterScript.CONTEXT,
|
||||
SimilarityScript.CONTEXT,
|
||||
|
|
|
@ -285,7 +285,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
|
|||
// TODO: fix this through some API or something, that's wrong
|
||||
// special exception to prevent expressions from compiling as update or mapping scripts
|
||||
boolean expression = "expression".equals(lang);
|
||||
boolean notSupported = context.name.equals(ExecutableScript.UPDATE_CONTEXT.name);
|
||||
boolean notSupported = context.name.equals(UpdateScript.CONTEXT.name);
|
||||
if (expression && notSupported) {
|
||||
throw new UnsupportedOperationException("scripts of type [" + script.getType() + "]," +
|
||||
" operation [" + context.name + "] and lang [" + lang + "] are not supported");
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* An update script.
|
||||
*/
|
||||
public abstract class UpdateScript {
|
||||
|
||||
public static final String[] PARAMETERS = { "ctx" };
|
||||
|
||||
/** The context used to compile {@link UpdateScript} factories. */
|
||||
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("update", Factory.class);
|
||||
|
||||
/** The generic runtime parameters for the script. */
|
||||
private final Map<String, Object> params;
|
||||
|
||||
public UpdateScript(Map<String, Object> params) {
|
||||
this.params = params;
|
||||
}
|
||||
|
||||
/** Return the parameters for this script. */
|
||||
public Map<String, Object> getParams() {
|
||||
return params;
|
||||
}
|
||||
|
||||
public abstract void execute(Map<String, Object> ctx);
|
||||
|
||||
public interface Factory {
|
||||
UpdateScript newInstance(Map<String, Object> params);
|
||||
}
|
||||
}
|
|
@ -180,7 +180,7 @@ public class BucketSortPipelineAggregator extends PipelineAggregator {
|
|||
private static class TopNPriorityQueue extends PriorityQueue<ComparableBucket> {
|
||||
|
||||
private TopNPriorityQueue(int n) {
|
||||
super(n, false);
|
||||
super(n);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -142,7 +142,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject());
|
||||
MapperParsingException ex = expectThrows(MapperParsingException.class,
|
||||
() -> defaultMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)));
|
||||
assertEquals("failed to parse [field]", ex.getMessage());
|
||||
assertEquals("failed to parse field [field] of type [boolean]", ex.getMessage());
|
||||
}
|
||||
|
||||
public void testMultiFields() throws IOException {
|
||||
|
|
|
@ -125,6 +125,35 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
|
|||
e.getMessage());
|
||||
}
|
||||
|
||||
public void testUnexpectedFieldMappingType() throws Exception {
|
||||
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("foo").field("type", "long").endObject()
|
||||
.startObject("bar").field("type", "boolean").endObject()
|
||||
.startObject("geo").field("type", "geo_shape").endObject()
|
||||
.endObject().endObject().endObject());
|
||||
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
|
||||
{
|
||||
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("foo", true).endObject());
|
||||
MapperException exception = expectThrows(MapperException.class,
|
||||
() -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)));
|
||||
assertThat(exception.getMessage(), containsString("failed to parse field [foo] of type [long]"));
|
||||
}
|
||||
{
|
||||
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("bar", "bar").endObject());
|
||||
MapperException exception = expectThrows(MapperException.class,
|
||||
() -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON)));
|
||||
assertThat(exception.getMessage(), containsString("failed to parse field [bar] of type [boolean]"));
|
||||
}
|
||||
{
|
||||
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("geo", 123).endObject());
|
||||
MapperException exception = expectThrows(MapperException.class,
|
||||
() -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON)));
|
||||
assertThat(exception.getMessage(), containsString("failed to parse field [geo] of type [geo_shape]"));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testDotsWithDynamicNestedMapper() throws Exception {
|
||||
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
|
|
@ -167,7 +167,7 @@ public class ScriptServiceTests extends ESTestCase {
|
|||
|
||||
assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT);
|
||||
assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT);
|
||||
assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT);
|
||||
assertCompileAccepted("painless", "script", ScriptType.INLINE, UpdateScript.CONTEXT);
|
||||
assertCompileAccepted("painless", "script", ScriptType.INLINE, IngestScript.CONTEXT);
|
||||
}
|
||||
|
||||
|
@ -187,7 +187,7 @@ public class ScriptServiceTests extends ESTestCase {
|
|||
|
||||
assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT);
|
||||
assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT);
|
||||
assertCompileRejected("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT);
|
||||
assertCompileRejected("painless", "script", ScriptType.INLINE, UpdateScript.CONTEXT);
|
||||
}
|
||||
|
||||
public void testAllowNoScriptTypeSettings() throws IOException {
|
||||
|
|
|
@ -93,6 +93,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
Map<String, Object> source = (Map<String, Object>) ctx.get("_source");
|
||||
params.remove("ctx");
|
||||
source.putAll(params);
|
||||
|
||||
return ctx;
|
||||
|
|
|
@ -96,6 +96,18 @@ public class MockScriptEngine implements ScriptEngine {
|
|||
}
|
||||
};
|
||||
return context.factoryClazz.cast(factory);
|
||||
} else if (context.instanceClazz.equals(UpdateScript.class)) {
|
||||
UpdateScript.Factory factory = parameters -> new UpdateScript(parameters) {
|
||||
@Override
|
||||
public void execute(Map<String, Object> ctx) {
|
||||
final Map<String, Object> vars = new HashMap<>();
|
||||
vars.put("ctx", ctx);
|
||||
vars.put("params", parameters);
|
||||
vars.putAll(parameters);
|
||||
script.apply(vars);
|
||||
}
|
||||
};
|
||||
return context.factoryClazz.cast(factory);
|
||||
} else if (context.instanceClazz.equals(BucketAggregationScript.class)) {
|
||||
BucketAggregationScript.Factory factory = parameters -> new BucketAggregationScript(parameters) {
|
||||
@Override
|
||||
|
|
|
@ -71,7 +71,7 @@ public class OverallBucketsProvider {
|
|||
static class TopNScores extends PriorityQueue<Double> {
|
||||
|
||||
TopNScores(int n) {
|
||||
super(n, false);
|
||||
super(n);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,6 +25,10 @@ grant {
|
|||
permission java.util.PropertyPermission "sun.security.krb5.debug","write";
|
||||
permission java.util.PropertyPermission "java.security.debug","write";
|
||||
permission java.util.PropertyPermission "sun.security.spnego.debug","write";
|
||||
|
||||
// needed for kerberos file permission tests to access user information
|
||||
permission java.lang.RuntimePermission "accessUserInformation";
|
||||
permission java.lang.RuntimePermission "getFileStoreAttributes";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.xmlsec-2.0.8.jar}" {
|
||||
|
|
|
@ -23,17 +23,19 @@ import org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData;
|
|||
import org.ietf.jgss.GSSException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.SeekableByteChannel;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.nio.file.attribute.FileAttribute;
|
||||
import java.nio.file.attribute.PosixFilePermission;
|
||||
import java.nio.file.attribute.AclEntry;
|
||||
import java.nio.file.attribute.AclEntryPermission;
|
||||
import java.nio.file.attribute.AclEntryType;
|
||||
import java.nio.file.attribute.AclFileAttributeView;
|
||||
import java.nio.file.attribute.PosixFileAttributeView;
|
||||
import java.nio.file.attribute.PosixFilePermissions;
|
||||
import java.nio.file.attribute.UserPrincipal;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.security.auth.login.LoginException;
|
||||
|
@ -112,7 +114,6 @@ public class KerberosRealmTests extends KerberosRealmTestCase {
|
|||
final String keytabPathCase = randomFrom("keytabPathAsDirectory", "keytabFileDoesNotExist", "keytabPathWithNoReadPermissions");
|
||||
final String expectedErrorMessage;
|
||||
final String keytabPath;
|
||||
final Set<PosixFilePermission> filePerms;
|
||||
switch (keytabPathCase) {
|
||||
case "keytabPathAsDirectory":
|
||||
final String dirName = randomAlphaOfLength(5);
|
||||
|
@ -125,14 +126,29 @@ public class KerberosRealmTests extends KerberosRealmTestCase {
|
|||
expectedErrorMessage = "configured service key tab file [" + keytabPath + "] does not exist";
|
||||
break;
|
||||
case "keytabPathWithNoReadPermissions":
|
||||
filePerms = PosixFilePermissions.fromString("---------");
|
||||
final String keytabFileName = randomAlphaOfLength(5) + ".keytab";
|
||||
final FileAttribute<Set<PosixFilePermission>> fileAttributes = PosixFilePermissions.asFileAttribute(filePerms);
|
||||
try (SeekableByteChannel byteChannel = Files.newByteChannel(dir.resolve(keytabFileName),
|
||||
EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), fileAttributes)) {
|
||||
byteChannel.write(ByteBuffer.wrap(randomByteArrayOfLength(10)));
|
||||
final String fileName = randomAlphaOfLength(5);
|
||||
final Path keytabFilePath = Files.createTempFile(dir, fileName, ".keytab");
|
||||
Files.write(keytabFilePath, randomAlphaOfLength(5).getBytes(StandardCharsets.UTF_8));
|
||||
final Set<String> supportedAttributes = keytabFilePath.getFileSystem().supportedFileAttributeViews();
|
||||
if (supportedAttributes.contains("posix")) {
|
||||
final PosixFileAttributeView fileAttributeView = Files.getFileAttributeView(keytabFilePath, PosixFileAttributeView.class);
|
||||
fileAttributeView.setPermissions(PosixFilePermissions.fromString("---------"));
|
||||
} else if (supportedAttributes.contains("acl")) {
|
||||
final UserPrincipal principal = Files.getOwner(keytabFilePath);
|
||||
final AclFileAttributeView view = Files.getFileAttributeView(keytabFilePath, AclFileAttributeView.class);
|
||||
final AclEntry entry = AclEntry.newBuilder()
|
||||
.setType(AclEntryType.DENY)
|
||||
.setPrincipal(principal)
|
||||
.setPermissions(AclEntryPermission.READ_DATA, AclEntryPermission.READ_ATTRIBUTES).build();
|
||||
final List<AclEntry> acl = view.getAcl();
|
||||
acl.add(0, entry);
|
||||
view.setAcl(acl);
|
||||
} else {
|
||||
throw new UnsupportedOperationException(
|
||||
String.format(Locale.ROOT, "Don't know how to make file [%s] non-readable on a file system with attributes [%s]",
|
||||
keytabFilePath, supportedAttributes));
|
||||
}
|
||||
keytabPath = dir.resolve(keytabFileName).toString();
|
||||
keytabPath = keytabFilePath.toString();
|
||||
expectedErrorMessage = "configured service key tab file [" + keytabPath + "] must have read permission";
|
||||
break;
|
||||
default:
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
73dd7703a94ec2357581f65ee7c1c4d618ff310f
|
|
@ -1 +0,0 @@
|
|||
471096d6e92338b208aa91f3a85feb2f9cfc4afd
|
|
@ -62,20 +62,10 @@ public class Locate extends ScalarFunction {
|
|||
|
||||
@Override
|
||||
protected ProcessorDefinition makeProcessorDefinition() {
|
||||
LocateFunctionProcessorDefinition processorDefinition;
|
||||
if (start == null) {
|
||||
processorDefinition = new LocateFunctionProcessorDefinition(location(), this,
|
||||
ProcessorDefinitions.toProcessorDefinition(pattern),
|
||||
ProcessorDefinitions.toProcessorDefinition(source));
|
||||
}
|
||||
else {
|
||||
processorDefinition = new LocateFunctionProcessorDefinition(location(), this,
|
||||
ProcessorDefinitions.toProcessorDefinition(pattern),
|
||||
ProcessorDefinitions.toProcessorDefinition(source),
|
||||
ProcessorDefinitions.toProcessorDefinition(start));
|
||||
}
|
||||
|
||||
return processorDefinition;
|
||||
return new LocateFunctionProcessorDefinition(location(), this,
|
||||
ProcessorDefinitions.toProcessorDefinition(pattern),
|
||||
ProcessorDefinitions.toProcessorDefinition(source),
|
||||
start == null ? null : ProcessorDefinitions.toProcessorDefinition(start));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,20 +21,12 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition {
|
|||
|
||||
public LocateFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition pattern,
|
||||
ProcessorDefinition source, ProcessorDefinition start) {
|
||||
super(location, expression, Arrays.asList(pattern, source, start));
|
||||
super(location, expression, start == null ? Arrays.asList(pattern, source) : Arrays.asList(pattern, source, start));
|
||||
this.pattern = pattern;
|
||||
this.source = source;
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public LocateFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition pattern,
|
||||
ProcessorDefinition source) {
|
||||
super(location, expression, Arrays.asList(pattern, source));
|
||||
this.pattern = pattern;
|
||||
this.source = source;
|
||||
this.start = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final ProcessorDefinition replaceChildren(List<ProcessorDefinition> newChildren) {
|
||||
int childrenSize = newChildren.size();
|
||||
|
@ -68,9 +60,6 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition {
|
|||
|
||||
protected ProcessorDefinition replaceChildren(ProcessorDefinition newPattern, ProcessorDefinition newSource,
|
||||
ProcessorDefinition newStart) {
|
||||
if (newStart == null) {
|
||||
return new LocateFunctionProcessorDefinition(location(), expression(), newPattern, newSource);
|
||||
}
|
||||
return new LocateFunctionProcessorDefinition(location(), expression(), newPattern, newSource, newStart);
|
||||
}
|
||||
|
||||
|
|
|
@ -38,50 +38,34 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase
|
|||
return (LocateFunctionProcessorDefinition) (new Locate(randomLocation(),
|
||||
randomStringLiteral(),
|
||||
randomStringLiteral(),
|
||||
frequently() ? randomIntLiteral() : null)
|
||||
randomFrom(true, false) ? randomIntLiteral() : null)
|
||||
.makeProcessorDefinition());
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32554")
|
||||
@Override
|
||||
public void testTransform() {
|
||||
// test transforming only the properties (location, expression),
|
||||
// skipping the children (the two parameters of the binary function) which are tested separately
|
||||
LocateFunctionProcessorDefinition b1 = randomInstance();
|
||||
Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomLocateFunctionExpression());
|
||||
LocateFunctionProcessorDefinition newB;
|
||||
if (b1.start() == null) {
|
||||
newB = new LocateFunctionProcessorDefinition(
|
||||
b1.location(),
|
||||
newExpression,
|
||||
b1.pattern(),
|
||||
b1.source());
|
||||
} else {
|
||||
newB = new LocateFunctionProcessorDefinition(
|
||||
b1.location(),
|
||||
newExpression,
|
||||
b1.pattern(),
|
||||
b1.source(),
|
||||
b1.start());
|
||||
}
|
||||
LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition(
|
||||
b1.location(),
|
||||
newExpression,
|
||||
b1.pattern(),
|
||||
b1.source(),
|
||||
b1.start());
|
||||
|
||||
assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class));
|
||||
|
||||
LocateFunctionProcessorDefinition b2 = randomInstance();
|
||||
Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation());
|
||||
if (b2.start() == null) {
|
||||
newB = new LocateFunctionProcessorDefinition(
|
||||
newLoc,
|
||||
b2.expression(),
|
||||
b2.pattern(),
|
||||
b2.source());
|
||||
} else {
|
||||
newB = new LocateFunctionProcessorDefinition(
|
||||
newLoc,
|
||||
b2.expression(),
|
||||
b2.pattern(),
|
||||
b2.source(),
|
||||
b2.start());
|
||||
}
|
||||
newB = new LocateFunctionProcessorDefinition(
|
||||
newLoc,
|
||||
b2.expression(),
|
||||
b2.pattern(),
|
||||
b2.source(),
|
||||
b2.start());
|
||||
|
||||
assertEquals(newB,
|
||||
b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class));
|
||||
}
|
||||
|
@ -93,15 +77,9 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase
|
|||
ProcessorDefinition newSource = toProcessorDefinition((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral()));
|
||||
ProcessorDefinition newStart;
|
||||
|
||||
LocateFunctionProcessorDefinition newB;
|
||||
if (b.start() == null) {
|
||||
newB = new LocateFunctionProcessorDefinition(b.location(), b.expression(), b.pattern(), b.source());
|
||||
newStart = null;
|
||||
}
|
||||
else {
|
||||
newB = new LocateFunctionProcessorDefinition(b.location(), b.expression(), b.pattern(), b.source(), b.start());
|
||||
newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral()));
|
||||
}
|
||||
LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition(
|
||||
b.location(), b.expression(), b.pattern(), b.source(), b.start());
|
||||
newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral()));
|
||||
LocateFunctionProcessorDefinition transformed = null;
|
||||
|
||||
// generate all the combinations of possible children modifications and test all of them
|
||||
|
@ -132,7 +110,8 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase
|
|||
comb.get(0) ? toProcessorDefinition((Expression) randomValueOtherThan(f.pattern(),
|
||||
() -> randomStringLiteral())) : f.pattern(),
|
||||
comb.get(1) ? toProcessorDefinition((Expression) randomValueOtherThan(f.source(),
|
||||
() -> randomStringLiteral())) : f.source()));
|
||||
() -> randomStringLiteral())) : f.source(),
|
||||
null));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -155,13 +134,7 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase
|
|||
|
||||
@Override
|
||||
protected LocateFunctionProcessorDefinition copy(LocateFunctionProcessorDefinition instance) {
|
||||
return instance.start() == null ?
|
||||
new LocateFunctionProcessorDefinition(instance.location(),
|
||||
instance.expression(),
|
||||
instance.pattern(),
|
||||
instance.source())
|
||||
:
|
||||
new LocateFunctionProcessorDefinition(instance.location(),
|
||||
return new LocateFunctionProcessorDefinition(instance.location(),
|
||||
instance.expression(),
|
||||
instance.pattern(),
|
||||
instance.source(),
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
|||
import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public class StringFunctionProcessorTests extends AbstractWireSerializingTestCase<StringProcessor> {
|
||||
public static StringProcessor randomStringFunctionProcessor() {
|
||||
|
@ -73,6 +74,19 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas
|
|||
|
||||
stringCharInputValidation(proc);
|
||||
}
|
||||
|
||||
public void testLCaseWithTRLocale() {
|
||||
Locale.setDefault(Locale.forLanguageTag("tr"));
|
||||
StringProcessor proc = new StringProcessor(StringOperation.LCASE);
|
||||
|
||||
// ES-SQL is not locale sensitive (so far). The obvious test for this is the Turkish language, uppercase letter I conversion
|
||||
// in non-Turkish locale the lowercasing would create i and an additional dot, while in Turkish Locale it would only create "i"
|
||||
// unicode 0069 = i
|
||||
assertEquals("\u0069\u0307", proc.process("\u0130"));
|
||||
// unicode 0049 = I (regular capital letter i)
|
||||
// in Turkish locale this would be lowercased to a "i" without dot (unicode 0131)
|
||||
assertEquals("\u0069", proc.process("\u0049"));
|
||||
}
|
||||
|
||||
public void testUCase() {
|
||||
StringProcessor proc = new StringProcessor(StringOperation.UCASE);
|
||||
|
@ -81,9 +95,21 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas
|
|||
assertEquals("SOMELOWERCASE", proc.process("SomeLoweRCasE"));
|
||||
assertEquals("FULLUPPERCASE", proc.process("FULLUPPERCASE"));
|
||||
assertEquals("A", proc.process('a'));
|
||||
|
||||
// special uppercasing for small letter sharp "s" resulting "SS"
|
||||
assertEquals("\u0053\u0053", proc.process("\u00df"));
|
||||
|
||||
stringCharInputValidation(proc);
|
||||
}
|
||||
|
||||
public void testUCaseWithTRLocale() {
|
||||
Locale.setDefault(Locale.forLanguageTag("tr"));
|
||||
StringProcessor proc = new StringProcessor(StringOperation.UCASE);
|
||||
|
||||
// ES-SQL is not Locale sensitive (so far).
|
||||
// in Turkish locale, small letter "i" is uppercased to "I" with a dot above (unicode 130), otherwise in "i" (unicode 49)
|
||||
assertEquals("\u0049", proc.process("\u0069"));
|
||||
}
|
||||
|
||||
public void testLength() {
|
||||
StringProcessor proc = new StringProcessor(StringOperation.LENGTH);
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||
|
||||
// Skip test on FIPS FIXME https://github.com/elastic/elasticsearch/issues/32737
|
||||
if (project.inFipsJvm) {
|
||||
tasks.withType(RestIntegTestTask) {
|
||||
enabled = false
|
||||
}
|
||||
}
|
|
@ -7,12 +7,14 @@ package org.elasticsearch.xpack.qa.sql.jdbc;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.junit.Assume;
|
||||
import org.junit.ClassRule;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Tests comparing sql queries executed against our jdbc client
|
||||
|
@ -25,7 +27,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
|
|||
public static LocalH2 H2 = new LocalH2((c) -> c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'"));
|
||||
|
||||
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
|
||||
public static List<Object[]> readScriptSpec() throws Exception {
|
||||
public static List<Object[]> readScriptSpec() throws Exception {
|
||||
Parser parser = specParser();
|
||||
List<Object[]> tests = new ArrayList<>();
|
||||
tests.addAll(readScriptSpec("/select.sql-spec", parser));
|
||||
|
@ -35,6 +37,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
|
|||
tests.addAll(readScriptSpec("/agg.sql-spec", parser));
|
||||
tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser));
|
||||
tests.addAll(readScriptSpec("/string-functions.sql-spec", parser));
|
||||
tests.addAll(readScriptSpec("/case-functions.sql-spec", parser));
|
||||
return tests;
|
||||
}
|
||||
|
||||
|
@ -56,6 +59,12 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
|
|||
|
||||
@Override
|
||||
protected final void doTest() throws Throwable {
|
||||
boolean goodLocale = !(Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr").build())
|
||||
|| Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr-TR").build()));
|
||||
if (fileName.startsWith("case-functions")) {
|
||||
Assume.assumeTrue(goodLocale);
|
||||
}
|
||||
|
||||
try (Connection h2 = H2.get();
|
||||
Connection es = esJdbc()) {
|
||||
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
// Next 4 SELECTs in this file are related to https://github.com/elastic/elasticsearch/issues/32589
|
||||
// H2 is Locale sensitive, while ES-SQL is not (so far)
|
||||
selectInsertWithLcaseAndLengthWithOrderBy
|
||||
SELECT "first_name" origFN, "last_name" origLN, INSERT(UCASE("first_name"),LENGTH("first_name")+1,123,LCASE("last_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC, "last_name" ASC LIMIT 10;
|
||||
|
||||
upperCasingTheSecondLetterFromTheRightFromFirstName
|
||||
SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f FROM "test_emp" ORDER BY "first_name" LIMIT 10;
|
||||
|
||||
upperCasingTheSecondLetterFromTheRightFromFirstNameWithOrderByAndGroupBy
|
||||
SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10;
|
||||
|
||||
upperCasingTheSecondLetterFromTheRightFromFirstNameWithWhere
|
||||
SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" WHERE CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1))='AlejandRo' GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10;
|
|
@ -22,7 +22,8 @@ SELECT LCASE(first_name) lc, CHAR(ASCII(LCASE(first_name))) chr FROM "test_emp"
|
|||
ltrimFilter
|
||||
SELECT LTRIM(first_name) lt FROM "test_emp" WHERE LTRIM(first_name) = 'Bob';
|
||||
|
||||
//Unsupported yet
|
||||
// Unsupported yet
|
||||
// Functions combined with 'LIKE' should perform the match inside a Painless script, whereas at the moment it's handled as a regular `match` query in ES.
|
||||
//ltrimFilterWithLike
|
||||
//SELECT LTRIM("first_name") lt FROM "test_emp" WHERE LTRIM("first_name") LIKE '%a%';
|
||||
|
||||
|
@ -93,10 +94,6 @@ SELECT "first_name" orig, REPEAT("first_name",2) reps FROM "test_emp" WHERE ASCI
|
|||
selectInsertWithLcase
|
||||
SELECT "first_name" orig, INSERT("first_name",2,1000,LCASE("first_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC LIMIT 10;
|
||||
|
||||
// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589
|
||||
// selectInsertWithLcaseAndLengthWithOrderBy
|
||||
//SELECT "first_name" origFN, "last_name" origLN, INSERT(UCASE("first_name"),LENGTH("first_name")+1,123,LCASE("last_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC, "last_name" ASC LIMIT 10;
|
||||
|
||||
selectInsertWithUcaseWithGroupByAndOrderBy
|
||||
SELECT INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) modified, COUNT(*) count FROM "test_emp" WHERE ASCII("first_name")=65 GROUP BY INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) ORDER BY INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) ASC LIMIT 10;
|
||||
|
||||
|
@ -141,14 +138,3 @@ SELECT RIGHT("first_name",2) f FROM "test_emp" ORDER BY "first_name" LIMIT 10;
|
|||
|
||||
selectRightWithGroupByAndOrderBy
|
||||
SELECT RIGHT("first_name",2) f, COUNT(*) count FROM "test_emp" GROUP BY RIGHT("first_name",2) ORDER BY RIGHT("first_name",2) LIMIT 10;
|
||||
|
||||
// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589
|
||||
// upperCasingTheSecondLetterFromTheRightFromFirstName
|
||||
// SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f FROM "test_emp" ORDER BY "first_name" LIMIT 10;
|
||||
|
||||
// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589
|
||||
// upperCasingTheSecondLetterFromTheRightFromFirstNameWithOrderByAndGroupBy
|
||||
// SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10;
|
||||
|
||||
upperCasingTheSecondLetterFromTheRightFromFirstNameWithWhere
|
||||
SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" WHERE CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1))='AlejandRo' GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10;
|
||||
|
|
Loading…
Reference in New Issue