Merge branch 'master' of github.com:elastic/elasticsearch

This commit is contained in:
Alpar Torok 2018-08-09 21:58:03 +03:00
commit 5bbed5ed9a
74 changed files with 234 additions and 148 deletions

View File

@ -782,9 +782,12 @@ class BuildPlugin implements Plugin<Project> {
}
}
// TODO: remove this once joda time is removed from scriptin in 7.0
// TODO: remove this once joda time is removed from scripting in 7.0
systemProperty 'es.scripting.use_java_time', 'true'
// TODO: remove this once ctx isn't added to update script params in 7.0
systemProperty 'es.scripting.update.ctx_in_params', 'false'
// Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM
if (project.inFipsJvm) {
systemProperty 'javax.net.ssl.trustStorePassword', 'password'

View File

@ -1,5 +1,5 @@
elasticsearch = 7.0.0-alpha1
lucene = 7.5.0-snapshot-608f0277b0
lucene = 7.5.0-snapshot-13b9e28f9d
# optional dependencies
spatial4j = 0.7

View File

@ -30,6 +30,14 @@ apply plugin: 'com.github.johnrengelman.shadow'
group = 'org.elasticsearch.client'
archivesBaseName = 'elasticsearch-rest-high-level-client'
publishing {
publications {
nebula {
artifactId = archivesBaseName
}
}
}
//we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions)
Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE)
test.dependsOn(copyRestSpec)

View File

@ -40,6 +40,7 @@ integTestCluster {
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
systemProperty 'es.scripting.use_java_time', 'false'
systemProperty 'es.scripting.update.ctx_in_params', 'false'
}
// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed

View File

@ -0,0 +1 @@
fded6bb485b8b01bb2a9280162fd14d4d3ce4510

View File

@ -1 +0,0 @@
bd7d8078a2d0ad11a24f54156cc015630c96858a

View File

@ -25,6 +25,7 @@ esplugin {
integTestCluster {
module project.project(':modules:mapper-extras')
systemProperty 'es.scripting.use_java_time', 'true'
systemProperty 'es.scripting.update.ctx_in_params', 'false'
}
dependencies {

View File

@ -132,7 +132,7 @@
body:
script:
lang: painless
source: "for (def key : params.keySet()) { ctx._source[key] = params[key]}"
source: "ctx._source.ctx = ctx"
params: { bar: 'xxx' }
- match: { error.root_cause.0.type: "remote_transport_exception" }

View File

@ -48,9 +48,9 @@ import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.UpdateScript;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.threadpool.ThreadPool;
@ -746,7 +746,7 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
private final Script script;
private final Map<String, Object> params;
private ExecutableScript executable;
private UpdateScript executable;
private Map<String, Object> context;
public ScriptApplier(WorkerBulkByScrollTaskState taskWorker,
@ -766,7 +766,7 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
return request;
}
if (executable == null) {
ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.UPDATE_CONTEXT);
UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT);
executable = factory.newInstance(params);
}
if (context == null) {
@ -787,8 +787,7 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
OpType oldOpType = OpType.INDEX;
context.put("op", oldOpType.toString());
executable.setNextVar("ctx", context);
executable.run();
executable.execute(context);
String newOp = (String) context.remove("op");
if (newOp == null) {

View File

@ -26,8 +26,10 @@ import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.UpdateScript;
import org.junit.Before;
import java.util.Collections;
import java.util.Map;
import java.util.function.Consumer;
@ -54,10 +56,16 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
protected <T extends ActionRequest> T applyScript(Consumer<Map<String, Object>> scriptBody) {
IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar"));
ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0);
ExecutableScript executableScript = new SimpleExecutableScript(scriptBody);
ExecutableScript.Factory factory = params -> executableScript;
when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(factory);
when(scriptService.compile(any(), eq(ExecutableScript.UPDATE_CONTEXT))).thenReturn(factory);
UpdateScript updateScript = new UpdateScript(Collections.emptyMap()) {
@Override
public void execute(Map<String, Object> ctx) {
scriptBody.accept(ctx);
}
};
UpdateScript.Factory factory = params -> updateScript;
ExecutableScript simpleExecutableScript = new SimpleExecutableScript(scriptBody);
when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(params -> simpleExecutableScript);
when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory);
AbstractAsyncBulkByScrollAction<Request> action = action(scriptService, request().setScript(mockScript("")));
RequestWrapper<?> result = action.buildScriptApplier().apply(AbstractAsyncBulkByScrollAction.wrap(index), doc);
return (result != null) ? (T) result.self() : null;

View File

@ -0,0 +1 @@
a010e852be8d56efe1906e6da5292e4541239724

View File

@ -1 +0,0 @@
7a37816def72a748416c4ae8b0f6817e30efb99f

View File

@ -0,0 +1 @@
88e0ed90d433a9088528485cd4f59311735d92a4

View File

@ -1 +0,0 @@
ca7437178cdbf7b8bfe0d75c75e3c8eb93925724

View File

@ -0,0 +1 @@
0daec9ac3c4bba5f91b1bc413c651b7a98313982

View File

@ -1 +0,0 @@
3f5dec44f380d6d58bc1c8aec51964fcb5390b60

View File

@ -0,0 +1 @@
f5af81eec04c1da0d6969cff18f360ff379b1bf7

View File

@ -1 +0,0 @@
453bf1d60df0415439095624e0b3e42492ad4716

View File

@ -0,0 +1 @@
9e649088ee298293aa95a05391dff9cb0582648e

View File

@ -1 +0,0 @@
70095a45257bca9f46629b5fb6cedf9eff5e2b07

View File

@ -0,0 +1 @@
47fb370054ba7413d050f13c177edf01180c31ca

View File

@ -1 +0,0 @@
7199d6962d268b7877f7b5160e98e4ff21cce5c7

View File

@ -0,0 +1 @@
bc0708acbac195772b67b5ad2e9c4683d27ff450

View File

@ -1 +0,0 @@
12aff508d39d206a1aead5013ecd11882062eb06

View File

@ -28,6 +28,7 @@ import org.apache.logging.log4j.core.appender.ConsoleAppender;
import org.apache.logging.log4j.core.appender.CountingNoOpAppender;
import org.apache.logging.log4j.core.config.Configurator;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.Constants;
import org.elasticsearch.cli.UserException;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.Randomness;
@ -360,7 +361,6 @@ public class EvilLoggerTests extends ESTestCase {
}
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32546")
public void testNoNodeNameWarning() throws IOException, UserException {
setupLogging("no_node_name");
@ -376,7 +376,11 @@ public class EvilLoggerTests extends ESTestCase {
+ "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users "
+ "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace "
+ "`%node_name` with `\\[%node_name\\]%marker ` in these locations:");
assertThat(events.get(1), endsWith("no_node_name/log4j2.properties"));
if (Constants.WINDOWS) {
assertThat(events.get(1), endsWith("no_node_name\\log4j2.properties"));
} else {
assertThat(events.get(1), endsWith("no_node_name/log4j2.properties"));
}
}
private void setupLogging(final String config) throws IOException, UserException {

View File

@ -0,0 +1 @@
c547b30525ad80d0ceeaa40c2d3a901c7e76fd46

View File

@ -1 +0,0 @@
d27958843ca118db2ffd2c242ae3761bd5a47328

View File

@ -0,0 +1 @@
9c327295d54d5abd2684e00c3aefe58aa1caace7

View File

@ -1 +0,0 @@
7ea220ba8e4accb8b04e280463042ad470e23bc0

View File

@ -0,0 +1 @@
73dd7703a94ec2357581f65ee7c1c4d618ff310f

View File

@ -1 +0,0 @@
471096d6e92338b208aa91f3a85feb2f9cfc4afd

View File

@ -0,0 +1 @@
1c3802fa30990a1758f2df19d17fe2c95fc45870

View File

@ -1 +0,0 @@
f0af947c60d24f779c22f774e81ebd7dd91cc932

View File

@ -0,0 +1 @@
8d7abdbb7900d7e6a76c391d8be07217c0d882ca

View File

@ -1 +0,0 @@
fbc83ac5a0139ed7e7faf6c95a2718f46f28c641

View File

@ -0,0 +1 @@
011f78ae9d9a386fcf20ceea29ba30e75fb512e8

View File

@ -1 +0,0 @@
30adfe493982b0db059dc243e269eea38d850d46

View File

@ -0,0 +1 @@
c3dd461a7cebdcacc77304660218513e10f89adb

View File

@ -1 +0,0 @@
656f304261d9aad05070fb68593beffafe9147e3

View File

@ -0,0 +1 @@
d63101181708d78eccc441b0d1193dd91d1a0bf1

View File

@ -1 +0,0 @@
8bf22ad81a7480c255b55bada401eb131bfdb4df

View File

@ -0,0 +1 @@
22e56fbd44d6a47d7dddbdda3c17ce22ad0a6680

View File

@ -1 +0,0 @@
edb3de4d68a34c1e1ca08f79fe4d103b10e98ad1

View File

@ -0,0 +1 @@
36b38a1d71045f5bee5dc40526f8d57084dbdc00

View File

@ -1 +0,0 @@
7ece30d5f1e18d96f61644451c858c3d9960558f

View File

@ -0,0 +1 @@
21eb8b111bcb94f4abb8c6402dfd10f51ecc0b38

View File

@ -1 +0,0 @@
ad3bd0c2ed96556193c7215bef328e689d0b157f

View File

@ -0,0 +1 @@
d60081c5641ed21aea82d5d0976b40e1f184c8e5

View File

@ -1 +0,0 @@
8a6bd97e39ee5af60126adbe8c8375dc41b1ea8e

View File

@ -0,0 +1 @@
2d42b373546aa8923d25e4e9a673dd186064f9bd

View File

@ -1 +0,0 @@
07e748d2d80000a7a213f3405b82b6e26b452948

View File

@ -0,0 +1 @@
7f31607959e5a2ed84ab2d9a007a3f76e9a2d38c

View File

@ -1 +0,0 @@
fd737bd5562f3943618ee7e73a0aaffb6319fdb2

View File

@ -0,0 +1 @@
f7619348f0619867c52f4801531c70358f49873a

View File

@ -1 +0,0 @@
ff3f260d1dc8c18bc67f3c33aa84a0ad290daac5

View File

@ -19,6 +19,11 @@
package org.elasticsearch.action.update;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.function.LongSupplier;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.DocWriteResponse;
@ -42,21 +47,22 @@ import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.UpdateScript;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.function.LongSupplier;
import static org.elasticsearch.common.Booleans.parseBoolean;
/**
* Helper for translating an update request to an index, delete request or update response.
*/
public class UpdateHelper extends AbstractComponent {
/** Whether scripts should add the ctx variable to the params map. */
private static final boolean CTX_IN_PARAMS =
parseBoolean(System.getProperty("es.scripting.update.ctx_in_params"), true);
private final ScriptService scriptService;
public UpdateHelper(Settings settings, ScriptService scriptService) {
@ -279,10 +285,18 @@ public class UpdateHelper extends AbstractComponent {
private Map<String, Object> executeScript(Script script, Map<String, Object> ctx) {
try {
if (scriptService != null) {
ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.UPDATE_CONTEXT);
ExecutableScript executableScript = factory.newInstance(script.getParams());
executableScript.setNextVar(ContextFields.CTX, ctx);
executableScript.run();
UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT);
final Map<String, Object> params;
if (CTX_IN_PARAMS) {
params = new HashMap<>(script.getParams());
params.put(ContextFields.CTX, ctx);
deprecationLogger.deprecated("Using `ctx` via `params.ctx` is deprecated. " +
"Use -Des.scripting.update.ctx_in_params=false to enforce non-deprecated usage.");
} else {
params = script.getParams();
}
UpdateScript executableScript = factory.newInstance(params);
executableScript.execute(ctx);
}
} catch (Exception e) {
throw new IllegalArgumentException("failed to execute script", e);

View File

@ -46,7 +46,4 @@ public interface ExecutableScript {
}
ScriptContext<Factory> CONTEXT = new ScriptContext<>("executable", Factory.class);
// TODO: remove these once each has its own script interface
ScriptContext<Factory> UPDATE_CONTEXT = new ScriptContext<>("update", Factory.class);
}

View File

@ -46,10 +46,10 @@ public class ScriptModule {
SearchScript.SCRIPT_SORT_CONTEXT,
SearchScript.TERMS_SET_QUERY_CONTEXT,
ExecutableScript.CONTEXT,
UpdateScript.CONTEXT,
BucketAggregationScript.CONTEXT,
BucketAggregationSelectorScript.CONTEXT,
SignificantTermsHeuristicScoreScript.CONTEXT,
ExecutableScript.UPDATE_CONTEXT,
IngestScript.CONTEXT,
FilterScript.CONTEXT,
SimilarityScript.CONTEXT,

View File

@ -285,7 +285,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
// TODO: fix this through some API or something, that's wrong
// special exception to prevent expressions from compiling as update or mapping scripts
boolean expression = "expression".equals(lang);
boolean notSupported = context.name.equals(ExecutableScript.UPDATE_CONTEXT.name);
boolean notSupported = context.name.equals(UpdateScript.CONTEXT.name);
if (expression && notSupported) {
throw new UnsupportedOperationException("scripts of type [" + script.getType() + "]," +
" operation [" + context.name + "] and lang [" + lang + "] are not supported");

View File

@ -0,0 +1,52 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script;
import java.util.Map;
/**
* An update script.
*/
public abstract class UpdateScript {
public static final String[] PARAMETERS = { "ctx" };
/** The context used to compile {@link UpdateScript} factories. */
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("update", Factory.class);
/** The generic runtime parameters for the script. */
private final Map<String, Object> params;
public UpdateScript(Map<String, Object> params) {
this.params = params;
}
/** Return the parameters for this script. */
public Map<String, Object> getParams() {
return params;
}
public abstract void execute(Map<String, Object> ctx);
public interface Factory {
UpdateScript newInstance(Map<String, Object> params);
}
}

View File

@ -180,7 +180,7 @@ public class BucketSortPipelineAggregator extends PipelineAggregator {
private static class TopNPriorityQueue extends PriorityQueue<ComparableBucket> {
private TopNPriorityQueue(int n) {
super(n, false);
super(n);
}
@Override

View File

@ -167,7 +167,7 @@ public class ScriptServiceTests extends ESTestCase {
assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT);
assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT);
assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT);
assertCompileAccepted("painless", "script", ScriptType.INLINE, UpdateScript.CONTEXT);
assertCompileAccepted("painless", "script", ScriptType.INLINE, IngestScript.CONTEXT);
}
@ -187,7 +187,7 @@ public class ScriptServiceTests extends ESTestCase {
assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT);
assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT);
assertCompileRejected("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT);
assertCompileRejected("painless", "script", ScriptType.INLINE, UpdateScript.CONTEXT);
}
public void testAllowNoScriptTypeSettings() throws IOException {

View File

@ -93,6 +93,7 @@ public class UpdateIT extends ESIntegTestCase {
}
Map<String, Object> source = (Map<String, Object>) ctx.get("_source");
params.remove("ctx");
source.putAll(params);
return ctx;

View File

@ -96,6 +96,18 @@ public class MockScriptEngine implements ScriptEngine {
}
};
return context.factoryClazz.cast(factory);
} else if (context.instanceClazz.equals(UpdateScript.class)) {
UpdateScript.Factory factory = parameters -> new UpdateScript(parameters) {
@Override
public void execute(Map<String, Object> ctx) {
final Map<String, Object> vars = new HashMap<>();
vars.put("ctx", ctx);
vars.put("params", parameters);
vars.putAll(parameters);
script.apply(vars);
}
};
return context.factoryClazz.cast(factory);
} else if (context.instanceClazz.equals(BucketAggregationScript.class)) {
BucketAggregationScript.Factory factory = parameters -> new BucketAggregationScript(parameters) {
@Override

View File

@ -71,7 +71,7 @@ public class OverallBucketsProvider {
static class TopNScores extends PriorityQueue<Double> {
TopNScores(int n) {
super(n, false);
super(n);
}
@Override

View File

@ -0,0 +1 @@
73dd7703a94ec2357581f65ee7c1c4d618ff310f

View File

@ -1 +0,0 @@
471096d6e92338b208aa91f3a85feb2f9cfc4afd

View File

@ -62,20 +62,10 @@ public class Locate extends ScalarFunction {
@Override
protected ProcessorDefinition makeProcessorDefinition() {
LocateFunctionProcessorDefinition processorDefinition;
if (start == null) {
processorDefinition = new LocateFunctionProcessorDefinition(location(), this,
ProcessorDefinitions.toProcessorDefinition(pattern),
ProcessorDefinitions.toProcessorDefinition(source));
}
else {
processorDefinition = new LocateFunctionProcessorDefinition(location(), this,
ProcessorDefinitions.toProcessorDefinition(pattern),
ProcessorDefinitions.toProcessorDefinition(source),
ProcessorDefinitions.toProcessorDefinition(start));
}
return processorDefinition;
return new LocateFunctionProcessorDefinition(location(), this,
ProcessorDefinitions.toProcessorDefinition(pattern),
ProcessorDefinitions.toProcessorDefinition(source),
start == null ? null : ProcessorDefinitions.toProcessorDefinition(start));
}
@Override

View File

@ -21,20 +21,12 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition {
public LocateFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition pattern,
ProcessorDefinition source, ProcessorDefinition start) {
super(location, expression, Arrays.asList(pattern, source, start));
super(location, expression, start == null ? Arrays.asList(pattern, source) : Arrays.asList(pattern, source, start));
this.pattern = pattern;
this.source = source;
this.start = start;
}
public LocateFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition pattern,
ProcessorDefinition source) {
super(location, expression, Arrays.asList(pattern, source));
this.pattern = pattern;
this.source = source;
this.start = null;
}
@Override
public final ProcessorDefinition replaceChildren(List<ProcessorDefinition> newChildren) {
int childrenSize = newChildren.size();
@ -68,9 +60,6 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition {
protected ProcessorDefinition replaceChildren(ProcessorDefinition newPattern, ProcessorDefinition newSource,
ProcessorDefinition newStart) {
if (newStart == null) {
return new LocateFunctionProcessorDefinition(location(), expression(), newPattern, newSource);
}
return new LocateFunctionProcessorDefinition(location(), expression(), newPattern, newSource, newStart);
}

View File

@ -38,50 +38,34 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase
return (LocateFunctionProcessorDefinition) (new Locate(randomLocation(),
randomStringLiteral(),
randomStringLiteral(),
frequently() ? randomIntLiteral() : null)
randomFrom(true, false) ? randomIntLiteral() : null)
.makeProcessorDefinition());
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32554")
@Override
public void testTransform() {
// test transforming only the properties (location, expression),
// skipping the children (the two parameters of the binary function) which are tested separately
LocateFunctionProcessorDefinition b1 = randomInstance();
Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomLocateFunctionExpression());
LocateFunctionProcessorDefinition newB;
if (b1.start() == null) {
newB = new LocateFunctionProcessorDefinition(
b1.location(),
newExpression,
b1.pattern(),
b1.source());
} else {
newB = new LocateFunctionProcessorDefinition(
b1.location(),
newExpression,
b1.pattern(),
b1.source(),
b1.start());
}
LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition(
b1.location(),
newExpression,
b1.pattern(),
b1.source(),
b1.start());
assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class));
LocateFunctionProcessorDefinition b2 = randomInstance();
Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation());
if (b2.start() == null) {
newB = new LocateFunctionProcessorDefinition(
newLoc,
b2.expression(),
b2.pattern(),
b2.source());
} else {
newB = new LocateFunctionProcessorDefinition(
newLoc,
b2.expression(),
b2.pattern(),
b2.source(),
b2.start());
}
newB = new LocateFunctionProcessorDefinition(
newLoc,
b2.expression(),
b2.pattern(),
b2.source(),
b2.start());
assertEquals(newB,
b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class));
}
@ -93,15 +77,9 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase
ProcessorDefinition newSource = toProcessorDefinition((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral()));
ProcessorDefinition newStart;
LocateFunctionProcessorDefinition newB;
if (b.start() == null) {
newB = new LocateFunctionProcessorDefinition(b.location(), b.expression(), b.pattern(), b.source());
newStart = null;
}
else {
newB = new LocateFunctionProcessorDefinition(b.location(), b.expression(), b.pattern(), b.source(), b.start());
newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral()));
}
LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition(
b.location(), b.expression(), b.pattern(), b.source(), b.start());
newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral()));
LocateFunctionProcessorDefinition transformed = null;
// generate all the combinations of possible children modifications and test all of them
@ -132,7 +110,8 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase
comb.get(0) ? toProcessorDefinition((Expression) randomValueOtherThan(f.pattern(),
() -> randomStringLiteral())) : f.pattern(),
comb.get(1) ? toProcessorDefinition((Expression) randomValueOtherThan(f.source(),
() -> randomStringLiteral())) : f.source()));
() -> randomStringLiteral())) : f.source(),
null));
}
}
} else {
@ -155,13 +134,7 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase
@Override
protected LocateFunctionProcessorDefinition copy(LocateFunctionProcessorDefinition instance) {
return instance.start() == null ?
new LocateFunctionProcessorDefinition(instance.location(),
instance.expression(),
instance.pattern(),
instance.source())
:
new LocateFunctionProcessorDefinition(instance.location(),
return new LocateFunctionProcessorDefinition(instance.location(),
instance.expression(),
instance.pattern(),
instance.source(),

View File

@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation;
import java.io.IOException;
import java.util.Locale;
public class StringFunctionProcessorTests extends AbstractWireSerializingTestCase<StringProcessor> {
public static StringProcessor randomStringFunctionProcessor() {
@ -73,6 +74,19 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas
stringCharInputValidation(proc);
}
public void testLCaseWithTRLocale() {
Locale.setDefault(Locale.forLanguageTag("tr"));
StringProcessor proc = new StringProcessor(StringOperation.LCASE);
// ES-SQL is not locale sensitive (so far). The obvious test for this is the Turkish language, uppercase letter I conversion
// in non-Turkish locale the lowercasing would create i and an additional dot, while in Turkish Locale it would only create "i"
// unicode 0069 = i
assertEquals("\u0069\u0307", proc.process("\u0130"));
// unicode 0049 = I (regular capital letter i)
// in Turkish locale this would be lowercased to a "i" without dot (unicode 0131)
assertEquals("\u0069", proc.process("\u0049"));
}
public void testUCase() {
StringProcessor proc = new StringProcessor(StringOperation.UCASE);
@ -81,9 +95,21 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas
assertEquals("SOMELOWERCASE", proc.process("SomeLoweRCasE"));
assertEquals("FULLUPPERCASE", proc.process("FULLUPPERCASE"));
assertEquals("A", proc.process('a'));
// special uppercasing for small letter sharp "s" resulting "SS"
assertEquals("\u0053\u0053", proc.process("\u00df"));
stringCharInputValidation(proc);
}
public void testUCaseWithTRLocale() {
Locale.setDefault(Locale.forLanguageTag("tr"));
StringProcessor proc = new StringProcessor(StringOperation.UCASE);
// ES-SQL is not Locale sensitive (so far).
// in Turkish locale, small letter "i" is uppercased to "I" with a dot above (unicode 130), otherwise in "i" (unicode 49)
assertEquals("\u0049", proc.process("\u0069"));
}
public void testLength() {
StringProcessor proc = new StringProcessor(StringOperation.LENGTH);

View File

@ -7,12 +7,14 @@ package org.elasticsearch.xpack.qa.sql.jdbc;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.junit.Assume;
import org.junit.ClassRule;
import java.sql.Connection;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
/**
* Tests comparing sql queries executed against our jdbc client
@ -25,7 +27,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
public static LocalH2 H2 = new LocalH2((c) -> c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'"));
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
public static List<Object[]> readScriptSpec() throws Exception {
public static List<Object[]> readScriptSpec() throws Exception {
Parser parser = specParser();
List<Object[]> tests = new ArrayList<>();
tests.addAll(readScriptSpec("/select.sql-spec", parser));
@ -35,6 +37,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
tests.addAll(readScriptSpec("/agg.sql-spec", parser));
tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser));
tests.addAll(readScriptSpec("/string-functions.sql-spec", parser));
tests.addAll(readScriptSpec("/case-functions.sql-spec", parser));
return tests;
}
@ -56,6 +59,12 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
@Override
protected final void doTest() throws Throwable {
boolean goodLocale = !(Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr").build())
|| Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr-TR").build()));
if (fileName.startsWith("case-functions")) {
Assume.assumeTrue(goodLocale);
}
try (Connection h2 = H2.get();
Connection es = esJdbc()) {

View File

@ -0,0 +1,13 @@
// Next 4 SELECTs in this file are related to https://github.com/elastic/elasticsearch/issues/32589
// H2 is Locale sensitive, while ES-SQL is not (so far)
selectInsertWithLcaseAndLengthWithOrderBy
SELECT "first_name" origFN, "last_name" origLN, INSERT(UCASE("first_name"),LENGTH("first_name")+1,123,LCASE("last_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC, "last_name" ASC LIMIT 10;
upperCasingTheSecondLetterFromTheRightFromFirstName
SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f FROM "test_emp" ORDER BY "first_name" LIMIT 10;
upperCasingTheSecondLetterFromTheRightFromFirstNameWithOrderByAndGroupBy
SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10;
upperCasingTheSecondLetterFromTheRightFromFirstNameWithWhere
SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" WHERE CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1))='AlejandRo' GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10;

View File

@ -22,7 +22,8 @@ SELECT LCASE(first_name) lc, CHAR(ASCII(LCASE(first_name))) chr FROM "test_emp"
ltrimFilter
SELECT LTRIM(first_name) lt FROM "test_emp" WHERE LTRIM(first_name) = 'Bob';
//Unsupported yet
// Unsupported yet
// Functions combined with 'LIKE' should perform the match inside a Painless script, whereas at the moment it's handled as a regular `match` query in ES.
//ltrimFilterWithLike
//SELECT LTRIM("first_name") lt FROM "test_emp" WHERE LTRIM("first_name") LIKE '%a%';
@ -93,10 +94,6 @@ SELECT "first_name" orig, REPEAT("first_name",2) reps FROM "test_emp" WHERE ASCI
selectInsertWithLcase
SELECT "first_name" orig, INSERT("first_name",2,1000,LCASE("first_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC LIMIT 10;
// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589
// selectInsertWithLcaseAndLengthWithOrderBy
//SELECT "first_name" origFN, "last_name" origLN, INSERT(UCASE("first_name"),LENGTH("first_name")+1,123,LCASE("last_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC, "last_name" ASC LIMIT 10;
selectInsertWithUcaseWithGroupByAndOrderBy
SELECT INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) modified, COUNT(*) count FROM "test_emp" WHERE ASCII("first_name")=65 GROUP BY INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) ORDER BY INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) ASC LIMIT 10;
@ -141,14 +138,3 @@ SELECT RIGHT("first_name",2) f FROM "test_emp" ORDER BY "first_name" LIMIT 10;
selectRightWithGroupByAndOrderBy
SELECT RIGHT("first_name",2) f, COUNT(*) count FROM "test_emp" GROUP BY RIGHT("first_name",2) ORDER BY RIGHT("first_name",2) LIMIT 10;
// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589
// upperCasingTheSecondLetterFromTheRightFromFirstName
// SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f FROM "test_emp" ORDER BY "first_name" LIMIT 10;
// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589
// upperCasingTheSecondLetterFromTheRightFromFirstNameWithOrderByAndGroupBy
// SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10;
upperCasingTheSecondLetterFromTheRightFromFirstNameWithWhere
SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" WHERE CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1))='AlejandRo' GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10;