Merge branch 'master' into docs/add_console_to_search_request_options
This commit is contained in:
commit
eca53d909c
|
@ -169,6 +169,7 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
|
|||
}
|
||||
|
||||
public BulkRequest add(DeleteRequest request, @Nullable Object payload) {
|
||||
Objects.requireNonNull(request, "'request' must not be null");
|
||||
requests.add(request);
|
||||
addPayload(payload);
|
||||
sizeInBytes += REQUEST_OVERHEAD;
|
||||
|
|
|
@ -88,15 +88,13 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
|
|||
}
|
||||
|
||||
/**
|
||||
* Elasticsearch 3.0 no longer supports indices with pre Lucene v5.0 (Elasticsearch v2.0.0.beta1) segments. All indices
|
||||
* that were created before Elasticsearch v2.0.0.beta1 should be upgraded using upgrade API before they can
|
||||
* be open by this version of elasticsearch.
|
||||
*/
|
||||
* Elasticsearch 5.0 no longer supports indices with pre Lucene v5.0 (Elasticsearch v2.0.0.beta1) segments. All indices
|
||||
* that were created before Elasticsearch v2.0.0.beta1 should be reindexed in Elasticsearch 2.x
|
||||
* before they can be opened by this version of elasticsearch. */
|
||||
private void checkSupportedVersion(IndexMetaData indexMetaData) {
|
||||
if (indexMetaData.getState() == IndexMetaData.State.OPEN && isSupportedVersion(indexMetaData) == false) {
|
||||
throw new IllegalStateException("The index [" + indexMetaData.getIndex() + "] was created before v2.0.0.beta1 and wasn't upgraded."
|
||||
+ " This index should be opened using a version before " + Version.CURRENT.minimumCompatibilityVersion()
|
||||
+ " and upgraded using the upgrade API.");
|
||||
throw new IllegalStateException("The index [" + indexMetaData.getIndex() + "] was created before v2.0.0.beta1."
|
||||
+ " It should be reindexed in Elasticsearch 2.x before upgrading to " + Version.CURRENT + ".");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -210,4 +210,11 @@ public class BulkRequestTests extends ESTestCase {
|
|||
"script or doc is missing",
|
||||
"source is missing"));
|
||||
}
|
||||
|
||||
public void testCannotAddNullRequests() throws Exception {
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
expectThrows(NullPointerException.class, () -> bulkRequest.add((IndexRequest) null));
|
||||
expectThrows(NullPointerException.class, () -> bulkRequest.add((UpdateRequest) null));
|
||||
expectThrows(NullPointerException.class, () -> bulkRequest.add((DeleteRequest) null));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,21 +11,22 @@ The idea is to use the results from the previous page to help the retrieval of t
|
|||
Suppose that the query to retrieve the first page looks like this:
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XGET 'localhost:9200/twitter/tweet/_search'
|
||||
GET twitter/tweet/_search
|
||||
{
|
||||
size: "10"
|
||||
"size": 10,
|
||||
"query": {
|
||||
"match" : {
|
||||
"title" : "elasticsearch"
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
{"age": "asc"},
|
||||
{"date": "asc"},
|
||||
{"_uid": "desc"}
|
||||
]
|
||||
}
|
||||
'
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:twitter]
|
||||
|
||||
NOTE: A field with one unique value per document should be used as the tiebreaker of the sort specification.
|
||||
Otherwise the sort order for documents that have the same sort values would be undefined. The recommended way is to use
|
||||
|
@ -38,22 +39,23 @@ For instance we can use the `sort values` of the last document and pass it to `s
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XGET 'localhost:9200/twitter/tweet/_search'
|
||||
GET twitter/tweet/_search
|
||||
{
|
||||
"size": 10
|
||||
"size": 10,
|
||||
"query": {
|
||||
"match" : {
|
||||
"title" : "elasticsearch"
|
||||
}
|
||||
},
|
||||
"search_after": [18, "tweet#654323"],
|
||||
"search_after": [1463538857, "tweet#654323"],
|
||||
"sort": [
|
||||
{"age": "asc"},
|
||||
{"date": "asc"},
|
||||
{"_uid": "desc"}
|
||||
]
|
||||
}
|
||||
'
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:twitter]
|
||||
|
||||
NOTE: The parameter `from` must be set to 0 (or -1) when `search_after` is used.
|
||||
|
||||
|
|
|
@ -5,8 +5,33 @@ Allows to add one or more sort on specific fields. Each sort can be
|
|||
reversed as well. The sort is defined on a per field level, with special
|
||||
field name for `_score` to sort by score, and `_doc` to sort by index order.
|
||||
|
||||
Assuming the following index mapping:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT /my_index
|
||||
{
|
||||
"mappings": {
|
||||
"my_type": {
|
||||
"properties": {
|
||||
"post_date": { "type": "date" },
|
||||
"user": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"age": { "type": "integer" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /my_index/my_type/_search
|
||||
{
|
||||
"sort" : [
|
||||
{ "post_date" : {"order" : "asc"}},
|
||||
|
@ -20,6 +45,8 @@ field name for `_score` to sort by score, and `_doc` to sort by index order.
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
NOTE: `_doc` has no real use-case besides being the most efficient sort order.
|
||||
So if you don't care about the order in which documents are returned, then you
|
||||
|
@ -60,20 +87,28 @@ to. The `mode` option can have the following values:
|
|||
===== Sort mode example usage
|
||||
|
||||
In the example below the field price has multiple prices per document.
|
||||
In this case the result hits will be sort by price ascending based on
|
||||
In this case the result hits will be sorted by price ascending based on
|
||||
the average price per document.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/_search' -d '{
|
||||
PUT /my_index/my_type/1?refresh
|
||||
{
|
||||
"product": "chocolate",
|
||||
"price": [20, 4]
|
||||
}
|
||||
|
||||
POST /_search
|
||||
{
|
||||
"query" : {
|
||||
...
|
||||
"term" : { "product" : "chocolate" }
|
||||
},
|
||||
"sort" : [
|
||||
{"price" : {"order" : "asc", "mode" : "avg"}}
|
||||
]
|
||||
}'
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
[[nested-sorting]]
|
||||
==== Sorting within nested objects.
|
||||
|
@ -101,9 +136,10 @@ The `nested_path` needs to be specified; otherwise, elasticsearch doesn't know o
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/_search' -d '{
|
||||
POST /_search
|
||||
{
|
||||
"query" : {
|
||||
...
|
||||
"term" : { "product" : "chocolate" }
|
||||
},
|
||||
"sort" : [
|
||||
{
|
||||
|
@ -117,8 +153,9 @@ curl -XPOST 'localhost:9200/_search' -d '{
|
|||
}
|
||||
}
|
||||
]
|
||||
}'
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
Nested sorting is also supported when sorting by
|
||||
scripts and sorting by geo distance.
|
||||
|
@ -132,15 +169,17 @@ will be used for missing docs as the sort value). For example:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"sort" : [
|
||||
{ "price" : {"missing" : "_last"} },
|
||||
{ "price" : {"missing" : "_last"} }
|
||||
],
|
||||
"query" : {
|
||||
"term" : { "user" : "kimchy" }
|
||||
"term" : { "product" : "chocolate" }
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
NOTE: If a nested inner object doesn't match with
|
||||
the `nested_filter` then a missing value is used.
|
||||
|
@ -155,15 +194,17 @@ example of how it can be used:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"sort" : [
|
||||
{ "price" : {"unmapped_type" : "long"} },
|
||||
{ "price" : {"unmapped_type" : "long"} }
|
||||
],
|
||||
"query" : {
|
||||
"term" : { "user" : "kimchy" }
|
||||
"term" : { "product" : "chocolate" }
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
If any of the indices that are queried doesn't have a mapping for `price`
|
||||
then Elasticsearch will handle it as if there was a mapping of type
|
||||
|
@ -176,6 +217,7 @@ Allow to sort by `_geo_distance`. Here is an example:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"sort" : [
|
||||
{
|
||||
|
@ -193,6 +235,7 @@ Allow to sort by `_geo_distance`. Here is an example:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
|
||||
|
||||
|
@ -209,6 +252,7 @@ The following formats are supported in providing the coordinates:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"sort" : [
|
||||
{
|
||||
|
@ -227,6 +271,7 @@ The following formats are supported in providing the coordinates:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
===== Lat Lon as String
|
||||
|
||||
|
@ -234,6 +279,7 @@ Format in `lat,lon`.
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"sort" : [
|
||||
{
|
||||
|
@ -249,11 +295,13 @@ Format in `lat,lon`.
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
===== Geohash
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"sort" : [
|
||||
{
|
||||
|
@ -269,6 +317,7 @@ Format in `lat,lon`.
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
===== Lat Lon as Array
|
||||
|
||||
|
@ -277,6 +326,7 @@ conform with http://geojson.org/[GeoJSON].
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"sort" : [
|
||||
{
|
||||
|
@ -292,6 +342,7 @@ conform with http://geojson.org/[GeoJSON].
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
|
||||
==== Multiple reference points
|
||||
|
@ -316,9 +367,10 @@ Allow to sort based on custom scripts, here is an example:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query" : {
|
||||
....
|
||||
"term" : { "user" : "kimchy" }
|
||||
},
|
||||
"sort" : {
|
||||
"_script" : {
|
||||
|
@ -334,6 +386,7 @@ Allow to sort based on custom scripts, here is an example:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
|
||||
==== Track Scores
|
||||
|
@ -343,6 +396,7 @@ When sorting on a field, scores are not computed. By setting
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"track_scores": true,
|
||||
"sort" : [
|
||||
|
@ -355,6 +409,7 @@ When sorting on a field, scores are not computed. By setting
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
==== Memory Considerations
|
||||
|
||||
|
|
|
@ -29,6 +29,11 @@ import org.objectweb.asm.Opcodes;
|
|||
import org.objectweb.asm.commons.GeneratorAdapter;
|
||||
import org.objectweb.asm.commons.Method;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Deque;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.painless.WriterConstants.ADDEXACT_INT;
|
||||
import static org.elasticsearch.painless.WriterConstants.ADDEXACT_LONG;
|
||||
import static org.elasticsearch.painless.WriterConstants.ADDWOOVERLOW_DOUBLE;
|
||||
|
@ -48,6 +53,8 @@ import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_DOUBLE;
|
|||
import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_FLOAT;
|
||||
import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_INT;
|
||||
import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_LONG;
|
||||
import static org.elasticsearch.painless.WriterConstants.INDY_STRING_CONCAT_BOOTSTRAP_HANDLE;
|
||||
import static org.elasticsearch.painless.WriterConstants.MAX_INDY_STRING_CONCAT_ARGS;
|
||||
import static org.elasticsearch.painless.WriterConstants.MULEXACT_INT;
|
||||
import static org.elasticsearch.painless.WriterConstants.MULEXACT_LONG;
|
||||
import static org.elasticsearch.painless.WriterConstants.MULWOOVERLOW_DOUBLE;
|
||||
|
@ -66,6 +73,7 @@ import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_ST
|
|||
import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_CONSTRUCTOR;
|
||||
import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_TOSTRING;
|
||||
import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_TYPE;
|
||||
import static org.elasticsearch.painless.WriterConstants.STRING_TYPE;
|
||||
import static org.elasticsearch.painless.WriterConstants.SUBEXACT_INT;
|
||||
import static org.elasticsearch.painless.WriterConstants.SUBEXACT_LONG;
|
||||
import static org.elasticsearch.painless.WriterConstants.SUBWOOVERLOW_DOUBLE;
|
||||
|
@ -97,6 +105,9 @@ import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_FLOAT
|
|||
*/
|
||||
public final class MethodWriter extends GeneratorAdapter {
|
||||
|
||||
private final Deque<List<org.objectweb.asm.Type>> stringConcatArgs = (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE == null) ?
|
||||
null : new ArrayDeque<>();
|
||||
|
||||
MethodWriter(int access, Method method, org.objectweb.asm.Type[] exceptions, ClassVisitor cv) {
|
||||
super(Opcodes.ASM5, cv.visitMethod(access, method.getName(), method.getDescriptor(), null, getInternalNames(exceptions)),
|
||||
access, method.getName(), method.getDescriptor());
|
||||
|
@ -173,13 +184,32 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
}
|
||||
|
||||
public void writeNewStrings() {
|
||||
if (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE != null) {
|
||||
// Java 9+: we just push our argument collector onto deque
|
||||
stringConcatArgs.push(new ArrayList<>());
|
||||
} else {
|
||||
// Java 8: create a StringBuilder in bytecode
|
||||
newInstance(STRINGBUILDER_TYPE);
|
||||
dup();
|
||||
invokeConstructor(STRINGBUILDER_TYPE, STRINGBUILDER_CONSTRUCTOR);
|
||||
}
|
||||
}
|
||||
|
||||
public void writeAppendStrings(final Sort sort) {
|
||||
switch (sort) {
|
||||
public void writeAppendStrings(final Type type) {
|
||||
if (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE != null) {
|
||||
// Java 9+: record type information
|
||||
stringConcatArgs.peek().add(type.type);
|
||||
// prevent too many concat args.
|
||||
// If there are too many, do the actual concat:
|
||||
if (stringConcatArgs.peek().size() >= MAX_INDY_STRING_CONCAT_ARGS) {
|
||||
writeToStrings();
|
||||
writeNewStrings();
|
||||
// add the return value type as new first param for next concat:
|
||||
stringConcatArgs.peek().add(STRING_TYPE);
|
||||
}
|
||||
} else {
|
||||
// Java 8: push a StringBuilder append
|
||||
switch (type.sort) {
|
||||
case BOOL: invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); break;
|
||||
case CHAR: invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); break;
|
||||
case BYTE:
|
||||
|
@ -192,10 +222,19 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
default: invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void writeToStrings() {
|
||||
if (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE != null) {
|
||||
// Java 9+: use type information and push invokeDynamic
|
||||
final String desc = org.objectweb.asm.Type.getMethodDescriptor(STRING_TYPE,
|
||||
stringConcatArgs.pop().stream().toArray(org.objectweb.asm.Type[]::new));
|
||||
invokeDynamic("concat", desc, INDY_STRING_CONCAT_BOOTSTRAP_HANDLE);
|
||||
} else {
|
||||
// Java 8: call toString() on StringBuilder
|
||||
invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_TOSTRING);
|
||||
}
|
||||
}
|
||||
|
||||
public void writeBinaryInstruction(final CompilerSettings settings, final Definition definition,
|
||||
final String location,
|
||||
|
@ -450,4 +489,12 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visitEnd() {
|
||||
if (stringConcatArgs != null && !stringConcatArgs.isEmpty()) {
|
||||
throw new IllegalStateException("String concat bytecode not completed.");
|
||||
}
|
||||
super.visitEnd();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.objectweb.asm.Type;
|
|||
import org.objectweb.asm.commons.Method;
|
||||
|
||||
import java.lang.invoke.CallSite;
|
||||
import java.lang.invoke.MethodHandle;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.lang.invoke.MethodType;
|
||||
import java.util.Map;
|
||||
|
@ -81,6 +82,27 @@ public final class WriterConstants {
|
|||
public final static Method DEF_GT_CALL = getAsmMethod(boolean.class, "gt" , Object.class, Object.class);
|
||||
public final static Method DEF_GTE_CALL = getAsmMethod(boolean.class, "gte", Object.class, Object.class);
|
||||
|
||||
/** dynamic invokedynamic bootstrap for indy string concats (Java 9+) */
|
||||
public final static Handle INDY_STRING_CONCAT_BOOTSTRAP_HANDLE;
|
||||
static {
|
||||
Handle bs;
|
||||
try {
|
||||
final Class<?> factory = Class.forName("java.lang.invoke.StringConcatFactory");
|
||||
final String methodName = "makeConcat";
|
||||
final MethodType type = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class);
|
||||
// ensure it is there:
|
||||
MethodHandles.publicLookup().findStatic(factory, methodName, type);
|
||||
bs = new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(factory), methodName, type.toMethodDescriptorString());
|
||||
} catch (ReflectiveOperationException e) {
|
||||
// not Java 9 - we set it null, so MethodWriter uses StringBuilder:
|
||||
bs = null;
|
||||
}
|
||||
INDY_STRING_CONCAT_BOOTSTRAP_HANDLE = bs;
|
||||
}
|
||||
|
||||
public final static int MAX_INDY_STRING_CONCAT_ARGS = 200;
|
||||
|
||||
public final static Type STRING_TYPE = Type.getType(String.class);
|
||||
public final static Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class);
|
||||
|
||||
public final static Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, "<init>");
|
||||
|
|
|
@ -503,13 +503,13 @@ public final class EBinary extends AExpression {
|
|||
left.write(settings, definition, adapter);
|
||||
|
||||
if (!(left instanceof EBinary) || ((EBinary)left).operation != Operation.ADD || left.actual.sort != Sort.STRING) {
|
||||
adapter.writeAppendStrings(left.actual.sort);
|
||||
adapter.writeAppendStrings(left.actual);
|
||||
}
|
||||
|
||||
right.write(settings, definition, adapter);
|
||||
|
||||
if (!(right instanceof EBinary) || ((EBinary)right).operation != Operation.ADD || right.actual.sort != Sort.STRING) {
|
||||
adapter.writeAppendStrings(right.actual.sort);
|
||||
adapter.writeAppendStrings(right.actual);
|
||||
}
|
||||
|
||||
if (!cat) {
|
||||
|
|
|
@ -266,13 +266,13 @@ public final class EChain extends AExpression {
|
|||
if (cat) {
|
||||
adapter.writeDup(link.size, 1);
|
||||
link.load(settings, definition, adapter);
|
||||
adapter.writeAppendStrings(link.after.sort);
|
||||
adapter.writeAppendStrings(link.after);
|
||||
|
||||
expression.write(settings, definition, adapter);
|
||||
|
||||
if (!(expression instanceof EBinary) ||
|
||||
((EBinary)expression).operation != Operation.ADD || expression.actual.sort != Sort.STRING) {
|
||||
adapter.writeAppendStrings(expression.actual.sort);
|
||||
adapter.writeAppendStrings(expression.actual);
|
||||
}
|
||||
|
||||
adapter.writeToStrings();
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
import static org.elasticsearch.painless.WriterConstants.MAX_INDY_STRING_CONCAT_ARGS;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
public class StringTests extends ScriptTestCase {
|
||||
|
@ -70,14 +72,27 @@ public class StringTests extends ScriptTestCase {
|
|||
}
|
||||
|
||||
public void testAppendMany() {
|
||||
for (int i = MAX_INDY_STRING_CONCAT_ARGS - 5; i < MAX_INDY_STRING_CONCAT_ARGS + 5; i++) {
|
||||
doTestAppendMany(i);
|
||||
}
|
||||
}
|
||||
|
||||
private void doTestAppendMany(int count) {
|
||||
StringBuilder script = new StringBuilder("String s = \"cat\"; return s");
|
||||
StringBuilder result = new StringBuilder("cat");
|
||||
for (int i = 0; i < 200 /* indy limit */ + 10; i++) {
|
||||
for (int i = 1; i < count; i++) {
|
||||
final String s = String.format(Locale.ROOT, "%03d", i);
|
||||
script.append(" + '").append(s).append("'.toString()");
|
||||
result.append(s);
|
||||
}
|
||||
assertEquals(result.toString(), exec(script.toString()));
|
||||
final String s = script.toString();
|
||||
assertTrue("every string part should be separatly pushed to stack.",
|
||||
Debugger.toString(s).contains(String.format(Locale.ROOT, "LDC \"%03d\"", count/2)));
|
||||
assertEquals(result.toString(), exec(s));
|
||||
}
|
||||
|
||||
public void testNestedConcats() {
|
||||
assertEquals("foo1010foo", exec("String s = 'foo'; String x = '10'; return s + Integer.parseInt(x + x) + s;"));
|
||||
}
|
||||
|
||||
public void testStringAPI() {
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
---
|
||||
"Discovery stats":
|
||||
- skip:
|
||||
version: "5.0.0 - "
|
||||
reason: Tracked in issue 18433
|
||||
|
||||
- do:
|
||||
cluster.state: {}
|
||||
|
||||
|
|
Loading…
Reference in New Issue