Remove runtime fields (#63418)

We are not going to release runtime fields with 7.10, hence we are removing them from the 7.10 branch.
This commit is contained in:
Luca Cavanna 2020-10-07 20:39:41 +02:00 committed by GitHub
parent 58eaba9950
commit 659988a77f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
129 changed files with 0 additions and 13604 deletions

View File

@ -127,11 +127,6 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
+ " \"keyword\":{\"type\":\"keyword\"}"
+ " }"
+ " },"
+ " \"airline_lowercase_rt\": { "
+ " \"type\":\"runtime\","
+ " \"runtime_type\": \"keyword\","
+ " \"script\" : { \"source\": \"emit(params._source.airline.toLowerCase())\" }"
+ " },"
+ " \"responsetime\": { \"type\":\"float\"}"
+ " }"
+ " }"
@ -297,13 +292,6 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
.execute();
}
public void testLookbackOnlyWithRuntimeFields() throws Exception {
new LookbackOnlyTestHelper("test-lookback-only-with-runtime-fields", "airline-data")
.setAirlineVariant("airline_lowercase_rt")
.setShouldSucceedProcessing(true)
.execute();
}
public void testLookbackonlyWithNestedFields() throws Exception {
String jobId = "test-lookback-only-with-nested-fields";
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);

View File

@ -1,23 +0,0 @@
apply plugin: 'elasticsearch.esplugin'
esplugin {
name 'x-pack-runtime-fields'
description 'A module which adds support for runtime fields'
classname 'org.elasticsearch.xpack.runtimefields.RuntimeFields'
extendedPlugins = ['x-pack-core', 'lang-painless']
}
archivesBaseName = 'x-pack-runtime-fields'
compileJava.options.compilerArgs << "-Xlint:-rawtypes"
compileTestJava.options.compilerArgs << "-Xlint:-rawtypes"
dependencies {
compileOnly project(":server")
compileOnly project(':modules:lang-painless:spi')
compileOnly project(path: xpackModule('core'), configuration: 'default')
}
dependencyLicenses {
ignoreSha 'x-pack-core'
}

View File

@ -1 +0,0 @@
// Empty project so we can pick up its subproject

View File

@ -1,54 +0,0 @@
apply plugin: 'elasticsearch.yaml-rest-test'
restResources {
restApi {
includeXpack 'async_search', 'graph', '*_point_in_time'
}
restTests {
includeCore '*'
includeXpack 'async_search', 'graph'
}
}
testClusters.yamlRestTest {
testDistribution = 'DEFAULT'
setting 'xpack.license.self_generated.type', 'trial'
}
yamlRestTest {
systemProperty 'tests.rest.suite',
[
'async_search',
'field_caps',
'graph',
'msearch',
'search',
'search.aggregation',
'search.highlight',
'search.inner_hits',
'search_shards',
'suggest',
].join(',')
systemProperty 'tests.rest.blacklist',
[
/////// TO FIX ///////
'search.highlight/40_keyword_ignore/Plain Highligher should skip highlighting ignored keyword values', // The plain highlighter is incompatible with runtime fields. Worth fixing?
'search/115_multiple_field_collapsing/two levels fields collapsing', // Broken. Gotta fix.
'field_caps/30_filter/Field caps with index filter', // We don't support filtering field caps on runtime fields. What should we do?
'search.aggregation/10_histogram/*', // runtime doesn't support sub-fields. Maybe it should?
'search/140_pre_filter_search_shards/pre_filter_shard_size with shards that have no hit',
/////// TO FIX ///////
/////// NOT SUPPORTED ///////
'search.aggregation/280_rare_terms/*', // Requires an index and we won't have it
// Runtime fields don't have global ords
'search.aggregation/20_terms/string profiler via global ordinals',
'search.aggregation/20_terms/Global ordinals are loaded with the global_ordinals execution hint',
'search.aggregation/170_cardinality_metric/profiler string',
//dynamic template causes a type _doc to be created, these tests use another type but only one type is allowed
'search.aggregation/51_filter_with_types/*',
'search/171_terms_query_with_types/*',
'msearch/12_basic_with_types/*'
/////// NOT SUPPORTED ///////
].join(',')
}

View File

@ -1,294 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.rest;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.index.mapper.BooleanFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.IpFieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext;
import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection;
import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite;
import org.elasticsearch.test.rest.yaml.section.DoSection;
import org.elasticsearch.test.rest.yaml.section.ExecutableSection;
import org.elasticsearch.test.rest.yaml.section.SetupSection;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.hamcrest.Matchers.equalTo;
public class CoreTestsWithRuntimeFieldsIT extends ESClientYamlSuiteTestCase {
public CoreTestsWithRuntimeFieldsIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
/**
* Builds test parameters similarly to {@link ESClientYamlSuiteTestCase#createParameters()},
* replacing the body of index creation commands so that fields are {@code runtime}s
* that load from {@code source} instead of their original type. Test configurations that
* do are not modified to contain runtime fields are not returned as they are tested
* elsewhere.
*/
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
Map<String, ClientYamlTestSuite> suites = new HashMap<>();
List<Object[]> result = new ArrayList<>();
for (Object[] orig : ESClientYamlSuiteTestCase.createParameters()) {
assert orig.length == 1;
ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) orig[0];
ClientYamlTestSuite suite = suites.computeIfAbsent(candidate.getTestPath(), k -> modifiedSuite(candidate));
if (suite == null) {
// The setup section contains an unsupported option
continue;
}
if (false == modifySection(candidate.getTestSection().getExecutableSections())) {
// The test section contains an unsupported option
continue;
}
ClientYamlTestSection modified = new ClientYamlTestSection(
candidate.getTestSection().getLocation(),
candidate.getTestSection().getName(),
candidate.getTestSection().getSkipSection(),
candidate.getTestSection().getExecutableSections()
);
result.add(new Object[] { new ClientYamlTestCandidate(suite, modified) });
}
return result;
}
/**
* Modify the setup section to setup a dynamic template that replaces
* field configurations with scripts that load from source
* <strong>and</strong> replaces field configurations in {@code incides.create}
* with scripts that load from source.
*/
private static ClientYamlTestSuite modifiedSuite(ClientYamlTestCandidate candidate) {
if (false == modifySection(candidate.getSetupSection().getExecutableSections())) {
return null;
}
List<ExecutableSection> setup = new ArrayList<>(candidate.getSetupSection().getExecutableSections().size() + 1);
setup.add(ADD_TEMPLATE);
setup.addAll(candidate.getSetupSection().getExecutableSections());
return new ClientYamlTestSuite(
candidate.getApi(),
candidate.getName(),
new SetupSection(candidate.getSetupSection().getSkipSection(), setup),
candidate.getTeardownSection(),
Collections.emptyList()
);
}
/**
* Replace field configuration in {@code indices.create} with scripts
* that load from the source.
*/
private static boolean modifySection(List<ExecutableSection> executables) {
for (ExecutableSection section : executables) {
if (false == (section instanceof DoSection)) {
continue;
}
DoSection doSection = (DoSection) section;
if (false == doSection.getApiCallSection().getApi().equals("indices.create")) {
continue;
}
for (Map<?, ?> body : doSection.getApiCallSection().getBodies()) {
Object settings = body.get("settings");
if (settings instanceof Map && ((Map<?, ?>) settings).containsKey("sort.field")) {
/*
* You can't sort the index on a runtime_keyword and it is
* hard to figure out if the sort was a runtime_keyword so
* let's just skip this test.
*/
continue;
}
Object mappings = body.get("mappings");
if (false == (mappings instanceof Map)) {
continue;
}
Object properties = ((Map<?, ?>) mappings).get("properties");
if (false == (properties instanceof Map)) {
continue;
}
for (Map.Entry<?, ?> property : ((Map<?, ?>) properties).entrySet()) {
if (false == property.getValue() instanceof Map) {
continue;
}
@SuppressWarnings("unchecked")
Map<String, Object> propertyMap = (Map<String, Object>) property.getValue();
String name = property.getKey().toString();
String type = Objects.toString(propertyMap.get("type"));
if ("nested".equals(type)) {
// Our loading scripts can't be made to manage nested fields so we have to skip those tests.
return false;
}
if ("false".equals(Objects.toString(propertyMap.get("doc_values")))) {
// If doc_values is false we can't emulate with scripts. `null` and `true` are fine.
continue;
}
if ("false".equals(Objects.toString(propertyMap.get("index")))) {
// If index is false we can't emulate with scripts
continue;
}
if ("true".equals(Objects.toString(propertyMap.get("store")))) {
// If store is true we can't emulate with scripts
continue;
}
if (propertyMap.containsKey("ignore_above")) {
// Scripts don't support ignore_above so we skip those fields
continue;
}
if (propertyMap.containsKey("ignore_malformed")) {
// Our source reading script doesn't emulate ignore_malformed
continue;
}
String toLoad = painlessToLoadFromSource(name, type);
if (toLoad == null) {
continue;
}
propertyMap.put("type", "runtime");
propertyMap.put("runtime_type", type);
propertyMap.put("script", toLoad);
propertyMap.remove("store");
propertyMap.remove("index");
propertyMap.remove("doc_values");
}
}
}
return true;
}
private static String painlessToLoadFromSource(String name, String type) {
String emit = PAINLESS_TO_EMIT.get(type);
if (emit == null) {
return null;
}
StringBuilder b = new StringBuilder();
b.append("def v = params._source['").append(name).append("'];\n");
b.append("if (v instanceof Iterable) {\n");
b.append(" for (def vv : ((Iterable) v)) {\n");
b.append(" if (vv != null) {\n");
b.append(" def value = vv;\n");
b.append(" ").append(emit).append("\n");
b.append(" }\n");
b.append(" }\n");
b.append("} else {\n");
b.append(" if (v != null) {\n");
b.append(" def value = v;\n");
b.append(" ").append(emit).append("\n");
b.append(" }\n");
b.append("}\n");
return b.toString();
}
private static final Map<String, String> PAINLESS_TO_EMIT = org.elasticsearch.common.collect.Map.of(
BooleanFieldMapper.CONTENT_TYPE,
"emit(parse(value));",
DateFieldMapper.CONTENT_TYPE,
"emit(parse(value.toString()));",
NumberType.DOUBLE.typeName(),
"emit(value instanceof Number ? ((Number) value).doubleValue() : Double.parseDouble(value.toString()));",
KeywordFieldMapper.CONTENT_TYPE,
"emit(value.toString());",
IpFieldMapper.CONTENT_TYPE,
"emit(value.toString());",
NumberType.LONG.typeName(),
"emit(value instanceof Number ? ((Number) value).longValue() : Long.parseLong(value.toString()));"
);
private static final ExecutableSection ADD_TEMPLATE = new ExecutableSection() {
@Override
public XContentLocation getLocation() {
return new XContentLocation(-1, -1);
}
@Override
public void execute(ClientYamlTestExecutionContext executionContext) throws IOException {
Map<String, String> params = org.elasticsearch.common.collect.Map.of("name", "convert_to_source_only", "create", "true");
List<Map<String, Object>> dynamicTemplates = new ArrayList<>();
for (String type : PAINLESS_TO_EMIT.keySet()) {
if (type.equals("ip")) {
// There isn't a dynamic template to pick up ips. They'll just look like strings.
continue;
}
Map<String, Object> mapping = org.elasticsearch.common.collect.Map.of(
"type",
"runtime",
"runtime_type",
type,
"script",
painlessToLoadFromSource("{name}", type)
);
if (type.contentEquals("keyword")) {
/*
* For "string"-type dynamic mappings emulate our default
* behavior with a top level text field and a `.keyword`
* multi-field. But instead of the default, use a runtime
* field for the multi-field.
*/
mapping = org.elasticsearch.common.collect.Map.of(
"type",
"text",
"fields",
org.elasticsearch.common.collect.Map.of("keyword", mapping)
);
dynamicTemplates.add(
org.elasticsearch.common.collect.Map.of(
type,
org.elasticsearch.common.collect.Map.of("match_mapping_type", "string", "mapping", mapping)
)
);
} else {
dynamicTemplates.add(
org.elasticsearch.common.collect.Map.of(
type,
org.elasticsearch.common.collect.Map.of("match_mapping_type", type, "mapping", mapping)
)
);
}
}
List<Map<String, Object>> bodies = Collections.singletonList(
org.elasticsearch.common.collect.Map.of(
"index_patterns",
"*",
"priority",
Integer.MAX_VALUE - 1,
"template",
org.elasticsearch.common.collect.Map.of(
"settings",
Collections.emptyMap(),
"mappings",
org.elasticsearch.common.collect.Map.of("dynamic_templates", dynamicTemplates)
)
)
);
ClientYamlTestResponse response = executionContext.callApi(
"indices.put_index_template",
params,
bodies,
Collections.emptyMap()
);
assertThat(response.getStatusCode(), equalTo(200));
// There are probably some warning about overlapping templates. Ignore them.
}
};
}

View File

@ -1,25 +0,0 @@
apply plugin: 'elasticsearch.java-rest-test'
dependencies {
javaRestTestImplementation project(path: xpackModule('core'))
javaRestTestImplementation project(path: xpackProject('plugin').path, configuration: 'testArtifacts')
}
def clusterCredentials = [username: System.getProperty('tests.rest.cluster.username', 'test_admin'),
password: System.getProperty('tests.rest.cluster.password', 'x-pack-test-password')]
javaRestTest {
systemProperty 'tests.rest.cluster.username', clusterCredentials.username
systemProperty 'tests.rest.cluster.password', clusterCredentials.password
}
testClusters.all {
testDistribution = 'DEFAULT'
setting 'xpack.security.enabled', 'true'
setting 'xpack.watcher.enabled', 'false'
setting 'xpack.ml.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
extraConfigFile 'roles.yml', file('roles.yml')
user clusterCredentials
user username: "test", password: "x-pack-test-password", role: "test"
}

View File

@ -1,13 +0,0 @@
test:
indices:
- names: [ 'dls' ]
privileges:
- read
query: "{\"match\": {\"year\": 2016}}"
- names: [ 'fls' ]
privileges:
- read
field_security:
grant: [ '*' ]
except: [ 'year', 'hidden' ]

View File

@ -1,228 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.junit.AfterClass;
import org.junit.Before;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
public class PermissionsIT extends ESRestTestCase {
private static HighLevelClient highLevelClient;
private static HighLevelClient adminHighLevelClient;
@Override
protected Settings restClientSettings() {
String token = basicAuthHeaderValue("test", new SecureString("x-pack-test-password".toCharArray()));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
@Override
protected Settings restAdminSettings() {
String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray()));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
@Before
public void initHighLevelClient() {
if (highLevelClient == null) {
highLevelClient = new HighLevelClient(client());
adminHighLevelClient = new HighLevelClient(adminClient());
}
}
@AfterClass
public static void closeHighLevelClients() throws IOException {
highLevelClient.close();
adminHighLevelClient.close();
highLevelClient = null;
adminHighLevelClient = null;
}
public void testDLS() throws IOException {
Request createIndex = new Request("PUT", "/dls");
createIndex.setJsonEntity(
"{\n"
+ " \"mappings\" : {\n"
+ " \"properties\" : {\n"
+ " \"date\" : {\"type\" : \"keyword\"},\n"
+ " \"year\" : {\n"
+ " \"type\" : \"runtime\", \n"
+ " \"runtime_type\" : \"keyword\",\n"
+ " \"script\" : \"emit(doc['date'].value.substring(0,4))\"\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}\n"
);
assertOK(adminClient().performRequest(createIndex));
Request indexDoc1 = new Request("PUT", "/dls/_doc/1");
indexDoc1.setJsonEntity("{\n" + " \"date\" : \"2009-11-15T14:12:12\"\n" + "}\n");
assertOK(adminClient().performRequest(indexDoc1));
Request indexDoc2 = new Request("PUT", "/dls/_doc/2");
indexDoc2.setJsonEntity("{\n" + " \"date\" : \"2016-11-15T14:12:12\"\n" + "}\n");
assertOK(adminClient().performRequest(indexDoc2));
Request indexDoc3 = new Request("PUT", "/dls/_doc/3");
indexDoc3.addParameter("refresh", "true");
indexDoc3.setJsonEntity("{\n" + " \"date\" : \"2018-11-15T14:12:12\"\n" + "}\n");
assertOK(adminClient().performRequest(indexDoc3));
SearchRequest searchRequest = new SearchRequest("dls");
{
SearchResponse searchResponse = adminHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
assertEquals(3, searchResponse.getHits().getTotalHits().value);
}
{
SearchResponse searchResponse = highLevelClient.search(searchRequest, RequestOptions.DEFAULT);
assertEquals(1, searchResponse.getHits().getTotalHits().value);
}
}
public void testFLSProtectsData() throws IOException {
Request createIndex = new Request("PUT", "/fls");
createIndex.setJsonEntity(
"{\n"
+ " \"mappings\" : {\n"
+ " \"properties\" : {\n"
+ " \"hidden\" : {\"type\" : \"keyword\"},\n"
+ " \"hidden_values_count\" : {\n"
+ " \"type\" : \"runtime\", \n"
+ " \"runtime_type\" : \"long\",\n"
+ " \"script\" : \"emit(doc['hidden'].size())\"\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}\n"
);
assertOK(adminClient().performRequest(createIndex));
Request indexDoc1 = new Request("PUT", "/fls/_doc/1");
indexDoc1.setJsonEntity("{\n" + " \"hidden\" : \"should not be read\"\n" + "}\n");
assertOK(adminClient().performRequest(indexDoc1));
Request indexDoc2 = new Request("PUT", "/fls/_doc/2");
indexDoc2.setJsonEntity("{\n" + " \"hidden\" : \"should not be read\"\n" + "}\n");
assertOK(adminClient().performRequest(indexDoc2));
Request indexDoc3 = new Request("PUT", "/fls/_doc/3");
indexDoc3.addParameter("refresh", "true");
indexDoc3.setJsonEntity("{\n" + " \"hidden\" : \"should not be read\"\n" + "}\n");
assertOK(adminClient().performRequest(indexDoc3));
SearchRequest searchRequest = new SearchRequest("fls").source(new SearchSourceBuilder().docValueField("hidden_values_count"));
{
SearchResponse searchResponse = adminHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
assertEquals(3, searchResponse.getHits().getTotalHits().value);
for (SearchHit hit : searchResponse.getHits().getHits()) {
assertEquals(1, hit.getFields().size());
assertEquals(1, (int) hit.getFields().get("hidden_values_count").getValue());
}
}
{
SearchResponse searchResponse = highLevelClient.search(searchRequest, RequestOptions.DEFAULT);
assertEquals(3, searchResponse.getHits().getTotalHits().value);
for (SearchHit hit : searchResponse.getHits().getHits()) {
assertEquals(0, (int) hit.getFields().get("hidden_values_count").getValue());
}
}
}
public void testFLSOnRuntimeField() throws IOException {
Request createIndex = new Request("PUT", "/fls");
createIndex.setJsonEntity(
"{\n"
+ " \"mappings\" : {\n"
+ " \"properties\" : {\n"
+ " \"date\" : {\"type\" : \"keyword\"},\n"
+ " \"year\" : {\n"
+ " \"type\" : \"runtime\", \n"
+ " \"runtime_type\" : \"keyword\",\n"
+ " \"script\" : \"emit(doc['date'].value.substring(0,4))\"\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}\n"
);
assertOK(adminClient().performRequest(createIndex));
Request indexDoc1 = new Request("PUT", "/fls/_doc/1");
indexDoc1.setJsonEntity("{\n" + " \"date\" : \"2009-11-15T14:12:12\"\n" + "}\n");
assertOK(adminClient().performRequest(indexDoc1));
Request indexDoc2 = new Request("PUT", "/fls/_doc/2");
indexDoc2.setJsonEntity("{\n" + " \"date\" : \"2016-11-15T14:12:12\"\n" + "}\n");
assertOK(adminClient().performRequest(indexDoc2));
Request indexDoc3 = new Request("PUT", "/fls/_doc/3");
indexDoc3.addParameter("refresh", "true");
indexDoc3.setJsonEntity("{\n" + " \"date\" : \"2018-11-15T14:12:12\"\n" + "}\n");
assertOK(adminClient().performRequest(indexDoc3));
// There is no FLS directly on runtime fields
SearchRequest searchRequest = new SearchRequest("fls").source(new SearchSourceBuilder().docValueField("year"));
SearchResponse searchResponse = highLevelClient.search(searchRequest, RequestOptions.DEFAULT);
assertEquals(3, searchResponse.getHits().getTotalHits().value);
for (SearchHit hit : searchResponse.getHits().getHits()) {
Map<String, DocumentField> fields = hit.getFields();
assertEquals(1, fields.size());
switch (hit.getId()) {
case "1":
assertEquals("2009", fields.get("year").getValue().toString());
break;
case "2":
assertEquals("2016", fields.get("year").getValue().toString());
break;
case "3":
assertEquals("2018", fields.get("year").getValue().toString());
break;
default:
throw new UnsupportedOperationException();
}
}
{
FieldCapabilitiesRequest fieldCapsRequest = new FieldCapabilitiesRequest().indices("fls").fields("year");
FieldCapabilitiesResponse fieldCapabilitiesResponse = adminHighLevelClient.fieldCaps(fieldCapsRequest, RequestOptions.DEFAULT);
assertNotNull(fieldCapabilitiesResponse.get().get("year"));
}
{
// Though field_caps filters runtime fields out like ordinary fields
FieldCapabilitiesRequest fieldCapsRequest = new FieldCapabilitiesRequest().indices("fls").fields("year");
FieldCapabilitiesResponse fieldCapabilitiesResponse = highLevelClient.fieldCaps(fieldCapsRequest, RequestOptions.DEFAULT);
assertEquals(0, fieldCapabilitiesResponse.get().size());
}
}
private static class HighLevelClient extends RestHighLevelClient {
private HighLevelClient(RestClient restClient) {
super(restClient, (client) -> {}, Collections.emptyList());
}
}
}

View File

@ -1,44 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript;
import org.elasticsearch.xpack.runtimefields.mapper.DateFieldScript;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
import org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript;
import org.elasticsearch.xpack.runtimefields.mapper.LongFieldScript;
import org.elasticsearch.xpack.runtimefields.mapper.RuntimeFieldMapper;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public final class RuntimeFields extends Plugin implements MapperPlugin, ScriptPlugin {
@Override
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap(RuntimeFieldMapper.CONTENT_TYPE, RuntimeFieldMapper.PARSER);
}
@Override
public List<ScriptContext<?>> getContexts() {
return org.elasticsearch.common.collect.List.of(
BooleanFieldScript.CONTEXT,
DateFieldScript.CONTEXT,
DoubleFieldScript.CONTEXT,
IpFieldScript.CONTEXT,
LongFieldScript.CONTEXT,
StringFieldScript.CONTEXT
);
}
}

View File

@ -1,73 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.LeafFieldData;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
public abstract class BinaryScriptFieldData implements IndexFieldData<BinaryScriptFieldData.BinaryScriptLeafFieldData> {
private final String fieldName;
protected BinaryScriptFieldData(String fieldName) {
this.fieldName = fieldName;
}
@Override
public String getFieldName() {
return fieldName;
}
@Override
public BinaryScriptLeafFieldData load(LeafReaderContext context) {
try {
return loadDirect(context);
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) {
final XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested);
return new SortField(getFieldName(), source, reverse);
}
@Override
public BucketedSort newBucketedSort(
BigArrays bigArrays,
Object missingValue,
MultiValueMode sortMode,
XFieldComparatorSource.Nested nested,
SortOrder sortOrder,
DocValueFormat format,
int bucketSize,
BucketedSort.ExtraData extra
) {
throw new IllegalArgumentException("only supported on numeric fields");
}
public abstract static class BinaryScriptLeafFieldData implements LeafFieldData {
@Override
public long ramBytesUsed() {
return 0;
}
@Override
public void close() {
}
}
}

View File

@ -1,37 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.elasticsearch.index.fielddata.AbstractSortedNumericDocValues;
import org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript;
public final class BooleanScriptDocValues extends AbstractSortedNumericDocValues {
private final BooleanFieldScript script;
private int cursor;
BooleanScriptDocValues(BooleanFieldScript script) {
this.script = script;
}
@Override
public boolean advanceExact(int docId) {
script.runForDoc(docId);
cursor = 0;
return script.trues() > 0 || script.falses() > 0;
}
@Override
public long nextValue() {
// Emit all false values before all true values
return cursor++ < script.falses() ? 0 : 1;
}
@Override
public int docValueCount() {
return script.trues() + script.falses();
}
}

View File

@ -1,96 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.plain.LeafLongFieldData;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript;
public final class BooleanScriptFieldData extends IndexNumericFieldData {
public static class Builder implements IndexFieldData.Builder {
private final String name;
private final BooleanFieldScript.LeafFactory leafFactory;
public Builder(String name, BooleanFieldScript.LeafFactory leafFactory) {
this.name = name;
this.leafFactory = leafFactory;
}
@Override
public BooleanScriptFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new BooleanScriptFieldData(name, leafFactory);
}
}
private final String fieldName;
private final BooleanFieldScript.LeafFactory leafFactory;
private BooleanScriptFieldData(String fieldName, BooleanFieldScript.LeafFactory leafFactory) {
this.fieldName = fieldName;
this.leafFactory = leafFactory;
}
@Override
public String getFieldName() {
return fieldName;
}
@Override
public ValuesSourceType getValuesSourceType() {
return CoreValuesSourceType.BOOLEAN;
}
@Override
public BooleanScriptLeafFieldData load(LeafReaderContext context) {
try {
return loadDirect(context);
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
@Override
public BooleanScriptLeafFieldData loadDirect(LeafReaderContext context) {
return new BooleanScriptLeafFieldData(new BooleanScriptDocValues(leafFactory.newInstance(context)));
}
@Override
public NumericType getNumericType() {
return NumericType.BOOLEAN;
}
@Override
protected boolean sortRequiresCustomComparator() {
return true;
}
public static class BooleanScriptLeafFieldData extends LeafLongFieldData {
private final BooleanScriptDocValues booleanScriptDocValues;
BooleanScriptLeafFieldData(BooleanScriptDocValues booleanScriptDocValues) {
super(0, NumericType.BOOLEAN);
this.booleanScriptDocValues = booleanScriptDocValues;
}
@Override
public SortedNumericDocValues getLongValues() {
return booleanScriptDocValues;
}
@Override
public void close() {}
}
}

View File

@ -1,93 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.plain.LeafLongFieldData;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.xpack.runtimefields.mapper.DateFieldScript;
public final class DateScriptFieldData extends IndexNumericFieldData {
public static class Builder implements IndexFieldData.Builder {
private final String name;
private final DateFieldScript.LeafFactory leafFactory;
public Builder(String name, DateFieldScript.LeafFactory leafFactory) {
this.name = name;
this.leafFactory = leafFactory;
}
@Override
public DateScriptFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new DateScriptFieldData(name, leafFactory);
}
}
private final String fieldName;
private final DateFieldScript.LeafFactory leafFactory;
private DateScriptFieldData(String fieldName, DateFieldScript.LeafFactory leafFactory) {
this.fieldName = fieldName;
this.leafFactory = leafFactory;
}
@Override
public String getFieldName() {
return fieldName;
}
@Override
public ValuesSourceType getValuesSourceType() {
return CoreValuesSourceType.DATE;
}
@Override
public DateScriptLeafFieldData load(LeafReaderContext context) {
try {
return loadDirect(context);
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
@Override
public DateScriptLeafFieldData loadDirect(LeafReaderContext context) {
return new DateScriptLeafFieldData(new LongScriptDocValues(leafFactory.newInstance(context)));
}
@Override
public NumericType getNumericType() {
return NumericType.DATE;
}
@Override
protected boolean sortRequiresCustomComparator() {
return true;
}
public static class DateScriptLeafFieldData extends LeafLongFieldData {
private final LongScriptDocValues longScriptDocValues;
DateScriptLeafFieldData(LongScriptDocValues longScriptDocValues) {
super(0, NumericType.DATE);
this.longScriptDocValues = longScriptDocValues;
}
@Override
public SortedNumericDocValues getLongValues() {
return longScriptDocValues;
}
}
}

View File

@ -1,42 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
import java.util.Arrays;
public final class DoubleScriptDocValues extends SortedNumericDoubleValues {
private final DoubleFieldScript script;
private int cursor;
DoubleScriptDocValues(DoubleFieldScript script) {
this.script = script;
}
@Override
public boolean advanceExact(int docId) {
script.runForDoc(docId);
if (script.count() == 0) {
return false;
}
Arrays.sort(script.values(), 0, script.count());
cursor = 0;
return true;
}
@Override
public double nextValue() {
return script.values()[cursor++];
}
@Override
public int docValueCount() {
return script.count();
}
}

View File

@ -1,96 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.plain.LeafDoubleFieldData;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
public final class DoubleScriptFieldData extends IndexNumericFieldData {
public static class Builder implements IndexFieldData.Builder {
private final String name;
private final DoubleFieldScript.LeafFactory leafFactory;
public Builder(String name, DoubleFieldScript.LeafFactory leafFactory) {
this.name = name;
this.leafFactory = leafFactory;
}
@Override
public DoubleScriptFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new DoubleScriptFieldData(name, leafFactory);
}
}
private final String fieldName;
DoubleFieldScript.LeafFactory leafFactory;
private DoubleScriptFieldData(String fieldName, DoubleFieldScript.LeafFactory leafFactory) {
this.fieldName = fieldName;
this.leafFactory = leafFactory;
}
@Override
public String getFieldName() {
return fieldName;
}
@Override
public ValuesSourceType getValuesSourceType() {
return CoreValuesSourceType.NUMERIC;
}
@Override
public DoubleScriptLeafFieldData load(LeafReaderContext context) {
try {
return loadDirect(context);
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
@Override
public DoubleScriptLeafFieldData loadDirect(LeafReaderContext context) {
return new DoubleScriptLeafFieldData(new DoubleScriptDocValues(leafFactory.newInstance(context)));
}
@Override
public NumericType getNumericType() {
return NumericType.DOUBLE;
}
@Override
protected boolean sortRequiresCustomComparator() {
return true;
}
public static class DoubleScriptLeafFieldData extends LeafDoubleFieldData {
private final DoubleScriptDocValues doubleScriptDocValues;
DoubleScriptLeafFieldData(DoubleScriptDocValues doubleScriptDocValues) {
super(0);
this.doubleScriptDocValues = doubleScriptDocValues;
}
@Override
public SortedNumericDoubleValues getDoubleValues() {
return doubleScriptDocValues;
}
@Override
public void close() {}
}
}

View File

@ -1,43 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript;
import java.util.Arrays;
public final class IpScriptDocValues extends SortedBinaryDocValues {
private final IpFieldScript script;
private int cursor;
IpScriptDocValues(IpFieldScript script) {
this.script = script;
}
@Override
public boolean advanceExact(int docId) {
script.runForDoc(docId);
if (script.count() == 0) {
return false;
}
Arrays.sort(script.values(), 0, script.count());
cursor = 0;
return true;
}
@Override
public BytesRef nextValue() {
return script.values()[cursor++];
}
@Override
public int docValueCount() {
return script.count();
}
}

View File

@ -1,87 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.IpFieldMapper;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript;
import java.net.InetAddress;
public class IpScriptFieldData extends BinaryScriptFieldData {
public static class Builder implements IndexFieldData.Builder {
private final String name;
private final IpFieldScript.LeafFactory leafFactory;
public Builder(String name, IpFieldScript.LeafFactory leafFactory) {
this.name = name;
this.leafFactory = leafFactory;
}
@Override
public IpScriptFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new IpScriptFieldData(name, leafFactory);
}
}
private final IpFieldScript.LeafFactory leafFactory;
private IpScriptFieldData(String fieldName, IpFieldScript.LeafFactory leafFactory) {
super(fieldName);
this.leafFactory = leafFactory;
}
@Override
public BinaryScriptLeafFieldData loadDirect(LeafReaderContext context) throws Exception {
IpFieldScript script = leafFactory.newInstance(context);
return new BinaryScriptLeafFieldData() {
@Override
public ScriptDocValues<String> getScriptValues() {
return new IpScriptDocValues(getBytesValues());
}
@Override
public SortedBinaryDocValues getBytesValues() {
return new org.elasticsearch.xpack.runtimefields.fielddata.IpScriptDocValues(script);
}
};
}
@Override
public ValuesSourceType getValuesSourceType() {
return CoreValuesSourceType.IP;
}
/**
* Doc values implementation for ips. We can't share
* {@link IpFieldMapper.IpFieldType.IpScriptDocValues} because it is based
* on global ordinals and we don't have those.
*/
public static class IpScriptDocValues extends ScriptDocValues.Strings {
public IpScriptDocValues(SortedBinaryDocValues in) {
super(in);
}
@Override
protected String bytesToString(BytesRef bytes) {
InetAddress addr = InetAddressPoint.decode(BytesReference.toBytes(new BytesArray(bytes)));
return InetAddresses.toAddrString(addr);
}
}
}

View File

@ -1,42 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.elasticsearch.index.fielddata.AbstractSortedNumericDocValues;
import org.elasticsearch.xpack.runtimefields.mapper.AbstractLongFieldScript;
import java.util.Arrays;
public final class LongScriptDocValues extends AbstractSortedNumericDocValues {
private final AbstractLongFieldScript script;
private int cursor;
LongScriptDocValues(AbstractLongFieldScript script) {
this.script = script;
}
@Override
public boolean advanceExact(int docId) {
script.runForDoc(docId);
if (script.count() == 0) {
return false;
}
Arrays.sort(script.values(), 0, script.count());
cursor = 0;
return true;
}
@Override
public long nextValue() {
return script.values()[cursor++];
}
@Override
public int docValueCount() {
return script.count();
}
}

View File

@ -1,95 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.plain.LeafLongFieldData;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.xpack.runtimefields.mapper.LongFieldScript;
import java.io.IOException;
public final class LongScriptFieldData extends IndexNumericFieldData {
public static class Builder implements IndexFieldData.Builder {
private final String name;
private final LongFieldScript.LeafFactory leafFactory;
public Builder(String name, LongFieldScript.LeafFactory leafFactory) {
this.name = name;
this.leafFactory = leafFactory;
}
@Override
public LongScriptFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new LongScriptFieldData(name, leafFactory);
}
}
private final String fieldName;
private final LongFieldScript.LeafFactory leafFactory;
private LongScriptFieldData(String fieldName, LongFieldScript.LeafFactory leafFactory) {
this.fieldName = fieldName;
this.leafFactory = leafFactory;
}
@Override
public String getFieldName() {
return fieldName;
}
@Override
public ValuesSourceType getValuesSourceType() {
return CoreValuesSourceType.NUMERIC;
}
@Override
public LongScriptLeafFieldData load(LeafReaderContext context) {
try {
return loadDirect(context);
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
@Override
public LongScriptLeafFieldData loadDirect(LeafReaderContext context) throws IOException {
return new LongScriptLeafFieldData(new LongScriptDocValues(leafFactory.newInstance(context)));
}
@Override
public NumericType getNumericType() {
return NumericType.LONG;
}
@Override
protected boolean sortRequiresCustomComparator() {
return true;
}
public static class LongScriptLeafFieldData extends LeafLongFieldData {
private final LongScriptDocValues longScriptDocValues;
LongScriptLeafFieldData(LongScriptDocValues longScriptDocValues) {
super(0, NumericType.LONG);
this.longScriptDocValues = longScriptDocValues;
}
@Override
public SortedNumericDocValues getLongValues() {
return longScriptDocValues;
}
}
}

View File

@ -1,37 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.elasticsearch.index.fielddata.SortingBinaryDocValues;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.List;
public final class StringScriptDocValues extends SortingBinaryDocValues {
private final StringFieldScript script;
StringScriptDocValues(StringFieldScript script) {
this.script = script;
}
@Override
public boolean advanceExact(int docId) {
List<String> results = script.resultsForDoc(docId);
count = results.size();
if (count == 0) {
return false;
}
grow();
int i = 0;
for (String value : results) {
values[i++].copyChars(value);
}
sort();
return true;
}
}

View File

@ -1,62 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
public class StringScriptFieldData extends BinaryScriptFieldData {
public static class Builder implements IndexFieldData.Builder {
private final String name;
private final StringFieldScript.LeafFactory leafFactory;
public Builder(String name, StringFieldScript.LeafFactory leafFactory) {
this.name = name;
this.leafFactory = leafFactory;
}
@Override
public StringScriptFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new StringScriptFieldData(name, leafFactory);
}
}
private final StringFieldScript.LeafFactory leafFactory;
private StringScriptFieldData(String fieldName, StringFieldScript.LeafFactory leafFactory) {
super(fieldName);
this.leafFactory = leafFactory;
}
@Override
public BinaryScriptLeafFieldData loadDirect(LeafReaderContext context) throws Exception {
StringFieldScript script = leafFactory.newInstance(context);
return new BinaryScriptLeafFieldData() {
@Override
public ScriptDocValues<?> getScriptValues() {
return new ScriptDocValues.Strings(getBytesValues());
}
@Override
public SortedBinaryDocValues getBytesValues() {
return new StringScriptDocValues(script);
}
};
}
@Override
public ValuesSourceType getValuesSourceType() {
return CoreValuesSourceType.BYTES;
}
}

View File

@ -1,124 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.script.AggregationScript;
import org.elasticsearch.script.DynamicMap;
import org.elasticsearch.script.ScriptCache;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
/**
* Abstract base for scripts to execute to build scripted fields. Inspired by
* {@link AggregationScript} but hopefully with less historical baggage.
*/
public abstract class AbstractFieldScript {
/**
* The maximum number of values a script should be allowed to emit.
*/
static final int MAX_VALUES = 100;
public static <F> ScriptContext<F> newContext(String name, Class<F> factoryClass) {
return new ScriptContext<>(
name + "_script_field",
factoryClass,
/*
* In an ideal world we wouldn't need the script cache at all
* because we have a hard reference to the script. The trouble
* is that we compile the scripts a few times when performing
* a mapping update. This is unfortunate, but we rely on the
* cache to speed this up.
*/
100,
timeValueMillis(0),
/*
* Disable compilation rate limits for scripted fields so we
* don't prevent mapping updates because we've performed too
* many recently. That'd just be lame.
*/
ScriptCache.UNLIMITED_COMPILATION_RATE.asTuple()
);
}
private static final Map<String, Function<Object, Object>> PARAMS_FUNCTIONS = org.elasticsearch.common.collect.Map.of(
"_source",
value -> ((SourceLookup) value).loadSourceIfNeeded()
);
protected final String fieldName;
private final Map<String, Object> params;
private final LeafSearchLookup leafSearchLookup;
public AbstractFieldScript(String fieldName, Map<String, Object> params, SearchLookup searchLookup, LeafReaderContext ctx) {
this.fieldName = fieldName;
this.leafSearchLookup = searchLookup.getLeafSearchLookup(ctx);
params = new HashMap<>(params);
params.put("_source", leafSearchLookup.source());
params.put("_fields", leafSearchLookup.fields());
this.params = new DynamicMap(params, PARAMS_FUNCTIONS);
}
/**
* Set the document to run the script against.
*/
public final void setDocument(int docId) {
this.leafSearchLookup.setDocument(docId);
}
/**
* Expose the {@code params} of the script to the script itself.
*/
public final Map<String, Object> getParams() {
return params;
}
/**
* Expose the {@code _source} to the script.
*/
@SuppressWarnings("unchecked")
protected final Map<String, Object> getSource() {
return leafSearchLookup.source();
}
/**
* Expose field data to the script as {@code doc}.
*/
public final Map<String, ScriptDocValues<?>> getDoc() {
return leafSearchLookup.doc();
}
/**
* Check if the we can add another value to the list of values.
* @param currentSize the current size of the list
*/
protected final void checkMaxSize(int currentSize) {
if (currentSize >= MAX_VALUES) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Runtime field [%s] is emitting [%s] values while the maximum number of values allowed is [%s]",
fieldName,
currentSize + 1,
MAX_VALUES
)
);
}
}
public abstract void execute();
}

View File

@ -1,59 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Map;
/**
* Common base class for script field scripts that return long values.
*/
public abstract class AbstractLongFieldScript extends AbstractFieldScript {
private long[] values = new long[1];
private int count;
public AbstractLongFieldScript(String fieldName, Map<String, Object> params, SearchLookup searchLookup, LeafReaderContext ctx) {
super(fieldName, params, searchLookup, ctx);
}
/**
* Execute the script for the provided {@code docId}.
*/
public final void runForDoc(int docId) {
count = 0;
setDocument(docId);
execute();
}
/**
* Values from the last time {@link #runForDoc(int)} was called. This array
* is mutable and will change with the next call of {@link #runForDoc(int)}.
* It is also oversized and will contain garbage at all indices at and
* above {@link #count()}.
*/
public final long[] values() {
return values;
}
/**
* The number of results produced the last time {@link #runForDoc(int)} was called.
*/
public final int count() {
return count;
}
protected final void emit(long v) {
checkMaxSize(count);
if (values.length < count + 1) {
values = ArrayUtil.grow(values, count + 1);
}
values[count++] = v;
}
}

View File

@ -1,220 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
import org.apache.lucene.search.spans.SpanQuery;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.DocValueFetcher;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.SearchLookup;
import java.time.ZoneId;
import java.util.Locale;
import java.util.Map;
import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES;
/**
* Abstract base {@linkplain MappedFieldType} for scripted fields.
*/
abstract class AbstractScriptFieldType<LeafFactory> extends MappedFieldType {
protected final Script script;
private final TriFunction<String, Map<String, Object>, SearchLookup, LeafFactory> factory;
AbstractScriptFieldType(
String name,
Script script,
TriFunction<String, Map<String, Object>, SearchLookup, LeafFactory> factory,
Map<String, String> meta
) {
super(name, false, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.script = script;
this.factory = factory;
}
protected abstract String runtimeType();
@Override
public final String typeName() {
return RuntimeFieldMapper.CONTENT_TYPE;
}
@Override
public final String familyTypeName() {
return runtimeType();
}
@Override
public final boolean isSearchable() {
return true;
}
@Override
public final boolean isAggregatable() {
return true;
}
/**
* Create a script leaf factory.
*/
protected final LeafFactory leafFactory(SearchLookup searchLookup) {
return factory.apply(name(), script.getParams(), searchLookup);
}
/**
* Create a script leaf factory for queries.
*/
protected final LeafFactory leafFactory(QueryShardContext context) {
/*
* Forking here causes us to count this field in the field data loop
* detection code as though we were resolving field data for this field.
* We're not, but running the query is close enough.
*/
return leafFactory(context.lookup().forkAndTrackFieldReferences(name()));
}
@Override
public final Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
ShapeRelation relation,
ZoneId timeZone,
DateMathParser parser,
QueryShardContext context
) {
if (relation == ShapeRelation.DISJOINT) {
String message = "Field [%s] of type [%s] with runtime type [%s] does not support DISJOINT ranges";
throw new IllegalArgumentException(String.format(Locale.ROOT, message, name(), typeName(), runtimeType()));
}
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, parser, context);
}
protected abstract Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
ZoneId timeZone,
DateMathParser parser,
QueryShardContext context
);
@Override
public Query fuzzyQuery(
Object value,
Fuzziness fuzziness,
int prefixLength,
int maxExpansions,
boolean transpositions,
QueryShardContext context
) {
throw new IllegalArgumentException(unsupported("fuzzy", "keyword and text"));
}
@Override
public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, QueryShardContext context) {
throw new IllegalArgumentException(unsupported("prefix", "keyword, text and wildcard"));
}
@Override
public Query wildcardQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, QueryShardContext context) {
throw new IllegalArgumentException(unsupported("wildcard", "keyword, text and wildcard"));
}
@Override
public Query regexpQuery(
String value,
int syntaxFlags,
int matchFlags,
int maxDeterminizedStates,
MultiTermQuery.RewriteMethod method,
QueryShardContext context
) {
throw new IllegalArgumentException(unsupported("regexp", "keyword and text"));
}
@Override
public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) {
throw new IllegalArgumentException(unsupported("phrase", "text"));
}
@Override
public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) {
throw new IllegalArgumentException(unsupported("phrase", "text"));
}
@Override
public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) {
throw new IllegalArgumentException(unsupported("phrase prefix", "text"));
}
@Override
public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, QueryShardContext context) {
throw new IllegalArgumentException(unsupported("span prefix", "text"));
}
private String unsupported(String query, String supported) {
return String.format(
Locale.ROOT,
"Can only use %s queries on %s fields - not on [%s] which is of type [%s] with runtime_type [%s]",
query,
supported,
name(),
RuntimeFieldMapper.CONTENT_TYPE,
runtimeType()
);
}
protected final void checkAllowExpensiveQueries(QueryShardContext context) {
if (context.allowExpensiveQueries() == false) {
throw new ElasticsearchException(
"queries cannot be executed against ["
+ RuntimeFieldMapper.CONTENT_TYPE
+ "] fields while ["
+ ALLOW_EXPENSIVE_QUERIES.getKey()
+ "] is set to [false]."
);
}
}
/**
* The format that this field should use. The default implementation is
* {@code null} because most fields don't support formats.
*/
protected String format() {
return null;
}
/**
* The locale that this field's format should use. The default
* implementation is {@code null} because most fields don't
* support formats.
*/
protected Locale formatLocale() {
return null;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
return new DocValueFetcher(docValueFormat(format, null), lookup.doc().getForField(this));
}
}

View File

@ -1,95 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistLoader;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptFactory;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public abstract class BooleanFieldScript extends AbstractFieldScript {
public static final ScriptContext<Factory> CONTEXT = newContext("boolean_script_field", Factory.class);
static List<Whitelist> whitelist() {
return Collections.singletonList(
WhitelistLoader.loadFromResourceFiles(RuntimeFieldsPainlessExtension.class, "boolean_whitelist.txt")
);
}
@SuppressWarnings("unused")
public static final String[] PARAMETERS = {};
public interface Factory extends ScriptFactory {
LeafFactory newFactory(String fieldName, Map<String, Object> params, SearchLookup searchLookup);
}
public interface LeafFactory {
BooleanFieldScript newInstance(LeafReaderContext ctx);
}
private int trues;
private int falses;
public BooleanFieldScript(String fieldName, Map<String, Object> params, SearchLookup searchLookup, LeafReaderContext ctx) {
super(fieldName, params, searchLookup, ctx);
}
/**
* Execute the script for the provided {@code docId}.
*/
public final void runForDoc(int docId) {
trues = 0;
falses = 0;
setDocument(docId);
execute();
}
/**
* How many {@code true} values were returned for this document.
*/
public final int trues() {
return trues;
}
/**
* How many {@code false} values were returned for this document.
*/
public final int falses() {
return falses;
}
protected final void emit(boolean v) {
if (v) {
trues++;
} else {
falses++;
}
}
public static boolean parse(Object str) {
return Booleans.parseBoolean(str.toString());
}
public static class Emit {
private final BooleanFieldScript script;
public Emit(BooleanFieldScript script) {
this.script = script;
}
public void value(boolean v) {
script.emit(v);
}
}
}

View File

@ -1,212 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.index.mapper.BooleanFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.xpack.runtimefields.fielddata.BooleanScriptFieldData;
import org.elasticsearch.xpack.runtimefields.query.BooleanScriptFieldExistsQuery;
import org.elasticsearch.xpack.runtimefields.query.BooleanScriptFieldTermQuery;
import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
public class BooleanScriptFieldType extends AbstractScriptFieldType<BooleanFieldScript.LeafFactory> {
BooleanScriptFieldType(String name, Script script, BooleanFieldScript.Factory scriptFactory, Map<String, String> meta) {
super(name, script, scriptFactory::newFactory, meta);
}
@Override
protected String runtimeType() {
return BooleanFieldMapper.CONTENT_TYPE;
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
return null;
}
switch (value.toString()) {
case "F":
return false;
case "T":
return true;
default:
throw new IllegalArgumentException("Expected [T] or [F] but got [" + value + "]");
}
}
@Override
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones");
}
return DocValueFormat.BOOLEAN;
}
@Override
public BooleanScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
return new BooleanScriptFieldData.Builder(name(), leafFactory(searchLookup.get()));
}
@Override
public Query existsQuery(QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldExistsQuery(script, leafFactory(context), name());
}
@Override
public Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
ZoneId timeZone,
DateMathParser parser,
QueryShardContext context
) {
boolean trueAllowed;
boolean falseAllowed;
/*
* gte: true --- true matches
* gt: true ---- none match
* gte: false -- both match
* gt: false --- true matches
*/
if (toBoolean(lowerTerm)) {
if (includeLower) {
trueAllowed = true;
falseAllowed = false;
} else {
trueAllowed = false;
falseAllowed = false;
}
} else {
if (includeLower) {
trueAllowed = true;
falseAllowed = true;
} else {
trueAllowed = true;
falseAllowed = false;
}
}
/*
* This is how the indexed version works:
* lte: true --- both match
* lt: true ---- false matches
* lte: false -- false matches
* lt: false --- none match
*/
if (toBoolean(upperTerm)) {
if (includeUpper) {
trueAllowed &= true;
falseAllowed &= true;
} else {
trueAllowed &= false;
falseAllowed &= true;
}
} else {
if (includeUpper) {
trueAllowed &= false;
falseAllowed &= true;
} else {
trueAllowed &= false;
falseAllowed &= false;
}
}
return termsQuery(trueAllowed, falseAllowed, context);
}
@Override
public Query termQueryCaseInsensitive(Object value, QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldTermQuery(script, leafFactory(context.lookup()), name(), toBoolean(value, true));
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldTermQuery(script, leafFactory(context), name(), toBoolean(value, false));
}
@Override
public Query termsQuery(List<?> values, QueryShardContext context) {
if (values.isEmpty()) {
return Queries.newMatchNoDocsQuery("Empty terms query");
}
boolean trueAllowed = false;
boolean falseAllowed = false;
for (Object value : values) {
if (toBoolean(value, false)) {
trueAllowed = true;
} else {
falseAllowed = true;
}
}
return termsQuery(trueAllowed, falseAllowed, context);
}
private Query termsQuery(boolean trueAllowed, boolean falseAllowed, QueryShardContext context) {
if (trueAllowed) {
if (falseAllowed) {
// Either true or false
return existsQuery(context);
}
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldTermQuery(script, leafFactory(context), name(), true);
}
if (falseAllowed) {
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldTermQuery(script, leafFactory(context), name(), false);
}
return new MatchNoDocsQuery("neither true nor false allowed");
}
private static boolean toBoolean(Object value) {
return toBoolean(value, false);
}
/**
* Convert the term into a boolean. Inspired by {@link BooleanFieldMapper.BooleanFieldType#indexedValueForSearch(Object)}.
*/
private static boolean toBoolean(Object value, boolean caseInsensitive) {
if (value == null) {
return false;
}
if (value instanceof Boolean) {
return (Boolean) value;
}
String sValue;
if (value instanceof BytesRef) {
sValue = ((BytesRef) value).utf8ToString();
} else {
sValue = value.toString();
}
if (caseInsensitive) {
sValue = Strings.toLowercaseAscii(sValue);
}
return Booleans.parseBoolean(sValue);
}
}

View File

@ -1,84 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistLoader;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptFactory;
import org.elasticsearch.search.lookup.SearchLookup;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public abstract class DateFieldScript extends AbstractLongFieldScript {
public static final ScriptContext<Factory> CONTEXT = newContext("date", Factory.class);
static List<Whitelist> whitelist() {
return Collections.singletonList(WhitelistLoader.loadFromResourceFiles(RuntimeFieldsPainlessExtension.class, "date_whitelist.txt"));
}
@SuppressWarnings("unused")
public static final String[] PARAMETERS = {};
public interface Factory extends ScriptFactory {
LeafFactory newFactory(String fieldName, Map<String, Object> params, SearchLookup searchLookup, DateFormatter formatter);
}
public interface LeafFactory {
DateFieldScript newInstance(LeafReaderContext ctx);
}
private final DateFormatter formatter;
public DateFieldScript(
String fieldName,
Map<String, Object> params,
SearchLookup searchLookup,
DateFormatter formatter,
LeafReaderContext ctx
) {
super(fieldName, params, searchLookup, ctx);
this.formatter = formatter;
}
public static long toEpochMilli(TemporalAccessor v) {
// TemporalAccessor is a nanos API so we have to convert.
long millis = Math.multiplyExact(v.getLong(ChronoField.INSTANT_SECONDS), 1000);
millis = Math.addExact(millis, v.get(ChronoField.NANO_OF_SECOND) / 1_000_000);
return millis;
}
public static class Emit {
private final DateFieldScript script;
public Emit(DateFieldScript script) {
this.script = script;
}
public void emit(long v) {
script.emit(v);
}
}
public static class Parse {
private final DateFieldScript script;
public Parse(DateFieldScript script) {
this.script = script;
}
public long parse(Object str) {
return script.formatter.parseMillis(str.toString());
}
}
}

View File

@ -1,187 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.LongSet;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.DateFieldMapper.Resolution;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.xpack.runtimefields.fielddata.DateScriptFieldData;
import org.elasticsearch.xpack.runtimefields.query.LongScriptFieldDistanceFeatureQuery;
import org.elasticsearch.xpack.runtimefields.query.LongScriptFieldExistsQuery;
import org.elasticsearch.xpack.runtimefields.query.LongScriptFieldRangeQuery;
import org.elasticsearch.xpack.runtimefields.query.LongScriptFieldTermQuery;
import org.elasticsearch.xpack.runtimefields.query.LongScriptFieldTermsQuery;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Supplier;
public class DateScriptFieldType extends AbstractScriptFieldType<DateFieldScript.LeafFactory> {
private final DateFormatter dateTimeFormatter;
DateScriptFieldType(
String name,
Script script,
DateFieldScript.Factory scriptFactory,
DateFormatter dateTimeFormatter,
Map<String, String> meta
) {
super(name, script, (n, params, ctx) -> scriptFactory.newFactory(n, params, ctx, dateTimeFormatter), meta);
this.dateTimeFormatter = dateTimeFormatter;
}
@Override
protected String runtimeType() {
return DateFieldMapper.CONTENT_TYPE;
}
@Override
public Object valueForDisplay(Object value) {
Long val = (Long) value;
if (val == null) {
return null;
}
return dateTimeFormatter.format(Resolution.MILLISECONDS.toInstant(val).atZone(ZoneOffset.UTC));
}
@Override
public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
DateFormatter dateTimeFormatter = this.dateTimeFormatter;
if (format != null) {
dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale());
}
if (timeZone == null) {
timeZone = ZoneOffset.UTC;
}
return new DocValueFormat.DateTime(dateTimeFormatter, timeZone, Resolution.MILLISECONDS);
}
@Override
public DateScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> lookup) {
return new DateScriptFieldData.Builder(name(), leafFactory(lookup.get()));
}
@Override
public Query distanceFeatureQuery(Object origin, String pivot, float boost, QueryShardContext context) {
checkAllowExpensiveQueries(context);
return DateFieldType.handleNow(context, now -> {
long originLong = DateFieldType.parseToLong(
origin,
true,
null,
dateTimeFormatter.toDateMathParser(),
now,
DateFieldMapper.Resolution.MILLISECONDS
);
TimeValue pivotTime = TimeValue.parseTimeValue(pivot, "distance_feature.pivot");
return new LongScriptFieldDistanceFeatureQuery(
script,
leafFactory(context)::newInstance,
name(),
originLong,
pivotTime.getMillis(),
boost
);
});
}
@Override
public Query existsQuery(QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new LongScriptFieldExistsQuery(script, leafFactory(context)::newInstance, name());
}
@Override
public Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
ZoneId timeZone,
@Nullable DateMathParser parser,
QueryShardContext context
) {
parser = parser == null ? dateTimeFormatter.toDateMathParser() : parser;
checkAllowExpensiveQueries(context);
return DateFieldType.dateRangeQuery(
lowerTerm,
upperTerm,
includeLower,
includeUpper,
timeZone,
parser,
context,
DateFieldMapper.Resolution.MILLISECONDS,
(l, u) -> new LongScriptFieldRangeQuery(script, leafFactory(context)::newInstance, name(), l, u)
);
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
return DateFieldType.handleNow(context, now -> {
long l = DateFieldType.parseToLong(
value,
false,
null,
dateTimeFormatter.toDateMathParser(),
now,
DateFieldMapper.Resolution.MILLISECONDS
);
checkAllowExpensiveQueries(context);
return new LongScriptFieldTermQuery(script, leafFactory(context)::newInstance, name(), l);
});
}
@Override
public Query termsQuery(List<?> values, QueryShardContext context) {
if (values.isEmpty()) {
return Queries.newMatchAllQuery();
}
return DateFieldType.handleNow(context, now -> {
LongSet terms = new LongHashSet(values.size());
for (Object value : values) {
terms.add(
DateFieldType.parseToLong(
value,
false,
null,
dateTimeFormatter.toDateMathParser(),
now,
DateFieldMapper.Resolution.MILLISECONDS
)
);
}
checkAllowExpensiveQueries(context);
return new LongScriptFieldTermsQuery(script, leafFactory(context)::newInstance, name(), terms);
});
}
@Override
protected String format() {
return dateTimeFormatter.pattern();
}
@Override
protected Locale formatLocale() {
return dateTimeFormatter.locale();
}
}

View File

@ -1,93 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistLoader;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptFactory;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public abstract class DoubleFieldScript extends AbstractFieldScript {
public static final ScriptContext<Factory> CONTEXT = newContext("double_script_field", Factory.class);
static List<Whitelist> whitelist() {
return Collections.singletonList(
WhitelistLoader.loadFromResourceFiles(RuntimeFieldsPainlessExtension.class, "double_whitelist.txt")
);
}
@SuppressWarnings("unused")
public static final String[] PARAMETERS = {};
public interface Factory extends ScriptFactory {
LeafFactory newFactory(String fieldName, Map<String, Object> params, SearchLookup searchLookup);
}
public interface LeafFactory {
DoubleFieldScript newInstance(LeafReaderContext ctx);
}
private double[] values = new double[1];
private int count;
public DoubleFieldScript(String fieldName, Map<String, Object> params, SearchLookup searchLookup, LeafReaderContext ctx) {
super(fieldName, params, searchLookup, ctx);
}
/**
* Execute the script for the provided {@code docId}.
*/
public final void runForDoc(int docId) {
count = 0;
setDocument(docId);
execute();
}
/**
* Values from the last time {@link #runForDoc(int)} was called. This array
* is mutable and will change with the next call of {@link #runForDoc(int)}.
* It is also oversized and will contain garbage at all indices at and
* above {@link #count()}.
*/
public final double[] values() {
return values;
}
/**
* The number of results produced the last time {@link #runForDoc(int)} was called.
*/
public final int count() {
return count;
}
protected final void emit(double v) {
checkMaxSize(count);
if (values.length < count + 1) {
values = ArrayUtil.grow(values, count + 1);
}
values[count++] = v;
}
public static class Emit {
private final DoubleFieldScript script;
public Emit(DoubleFieldScript script) {
this.script = script;
}
public void emit(double v) {
script.emit(v);
}
}
}

View File

@ -1,105 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.LongSet;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.xpack.runtimefields.fielddata.DoubleScriptFieldData;
import org.elasticsearch.xpack.runtimefields.query.DoubleScriptFieldExistsQuery;
import org.elasticsearch.xpack.runtimefields.query.DoubleScriptFieldRangeQuery;
import org.elasticsearch.xpack.runtimefields.query.DoubleScriptFieldTermQuery;
import org.elasticsearch.xpack.runtimefields.query.DoubleScriptFieldTermsQuery;
import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
public class DoubleScriptFieldType extends AbstractScriptFieldType<DoubleFieldScript.LeafFactory> {
DoubleScriptFieldType(String name, Script script, DoubleFieldScript.Factory scriptFactory, Map<String, String> meta) {
super(name, script, scriptFactory::newFactory, meta);
}
@Override
protected String runtimeType() {
return NumberType.DOUBLE.typeName();
}
@Override
public Object valueForDisplay(Object value) {
return value; // These should come back as a Double
}
@Override
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones");
}
if (format == null) {
return DocValueFormat.RAW;
}
return new DocValueFormat.Decimal(format);
}
@Override
public DoubleScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
return new DoubleScriptFieldData.Builder(name(), leafFactory(searchLookup.get()));
}
@Override
public Query existsQuery(QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new DoubleScriptFieldExistsQuery(script, leafFactory(context), name());
}
@Override
public Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
ZoneId timeZone,
DateMathParser parser,
QueryShardContext context
) {
checkAllowExpensiveQueries(context);
return NumberType.doubleRangeQuery(
lowerTerm,
upperTerm,
includeLower,
includeUpper,
(l, u) -> new DoubleScriptFieldRangeQuery(script, leafFactory(context), name(), l, u)
);
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new DoubleScriptFieldTermQuery(script, leafFactory(context), name(), NumberType.objectToDouble(value));
}
@Override
public Query termsQuery(List<?> values, QueryShardContext context) {
if (values.isEmpty()) {
return Queries.newMatchAllQuery();
}
LongSet terms = new LongHashSet(values.size());
for (Object value : values) {
terms.add(Double.doubleToLongBits(NumberType.objectToDouble(value)));
}
checkAllowExpensiveQueries(context);
return new DoubleScriptFieldTermsQuery(script, leafFactory(context), name(), terms);
}
}

View File

@ -1,115 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.index.mapper.IpFieldMapper;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistLoader;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptFactory;
import org.elasticsearch.search.lookup.SearchLookup;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* Script producing IP addresses. Unlike the other {@linkplain AbstractFieldScript}s
* which deal with their native java objects this converts its values to the same format
* that Lucene uses to store its fields, {@link InetAddressPoint}. There are a few compelling
* reasons to do this:
* <ul>
* <li>{@link Inet4Address}es and {@link Inet6Address} are not comparable with one another.
* That is correct in some contexts, but not for our queries. Our queries must consider the
* IPv4 address equal to the address that it maps to in IPv6 <a href="https://tools.ietf.org/html/rfc4291">rfc4291</a>).
* <li>{@link InetAddress}es are not ordered, but we need to implement range queries with
* same same ordering as {@link IpFieldMapper}. That also uses {@link InetAddressPoint}
* so it saves us a lot of trouble to use the same representation.
* </ul>
*/
public abstract class IpFieldScript extends AbstractFieldScript {
public static final ScriptContext<Factory> CONTEXT = newContext("ip_script_field", Factory.class);
static List<Whitelist> whitelist() {
return Collections.singletonList(WhitelistLoader.loadFromResourceFiles(RuntimeFieldsPainlessExtension.class, "ip_whitelist.txt"));
}
@SuppressWarnings("unused")
public static final String[] PARAMETERS = {};
public interface Factory extends ScriptFactory {
LeafFactory newFactory(String fieldName, Map<String, Object> params, SearchLookup searchLookup);
}
public interface LeafFactory {
IpFieldScript newInstance(LeafReaderContext ctx);
}
private BytesRef[] values = new BytesRef[1];
private int count;
public IpFieldScript(String fieldName, Map<String, Object> params, SearchLookup searchLookup, LeafReaderContext ctx) {
super(fieldName, params, searchLookup, ctx);
}
/**
* Execute the script for the provided {@code docId}.
*/
public final void runForDoc(int docId) {
count = 0;
setDocument(docId);
execute();
}
/**
* Values from the last time {@link #runForDoc(int)} was called. This array
* is mutable and will change with the next call of {@link #runForDoc(int)}.
* It is also oversized and will contain garbage at all indices at and
* above {@link #count()}.
* <p>
* All values are IPv6 addresses so they are 16 bytes. IPv4 addresses are
* encoded by <a href="https://tools.ietf.org/html/rfc4291">rfc4291</a>.
*/
public final BytesRef[] values() {
return values;
}
/**
* The number of results produced the last time {@link #runForDoc(int)} was called.
*/
public final int count() {
return count;
}
protected final void emit(String v) {
checkMaxSize(count);
if (values.length < count + 1) {
values = ArrayUtil.grow(values, count + 1);
}
values[count++] = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(v)));
}
public static class Emit {
private final IpFieldScript script;
public Emit(IpFieldScript script) {
this.script = script;
}
public void emit(String v) {
script.emit(v);
}
}
}

View File

@ -1,174 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.index.mapper.IpFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.xpack.runtimefields.fielddata.IpScriptFieldData;
import org.elasticsearch.xpack.runtimefields.query.IpScriptFieldExistsQuery;
import org.elasticsearch.xpack.runtimefields.query.IpScriptFieldRangeQuery;
import org.elasticsearch.xpack.runtimefields.query.IpScriptFieldTermQuery;
import org.elasticsearch.xpack.runtimefields.query.IpScriptFieldTermsQuery;
import java.net.InetAddress;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Supplier;
public final class IpScriptFieldType extends AbstractScriptFieldType<IpFieldScript.LeafFactory> {
IpScriptFieldType(String name, Script script, IpFieldScript.Factory scriptFactory, Map<String, String> meta) {
super(name, script, scriptFactory::newFactory, meta);
}
@Override
protected String runtimeType() {
return IpFieldMapper.CONTENT_TYPE;
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
return null;
}
return DocValueFormat.IP.format((BytesRef) value);
}
@Override
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (format != null) {
String message = "Field [%s] of type [%s] with runtime type [%s] does not support custom formats";
throw new IllegalArgumentException(String.format(Locale.ROOT, message, name(), typeName(), runtimeType()));
}
if (timeZone != null) {
String message = "Field [%s] of type [%s] with runtime type [%s] does not support custom time zones";
throw new IllegalArgumentException(String.format(Locale.ROOT, message, name(), typeName(), runtimeType()));
}
return DocValueFormat.IP;
}
@Override
public IpScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
return new IpScriptFieldData.Builder(name(), leafFactory(searchLookup.get()));
}
@Override
public Query existsQuery(QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new IpScriptFieldExistsQuery(script, leafFactory(context), name());
}
@Override
public Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
ZoneId timeZone,
DateMathParser parser,
QueryShardContext context
) {
checkAllowExpensiveQueries(context);
return IpFieldMapper.IpFieldType.rangeQuery(
lowerTerm,
upperTerm,
includeLower,
includeUpper,
(lower, upper) -> new IpScriptFieldRangeQuery(
script,
leafFactory(context),
name(),
new BytesRef(InetAddressPoint.encode(lower)),
new BytesRef(InetAddressPoint.encode(upper))
)
);
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
checkAllowExpensiveQueries(context);
if (value instanceof InetAddress) {
return inetAddressQuery((InetAddress) value, context);
}
String term = BytesRefs.toString(value);
if (term.contains("/")) {
return cidrQuery(term, context);
}
InetAddress address = InetAddresses.forString(term);
return inetAddressQuery(address, context);
}
private Query inetAddressQuery(InetAddress address, QueryShardContext context) {
return new IpScriptFieldTermQuery(script, leafFactory(context), name(), new BytesRef(InetAddressPoint.encode(address)));
}
@Override
public Query termsQuery(List<?> values, QueryShardContext context) {
checkAllowExpensiveQueries(context);
BytesRefHash terms = new BytesRefHash(values.size(), BigArrays.NON_RECYCLING_INSTANCE);
List<Query> cidrQueries = null;
for (Object value : values) {
if (value instanceof InetAddress) {
terms.add(new BytesRef(InetAddressPoint.encode((InetAddress) value)));
continue;
}
String term = BytesRefs.toString(value);
if (false == term.contains("/")) {
terms.add(new BytesRef(InetAddressPoint.encode(InetAddresses.forString(term))));
continue;
}
if (cidrQueries == null) {
cidrQueries = new ArrayList<>();
}
cidrQueries.add(cidrQuery(term, context));
}
Query termsQuery = new IpScriptFieldTermsQuery(script, leafFactory(context), name(), terms);
if (cidrQueries == null) {
return termsQuery;
}
BooleanQuery.Builder bool = new BooleanQuery.Builder();
bool.add(termsQuery, Occur.SHOULD);
for (Query cidrQuery : cidrQueries) {
bool.add(cidrQuery, Occur.SHOULD);
}
return bool.build();
}
private Query cidrQuery(String term, QueryShardContext context) {
Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(term);
InetAddress addr = cidr.v1();
int prefixLength = cidr.v2();
// create the lower value by zeroing out the host portion, upper value by filling it with all ones.
byte lower[] = addr.getAddress();
byte upper[] = addr.getAddress();
for (int i = prefixLength; i < 8 * lower.length; i++) {
int m = 1 << (7 - (i & 7));
lower[i >> 3] &= ~m;
upper[i >> 3] |= m;
}
// Force the terms into IPv6
BytesRef lowerBytes = new BytesRef(InetAddressPoint.encode(InetAddressPoint.decode(lower)));
BytesRef upperBytes = new BytesRef(InetAddressPoint.encode(InetAddressPoint.decode(upper)));
return new IpScriptFieldRangeQuery(script, leafFactory(context), name(), lowerBytes, upperBytes);
}
}

View File

@ -1,183 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.search.MultiTermQuery.RewriteMethod;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.xpack.runtimefields.fielddata.StringScriptFieldData;
import org.elasticsearch.xpack.runtimefields.query.StringScriptFieldExistsQuery;
import org.elasticsearch.xpack.runtimefields.query.StringScriptFieldFuzzyQuery;
import org.elasticsearch.xpack.runtimefields.query.StringScriptFieldPrefixQuery;
import org.elasticsearch.xpack.runtimefields.query.StringScriptFieldRangeQuery;
import org.elasticsearch.xpack.runtimefields.query.StringScriptFieldRegexpQuery;
import org.elasticsearch.xpack.runtimefields.query.StringScriptFieldTermQuery;
import org.elasticsearch.xpack.runtimefields.query.StringScriptFieldTermsQuery;
import org.elasticsearch.xpack.runtimefields.query.StringScriptFieldWildcardQuery;
import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Supplier;
import static java.util.stream.Collectors.toSet;
public final class KeywordScriptFieldType extends AbstractScriptFieldType<StringFieldScript.LeafFactory> {
KeywordScriptFieldType(String name, Script script, StringFieldScript.Factory scriptFactory, Map<String, String> meta) {
super(name, script, scriptFactory::newFactory, meta);
}
@Override
protected String runtimeType() {
return KeywordFieldMapper.CONTENT_TYPE;
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
return null;
}
// keywords are internally stored as utf8 bytes
BytesRef binaryValue = (BytesRef) value;
return binaryValue.utf8ToString();
}
@Override
public StringScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
return new StringScriptFieldData.Builder(name(), leafFactory(searchLookup.get()));
}
@Override
public Query existsQuery(QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new StringScriptFieldExistsQuery(script, leafFactory(context), name());
}
@Override
public Query fuzzyQuery(
Object value,
Fuzziness fuzziness,
int prefixLength,
int maxExpansions,
boolean transpositions,
QueryShardContext context
) {
checkAllowExpensiveQueries(context);
return StringScriptFieldFuzzyQuery.build(
script,
leafFactory(context),
name(),
BytesRefs.toString(Objects.requireNonNull(value)),
fuzziness.asDistance(BytesRefs.toString(value)),
prefixLength,
transpositions
);
}
@Override
public Query prefixQuery(
String value,
RewriteMethod method,
boolean caseInsensitive,
org.elasticsearch.index.query.QueryShardContext context
) {
checkAllowExpensiveQueries(context);
return new StringScriptFieldPrefixQuery(script, leafFactory(context), name(), value, caseInsensitive);
}
@Override
public Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
ZoneId timeZone,
DateMathParser parser,
QueryShardContext context
) {
checkAllowExpensiveQueries(context);
return new StringScriptFieldRangeQuery(
script,
leafFactory(context),
name(),
BytesRefs.toString(Objects.requireNonNull(lowerTerm)),
BytesRefs.toString(Objects.requireNonNull(upperTerm)),
includeLower,
includeUpper
);
}
@Override
public Query regexpQuery(
String value,
int syntaxFlags,
int matchFlags,
int maxDeterminizedStates,
RewriteMethod method,
QueryShardContext context
) {
checkAllowExpensiveQueries(context);
if (matchFlags != 0) {
throw new IllegalArgumentException("Match flags not yet implemented [" + matchFlags + "]");
}
return new StringScriptFieldRegexpQuery(
script,
leafFactory(context),
name(),
value,
syntaxFlags,
matchFlags,
maxDeterminizedStates
);
}
@Override
public Query termQueryCaseInsensitive(Object value, QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new StringScriptFieldTermQuery(
script,
leafFactory(context),
name(),
BytesRefs.toString(Objects.requireNonNull(value)),
true
);
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new StringScriptFieldTermQuery(
script,
leafFactory(context),
name(),
BytesRefs.toString(Objects.requireNonNull(value)),
false
);
}
@Override
public Query termsQuery(List<?> values, QueryShardContext context) {
checkAllowExpensiveQueries(context);
Set<String> terms = values.stream().map(v -> BytesRefs.toString(Objects.requireNonNull(v))).collect(toSet());
return new StringScriptFieldTermsQuery(script, leafFactory(context), name(), terms);
}
@Override
public Query wildcardQuery(String value, RewriteMethod method, boolean caseInsensitive, QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new StringScriptFieldWildcardQuery(script, leafFactory(context), name(), value, caseInsensitive);
}
}

View File

@ -1,53 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistLoader;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptFactory;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public abstract class LongFieldScript extends AbstractLongFieldScript {
public static final ScriptContext<Factory> CONTEXT = newContext("long_script_field", Factory.class);
static List<Whitelist> whitelist() {
return Collections.singletonList(WhitelistLoader.loadFromResourceFiles(RuntimeFieldsPainlessExtension.class, "long_whitelist.txt"));
}
@SuppressWarnings("unused")
public static final String[] PARAMETERS = {};
public interface Factory extends ScriptFactory {
LeafFactory newFactory(String fieldName, Map<String, Object> params, SearchLookup searchLookup);
}
public interface LeafFactory {
LongFieldScript newInstance(LeafReaderContext ctx);
}
public LongFieldScript(String fieldName, Map<String, Object> params, SearchLookup searchLookup, LeafReaderContext ctx) {
super(fieldName, params, searchLookup, ctx);
}
public static class Emit {
private final LongFieldScript script;
public Emit(LongFieldScript script) {
this.script = script;
}
public void emit(long v) {
script.emit(v);
}
}
}

View File

@ -1,114 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.LongSet;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.xpack.runtimefields.fielddata.LongScriptFieldData;
import org.elasticsearch.xpack.runtimefields.query.LongScriptFieldExistsQuery;
import org.elasticsearch.xpack.runtimefields.query.LongScriptFieldRangeQuery;
import org.elasticsearch.xpack.runtimefields.query.LongScriptFieldTermQuery;
import org.elasticsearch.xpack.runtimefields.query.LongScriptFieldTermsQuery;
import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
public class LongScriptFieldType extends AbstractScriptFieldType<LongFieldScript.LeafFactory> {
LongScriptFieldType(String name, Script script, LongFieldScript.Factory scriptFactory, Map<String, String> meta) {
super(name, script, scriptFactory::newFactory, meta);
}
@Override
protected String runtimeType() {
return NumberType.LONG.typeName();
}
@Override
public Object valueForDisplay(Object value) {
return value; // These should come back as a Long
}
@Override
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones");
}
if (format == null) {
return DocValueFormat.RAW;
}
return new DocValueFormat.Decimal(format);
}
@Override
public LongScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
return new LongScriptFieldData.Builder(name(), leafFactory(searchLookup.get()));
}
@Override
public Query existsQuery(QueryShardContext context) {
checkAllowExpensiveQueries(context);
return new LongScriptFieldExistsQuery(script, leafFactory(context)::newInstance, name());
}
@Override
public Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
ZoneId timeZone,
DateMathParser parser,
QueryShardContext context
) {
checkAllowExpensiveQueries(context);
return NumberType.longRangeQuery(
lowerTerm,
upperTerm,
includeLower,
includeUpper,
(l, u) -> new LongScriptFieldRangeQuery(script, leafFactory(context)::newInstance, name(), l, u)
);
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
if (NumberType.hasDecimalPart(value)) {
return Queries.newMatchNoDocsQuery("Value [" + value + "] has a decimal part");
}
checkAllowExpensiveQueries(context);
return new LongScriptFieldTermQuery(script, leafFactory(context)::newInstance, name(), NumberType.objectToLong(value, true));
}
@Override
public Query termsQuery(List<?> values, QueryShardContext context) {
if (values.isEmpty()) {
return Queries.newMatchAllQuery();
}
LongSet terms = new LongHashSet(values.size());
for (Object value : values) {
if (NumberType.hasDecimalPart(value)) {
continue;
}
terms.add(NumberType.objectToLong(value, true));
}
if (terms.isEmpty()) {
return Queries.newMatchNoDocsQuery("All values have a decimal part");
}
checkAllowExpensiveQueries(context);
return new LongScriptFieldTermsQuery(script, leafFactory(context)::newInstance, name(), terms);
}
}

View File

@ -1,263 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.index.mapper.BooleanFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.IpFieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptType;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.BiFunction;
public final class RuntimeFieldMapper extends ParametrizedFieldMapper {
public static final String CONTENT_TYPE = "runtime";
public static final TypeParser PARSER = new TypeParser((name, parserContext) -> new Builder(name, new ScriptCompiler() {
@Override
public <FactoryType> FactoryType compile(Script script, ScriptContext<FactoryType> context) {
return parserContext.scriptService().compile(script, context);
}
}));
private final String runtimeType;
private final Script script;
private final ScriptCompiler scriptCompiler;
protected RuntimeFieldMapper(
String simpleName,
AbstractScriptFieldType<?> mappedFieldType,
MultiFields multiFields,
CopyTo copyTo,
String runtimeType,
Script script,
ScriptCompiler scriptCompiler
) {
super(simpleName, mappedFieldType, multiFields, copyTo);
this.runtimeType = runtimeType;
this.script = script;
this.scriptCompiler = scriptCompiler;
}
@Override
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new RuntimeFieldMapper.Builder(simpleName(), scriptCompiler).init(this);
}
@Override
protected void parseCreateField(ParseContext context) {
// there is no lucene field
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
public static class Builder extends ParametrizedFieldMapper.Builder {
static final Map<String, BiFunction<Builder, BuilderContext, AbstractScriptFieldType<?>>> FIELD_TYPE_RESOLVER =
org.elasticsearch.common.collect.Map.of(BooleanFieldMapper.CONTENT_TYPE, (builder, context) -> {
builder.formatAndLocaleNotSupported();
BooleanFieldScript.Factory factory = builder.scriptCompiler.compile(builder.script.getValue(), BooleanFieldScript.CONTEXT);
return new BooleanScriptFieldType(
builder.buildFullName(context),
builder.script.getValue(),
factory,
builder.meta.getValue()
);
}, DateFieldMapper.CONTENT_TYPE, (builder, context) -> {
DateFieldScript.Factory factory = builder.scriptCompiler.compile(builder.script.getValue(), DateFieldScript.CONTEXT);
String format = builder.format.getValue();
if (format == null) {
format = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern();
}
Locale locale = builder.locale.getValue();
if (locale == null) {
locale = Locale.ROOT;
}
DateFormatter dateTimeFormatter = DateFormatter.forPattern(format).withLocale(locale);
return new DateScriptFieldType(
builder.buildFullName(context),
builder.script.getValue(),
factory,
dateTimeFormatter,
builder.meta.getValue()
);
}, NumberType.DOUBLE.typeName(), (builder, context) -> {
builder.formatAndLocaleNotSupported();
DoubleFieldScript.Factory factory = builder.scriptCompiler.compile(builder.script.getValue(), DoubleFieldScript.CONTEXT);
return new DoubleScriptFieldType(
builder.buildFullName(context),
builder.script.getValue(),
factory,
builder.meta.getValue()
);
}, IpFieldMapper.CONTENT_TYPE, (builder, context) -> {
builder.formatAndLocaleNotSupported();
IpFieldScript.Factory factory = builder.scriptCompiler.compile(builder.script.getValue(), IpFieldScript.CONTEXT);
return new IpScriptFieldType(builder.buildFullName(context), builder.script.getValue(), factory, builder.meta.getValue());
}, KeywordFieldMapper.CONTENT_TYPE, (builder, context) -> {
builder.formatAndLocaleNotSupported();
StringFieldScript.Factory factory = builder.scriptCompiler.compile(builder.script.getValue(), StringFieldScript.CONTEXT);
return new KeywordScriptFieldType(
builder.buildFullName(context),
builder.script.getValue(),
factory,
builder.meta.getValue()
);
}, NumberType.LONG.typeName(), (builder, context) -> {
builder.formatAndLocaleNotSupported();
LongFieldScript.Factory factory = builder.scriptCompiler.compile(builder.script.getValue(), LongFieldScript.CONTEXT);
return new LongScriptFieldType(builder.buildFullName(context), builder.script.getValue(), factory, builder.meta.getValue());
});
private static RuntimeFieldMapper toType(FieldMapper in) {
return (RuntimeFieldMapper) in;
}
private final Parameter<Map<String, String>> meta = Parameter.metaParam();
private final Parameter<String> runtimeType = Parameter.stringParam(
"runtime_type",
true,
mapper -> toType(mapper).runtimeType,
null
).setValidator(runtimeType -> {
if (runtimeType == null) {
throw new IllegalArgumentException("runtime_type must be specified for " + CONTENT_TYPE + " field [" + name + "]");
}
});
private final Parameter<Script> script = new Parameter<>(
"script",
true,
() -> null,
Builder::parseScript,
mapper -> toType(mapper).script
).setValidator(script -> {
if (script == null) {
throw new IllegalArgumentException("script must be specified for " + CONTENT_TYPE + " field [" + name + "]");
}
});
private final Parameter<String> format = Parameter.stringParam(
"format",
true,
mapper -> ((AbstractScriptFieldType<?>) mapper.fieldType()).format(),
null
).setSerializer((b, n, v) -> {
if (v != null && false == v.equals(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern())) {
b.field(n, v);
}
}, Object::toString).acceptsNull();
private final Parameter<Locale> locale = new Parameter<>(
"locale",
true,
() -> null,
(n, c, o) -> o == null ? null : LocaleUtils.parse(o.toString()),
mapper -> ((AbstractScriptFieldType<?>) mapper.fieldType()).formatLocale()
).setSerializer((b, n, v) -> {
if (v != null && false == v.equals(Locale.ROOT)) {
b.field(n, v.toString());
}
}, Object::toString).acceptsNull();
private final ScriptCompiler scriptCompiler;
protected Builder(String name, ScriptCompiler scriptCompiler) {
super(name);
this.scriptCompiler = scriptCompiler;
}
@Override
protected List<Parameter<?>> getParameters() {
return org.elasticsearch.common.collect.List.of(meta, runtimeType, script, format, locale);
}
@Override
public RuntimeFieldMapper build(BuilderContext context) {
BiFunction<Builder, BuilderContext, AbstractScriptFieldType<?>> fieldTypeResolver = Builder.FIELD_TYPE_RESOLVER.get(
runtimeType.getValue()
);
if (fieldTypeResolver == null) {
throw new IllegalArgumentException(
"runtime_type [" + runtimeType.getValue() + "] not supported for " + CONTENT_TYPE + " field [" + name + "]"
);
}
MultiFields multiFields = multiFieldsBuilder.build(this, context);
if (multiFields.iterator().hasNext()) {
throw new IllegalArgumentException(CONTENT_TYPE + " field [" + name + "] does not support [fields]");
}
CopyTo copyTo = this.copyTo.build();
if (copyTo.copyToFields().isEmpty() == false) {
throw new IllegalArgumentException(CONTENT_TYPE + " field [" + name + "] does not support [copy_to]");
}
return new RuntimeFieldMapper(
name,
fieldTypeResolver.apply(this, context),
MultiFields.empty(),
CopyTo.empty(),
runtimeType.getValue(),
script.getValue(),
scriptCompiler
);
}
static Script parseScript(String name, Mapper.TypeParser.ParserContext parserContext, Object scriptObject) {
Script script = Script.parse(scriptObject);
if (script.getType() == ScriptType.STORED) {
throw new IllegalArgumentException("stored scripts are not supported for " + CONTENT_TYPE + " field [" + name + "]");
}
return script;
}
private void formatAndLocaleNotSupported() {
if (format.getValue() != null) {
throw new IllegalArgumentException(
"format can not be specified for ["
+ CONTENT_TYPE
+ "] field ["
+ name
+ "] of "
+ runtimeType.name
+ " ["
+ runtimeType.getValue()
+ "]"
);
}
if (locale.getValue() != null) {
throw new IllegalArgumentException(
"locale can not be specified for ["
+ CONTENT_TYPE
+ "] field ["
+ name
+ "] of "
+ runtimeType.name
+ " ["
+ runtimeType.getValue()
+ "]"
);
}
}
}
@FunctionalInterface
private interface ScriptCompiler {
<FactoryType> FactoryType compile(Script script, ScriptContext<FactoryType> context);
}
}

View File

@ -1,34 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.elasticsearch.painless.spi.PainlessExtension;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.script.ScriptContext;
import java.util.List;
import java.util.Map;
public class RuntimeFieldsPainlessExtension implements PainlessExtension {
@Override
public Map<ScriptContext<?>, List<Whitelist>> getContextWhitelists() {
return org.elasticsearch.common.collect.Map.of(
BooleanFieldScript.CONTEXT,
BooleanFieldScript.whitelist(),
DateFieldScript.CONTEXT,
DateFieldScript.whitelist(),
DoubleFieldScript.CONTEXT,
DoubleFieldScript.whitelist(),
IpFieldScript.CONTEXT,
IpFieldScript.whitelist(),
LongFieldScript.CONTEXT,
LongFieldScript.whitelist(),
StringFieldScript.CONTEXT,
StringFieldScript.whitelist()
);
}
}

View File

@ -1,96 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistLoader;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptFactory;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
public abstract class StringFieldScript extends AbstractFieldScript {
/**
* The maximum number of chars a script should be allowed to emit.
*/
public static final long MAX_CHARS = 1024 * 1024;
public static final ScriptContext<Factory> CONTEXT = newContext("string_script_field", Factory.class);
static List<Whitelist> whitelist() {
return Collections.singletonList(
WhitelistLoader.loadFromResourceFiles(RuntimeFieldsPainlessExtension.class, "string_whitelist.txt")
);
}
@SuppressWarnings("unused")
public static final String[] PARAMETERS = {};
public interface Factory extends ScriptFactory {
LeafFactory newFactory(String fieldName, Map<String, Object> params, SearchLookup searchLookup);
}
public interface LeafFactory {
StringFieldScript newInstance(LeafReaderContext ctx);
}
private final List<String> results = new ArrayList<>();
private long chars;
public StringFieldScript(String fieldName, Map<String, Object> params, SearchLookup searchLookup, LeafReaderContext ctx) {
super(fieldName, params, searchLookup, ctx);
}
/**
* Execute the script for the provided {@code docId}.
* <p>
* @return a mutable {@link List} that contains the results of the script
* and will be modified the next time you call {@linkplain #resultsForDoc}.
*/
public final List<String> resultsForDoc(int docId) {
results.clear();
chars = 0;
setDocument(docId);
execute();
return results;
}
protected final void emit(String v) {
checkMaxSize(results.size());
chars += v.length();
if (chars > MAX_CHARS) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Runtime field [%s] is emitting [%s] characters while the maximum number of values allowed is [%s]",
fieldName,
chars,
MAX_CHARS
)
);
}
results.add(v);
}
public static class Emit {
private final StringFieldScript script;
public Emit(StringFieldScript script) {
this.script = script;
}
public void emit(String v) {
script.emit(v);
}
}
}

View File

@ -1,43 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.search.QueryVisitor;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
/**
* Abstract base class for building queries based on {@link DoubleFieldScript}.
*/
abstract class AbstractBooleanScriptFieldQuery extends AbstractScriptFieldQuery<BooleanFieldScript> {
AbstractBooleanScriptFieldQuery(Script script, BooleanFieldScript.LeafFactory leafFactory, String fieldName) {
super(script, fieldName, leafFactory::newInstance);
}
@Override
protected boolean matches(BooleanFieldScript scriptContext, int docId) {
scriptContext.runForDoc(docId);
return matches(scriptContext.trues(), scriptContext.falses());
}
/**
* Does the value match this query?
* @param trues the number of true values returned by the script
* @param falses the number of false values returned by the script
*/
protected abstract boolean matches(int trues, int falses);
@Override
public final void visit(QueryVisitor visitor) {
// No subclasses contain any Terms because those have to be strings.
if (visitor.acceptField(fieldName())) {
visitor.visitLeaf(this);
}
}
}

View File

@ -1,40 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.search.QueryVisitor;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
/**
* Abstract base class for building queries based on {@link DoubleFieldScript}.
*/
abstract class AbstractDoubleScriptFieldQuery extends AbstractScriptFieldQuery<DoubleFieldScript> {
AbstractDoubleScriptFieldQuery(Script script, DoubleFieldScript.LeafFactory leafFactory, String fieldName) {
super(script, fieldName, leafFactory::newInstance);
}
@Override
protected boolean matches(DoubleFieldScript scriptContext, int docId) {
scriptContext.runForDoc(docId);
return matches(scriptContext.values(), scriptContext.count());
}
/**
* Does the value match this query?
*/
protected abstract boolean matches(double[] values, int count);
@Override
public final void visit(QueryVisitor visitor) {
// No subclasses contain any Terms because those have to be strings.
if (visitor.acceptField(fieldName())) {
visitor.visitLeaf(this);
}
}
}

View File

@ -1,42 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.net.InetAddress;
/**
* Abstract base class for building queries based on {@link StringFieldScript}.
*/
abstract class AbstractIpScriptFieldQuery extends AbstractScriptFieldQuery<IpFieldScript> {
AbstractIpScriptFieldQuery(Script script, IpFieldScript.LeafFactory leafFactory, String fieldName) {
super(script, fieldName, leafFactory::newInstance);
}
@Override
protected boolean matches(IpFieldScript scriptContext, int docId) {
scriptContext.runForDoc(docId);
return matches(scriptContext.values(), scriptContext.count());
}
/**
* Does the value match this query?
*/
protected abstract boolean matches(BytesRef[] values, int conut);
protected static InetAddress decode(BytesRef ref) {
return InetAddressPoint.decode(BytesReference.toBytes(new BytesArray(ref)));
}
}

View File

@ -1,47 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.QueryVisitor;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.AbstractLongFieldScript;
import java.util.function.Function;
/**
* Abstract base class for building queries based on {@link AbstractLongFieldScript}.
*/
abstract class AbstractLongScriptFieldQuery extends AbstractScriptFieldQuery<AbstractLongFieldScript> {
AbstractLongScriptFieldQuery(
Script script,
Function<LeafReaderContext, AbstractLongFieldScript> scriptContextFunction,
String fieldName
) {
super(script, fieldName, scriptContextFunction);
}
@Override
protected boolean matches(AbstractLongFieldScript scriptContext, int docId) {
scriptContext.runForDoc(docId);
return AbstractLongScriptFieldQuery.this.matches(scriptContext.values(), scriptContext.count());
}
/**
* Does the value match this query?
*/
protected abstract boolean matches(long[] values, int count);
@Override
public final void visit(QueryVisitor visitor) {
// No subclasses contain any Terms because those have to be strings.
if (visitor.acceptField(fieldName())) {
visitor.visitLeaf(this);
}
}
}

View File

@ -1,101 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.AbstractFieldScript;
import java.io.IOException;
import java.util.Objects;
import java.util.function.Function;
/**
* Abstract base class for building queries based on script fields.
*/
abstract class AbstractScriptFieldQuery<S extends AbstractFieldScript> extends Query {
/**
* We don't have the infrastructure to estimate the match cost of a script
* so we just use a big number.
*/
protected static final float MATCH_COST = 9000f;
private final Script script;
private final String fieldName;
private final Function<LeafReaderContext, S> scriptContextFunction;
AbstractScriptFieldQuery(Script script, String fieldName, Function<LeafReaderContext, S> scriptContextFunction) {
this.script = Objects.requireNonNull(script);
this.fieldName = Objects.requireNonNull(fieldName);
this.scriptContextFunction = scriptContextFunction;
}
final Function<LeafReaderContext, S> scriptContextFunction() {
return scriptContextFunction;
}
final Script script() {
return script;
}
final String fieldName() {
return fieldName;
}
@Override
public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
return new ConstantScoreWeight(this, boost) {
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return false; // scripts aren't really cacheable at this point
}
@Override
public Scorer scorer(LeafReaderContext ctx) {
S scriptContext = scriptContextFunction.apply(ctx);
DocIdSetIterator approximation = DocIdSetIterator.all(ctx.reader().maxDoc());
TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) {
@Override
public boolean matches() {
return AbstractScriptFieldQuery.this.matches(scriptContext, approximation.docID());
}
@Override
public float matchCost() {
return MATCH_COST;
}
};
return new ConstantScoreScorer(this, score(), scoreMode, twoPhase);
}
};
}
protected abstract boolean matches(S scriptContext, int docId);
@Override
public int hashCode() {
return Objects.hash(getClass(), script, fieldName);
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
AbstractScriptFieldQuery<?> other = (AbstractScriptFieldQuery<?>) obj;
return script.equals(other.script) && fieldName.equals(other.fieldName);
}
}

View File

@ -1,48 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.List;
public abstract class AbstractStringScriptFieldAutomatonQuery extends AbstractStringScriptFieldQuery {
private final BytesRefBuilder scratch = new BytesRefBuilder();
private final ByteRunAutomaton automaton;
public AbstractStringScriptFieldAutomatonQuery(
Script script,
StringFieldScript.LeafFactory leafFactory,
String fieldName,
ByteRunAutomaton automaton
) {
super(script, leafFactory, fieldName);
this.automaton = automaton;
}
@Override
protected final boolean matches(List<String> values) {
for (String value : values) {
scratch.copyChars(value);
if (automaton.run(scratch.bytes(), 0, scratch.length())) {
return true;
}
}
return false;
}
@Override
public final void visit(QueryVisitor visitor) {
if (visitor.acceptField(fieldName())) {
visitor.consumeTermsMatching(this, fieldName(), () -> automaton);
}
}
}

View File

@ -1,32 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.List;
/**
* Abstract base class for building queries based on {@link StringFieldScript}.
*/
abstract class AbstractStringScriptFieldQuery extends AbstractScriptFieldQuery<StringFieldScript> {
AbstractStringScriptFieldQuery(Script script, StringFieldScript.LeafFactory leafFactory, String fieldName) {
super(script, fieldName, leafFactory::newInstance);
}
@Override
protected final boolean matches(StringFieldScript scriptContext, int docId) {
return matches(scriptContext.resultsForDoc(docId));
}
/**
* Does the value match this query?
*/
protected abstract boolean matches(List<String> values);
}

View File

@ -1,31 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript;
public class BooleanScriptFieldExistsQuery extends AbstractBooleanScriptFieldQuery {
public BooleanScriptFieldExistsQuery(Script script, BooleanFieldScript.LeafFactory leafFactory, String fieldName) {
super(script, leafFactory, fieldName);
}
@Override
protected boolean matches(int trues, int falses) {
return (trues | falses) != 0;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return getClass().getSimpleName();
}
return fieldName() + ":" + getClass().getSimpleName();
}
// Superclass's equals and hashCode are great for this class
}

View File

@ -1,55 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript;
import java.util.Objects;
public class BooleanScriptFieldTermQuery extends AbstractBooleanScriptFieldQuery {
private final boolean term;
public BooleanScriptFieldTermQuery(Script script, BooleanFieldScript.LeafFactory leafFactory, String fieldName, boolean term) {
super(script, leafFactory, fieldName);
this.term = term;
}
@Override
protected boolean matches(int trues, int falses) {
if (term) {
return trues > 0;
}
return falses > 0;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return Boolean.toString(term);
}
return fieldName() + ":" + term;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), term);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
BooleanScriptFieldTermQuery other = (BooleanScriptFieldTermQuery) obj;
return term == other.term;
}
boolean term() {
return term;
}
}

View File

@ -1,31 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
public class DoubleScriptFieldExistsQuery extends AbstractDoubleScriptFieldQuery {
public DoubleScriptFieldExistsQuery(Script script, DoubleFieldScript.LeafFactory leafFactory, String fieldName) {
super(script, leafFactory, fieldName);
}
@Override
protected boolean matches(double[] values, int count) {
return count > 0;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return getClass().getSimpleName();
}
return fieldName() + ":" + getClass().getSimpleName();
}
// Superclass's equals and hashCode are great for this class
}

View File

@ -1,72 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
import java.util.Objects;
public class DoubleScriptFieldRangeQuery extends AbstractDoubleScriptFieldQuery {
private final double lowerValue;
private final double upperValue;
public DoubleScriptFieldRangeQuery(
Script script,
DoubleFieldScript.LeafFactory leafFactory,
String fieldName,
double lowerValue,
double upperValue
) {
super(script, leafFactory, fieldName);
this.lowerValue = lowerValue;
this.upperValue = upperValue;
assert lowerValue <= upperValue;
}
@Override
protected boolean matches(double[] values, int count) {
for (int i = 0; i < count; i++) {
if (lowerValue <= values[i] && values[i] <= upperValue) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
StringBuilder b = new StringBuilder();
if (false == fieldName().contentEquals(field)) {
b.append(fieldName()).append(':');
}
b.append('[').append(lowerValue).append(" TO ").append(upperValue).append(']');
return b.toString();
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), lowerValue, upperValue);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
DoubleScriptFieldRangeQuery other = (DoubleScriptFieldRangeQuery) obj;
return lowerValue == other.lowerValue && upperValue == other.upperValue;
}
double lowerValue() {
return lowerValue;
}
double upperValue() {
return upperValue;
}
}

View File

@ -1,57 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
import java.util.Objects;
public class DoubleScriptFieldTermQuery extends AbstractDoubleScriptFieldQuery {
private final double term;
public DoubleScriptFieldTermQuery(Script script, DoubleFieldScript.LeafFactory leafFactory, String fieldName, double term) {
super(script, leafFactory, fieldName);
this.term = term;
}
@Override
protected boolean matches(double[] values, int count) {
for (int i = 0; i < count; i++) {
if (term == values[i]) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return Double.toString(term);
}
return fieldName() + ":" + term;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), term);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
DoubleScriptFieldTermQuery other = (DoubleScriptFieldTermQuery) obj;
return term == other.term;
}
double term() {
return term;
}
}

View File

@ -1,72 +0,0 @@
/*
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import com.carrotsearch.hppc.LongSet;
import com.carrotsearch.hppc.cursors.LongCursor;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
import java.util.Arrays;
import java.util.Objects;
public class DoubleScriptFieldTermsQuery extends AbstractDoubleScriptFieldQuery {
private final LongSet terms;
/**
* Build the query.
* @param terms The terms converted to a long with {@link Double#doubleToLongBits(double)}.
*/
public DoubleScriptFieldTermsQuery(Script script, DoubleFieldScript.LeafFactory leafFactory, String fieldName, LongSet terms) {
super(script, leafFactory, fieldName);
this.terms = terms;
}
@Override
protected boolean matches(double[] values, int count) {
for (int i = 0; i < count; i++) {
if (terms.contains(Double.doubleToLongBits(values[i]))) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
double[] termsArray = terms();
Arrays.sort(termsArray);
if (fieldName().equals(field)) {
return Arrays.toString(termsArray);
}
return fieldName() + ":" + Arrays.toString(termsArray);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), terms);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
DoubleScriptFieldTermsQuery other = (DoubleScriptFieldTermsQuery) obj;
return terms.equals(other.terms);
}
double[] terms() {
double[] result = new double[terms.size()];
int i = 0;
for (LongCursor lc : terms) {
result[i++] = Double.longBitsToDouble(lc.value);
}
return result;
}
}

View File

@ -1,32 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript;
public class IpScriptFieldExistsQuery extends AbstractIpScriptFieldQuery {
public IpScriptFieldExistsQuery(Script script, IpFieldScript.LeafFactory leafFactory, String fieldName) {
super(script, leafFactory, fieldName);
}
@Override
protected boolean matches(BytesRef[] values, int count) {
return count > 0;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return getClass().getSimpleName();
}
return fieldName() + ":" + getClass().getSimpleName();
}
// Superclass's equals and hashCode are great for this class
}

View File

@ -1,73 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript;
import java.net.InetAddress;
import java.util.Objects;
public class IpScriptFieldRangeQuery extends AbstractIpScriptFieldQuery {
private final BytesRef lower;
private final BytesRef upper;
public IpScriptFieldRangeQuery(Script script, IpFieldScript.LeafFactory leafFactory, String fieldName, BytesRef lower, BytesRef upper) {
super(script, leafFactory, fieldName);
this.lower = lower;
this.upper = upper;
assert this.lower.compareTo(this.upper) <= 0;
}
@Override
protected boolean matches(BytesRef[] values, int count) {
for (int i = 0; i < count; i++) {
if (lower.compareTo(values[i]) <= 0 && upper.compareTo(values[i]) >= 0) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
StringBuilder b = new StringBuilder();
if (false == fieldName().contentEquals(field)) {
b.append(fieldName()).append(':');
}
b.append('[')
.append(InetAddresses.toAddrString(lowerAddress()))
.append(" TO ")
.append(InetAddresses.toAddrString(upperAddress()))
.append(']');
return b.toString();
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), lower, upper);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
IpScriptFieldRangeQuery other = (IpScriptFieldRangeQuery) obj;
return lower.bytesEquals(other.lower) && upper.bytesEquals(other.upper);
}
InetAddress lowerAddress() {
return decode(lower);
}
InetAddress upperAddress() {
return decode(upper);
}
}

View File

@ -1,60 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript;
import java.net.InetAddress;
import java.util.Objects;
public class IpScriptFieldTermQuery extends AbstractIpScriptFieldQuery {
private final BytesRef term;
public IpScriptFieldTermQuery(Script script, IpFieldScript.LeafFactory leafFactory, String fieldName, BytesRef term) {
super(script, leafFactory, fieldName);
this.term = term;
}
@Override
protected boolean matches(BytesRef[] values, int count) {
for (int i = 0; i < count; i++) {
if (term.bytesEquals(values[i])) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return InetAddresses.toAddrString(address());
}
return fieldName() + ":" + InetAddresses.toAddrString(address());
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), term);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
IpScriptFieldTermQuery other = (IpScriptFieldTermQuery) obj;
return term.bytesEquals(other.term);
}
InetAddress address() {
return decode(term);
}
}

View File

@ -1,90 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript;
import java.util.Objects;
public class IpScriptFieldTermsQuery extends AbstractIpScriptFieldQuery {
private final BytesRefHash terms;
public IpScriptFieldTermsQuery(Script script, IpFieldScript.LeafFactory leafFactory, String fieldName, BytesRefHash terms) {
super(script, leafFactory, fieldName);
this.terms = terms;
}
@Override
protected boolean matches(BytesRef[] values, int count) {
for (int i = 0; i < count; i++) {
if (terms.find(values[i]) >= 0) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
StringBuilder b = new StringBuilder();
if (false == fieldName().contentEquals(field)) {
b.append(fieldName()).append(":");
}
b.append("[");
BytesRef spare = new BytesRef();
long i = 0;
while (i < terms.size() && b.length() < 5000) {
if (i != 0) {
b.append(", ");
}
b.append(InetAddresses.toAddrString(decode(terms.get(i++, spare))));
}
if (i < terms.size()) {
b.append("...");
}
return b.append("]").toString();
}
@Override
public int hashCode() {
long hash = 0;
BytesRef spare = new BytesRef();
for (long i = 0; i < terms.size(); i++) {
hash = 31 * hash + terms.get(i, spare).hashCode();
}
return Objects.hash(super.hashCode(), hash);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
IpScriptFieldTermsQuery other = (IpScriptFieldTermsQuery) obj;
if (terms.size() != other.terms.size()) {
return false;
}
BytesRef mySpare = new BytesRef();
BytesRef otherSpare = new BytesRef();
for (long i = 0; i < terms.size(); i++) {
terms.get(i, mySpare);
other.terms.get(i, otherSpare);
if (false == mySpare.bytesEquals(otherSpare)) {
return false;
}
}
return true;
}
BytesRefHash terms() {
return terms;
}
}

View File

@ -1,221 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.AbstractLongFieldScript;
import java.io.IOException;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
public final class LongScriptFieldDistanceFeatureQuery extends AbstractScriptFieldQuery<AbstractLongFieldScript> {
private final long origin;
private final long pivot;
private final float boost;
public LongScriptFieldDistanceFeatureQuery(
Script script,
Function<LeafReaderContext, AbstractLongFieldScript> leafFactory,
String fieldName,
long origin,
long pivot,
float boost
) {
super(script, fieldName, leafFactory);
this.origin = origin;
this.pivot = pivot;
this.boost = boost;
}
@Override
protected boolean matches(AbstractLongFieldScript scriptContext, int docId) {
scriptContext.runForDoc(docId);
return scriptContext.count() > 0;
}
@Override
public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
return new Weight(this) {
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return false;
}
@Override
public void extractTerms(Set<Term> terms) {}
@Override
public Scorer scorer(LeafReaderContext context) {
return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost);
}
@Override
public Explanation explain(LeafReaderContext context, int doc) {
AbstractLongFieldScript script = scriptContextFunction().apply(context);
script.runForDoc(doc);
long value = valueWithMinAbsoluteDistance(script);
float weight = LongScriptFieldDistanceFeatureQuery.this.boost * boost;
float score = score(weight, distanceFor(value));
return Explanation.match(
score,
"Distance score, computed as weight * pivot / (pivot + abs(value - origin)) from:",
Explanation.match(weight, "weight"),
Explanation.match(pivot, "pivot"),
Explanation.match(origin, "origin"),
Explanation.match(value, "current value")
);
}
};
}
private class DistanceScorer extends Scorer {
private final AbstractLongFieldScript script;
private final TwoPhaseIterator twoPhase;
private final DocIdSetIterator disi;
private final float weight;
protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) {
super(weight);
this.script = script;
twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) {
@Override
public boolean matches() {
return LongScriptFieldDistanceFeatureQuery.this.matches(script, approximation.docID());
}
@Override
public float matchCost() {
return MATCH_COST;
}
};
disi = TwoPhaseIterator.asDocIdSetIterator(twoPhase);
this.weight = LongScriptFieldDistanceFeatureQuery.this.boost * boost;
}
@Override
public int docID() {
return disi.docID();
}
@Override
public DocIdSetIterator iterator() {
return disi;
}
@Override
public TwoPhaseIterator twoPhaseIterator() {
return twoPhase;
}
@Override
public float getMaxScore(int upTo) {
return weight;
}
@Override
public float score() {
if (script.count() == 0) {
return 0;
}
return LongScriptFieldDistanceFeatureQuery.this.score(weight, (double) minAbsoluteDistance(script));
}
}
long minAbsoluteDistance(AbstractLongFieldScript script) {
long minDistance = Long.MAX_VALUE;
for (int i = 0; i < script.count(); i++) {
minDistance = Math.min(minDistance, distanceFor(script.values()[i]));
}
return minDistance;
}
long valueWithMinAbsoluteDistance(AbstractLongFieldScript script) {
long minDistance = Long.MAX_VALUE;
long minDistanceValue = Long.MAX_VALUE;
for (int i = 0; i < script.count(); i++) {
long distance = distanceFor(script.values()[i]);
if (distance < minDistance) {
minDistance = distance;
minDistanceValue = script.values()[i];
}
}
return minDistanceValue;
}
long distanceFor(long value) {
long distance = Math.max(value, origin) - Math.min(value, origin);
if (distance < 0) {
// The distance doesn't fit into signed long so clamp it to MAX_VALUE
return Long.MAX_VALUE;
}
return distance;
}
float score(float weight, double distance) {
return (float) (weight * (pivot / (pivot + distance)));
}
@Override
public String toString(String field) {
StringBuilder b = new StringBuilder();
if (false == fieldName().equals(field)) {
b.append(fieldName()).append(":");
}
b.append(getClass().getSimpleName());
b.append("(origin=").append(origin);
b.append(",pivot=").append(pivot);
b.append(",boost=").append(boost).append(")");
return b.toString();
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), origin, pivot, boost);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
LongScriptFieldDistanceFeatureQuery other = (LongScriptFieldDistanceFeatureQuery) obj;
return origin == other.origin && pivot == other.pivot && boost == other.boost;
}
@Override
public void visit(QueryVisitor visitor) {
// No subclasses contain any Terms because those have to be strings.
if (visitor.acceptField(fieldName())) {
visitor.visitLeaf(this);
}
}
long origin() {
return origin;
}
long pivot() {
return pivot;
}
float boost() {
return boost;
}
}

View File

@ -1,34 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.AbstractLongFieldScript;
import java.util.function.Function;
public class LongScriptFieldExistsQuery extends AbstractLongScriptFieldQuery {
public LongScriptFieldExistsQuery(Script script, Function<LeafReaderContext, AbstractLongFieldScript> leafFactory, String fieldName) {
super(script, leafFactory, fieldName);
}
@Override
protected boolean matches(long[] values, int count) {
return count > 0;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return getClass().getSimpleName();
}
return fieldName() + ":" + getClass().getSimpleName();
}
// Superclass's equals and hashCode are great for this class
}

View File

@ -1,74 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.AbstractLongFieldScript;
import java.util.Objects;
import java.util.function.Function;
public class LongScriptFieldRangeQuery extends AbstractLongScriptFieldQuery {
private final long lowerValue;
private final long upperValue;
public LongScriptFieldRangeQuery(
Script script,
Function<LeafReaderContext, AbstractLongFieldScript> leafFactory,
String fieldName,
long lowerValue,
long upperValue
) {
super(script, leafFactory, fieldName);
this.lowerValue = lowerValue;
this.upperValue = upperValue;
assert lowerValue <= upperValue;
}
@Override
protected boolean matches(long[] values, int count) {
for (int i = 0; i < count; i++) {
if (lowerValue <= values[i] && values[i] <= upperValue) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
StringBuilder b = new StringBuilder();
if (false == fieldName().contentEquals(field)) {
b.append(fieldName()).append(':');
}
b.append('[').append(lowerValue).append(" TO ").append(upperValue).append(']');
return b.toString();
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), lowerValue, upperValue);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
LongScriptFieldRangeQuery other = (LongScriptFieldRangeQuery) obj;
return lowerValue == other.lowerValue && upperValue == other.upperValue;
}
long lowerValue() {
return lowerValue;
}
long upperValue() {
return upperValue;
}
}

View File

@ -1,64 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.AbstractLongFieldScript;
import java.util.Objects;
import java.util.function.Function;
public class LongScriptFieldTermQuery extends AbstractLongScriptFieldQuery {
private final long term;
public LongScriptFieldTermQuery(
Script script,
Function<LeafReaderContext, AbstractLongFieldScript> leafFactory,
String fieldName,
long term
) {
super(script, leafFactory, fieldName);
this.term = term;
}
@Override
protected boolean matches(long[] values, int count) {
for (int i = 0; i < count; i++) {
if (term == values[i]) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return Long.toString(term);
}
return fieldName() + ":" + term;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), term);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
LongScriptFieldTermQuery other = (LongScriptFieldTermQuery) obj;
return term == other.term;
}
long term() {
return term;
}
}

View File

@ -1,65 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import com.carrotsearch.hppc.LongSet;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.AbstractLongFieldScript;
import java.util.Objects;
import java.util.function.Function;
public class LongScriptFieldTermsQuery extends AbstractLongScriptFieldQuery {
private final LongSet terms;
public LongScriptFieldTermsQuery(
Script script,
Function<LeafReaderContext, AbstractLongFieldScript> leafFactory,
String fieldName,
LongSet terms
) {
super(script, leafFactory, fieldName);
this.terms = terms;
}
@Override
protected boolean matches(long[] values, int count) {
for (int i = 0; i < count; i++) {
if (terms.contains(values[i])) {
return true;
}
}
return false;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return terms.toString();
}
return fieldName() + ":" + terms;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), terms);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
LongScriptFieldTermsQuery other = (LongScriptFieldTermsQuery) obj;
return terms.equals(other.terms);
}
LongSet terms() {
return terms;
}
}

View File

@ -1,33 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.List;
public class StringScriptFieldExistsQuery extends AbstractStringScriptFieldQuery {
public StringScriptFieldExistsQuery(Script script, StringFieldScript.LeafFactory leafFactory, String fieldName) {
super(script, leafFactory, fieldName);
}
@Override
protected boolean matches(List<String> values) {
return false == values.isEmpty();
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return getClass().getSimpleName();
}
return fieldName() + ":" + getClass().getSimpleName();
}
// Superclass's equals and hashCode are great for this class
}

View File

@ -1,68 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.Objects;
public class StringScriptFieldFuzzyQuery extends AbstractStringScriptFieldAutomatonQuery {
public static StringScriptFieldFuzzyQuery build(
Script script,
StringFieldScript.LeafFactory leafFactory,
String fieldName,
String term,
int maxEdits,
int prefixLength,
boolean transpositions
) {
int maxExpansions = 1; // We don't actually expand anything so the value here doesn't matter
FuzzyQuery delegate = new FuzzyQuery(new Term(fieldName, term), maxEdits, prefixLength, maxExpansions, transpositions);
ByteRunAutomaton automaton = delegate.getAutomata().runAutomaton;
return new StringScriptFieldFuzzyQuery(script, leafFactory, fieldName, automaton, delegate);
}
private final FuzzyQuery delegate;
private StringScriptFieldFuzzyQuery(
Script script,
StringFieldScript.LeafFactory leafFactory,
String fieldName,
ByteRunAutomaton automaton,
FuzzyQuery delegate
) {
super(script, leafFactory, fieldName, automaton);
this.delegate = delegate;
}
@Override
public final String toString(String field) {
return delegate.toString(field);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), delegate);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
StringScriptFieldFuzzyQuery other = (StringScriptFieldFuzzyQuery) obj;
return delegate.equals(other.delegate);
}
FuzzyQuery delegate() {
return delegate;
}
}

View File

@ -1,112 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.elasticsearch.common.lucene.search.AutomatonQueries;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.List;
import java.util.Objects;
public class StringScriptFieldPrefixQuery extends AbstractStringScriptFieldQuery {
private final String prefix;
private final boolean caseInsensitive;
public StringScriptFieldPrefixQuery(
Script script,
StringFieldScript.LeafFactory leafFactory,
String fieldName,
String prefix,
boolean caseInsensitive
) {
super(script, leafFactory, fieldName);
this.prefix = Objects.requireNonNull(prefix);
this.caseInsensitive = caseInsensitive;
}
@Override
protected boolean matches(List<String> values) {
for (String value : values) {
if (startsWith(value, prefix, caseInsensitive)) {
return true;
}
}
return false;
}
/**
* <p>Check if a String starts with a specified prefix (optionally case insensitive).</p>
*
* @see java.lang.String#startsWith(String)
* @param str the String to check, may be null
* @param prefix the prefix to find, may be null
* @param ignoreCase inidicates whether the compare should ignore case
* (case insensitive) or not.
* @return <code>true</code> if the String starts with the prefix or
* both <code>null</code>
*/
private static boolean startsWith(String str, String prefix, boolean ignoreCase) {
if (str == null || prefix == null) {
return (str == null && prefix == null);
}
if (prefix.length() > str.length()) {
return false;
}
return str.regionMatches(ignoreCase, 0, prefix, 0, prefix.length());
}
@Override
public void visit(QueryVisitor visitor) {
if (visitor.acceptField(fieldName())) {
visitor.consumeTermsMatching(this, fieldName(), () -> new ByteRunAutomaton(buildAutomaton(new BytesRef(prefix))));
}
}
Automaton buildAutomaton(BytesRef prefix) {
if (caseInsensitive) {
return AutomatonQueries.caseInsensitivePrefix(prefix.utf8ToString());
} else {
return PrefixQuery.toAutomaton(prefix);
}
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return prefix + "*";
}
return fieldName() + ":" + prefix + "*";
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), prefix, caseInsensitive);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
StringScriptFieldPrefixQuery other = (StringScriptFieldPrefixQuery) obj;
return prefix.equals(other.prefix) && caseInsensitive == other.caseInsensitive;
}
String prefix() {
return prefix;
}
boolean caseInsensitive() {
return caseInsensitive;
}
}

View File

@ -1,115 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.List;
import java.util.Objects;
public class StringScriptFieldRangeQuery extends AbstractStringScriptFieldQuery {
private final String lowerValue;
private final String upperValue;
private final boolean includeLower;
private final boolean includeUpper;
public StringScriptFieldRangeQuery(
Script script,
StringFieldScript.LeafFactory leafFactory,
String fieldName,
String lowerValue,
String upperValue,
boolean includeLower,
boolean includeUpper
) {
super(script, leafFactory, fieldName);
this.lowerValue = Objects.requireNonNull(lowerValue);
this.upperValue = Objects.requireNonNull(upperValue);
this.includeLower = includeLower;
this.includeUpper = includeUpper;
assert lowerValue.compareTo(upperValue) <= 0;
}
@Override
protected boolean matches(List<String> values) {
for (String value : values) {
int lct = lowerValue.compareTo(value);
boolean lowerOk = includeLower ? lct <= 0 : lct < 0;
if (lowerOk) {
int uct = upperValue.compareTo(value);
boolean upperOk = includeUpper ? uct >= 0 : uct > 0;
if (upperOk) {
return true;
}
}
}
return false;
}
@Override
public void visit(QueryVisitor visitor) {
if (visitor.acceptField(fieldName())) {
visitor.consumeTermsMatching(
this,
fieldName(),
() -> new ByteRunAutomaton(
Automata.makeBinaryInterval(new BytesRef(lowerValue), includeLower, new BytesRef(upperValue), includeUpper)
)
);
}
}
@Override
public final String toString(String field) {
StringBuilder b = new StringBuilder();
if (false == fieldName().contentEquals(field)) {
b.append(fieldName()).append(':');
}
b.append(includeLower ? '[' : '{');
b.append(lowerValue).append(" TO ").append(upperValue);
b.append(includeUpper ? ']' : '}');
return b.toString();
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), lowerValue, upperValue, includeLower, includeUpper);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
StringScriptFieldRangeQuery other = (StringScriptFieldRangeQuery) obj;
return lowerValue.equals(other.lowerValue)
&& upperValue.equals(other.upperValue)
&& includeLower == other.includeLower
&& includeUpper == other.includeUpper;
}
String lowerValue() {
return lowerValue;
}
String upperValue() {
return upperValue;
}
boolean includeLower() {
return includeLower;
}
boolean includeUpper() {
return includeUpper;
}
}

View File

@ -1,75 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.Objects;
public class StringScriptFieldRegexpQuery extends AbstractStringScriptFieldAutomatonQuery {
private final String pattern;
private final int syntaxFlags;
private final int matchFlags;
public StringScriptFieldRegexpQuery(
Script script,
StringFieldScript.LeafFactory leafFactory,
String fieldName,
String pattern,
int syntaxFlags,
int matchFlags,
int maxDeterminizedStates
) {
super(
script,
leafFactory,
fieldName,
new ByteRunAutomaton(new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates))
);
this.pattern = pattern;
this.syntaxFlags = syntaxFlags;
this.matchFlags = matchFlags;
}
@Override
public final String toString(String field) {
StringBuilder b = new StringBuilder();
if (false == fieldName().contentEquals(field)) {
b.append(fieldName()).append(':');
}
return b.append('/').append(pattern).append('/').toString();
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), pattern, syntaxFlags, matchFlags);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
StringScriptFieldRegexpQuery other = (StringScriptFieldRegexpQuery) obj;
return pattern.equals(other.pattern) && syntaxFlags == other.syntaxFlags && matchFlags == other.matchFlags;
}
String pattern() {
return pattern;
}
int syntaxFlags() {
return syntaxFlags;
}
int matchFlags() {
return matchFlags;
}
}

View File

@ -1,81 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.QueryVisitor;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.List;
import java.util.Objects;
public class StringScriptFieldTermQuery extends AbstractStringScriptFieldQuery {
private final String term;
private final boolean caseInsensitive;
public StringScriptFieldTermQuery(
Script script,
StringFieldScript.LeafFactory leafFactory,
String fieldName,
String term,
boolean caseInsensitive
) {
super(script, leafFactory, fieldName);
this.term = Objects.requireNonNull(term);
this.caseInsensitive = caseInsensitive;
}
@Override
protected boolean matches(List<String> values) {
for (String value : values) {
if (caseInsensitive) {
if (term.equalsIgnoreCase(value)) {
return true;
}
} else if (term.equals(value)) {
return true;
}
}
return false;
}
@Override
public void visit(QueryVisitor visitor) {
visitor.consumeTerms(this, new Term(fieldName(), term));
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return term;
}
return fieldName() + ":" + term;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), term, caseInsensitive);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
StringScriptFieldTermQuery other = (StringScriptFieldTermQuery) obj;
return term.equals(other.term) && caseInsensitive == other.caseInsensitive;
}
String term() {
return term;
}
boolean caseInsensitive() {
return caseInsensitive;
}
}

View File

@ -1,70 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.QueryVisitor;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.List;
import java.util.Objects;
import java.util.Set;
public class StringScriptFieldTermsQuery extends AbstractStringScriptFieldQuery {
private final Set<String> terms;
public StringScriptFieldTermsQuery(Script script, StringFieldScript.LeafFactory leafFactory, String fieldName, Set<String> terms) {
super(script, leafFactory, fieldName);
this.terms = terms;
}
@Override
protected boolean matches(List<String> values) {
for (String value : values) {
if (terms.contains(value)) {
return true;
}
}
return false;
}
@Override
public void visit(QueryVisitor visitor) {
if (visitor.acceptField(fieldName())) {
for (String term : terms) {
visitor.consumeTerms(this, new Term(fieldName(), term));
}
}
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return terms.toString();
}
return fieldName() + ":" + terms;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), terms);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
StringScriptFieldTermsQuery other = (StringScriptFieldTermsQuery) obj;
return terms.equals(other.terms);
}
Set<String> terms() {
return terms;
}
}

View File

@ -1,76 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.elasticsearch.common.lucene.search.AutomatonQueries;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.Objects;
public class StringScriptFieldWildcardQuery extends AbstractStringScriptFieldAutomatonQuery {
private final String pattern;
private final boolean caseInsensitive;
public StringScriptFieldWildcardQuery(
Script script,
StringFieldScript.LeafFactory leafFactory,
String fieldName,
String pattern,
boolean caseInsensitive
) {
super(
script,
leafFactory,
fieldName,
new ByteRunAutomaton(buildAutomaton(new Term(fieldName, Objects.requireNonNull(pattern)), caseInsensitive))
);
this.pattern = pattern;
this.caseInsensitive = caseInsensitive;
}
private static Automaton buildAutomaton(Term term, boolean caseInsensitive) {
if (caseInsensitive) {
return AutomatonQueries.toCaseInsensitiveWildcardAutomaton(term, Integer.MAX_VALUE);
}
return WildcardQuery.toAutomaton(term);
}
@Override
public final String toString(String field) {
if (fieldName().equals(field)) {
return pattern;
}
return fieldName() + ":" + pattern;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), pattern, caseInsensitive);
}
@Override
public boolean equals(Object obj) {
if (false == super.equals(obj)) {
return false;
}
StringScriptFieldWildcardQuery other = (StringScriptFieldWildcardQuery) obj;
return pattern.equals(other.pattern) && caseInsensitive == other.caseInsensitive;
}
String pattern() {
return pattern;
}
boolean caseInsensitive() {
return caseInsensitive;
}
}

View File

@ -1 +0,0 @@
org.elasticsearch.xpack.runtimefields.mapper.RuntimeFieldsPainlessExtension

View File

@ -1,21 +0,0 @@
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
#
# The whitelist for boolean-valued runtime fields
# These two whitelists are required for painless to find the classes
class org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript @no_import {
}
class org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript$Factory @no_import {
}
static_import {
# The `emit` callback to collect values for the field
void emit(org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript, boolean) bound_to org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript$Emit
# Parse a value from the source to a boolean
boolean parse(def) from_class org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript
}

View File

@ -1,25 +0,0 @@
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
#
# The whitelist for date-valued runtime fields
# These two whitelists are required for painless to find the classes
class org.elasticsearch.xpack.runtimefields.mapper.DateFieldScript @no_import {
}
class org.elasticsearch.xpack.runtimefields.mapper.DateFieldScript$Factory @no_import {
}
static_import {
# The `emit` callback to collect values for the field
void emit(org.elasticsearch.xpack.runtimefields.mapper.DateFieldScript, long) bound_to org.elasticsearch.xpack.runtimefields.mapper.DateFieldScript$Emit
# Parse a value from the source to millis since epoch
long parse(org.elasticsearch.xpack.runtimefields.mapper.DateFieldScript, def) bound_to org.elasticsearch.xpack.runtimefields.mapper.DateFieldScript$Parse
# Add an easy method to convert temporalAccessors to millis since epoch.
long toEpochMilli(java.time.temporal.TemporalAccessor) from_class org.elasticsearch.xpack.runtimefields.mapper.DateFieldScript
}

View File

@ -1,19 +0,0 @@
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
#
# The whitelist for double-valued runtime fields
# These two whitelists are required for painless to find the classes
class org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript @no_import {
}
class org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript$Factory @no_import {
}
static_import {
# The `emit` callback to collect values for the field
void emit(org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript, double) bound_to org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript$Emit
}

View File

@ -1,18 +0,0 @@
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
#
# The whitelist for ip-valued runtime fields
# These two whitelists are required for painless to find the classes
class org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript @no_import {
}
class org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript$Factory @no_import {
}
static_import {
# The `emit` callback to collect values for the field
void emit(org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript, String) bound_to org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript$Emit
}

View File

@ -1,18 +0,0 @@
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
#
# The whitelist for long-valued runtime fields
# These two whitelists are required for painless to find the classes
class org.elasticsearch.xpack.runtimefields.mapper.LongFieldScript @no_import {
}
class org.elasticsearch.xpack.runtimefields.mapper.LongFieldScript$Factory @no_import {
}
static_import {
# The `emit` callback to collect values for the field
void emit(org.elasticsearch.xpack.runtimefields.mapper.LongFieldScript, long) bound_to org.elasticsearch.xpack.runtimefields.mapper.LongFieldScript$Emit
}

View File

@ -1,18 +0,0 @@
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
#
# The whitelist for string-valued runtime fields
# These two whitelists are required for painless to find the classes
class org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript @no_import {
}
class org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript$Factory @no_import {
}
static_import {
# The `emit` callback to collect values for the field
void emit(org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript, String) bound_to org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript$Emit
}

View File

@ -1,66 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.common.unit.Fuzziness;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
abstract class AbstractNonTextScriptFieldTypeTestCase extends AbstractScriptFieldTypeTestCase {
public void testFuzzyQueryIsError() throws IOException {
assertQueryOnlyOnTextAndKeyword(
"fuzzy",
() -> simpleMappedFieldType().fuzzyQuery("cat", Fuzziness.AUTO, 0, 1, true, mockContext())
);
}
public void testPrefixQueryIsError() throws IOException {
assertQueryOnlyOnTextKeywordAndWildcard("prefix", () -> simpleMappedFieldType().prefixQuery("cat", null, mockContext()));
}
public void testRegexpQueryIsError() throws IOException {
assertQueryOnlyOnTextAndKeyword(
"regexp",
() -> simpleMappedFieldType().regexpQuery("cat", 0, 0, Operations.DEFAULT_MAX_DETERMINIZED_STATES, null, mockContext())
);
}
public void testWildcardQueryIsError() throws IOException {
assertQueryOnlyOnTextKeywordAndWildcard("wildcard", () -> simpleMappedFieldType().wildcardQuery("cat", null, mockContext()));
}
private void assertQueryOnlyOnTextAndKeyword(String queryName, ThrowingRunnable buildQuery) {
Exception e = expectThrows(IllegalArgumentException.class, buildQuery);
assertThat(
e.getMessage(),
equalTo(
"Can only use "
+ queryName
+ " queries on keyword and text fields - not on [test] which is of type [runtime] with runtime_type ["
+ runtimeType()
+ "]"
)
);
}
private void assertQueryOnlyOnTextKeywordAndWildcard(String queryName, ThrowingRunnable buildQuery) {
Exception e = expectThrows(IllegalArgumentException.class, buildQuery);
assertThat(
e.getMessage(),
equalTo(
"Can only use "
+ queryName
+ " queries on keyword, text and wildcard fields - not on [test] which is of type [runtime] with runtime_type ["
+ runtimeType()
+ "]"
)
);
}
}

View File

@ -1,178 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.function.BiConsumer;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
abstract class AbstractScriptFieldTypeTestCase extends ESTestCase {
protected abstract MappedFieldType simpleMappedFieldType() throws IOException;
protected abstract MappedFieldType loopFieldType() throws IOException;
protected abstract String runtimeType();
@SuppressWarnings("unused")
public abstract void testDocValues() throws IOException;
@SuppressWarnings("unused")
public abstract void testSort() throws IOException;
@SuppressWarnings("unused")
public abstract void testUsedInScript() throws IOException;
@SuppressWarnings("unused")
public abstract void testExistsQuery() throws IOException;
@SuppressWarnings("unused")
public abstract void testRangeQuery() throws IOException;
protected abstract Query randomRangeQuery(MappedFieldType ft, QueryShardContext ctx);
@SuppressWarnings("unused")
public abstract void testTermQuery() throws IOException;
protected abstract Query randomTermQuery(MappedFieldType ft, QueryShardContext ctx);
@SuppressWarnings("unused")
public abstract void testTermsQuery() throws IOException;
protected abstract Query randomTermsQuery(MappedFieldType ft, QueryShardContext ctx);
protected static QueryShardContext mockContext() {
return mockContext(true);
}
protected static QueryShardContext mockContext(boolean allowExpensiveQueries) {
return mockContext(allowExpensiveQueries, null);
}
protected static QueryShardContext mockContext(boolean allowExpensiveQueries, MappedFieldType mappedFieldType) {
MapperService mapperService = mock(MapperService.class);
when(mapperService.fieldType(anyString())).thenReturn(mappedFieldType);
QueryShardContext context = mock(QueryShardContext.class);
when(context.getMapperService()).thenReturn(mapperService);
if (mappedFieldType != null) {
when(context.fieldMapper(anyString())).thenReturn(mappedFieldType);
when(context.getSearchAnalyzer(any())).thenReturn(mappedFieldType.getTextSearchInfo().getSearchAnalyzer());
}
when(context.allowExpensiveQueries()).thenReturn(allowExpensiveQueries);
SearchLookup lookup = new SearchLookup(
mapperService,
(mft, lookupSupplier) -> mft.fielddataBuilder("test", lookupSupplier).build(null, null),
null
);
when(context.lookup()).thenReturn(lookup);
return context;
}
public void testExistsQueryIsExpensive() {
checkExpensiveQuery(MappedFieldType::existsQuery);
}
public void testExistsQueryInLoop() {
checkLoop(MappedFieldType::existsQuery);
}
public void testRangeQueryWithShapeRelationIsError() {
Exception e = expectThrows(
IllegalArgumentException.class,
() -> simpleMappedFieldType().rangeQuery(1, 2, true, true, ShapeRelation.DISJOINT, null, null, null)
);
assertThat(
e.getMessage(),
equalTo("Field [test] of type [runtime] with runtime type [" + runtimeType() + "] does not support DISJOINT ranges")
);
}
public void testRangeQueryIsExpensive() {
checkExpensiveQuery(this::randomRangeQuery);
}
public void testRangeQueryInLoop() {
checkLoop(this::randomRangeQuery);
}
public void testTermQueryIsExpensive() {
checkExpensiveQuery(this::randomTermQuery);
}
public void testTermQueryInLoop() {
checkLoop(this::randomTermQuery);
}
public void testTermsQueryIsExpensive() {
checkExpensiveQuery(this::randomTermsQuery);
}
public void testTermsQueryInLoop() {
checkLoop(this::randomTermsQuery);
}
public void testPhraseQueryIsError() {
assertQueryOnlyOnText("phrase", () -> simpleMappedFieldType().phraseQuery(null, 1, false));
}
public void testPhrasePrefixQueryIsError() {
assertQueryOnlyOnText("phrase prefix", () -> simpleMappedFieldType().phrasePrefixQuery(null, 1, 1));
}
public void testMultiPhraseQueryIsError() {
assertQueryOnlyOnText("phrase", () -> simpleMappedFieldType().multiPhraseQuery(null, 1, false));
}
public void testSpanPrefixQueryIsError() {
assertQueryOnlyOnText("span prefix", () -> simpleMappedFieldType().spanPrefixQuery(null, null, null));
}
private void assertQueryOnlyOnText(String queryName, ThrowingRunnable buildQuery) {
Exception e = expectThrows(IllegalArgumentException.class, buildQuery);
assertThat(
e.getMessage(),
equalTo(
"Can only use "
+ queryName
+ " queries on text fields - not on [test] which is of type [runtime] with runtime_type ["
+ runtimeType()
+ "]"
)
);
}
protected String readSource(IndexReader reader, int docId) throws IOException {
return reader.document(docId).getBinaryValue("_source").utf8ToString();
}
protected final void checkExpensiveQuery(BiConsumer<MappedFieldType, QueryShardContext> queryBuilder) {
Exception e = expectThrows(ElasticsearchException.class, () -> queryBuilder.accept(simpleMappedFieldType(), mockContext(false)));
assertThat(
e.getMessage(),
equalTo("queries cannot be executed against [runtime] fields while [search.allow_expensive_queries] is set to [false].")
);
}
protected final void checkLoop(BiConsumer<MappedFieldType, QueryShardContext> queryBuilder) {
Exception e = expectThrows(IllegalArgumentException.class, () -> queryBuilder.accept(loopFieldType(), mockContext()));
assertThat(e.getMessage(), equalTo("Cyclic dependency detected while resolving runtime fields: test -> test"));
}
}

View File

@ -1,67 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import static org.mockito.Mockito.mock;
public class BooleanFieldScriptTests extends FieldScriptTestCase<BooleanFieldScript.Factory> {
public static final BooleanFieldScript.Factory DUMMY = (fieldName, params, lookup) -> ctx -> new BooleanFieldScript(
fieldName,
params,
lookup,
ctx
) {
@Override
public void execute() {
emit(false);
}
};
@Override
protected ScriptContext<BooleanFieldScript.Factory> context() {
return BooleanFieldScript.CONTEXT;
}
@Override
protected BooleanFieldScript.Factory dummyScript() {
return DUMMY;
}
public void testTooManyValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{}"))));
try (DirectoryReader reader = iw.getReader()) {
BooleanFieldScript script = new BooleanFieldScript(
"test",
org.elasticsearch.common.collect.Map.of(),
new SearchLookup(mock(MapperService.class), (ft, lookup) -> null, null),
reader.leaves().get(0)
) {
@Override
public void execute() {
for (int i = 0; i <= AbstractFieldScript.MAX_VALUES * 1000; i++) {
emit(i % 2 == 0);
}
}
};
// There isn't a limit to the number of values so this won't throw
script.execute();
}
}
}
}

View File

@ -1,525 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.BooleanFieldMapper;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.runtimefields.RuntimeFields;
import org.elasticsearch.xpack.runtimefields.fielddata.BooleanScriptFieldData;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class BooleanScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase {
@Override
public void testDocValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"true\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}"))));
List<Long> results = new ArrayList<>();
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
BooleanScriptFieldType ft = simpleMappedFieldType();
BooleanScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null);
searcher.search(new MatchAllDocsQuery(), new Collector() {
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) {
SortedNumericDocValues dv = ifd.load(context).getLongValues();
return new LeafCollector() {
@Override
public void setScorer(Scorable scorer) {}
@Override
public void collect(int doc) throws IOException {
if (dv.advanceExact(doc)) {
for (int i = 0; i < dv.docValueCount(); i++) {
results.add(dv.nextValue());
}
}
}
};
}
});
assertThat(results, equalTo(org.elasticsearch.common.collect.List.of(1L, 0L, 1L)));
}
}
}
@Override
public void testSort() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null);
SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false);
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf));
assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [false]}"));
assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [true]}"));
}
}
}
@Override
public void testUsedInScript() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
QueryShardContext qsc = mockContext(true, simpleMappedFieldType());
assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() {
@Override
public boolean needs_score() {
return false;
}
@Override
public ScoreScript newInstance(LeafReaderContext ctx) {
return new ScoreScript(org.elasticsearch.common.collect.Map.of(), qsc.lookup(), ctx) {
@Override
public double execute(ExplanationHolder explanation) {
ScriptDocValues.Booleans booleans = (ScriptDocValues.Booleans) getDoc().get("test");
return booleans.get(0) ? 3 : 0;
}
};
}
}, 2.5f, "test", 0, Version.CURRENT)), equalTo(1));
}
}
}
@Override
public void testExistsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": []}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(3));
}
}
}
@Override
public void testRangeQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType ft = simpleMappedFieldType();
assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, false, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(0));
}
}
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType ft = simpleMappedFieldType();
assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, true, false, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(0));
}
}
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType ft = simpleMappedFieldType();
assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(2));
assertThat(searcher.count(ft.rangeQuery(false, false, false, false, null, null, null, mockContext())), equalTo(0));
assertThat(searcher.count(ft.rangeQuery(true, true, false, false, null, null, null, mockContext())), equalTo(0));
}
}
}
public void testRangeQueryDegeneratesIntoNotExpensive() throws IOException {
assertThat(
simpleMappedFieldType().rangeQuery(true, true, false, false, null, null, null, mockContext()),
instanceOf(MatchNoDocsQuery.class)
);
assertThat(
simpleMappedFieldType().rangeQuery(false, false, false, false, null, null, null, mockContext()),
instanceOf(MatchNoDocsQuery.class)
);
// Even if the running the field would blow up because it loops the query *still* just returns none.
assertThat(
loopFieldType().rangeQuery(true, true, false, false, null, null, null, mockContext()),
instanceOf(MatchNoDocsQuery.class)
);
assertThat(
loopFieldType().rangeQuery(false, false, false, false, null, null, null, mockContext()),
instanceOf(MatchNoDocsQuery.class)
);
}
@Override
protected Query randomRangeQuery(MappedFieldType ft, QueryShardContext ctx) {
// Builds a random range query that doesn't degenerate into match none
switch (randomInt(2)) {
case 0:
return ft.rangeQuery(true, true, true, true, null, null, null, ctx);
case 1:
return ft.rangeQuery(false, true, true, true, null, null, null, ctx);
case 2:
return ft.rangeQuery(false, true, false, true, null, null, null, ctx);
default:
throw new UnsupportedOperationException();
}
}
@Override
public void testTermQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery("true", mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(0));
assertThat(
searcher.count(
build("xor_param", org.elasticsearch.common.collect.Map.of("param", false)).termQuery(true, mockContext())
),
equalTo(1)
);
}
}
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery("false", mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(null, mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(0));
assertThat(
searcher.count(
build("xor_param", org.elasticsearch.common.collect.Map.of("param", false)).termQuery(false, mockContext())
),
equalTo(1)
);
}
}
}
@Override
protected Query randomTermQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termQuery(randomBoolean(), ctx);
}
@Override
public void testTermsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(true, true), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(
simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of("true", "true"), mockContext())
),
equalTo(1)
);
assertThat(
searcher.count(
simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(false, false), mockContext())
),
equalTo(0)
);
assertThat(
searcher.count(
simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(true, false), mockContext())
),
equalTo(1)
);
}
}
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(
simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(false, false), mockContext())
),
equalTo(1)
);
assertThat(
searcher.count(
simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of("false", "false"), mockContext())
),
equalTo(1)
);
assertThat(searcher.count(simpleMappedFieldType().termsQuery(singletonList(null), mockContext())), equalTo(1));
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(true, true), mockContext())),
equalTo(0)
);
assertThat(
searcher.count(
simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(true, false), mockContext())
),
equalTo(1)
);
}
}
}
public void testEmptyTermsQueryDegeneratesIntoMatchNone() throws IOException {
assertThat(
simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(), mockContext()),
instanceOf(MatchNoDocsQuery.class)
);
}
@Override
protected Query randomTermsQuery(MappedFieldType ft, QueryShardContext ctx) {
switch (randomInt(2)) {
case 0:
return ft.termsQuery(org.elasticsearch.common.collect.List.of(true), ctx);
case 1:
return ft.termsQuery(org.elasticsearch.common.collect.List.of(false), ctx);
case 2:
return ft.termsQuery(org.elasticsearch.common.collect.List.of(false, true), ctx);
default:
throw new UnsupportedOperationException();
}
}
public void testDualingQueries() throws IOException {
BooleanFieldMapper ootb = new BooleanFieldMapper.Builder("foo").build(new BuilderContext(Settings.EMPTY, new ContentPath()));
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
List<Boolean> values = randomList(0, 2, ESTestCase::randomBoolean);
String source = "{\"foo\": " + values + "}";
ParseContext ctx = mock(ParseContext.class);
when(ctx.parser()).thenReturn(createParser(JsonXContent.jsonXContent, source));
ParseContext.Document doc = new ParseContext.Document();
when(ctx.doc()).thenReturn(doc);
when(ctx.sourceToParse()).thenReturn(new SourceToParse("test", "test", "test", new BytesArray(source), XContentType.JSON));
doc.add(new StoredField("_source", new BytesRef(source)));
ctx.parser().nextToken();
ctx.parser().nextToken();
ctx.parser().nextToken();
while (ctx.parser().nextToken() != Token.END_ARRAY) {
ootb.parse(ctx);
}
iw.addDocument(doc);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertSameCount(
searcher,
source,
"*",
simpleMappedFieldType().existsQuery(mockContext()),
ootb.fieldType().existsQuery(mockContext())
);
boolean term = randomBoolean();
assertSameCount(
searcher,
source,
term,
simpleMappedFieldType().termQuery(term, mockContext()),
ootb.fieldType().termQuery(term, mockContext())
);
List<Boolean> terms = randomList(0, 3, ESTestCase::randomBoolean);
assertSameCount(
searcher,
source,
terms,
simpleMappedFieldType().termsQuery(terms, mockContext()),
ootb.fieldType().termsQuery(terms, mockContext())
);
boolean low;
boolean high;
if (randomBoolean()) {
low = high = randomBoolean();
} else {
low = false;
high = true;
}
boolean includeLow = randomBoolean();
boolean includeHigh = randomBoolean();
assertSameCount(
searcher,
source,
(includeLow ? "[" : "(") + low + "," + high + (includeHigh ? "]" : ")"),
simpleMappedFieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()),
ootb.fieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext())
);
}
}
}
private void assertSameCount(IndexSearcher searcher, String source, Object queryDescription, Query scriptedQuery, Query ootbQuery)
throws IOException {
assertThat(
"source=" + source + ",query=" + queryDescription + ",scripted=" + scriptedQuery + ",ootb=" + ootbQuery,
searcher.count(scriptedQuery),
equalTo(searcher.count(ootbQuery))
);
}
@Override
protected BooleanScriptFieldType simpleMappedFieldType() throws IOException {
return build("read_foo", org.elasticsearch.common.collect.Map.of());
}
@Override
protected MappedFieldType loopFieldType() throws IOException {
return build("loop", org.elasticsearch.common.collect.Map.of());
}
@Override
protected String runtimeType() {
return "boolean";
}
private static BooleanScriptFieldType build(String code, Map<String, Object> params) throws IOException {
return build(new Script(ScriptType.INLINE, "test", code, params));
}
private static BooleanScriptFieldType build(Script script) throws IOException {
ScriptPlugin scriptPlugin = new ScriptPlugin() {
@Override
public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) {
return new ScriptEngine() {
@Override
public String getType() {
return "test";
}
@Override
public Set<ScriptContext<?>> getSupportedContexts() {
return org.elasticsearch.common.collect.Set.of(DoubleFieldScript.CONTEXT);
}
@Override
public <FactoryType> FactoryType compile(
String name,
String code,
ScriptContext<FactoryType> context,
Map<String, String> params
) {
@SuppressWarnings("unchecked")
FactoryType factory = (FactoryType) factory(code);
return factory;
}
private BooleanFieldScript.Factory factory(String code) {
switch (code) {
case "read_foo":
return (fieldName, params, lookup) -> (ctx) -> new BooleanFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit(parse(foo));
}
}
};
case "xor_param":
return (fieldName, params, lookup) -> (ctx) -> new BooleanFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit((Boolean) foo ^ ((Boolean) getParams().get("param")));
}
}
};
case "loop":
return (fieldName, params, lookup) -> {
// Indicate that this script wants the field call "test", which *is* the name of this field
lookup.forkAndTrackFieldReferences("test");
throw new IllegalStateException("shoud have thrown on the line above");
};
default:
throw new IllegalArgumentException("unsupported script [" + code + "]");
}
}
};
}
};
ScriptModule scriptModule = new ScriptModule(
Settings.EMPTY,
org.elasticsearch.common.collect.List.of(scriptPlugin, new RuntimeFields())
);
try (ScriptService scriptService = new ScriptService(Settings.EMPTY, scriptModule.engines, scriptModule.contexts)) {
BooleanFieldScript.Factory factory = scriptService.compile(script, BooleanFieldScript.CONTEXT);
return new BooleanScriptFieldType("test", script, factory, emptyMap());
}
}
}

View File

@ -1,74 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
public class DateFieldScriptTests extends FieldScriptTestCase<DateFieldScript.Factory> {
public static final DateFieldScript.Factory DUMMY = (fieldName, params, lookup, formatter) -> ctx -> new DateFieldScript(
fieldName,
params,
lookup,
formatter,
ctx
) {
@Override
public void execute() {
emit(1595431354874L);
}
};
@Override
protected ScriptContext<DateFieldScript.Factory> context() {
return DateFieldScript.CONTEXT;
}
@Override
protected DateFieldScript.Factory dummyScript() {
return DUMMY;
}
public void testTooManyValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{}"))));
try (DirectoryReader reader = iw.getReader()) {
DateFieldScript script = new DateFieldScript(
"test",
org.elasticsearch.common.collect.Map.of(),
new SearchLookup(mock(MapperService.class), (ft, lookup) -> null, null),
DateFormatter.forPattern(randomDateFormatterPattern()).withLocale(randomLocale(random())),
reader.leaves().get(0)
) {
@Override
public void execute() {
for (int i = 0; i <= AbstractFieldScript.MAX_VALUES; i++) {
emit(0);
}
}
};
Exception e = expectThrows(IllegalArgumentException.class, script::execute);
assertThat(
e.getMessage(),
equalTo("Runtime field [test] is emitting [101] values while the maximum number of values allowed is [100]")
);
}
}
}
}

View File

@ -1,581 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.xpack.runtimefields.RuntimeFields;
import org.elasticsearch.xpack.runtimefields.fielddata.DateScriptFieldData;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class DateScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase {
public void testFormat() throws IOException {
assertThat(simpleMappedFieldType().docValueFormat("date", null).format(1595432181354L), equalTo("2020-07-22"));
assertThat(
simpleMappedFieldType().docValueFormat("strict_date_optional_time", null).format(1595432181354L),
equalTo("2020-07-22T15:36:21.354Z")
);
assertThat(
simpleMappedFieldType().docValueFormat("strict_date_optional_time", ZoneId.of("America/New_York")).format(1595432181354L),
equalTo("2020-07-22T11:36:21.354-04:00")
);
assertThat(
simpleMappedFieldType().docValueFormat(null, ZoneId.of("America/New_York")).format(1595432181354L),
equalTo("2020-07-22T11:36:21.354-04:00")
);
assertThat(coolFormattedFieldType().docValueFormat(null, null).format(1595432181354L), equalTo("2020-07-22(-■_■)15:36:21.354Z"));
}
public void testFormatDuel() throws IOException {
DateFormatter formatter = DateFormatter.forPattern(randomDateFormatterPattern()).withLocale(randomLocale(random()));
DateScriptFieldType scripted = build(
new Script(ScriptType.INLINE, "test", "read_timestamp", org.elasticsearch.common.collect.Map.of()),
formatter
);
DateFieldMapper.DateFieldType indexed = new DateFieldMapper.DateFieldType("test", formatter);
for (int i = 0; i < 100; i++) {
long date = randomDate();
assertThat(indexed.docValueFormat(null, null).format(date), equalTo(scripted.docValueFormat(null, null).format(date)));
String format = randomDateFormatterPattern();
assertThat(indexed.docValueFormat(format, null).format(date), equalTo(scripted.docValueFormat(format, null).format(date)));
ZoneId zone = randomZone();
assertThat(indexed.docValueFormat(null, zone).format(date), equalTo(scripted.docValueFormat(null, zone).format(date)));
assertThat(indexed.docValueFormat(format, zone).format(date), equalTo(scripted.docValueFormat(format, zone).format(date)));
}
}
@Override
public void testDocValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(
new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356, 1595432181351]}"))
)
);
List<Long> results = new ArrayList<>();
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
DateScriptFieldType ft = build("add_days", org.elasticsearch.common.collect.Map.of("days", 1));
DateScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null);
searcher.search(new MatchAllDocsQuery(), new Collector() {
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
SortedNumericDocValues dv = ifd.load(context).getLongValues();
return new LeafCollector() {
@Override
public void setScorer(Scorable scorer) throws IOException {}
@Override
public void collect(int doc) throws IOException {
if (dv.advanceExact(doc)) {
for (int i = 0; i < dv.docValueCount(); i++) {
results.add(dv.nextValue());
}
}
}
};
}
});
assertThat(results, equalTo(org.elasticsearch.common.collect.List.of(1595518581354L, 1595518581351L, 1595518581356L)));
}
}
}
@Override
public void testSort() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))
);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
DateScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null);
SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false);
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf));
assertThat(readSource(reader, docs.scoreDocs[0].doc), equalTo("{\"timestamp\": [1595432181351]}"));
assertThat(readSource(reader, docs.scoreDocs[1].doc), equalTo("{\"timestamp\": [1595432181354]}"));
assertThat(readSource(reader, docs.scoreDocs[2].doc), equalTo("{\"timestamp\": [1595432181356]}"));
assertThat((Long) (((FieldDoc) docs.scoreDocs[0]).fields[0]), equalTo(1595432181351L));
assertThat((Long) (((FieldDoc) docs.scoreDocs[1]).fields[0]), equalTo(1595432181354L));
assertThat((Long) (((FieldDoc) docs.scoreDocs[2]).fields[0]), equalTo(1595432181356L));
}
}
}
@Override
public void testUsedInScript() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))
);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
QueryShardContext qsc = mockContext(true, simpleMappedFieldType());
assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() {
@Override
public boolean needs_score() {
return false;
}
@Override
public ScoreScript newInstance(LeafReaderContext ctx) throws IOException {
return new ScoreScript(org.elasticsearch.common.collect.Map.of(), qsc.lookup(), ctx) {
@Override
public double execute(ExplanationHolder explanation) {
ScriptDocValues.Dates dates = (ScriptDocValues.Dates) getDoc().get("test");
return dates.get(0).toInstant().toEpochMilli() % 1000;
}
};
}
}, 354.5f, "test", 0, Version.CURRENT)), equalTo(1));
}
}
}
public void testDistanceFeatureQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocuments(
org.elasticsearch.common.collect.List.of(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}"))),
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}"))),
org.elasticsearch.common.collect.List.of(
new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356, 1]}"))
),
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": []}")))
)
);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
Query query = simpleMappedFieldType().distanceFeatureQuery(1595432181354L, "1ms", 1, mockContext());
TopDocs docs = searcher.search(query, 4);
assertThat(docs.scoreDocs, arrayWithSize(3));
assertThat(readSource(reader, docs.scoreDocs[0].doc), equalTo("{\"timestamp\": [1595432181354]}"));
assertThat(docs.scoreDocs[0].score, equalTo(1.0F));
assertThat(readSource(reader, docs.scoreDocs[1].doc), equalTo("{\"timestamp\": [1595432181356, 1]}"));
assertThat((double) docs.scoreDocs[1].score, closeTo(.333, .001));
assertThat(readSource(reader, docs.scoreDocs[2].doc), equalTo("{\"timestamp\": [1595432181351]}"));
assertThat((double) docs.scoreDocs[2].score, closeTo(.250, .001));
Explanation explanation = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0F)
.explain(reader.leaves().get(0), docs.scoreDocs[0].doc);
assertThat(explanation.toString(), containsString("1.0 = Distance score, computed as weight * pivot / (pivot"));
assertThat(explanation.toString(), containsString("1.0 = weight"));
assertThat(explanation.toString(), containsString("1 = pivot"));
assertThat(explanation.toString(), containsString("1595432181354 = origin"));
assertThat(explanation.toString(), containsString("1595432181354 = current value"));
}
}
}
public void testDistanceFeatureQueryIsExpensive() throws IOException {
checkExpensiveQuery(this::randomDistanceFeatureQuery);
}
public void testDistanceFeatureQueryInLoop() throws IOException {
checkLoop(this::randomDistanceFeatureQuery);
}
private Query randomDistanceFeatureQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.distanceFeatureQuery(randomDate(), randomTimeValue(), randomFloat(), ctx);
}
@Override
public void testExistsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))
);
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": []}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1));
}
}
}
@Override
public void testRangeQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))
);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType ft = simpleMappedFieldType();
assertThat(
searcher.count(
ft.rangeQuery("2020-07-22T15:36:21.356Z", "2020-07-23T00:00:00.000Z", true, true, null, null, null, mockContext())
),
equalTo(1)
);
assertThat(
searcher.count(
ft.rangeQuery("2020-07-22T00:00:00.00Z", "2020-07-22T15:36:21.354Z", true, true, null, null, null, mockContext())
),
equalTo(2)
);
assertThat(
searcher.count(ft.rangeQuery(1595432181351L, 1595432181356L, true, true, null, null, null, mockContext())),
equalTo(3)
);
assertThat(
searcher.count(
ft.rangeQuery("2020-07-22T15:36:21.356Z", "2020-07-23T00:00:00.000Z", true, false, null, null, null, mockContext())
),
equalTo(1)
);
assertThat(
searcher.count(
ft.rangeQuery("2020-07-22T15:36:21.356Z", "2020-07-23T00:00:00.000Z", false, false, null, null, null, mockContext())
),
equalTo(0)
);
checkBadDate(
() -> searcher.count(
ft.rangeQuery(
"2020-07-22(-■_■)00:00:00.000Z",
"2020-07-23(-■_■)00:00:00.000Z",
false,
false,
null,
null,
null,
mockContext()
)
)
);
assertThat(
searcher.count(
coolFormattedFieldType().rangeQuery(
"2020-07-22(-■_■)00:00:00.000Z",
"2020-07-23(-■_■)00:00:00.000Z",
false,
false,
null,
null,
null,
mockContext()
)
),
equalTo(3)
);
}
}
}
@Override
protected Query randomRangeQuery(MappedFieldType ft, QueryShardContext ctx) {
long d1 = randomDate();
long d2 = randomValueOtherThan(d1, DateScriptFieldTypeTests::randomDate);
if (d1 > d2) {
long backup = d2;
d2 = d1;
d1 = backup;
}
return ft.rangeQuery(d1, d2, randomBoolean(), randomBoolean(), null, null, null, ctx);
}
@Override
public void testTermQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))
);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().termQuery("2020-07-22T15:36:21.354Z", mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery("1595432181355", mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(1595432181354L, mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(2595432181354L, mockContext())), equalTo(0));
assertThat(
searcher.count(
build("add_days", org.elasticsearch.common.collect.Map.of("days", 1)).termQuery(
"2020-07-23T15:36:21.354Z",
mockContext()
)
),
equalTo(1)
);
checkBadDate(() -> searcher.count(simpleMappedFieldType().termQuery("2020-07-22(-■_■)15:36:21.354Z", mockContext())));
assertThat(searcher.count(coolFormattedFieldType().termQuery("2020-07-22(-■_■)15:36:21.354Z", mockContext())), equalTo(1));
}
}
}
@Override
protected Query randomTermQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termQuery(randomDate(), ctx);
}
@Override
public void testTermsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))
);
try (DirectoryReader reader = iw.getReader()) {
MappedFieldType ft = simpleMappedFieldType();
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(ft.termsQuery(org.elasticsearch.common.collect.List.of("2020-07-22T15:36:21.354Z"), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(ft.termsQuery(org.elasticsearch.common.collect.List.of("1595432181354"), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(ft.termsQuery(org.elasticsearch.common.collect.List.of(1595432181354L), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(ft.termsQuery(org.elasticsearch.common.collect.List.of(2595432181354L), mockContext())),
equalTo(0)
);
assertThat(
searcher.count(ft.termsQuery(org.elasticsearch.common.collect.List.of(1595432181354L, 2595432181354L), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(ft.termsQuery(org.elasticsearch.common.collect.List.of(2595432181354L, 1595432181354L), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(ft.termsQuery(org.elasticsearch.common.collect.List.of(1595432181355L, 1595432181354L), mockContext())),
equalTo(2)
);
checkBadDate(
() -> searcher.count(
simpleMappedFieldType().termsQuery(
org.elasticsearch.common.collect.List.of("2020-07-22T15:36:21.354Z", "2020-07-22(-■_■)15:36:21.354Z"),
mockContext()
)
)
);
assertThat(
searcher.count(
coolFormattedFieldType().termsQuery(
org.elasticsearch.common.collect.List.of("2020-07-22(-■_■)15:36:21.354Z", "2020-07-22(-■_■)15:36:21.355Z"),
mockContext()
)
),
equalTo(2)
);
}
}
}
@Override
protected Query randomTermsQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termsQuery(randomList(1, 100, DateScriptFieldTypeTests::randomDate), ctx);
}
@Override
protected DateScriptFieldType simpleMappedFieldType() throws IOException {
return build("read_timestamp");
}
@Override
protected MappedFieldType loopFieldType() throws IOException {
return build("loop");
}
private DateScriptFieldType coolFormattedFieldType() throws IOException {
return build(simpleMappedFieldType().script, DateFormatter.forPattern("yyyy-MM-dd(-■_■)HH:mm:ss.SSSz||epoch_millis"));
}
@Override
protected String runtimeType() {
return "date";
}
private static DateScriptFieldType build(String code) throws IOException {
return build(code, org.elasticsearch.common.collect.Map.of());
}
private static DateScriptFieldType build(String code, Map<String, Object> params) throws IOException {
return build(new Script(ScriptType.INLINE, "test", code, params), DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER);
}
private static DateScriptFieldType build(Script script, DateFormatter dateTimeFormatter) throws IOException {
ScriptPlugin scriptPlugin = new ScriptPlugin() {
@Override
public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) {
return new ScriptEngine() {
@Override
public String getType() {
return "test";
}
@Override
public Set<ScriptContext<?>> getSupportedContexts() {
return org.elasticsearch.common.collect.Set.of(DateFieldScript.CONTEXT);
}
@Override
public <FactoryType> FactoryType compile(
String name,
String code,
ScriptContext<FactoryType> context,
Map<String, String> params
) {
@SuppressWarnings("unchecked")
FactoryType factory = (FactoryType) factory(code);
return factory;
}
private DateFieldScript.Factory factory(String code) {
switch (code) {
case "read_timestamp":
return (fieldName, params, lookup, formatter) -> ctx -> new DateFieldScript(
fieldName,
params,
lookup,
formatter,
ctx
) {
@Override
public void execute() {
for (Object timestamp : (List<?>) getSource().get("timestamp")) {
DateFieldScript.Parse parse = new DateFieldScript.Parse(this);
emit(parse.parse(timestamp));
}
}
};
case "add_days":
return (fieldName, params, lookup, formatter) -> ctx -> new DateFieldScript(
fieldName,
params,
lookup,
formatter,
ctx
) {
@Override
public void execute() {
for (Object timestamp : (List<?>) getSource().get("timestamp")) {
long epoch = (Long) timestamp;
ZonedDateTime dt = ZonedDateTime.ofInstant(Instant.ofEpochMilli(epoch), ZoneId.of("UTC"));
dt = dt.plus(((Number) params.get("days")).longValue(), ChronoUnit.DAYS);
emit(toEpochMilli(dt));
}
}
};
case "loop":
return (fieldName, params, lookup, formatter) -> {
// Indicate that this script wants the field call "test", which *is* the name of this field
lookup.forkAndTrackFieldReferences("test");
throw new IllegalStateException("shoud have thrown on the line above");
};
default:
throw new IllegalArgumentException("unsupported script [" + code + "]");
}
}
};
}
};
ScriptModule scriptModule = new ScriptModule(
Settings.EMPTY,
org.elasticsearch.common.collect.List.of(scriptPlugin, new RuntimeFields())
);
try (ScriptService scriptService = new ScriptService(Settings.EMPTY, scriptModule.engines, scriptModule.contexts)) {
DateFieldScript.Factory factory = scriptService.compile(script, DateFieldScript.CONTEXT);
return new DateScriptFieldType("test", script, factory, dateTimeFormatter, emptyMap());
}
}
private static long randomDate() {
return Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00
}
private void checkBadDate(ThrowingRunnable queryBuilder) {
Exception e = expectThrows(ElasticsearchParseException.class, queryBuilder);
assertThat(e.getMessage(), containsString("failed to parse date field"));
}
}

View File

@ -1,71 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
public class DoubleFieldScriptTests extends FieldScriptTestCase<DoubleFieldScript.Factory> {
public static final DoubleFieldScript.Factory DUMMY = (fieldName, params, lookup) -> ctx -> new DoubleFieldScript(
fieldName,
params,
lookup,
ctx
) {
@Override
public void execute() {
emit(1.0);
}
};
@Override
protected ScriptContext<DoubleFieldScript.Factory> context() {
return DoubleFieldScript.CONTEXT;
}
@Override
protected DoubleFieldScript.Factory dummyScript() {
return DUMMY;
}
public void testTooManyValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{}"))));
try (DirectoryReader reader = iw.getReader()) {
DoubleFieldScript script = new DoubleFieldScript(
"test",
org.elasticsearch.common.collect.Map.of(),
new SearchLookup(mock(MapperService.class), (ft, lookup) -> null, null),
reader.leaves().get(0)
) {
@Override
public void execute() {
for (int i = 0; i <= AbstractFieldScript.MAX_VALUES; i++) {
emit(1.0);
}
}
};
Exception e = expectThrows(IllegalArgumentException.class, script::execute);
assertThat(
e.getMessage(),
equalTo("Runtime field [test] is emitting [101] values while the maximum number of values allowed is [100]")
);
}
}
}
}

View File

@ -1,335 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.xpack.runtimefields.RuntimeFields;
import org.elasticsearch.xpack.runtimefields.fielddata.DoubleScriptFieldData;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.equalTo;
public class DoubleScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase {
public void testFormat() throws IOException {
assertThat(simpleMappedFieldType().docValueFormat("#.0", null).format(1), equalTo("1.0"));
assertThat(simpleMappedFieldType().docValueFormat("#.0", null).format(1.2), equalTo("1.2"));
assertThat(simpleMappedFieldType().docValueFormat("#,##0.##", null).format(11), equalTo("11"));
assertThat(simpleMappedFieldType().docValueFormat("#,##0.##", null).format(1123), equalTo("1,123"));
assertThat(simpleMappedFieldType().docValueFormat("#,##0.00", null).format(1123), equalTo("1,123.00"));
assertThat(simpleMappedFieldType().docValueFormat("#,##0.00", null).format(1123.1), equalTo("1,123.10"));
}
@Override
public void testDocValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.0]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [3.14, 1.4]}"))));
List<Double> results = new ArrayList<>();
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
DoubleScriptFieldType ft = build("add_param", org.elasticsearch.common.collect.Map.of("param", 1));
DoubleScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null);
searcher.search(new MatchAllDocsQuery(), new Collector() {
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) {
SortedNumericDoubleValues dv = ifd.load(context).getDoubleValues();
return new LeafCollector() {
@Override
public void setScorer(Scorable scorer) {}
@Override
public void collect(int doc) throws IOException {
if (dv.advanceExact(doc)) {
for (int i = 0; i < dv.docValueCount(); i++) {
results.add(dv.nextValue());
}
}
}
};
}
});
assertThat(results, equalTo(org.elasticsearch.common.collect.List.of(2.0, 2.4, 4.140000000000001)));
}
}
}
@Override
public void testSort() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
DoubleScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null);
SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false);
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf));
assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1.1]}"));
assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2.1]}"));
assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4.2]}"));
}
}
}
@Override
public void testUsedInScript() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
QueryShardContext qsc = mockContext(true, simpleMappedFieldType());
assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() {
@Override
public boolean needs_score() {
return false;
}
@Override
public ScoreScript newInstance(LeafReaderContext ctx) {
return new ScoreScript(org.elasticsearch.common.collect.Map.of(), qsc.lookup(), ctx) {
@Override
public double execute(ExplanationHolder explanation) {
ScriptDocValues.Doubles doubles = (ScriptDocValues.Doubles) getDoc().get("test");
return doubles.get(0);
}
};
}
}, 2.5f, "test", 0, Version.CURRENT)), equalTo(1));
}
}
}
@Override
public void testExistsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": []}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1));
}
}
}
@Override
public void testRangeQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.5]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType ft = simpleMappedFieldType();
assertThat(searcher.count(ft.rangeQuery("2", "3", true, true, null, null, null, mockContext())), equalTo(2));
assertThat(searcher.count(ft.rangeQuery(2, 3, true, true, null, null, null, mockContext())), equalTo(2));
assertThat(searcher.count(ft.rangeQuery(1.1, 3, true, true, null, null, null, mockContext())), equalTo(2));
assertThat(searcher.count(ft.rangeQuery(1.1, 3, false, true, null, null, null, mockContext())), equalTo(2));
assertThat(searcher.count(ft.rangeQuery(2, 3, false, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(2.5, 3, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(2.5, 3, false, true, null, null, null, mockContext())), equalTo(0));
}
}
}
@Override
protected Query randomRangeQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.rangeQuery(randomLong(), randomLong(), randomBoolean(), randomBoolean(), null, null, null, ctx);
}
@Override
public void testTermQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().termQuery("1", mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(1, mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(1.1, mockContext())), equalTo(0));
assertThat(
searcher.count(build("add_param", org.elasticsearch.common.collect.Map.of("param", 1)).termQuery(2, mockContext())),
equalTo(1)
);
}
}
}
@Override
protected Query randomTermQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termQuery(randomLong(), ctx);
}
@Override
public void testTermsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of("1"), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(1), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(1.1), mockContext())),
equalTo(0)
);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(1.1, 2.1), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(2.1, 1), mockContext())),
equalTo(2)
);
}
}
}
@Override
protected Query randomTermsQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termsQuery(org.elasticsearch.common.collect.List.of(randomLong()), ctx);
}
@Override
protected DoubleScriptFieldType simpleMappedFieldType() throws IOException {
return build("read_foo", org.elasticsearch.common.collect.Map.of());
}
@Override
protected MappedFieldType loopFieldType() throws IOException {
return build("loop", org.elasticsearch.common.collect.Map.of());
}
@Override
protected String runtimeType() {
return "double";
}
private static DoubleScriptFieldType build(String code, Map<String, Object> params) throws IOException {
return build(new Script(ScriptType.INLINE, "test", code, params));
}
private static DoubleScriptFieldType build(Script script) throws IOException {
ScriptPlugin scriptPlugin = new ScriptPlugin() {
@Override
public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) {
return new ScriptEngine() {
@Override
public String getType() {
return "test";
}
@Override
public Set<ScriptContext<?>> getSupportedContexts() {
return org.elasticsearch.common.collect.Set.of(DoubleFieldScript.CONTEXT);
}
@Override
public <FactoryType> FactoryType compile(
String name,
String code,
ScriptContext<FactoryType> context,
Map<String, String> params
) {
@SuppressWarnings("unchecked")
FactoryType factory = (FactoryType) factory(code);
return factory;
}
private DoubleFieldScript.Factory factory(String code) {
switch (code) {
case "read_foo":
return (fieldName, params, lookup) -> (ctx) -> new DoubleFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit(((Number) foo).doubleValue());
}
}
};
case "add_param":
return (fieldName, params, lookup) -> (ctx) -> new DoubleFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit(((Number) foo).doubleValue() + ((Number) getParams().get("param")).doubleValue());
}
}
};
case "loop":
return (fieldName, params, lookup) -> {
// Indicate that this script wants the field call "test", which *is* the name of this field
lookup.forkAndTrackFieldReferences("test");
throw new IllegalStateException("shoud have thrown on the line above");
};
default:
throw new IllegalArgumentException("unsupported script [" + code + "]");
}
}
};
}
};
ScriptModule scriptModule = new ScriptModule(
Settings.EMPTY,
org.elasticsearch.common.collect.List.of(scriptPlugin, new RuntimeFields())
);
try (ScriptService scriptService = new ScriptService(Settings.EMPTY, scriptModule.engines, scriptModule.contexts)) {
DoubleFieldScript.Factory factory = scriptService.compile(script, DoubleFieldScript.CONTEXT);
return new DoubleScriptFieldType("test", script, factory, emptyMap());
}
}
}

View File

@ -1,30 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
public abstract class FieldScriptTestCase<T> extends ESTestCase {
protected abstract ScriptContext<T> context();
protected abstract T dummyScript();
public final void testRateLimitingDisabled() throws IOException {
try (ScriptService scriptService = TestScriptEngine.scriptService(context(), dummyScript())) {
for (int i = 0; i < 1000; i++) {
scriptService.compile(new Script(ScriptType.INLINE, "test", "test_" + i, Collections.emptyMap()), context());
}
}
}
}

View File

@ -1,71 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
public class IpFieldScriptTests extends FieldScriptTestCase<IpFieldScript.Factory> {
public static final IpFieldScript.Factory DUMMY = (fieldName, params, lookup) -> ctx -> new IpFieldScript(
fieldName,
params,
lookup,
ctx
) {
@Override
public void execute() {
emit("192.168.0.1");
}
};
@Override
protected ScriptContext<IpFieldScript.Factory> context() {
return IpFieldScript.CONTEXT;
}
@Override
protected IpFieldScript.Factory dummyScript() {
return DUMMY;
}
public void testTooManyValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{}"))));
try (DirectoryReader reader = iw.getReader()) {
IpFieldScript script = new IpFieldScript(
"test",
org.elasticsearch.common.collect.Map.of(),
new SearchLookup(mock(MapperService.class), (ft, lookup) -> null, null),
reader.leaves().get(0)
) {
@Override
public void execute() {
for (int i = 0; i <= AbstractFieldScript.MAX_VALUES; i++) {
emit("192.168.0.1");
}
}
};
Exception e = expectThrows(IllegalArgumentException.class, script::execute);
assertThat(
e.getMessage(),
equalTo("Runtime field [test] is emitting [101] values while the maximum number of values allowed is [100]")
);
}
}
}
}

View File

@ -1,378 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.xpack.runtimefields.RuntimeFields;
import org.elasticsearch.xpack.runtimefields.fielddata.BinaryScriptFieldData;
import org.elasticsearch.xpack.runtimefields.fielddata.IpScriptFieldData;
import java.io.IOException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.sameInstance;
public class IpScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase {
public void testFormat() throws IOException {
assertThat(simpleMappedFieldType().docValueFormat(null, null), sameInstance(DocValueFormat.IP));
Exception e = expectThrows(IllegalArgumentException.class, () -> simpleMappedFieldType().docValueFormat("ASDFA", null));
assertThat(e.getMessage(), equalTo("Field [test] of type [runtime] with runtime type [ip] does not support custom formats"));
e = expectThrows(IllegalArgumentException.class, () -> simpleMappedFieldType().docValueFormat(null, ZoneId.of("America/New_York")));
assertThat(e.getMessage(), equalTo("Field [test] of type [runtime] with runtime type [ip] does not support custom time zones"));
}
@Override
public void testDocValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(
new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.2\", \"192.168.1\"]}"))
)
);
List<Object> results = new ArrayList<>();
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
IpScriptFieldType ft = build("append_param", org.elasticsearch.common.collect.Map.of("param", ".1"));
BinaryScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null);
DocValueFormat format = ft.docValueFormat(null, null);
searcher.search(new MatchAllDocsQuery(), new Collector() {
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) {
SortedBinaryDocValues dv = ifd.load(context).getBytesValues();
return new LeafCollector() {
@Override
public void setScorer(Scorable scorer) {}
@Override
public void collect(int doc) throws IOException {
if (dv.advanceExact(doc)) {
for (int i = 0; i < dv.docValueCount(); i++) {
results.add(format.format(dv.nextValue()));
}
}
}
};
}
});
assertThat(results, equalTo(org.elasticsearch.common.collect.List.of("192.168.0.1", "192.168.1.1", "192.168.2.1")));
}
}
}
@Override
public void testSort() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))
);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null);
SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false);
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf));
assertThat(
reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(),
equalTo("{\"foo\": [\"192.168.0.1\"]}")
);
assertThat(
reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(),
equalTo("{\"foo\": [\"192.168.0.2\"]}")
);
assertThat(
reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(),
equalTo("{\"foo\": [\"192.168.0.4\"]}")
);
}
}
}
@Override
public void testUsedInScript() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))
);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
QueryShardContext qsc = mockContext(true, simpleMappedFieldType());
assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() {
@Override
public boolean needs_score() {
return false;
}
@Override
public ScoreScript newInstance(LeafReaderContext ctx) {
return new ScoreScript(org.elasticsearch.common.collect.Map.of(), qsc.lookup(), ctx) {
@Override
public double execute(ExplanationHolder explanation) {
IpScriptFieldData.IpScriptDocValues bytes = (IpScriptFieldData.IpScriptDocValues) getDoc().get("test");
return Integer.parseInt(bytes.getValue().substring(bytes.getValue().lastIndexOf(".") + 1));
}
};
}
}, 2.5f, "test", 0, Version.CURRENT)), equalTo(1));
}
}
}
@Override
public void testExistsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))
);
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": []}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1));
}
}
}
@Override
public void testRangeQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))
);
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(
simpleMappedFieldType().rangeQuery("192.0.0.0", "200.0.0.0", false, false, null, null, null, mockContext())
),
equalTo(1)
);
}
}
}
@Override
protected Query randomRangeQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.rangeQuery("192.0.0.0", "200.0.0.0", randomBoolean(), randomBoolean(), null, null, null, ctx);
}
@Override
public void testTermQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1\"]}")))
);
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
IpScriptFieldType fieldType = build("append_param", org.elasticsearch.common.collect.Map.of("param", ".1"));
assertThat(searcher.count(fieldType.termQuery("192.168.0.1", mockContext())), equalTo(1));
assertThat(searcher.count(fieldType.termQuery("192.168.0.7", mockContext())), equalTo(0));
assertThat(searcher.count(fieldType.termQuery("192.168.0.0/16", mockContext())), equalTo(2));
assertThat(searcher.count(fieldType.termQuery("10.168.0.0/16", mockContext())), equalTo(0));
}
}
}
@Override
protected Query randomTermQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termQuery(randomIp(randomBoolean()), ctx);
}
@Override
public void testTermsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1.1\"]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))
);
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(
simpleMappedFieldType().termsQuery(
org.elasticsearch.common.collect.List.of("192.168.0.1", "1.1.1.1"),
mockContext()
)
),
equalTo(2)
);
assertThat(
searcher.count(
simpleMappedFieldType().termsQuery(
org.elasticsearch.common.collect.List.of("192.168.0.0/16", "1.1.1.1"),
mockContext()
)
),
equalTo(3)
);
}
}
}
@Override
protected Query randomTermsQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termsQuery(randomList(100, () -> randomIp(randomBoolean())), ctx);
}
@Override
protected IpScriptFieldType simpleMappedFieldType() throws IOException {
return build("read_foo", org.elasticsearch.common.collect.Map.of());
}
@Override
protected MappedFieldType loopFieldType() throws IOException {
return build("loop", org.elasticsearch.common.collect.Map.of());
}
@Override
protected String runtimeType() {
return "ip";
}
private static IpScriptFieldType build(String code, Map<String, Object> params) throws IOException {
return build(new Script(ScriptType.INLINE, "test", code, params));
}
private static IpScriptFieldType build(Script script) throws IOException {
ScriptPlugin scriptPlugin = new ScriptPlugin() {
@Override
public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) {
return new ScriptEngine() {
@Override
public String getType() {
return "test";
}
@Override
public Set<ScriptContext<?>> getSupportedContexts() {
return org.elasticsearch.common.collect.Set.of(StringFieldScript.CONTEXT);
}
@Override
public <FactoryType> FactoryType compile(
String name,
String code,
ScriptContext<FactoryType> context,
Map<String, String> params
) {
@SuppressWarnings("unchecked")
FactoryType factory = (FactoryType) factory(code);
return factory;
}
private IpFieldScript.Factory factory(String code) {
switch (code) {
case "read_foo":
return (fieldName, params, lookup) -> (ctx) -> new IpFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit(foo.toString());
}
}
};
case "append_param":
return (fieldName, params, lookup) -> (ctx) -> new IpFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit(foo.toString() + getParams().get("param"));
}
}
};
case "loop":
return (fieldName, params, lookup) -> {
// Indicate that this script wants the field call "test", which *is* the name of this field
lookup.forkAndTrackFieldReferences("test");
throw new IllegalStateException("shoud have thrown on the line above");
};
default:
throw new IllegalArgumentException("unsupported script [" + code + "]");
}
}
};
}
};
ScriptModule scriptModule = new ScriptModule(
Settings.EMPTY,
org.elasticsearch.common.collect.List.of(scriptPlugin, new RuntimeFields())
);
try (ScriptService scriptService = new ScriptService(Settings.EMPTY, scriptModule.engines, scriptModule.contexts)) {
IpFieldScript.Factory factory = scriptService.compile(script, IpFieldScript.CONTEXT);
return new IpScriptFieldType("test", script, factory, emptyMap());
}
}
}

View File

@ -1,443 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.xpack.runtimefields.RuntimeFields;
import org.elasticsearch.xpack.runtimefields.fielddata.BinaryScriptFieldData;
import org.elasticsearch.xpack.runtimefields.fielddata.StringScriptFieldData;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.equalTo;
public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase {
@Override
public void testDocValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}"))));
List<String> results = new ArrayList<>();
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
KeywordScriptFieldType ft = build("append_param", org.elasticsearch.common.collect.Map.of("param", "-suffix"));
StringScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null);
searcher.search(new MatchAllDocsQuery(), new Collector() {
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) {
SortedBinaryDocValues dv = ifd.load(context).getBytesValues();
return new LeafCollector() {
@Override
public void setScorer(Scorable scorer) {}
@Override
public void collect(int doc) throws IOException {
if (dv.advanceExact(doc)) {
for (int i = 0; i < dv.docValueCount(); i++) {
results.add(dv.nextValue().utf8ToString());
}
}
}
};
}
});
assertThat(results, equalTo(org.elasticsearch.common.collect.List.of("1-suffix", "1-suffix", "2-suffix")));
}
}
}
@Override
public void testSort() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"d\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null);
SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false);
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf));
assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"a\"]}"));
assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"b\"]}"));
assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"d\"]}"));
}
}
}
@Override
public void testUsedInScript() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aaa\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aa\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
QueryShardContext qsc = mockContext(true, simpleMappedFieldType());
assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() {
@Override
public boolean needs_score() {
return false;
}
@Override
public ScoreScript newInstance(LeafReaderContext ctx) {
return new ScoreScript(org.elasticsearch.common.collect.Map.of(), qsc.lookup(), ctx) {
@Override
public double execute(ExplanationHolder explanation) {
ScriptDocValues.Strings bytes = (ScriptDocValues.Strings) getDoc().get("test");
return bytes.get(0).length();
}
};
}
}, 2.5f, "test", 0, Version.CURRENT)), equalTo(1));
}
}
}
@Override
public void testExistsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": []}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1));
}
}
}
public void testFuzzyQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
// No edits, matches
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}"))));
// Single insertion, matches
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caat\"]}"))));
// Single transposition, matches
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cta\"]}"))));
// Two insertions, no match
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caaat\"]}"))));
// Totally wrong, no match
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(simpleMappedFieldType().fuzzyQuery("cat", Fuzziness.AUTO, 0, 1, true, mockContext())),
equalTo(3)
);
}
}
}
public void testFuzzyQueryIsExpensive() throws IOException {
checkExpensiveQuery(this::randomFuzzyQuery);
}
public void testFuzzyQueryInLoop() throws IOException {
checkLoop(this::randomFuzzyQuery);
}
private Query randomFuzzyQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.fuzzyQuery(
randomAlphaOfLengthBetween(1, 1000),
randomFrom(Fuzziness.AUTO, Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO),
randomInt(),
randomInt(),
randomBoolean(),
ctx
);
}
public void testPrefixQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().prefixQuery("cat", null, mockContext())), equalTo(2));
}
}
}
public void testPrefixQueryIsExpensive() throws IOException {
checkExpensiveQuery(this::randomPrefixQuery);
}
public void testPrefixQueryInLoop() throws IOException {
checkLoop(this::randomPrefixQuery);
}
private Query randomPrefixQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.prefixQuery(randomAlphaOfLengthBetween(1, 1000), null, ctx);
}
@Override
public void testRangeQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(simpleMappedFieldType().rangeQuery("cat", "d", false, false, null, null, null, mockContext())),
equalTo(1)
);
}
}
}
@Override
protected Query randomRangeQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.rangeQuery(
randomAlphaOfLengthBetween(0, 1000),
randomAlphaOfLengthBetween(0, 1000),
randomBoolean(),
randomBoolean(),
null,
null,
null,
ctx
);
}
public void testRegexpQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(
simpleMappedFieldType().regexpQuery("ca.+", 0, 0, Operations.DEFAULT_MAX_DETERMINIZED_STATES, null, mockContext())
),
equalTo(2)
);
}
}
}
public void testRegexpQueryInLoop() throws IOException {
checkLoop(this::randomRegexpQuery);
}
private Query randomRegexpQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.regexpQuery(randomAlphaOfLengthBetween(1, 1000), randomInt(0xFF), 0, Integer.MAX_VALUE, null, ctx);
}
@Override
public void testTermQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
KeywordScriptFieldType fieldType = build("append_param", org.elasticsearch.common.collect.Map.of("param", "-suffix"));
assertThat(searcher.count(fieldType.termQuery("1-suffix", mockContext())), equalTo(1));
}
}
}
@Override
protected Query randomTermQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termQuery(randomAlphaOfLengthBetween(1, 1000), ctx);
}
@Override
public void testTermsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [3]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of("1", "2"), mockContext())),
equalTo(2)
);
}
}
}
@Override
protected Query randomTermsQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termsQuery(randomList(100, () -> randomAlphaOfLengthBetween(1, 1000)), ctx);
}
public void testWildcardQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().wildcardQuery("a*b", null, mockContext())), equalTo(1));
}
}
}
public void testWildcardQueryIsExpensive() throws IOException {
checkExpensiveQuery(this::randomWildcardQuery);
}
public void testWildcardQueryInLoop() throws IOException {
checkLoop(this::randomWildcardQuery);
}
private Query randomWildcardQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.wildcardQuery(randomAlphaOfLengthBetween(1, 1000), null, ctx);
}
public void testMatchQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
KeywordScriptFieldType fieldType = build("append_param", org.elasticsearch.common.collect.Map.of("param", "-Suffix"));
QueryShardContext queryShardContext = mockContext(true, fieldType);
Query query = new MatchQueryBuilder("test", "1-Suffix").toQuery(queryShardContext);
assertThat(searcher.count(query), equalTo(1));
}
}
}
@Override
protected KeywordScriptFieldType simpleMappedFieldType() throws IOException {
return build("read_foo", org.elasticsearch.common.collect.Map.of());
}
@Override
protected KeywordScriptFieldType loopFieldType() throws IOException {
return build("loop", org.elasticsearch.common.collect.Map.of());
}
@Override
protected String runtimeType() {
return "keyword";
}
private static KeywordScriptFieldType build(String code, Map<String, Object> params) throws IOException {
return build(new Script(ScriptType.INLINE, "test", code, params));
}
private static KeywordScriptFieldType build(Script script) throws IOException {
ScriptPlugin scriptPlugin = new ScriptPlugin() {
@Override
public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) {
return new ScriptEngine() {
@Override
public String getType() {
return "test";
}
@Override
public Set<ScriptContext<?>> getSupportedContexts() {
return org.elasticsearch.common.collect.Set.of(StringFieldScript.CONTEXT);
}
@Override
public <FactoryType> FactoryType compile(
String name,
String code,
ScriptContext<FactoryType> context,
Map<String, String> params
) {
@SuppressWarnings("unchecked")
FactoryType factory = (FactoryType) factory(code);
return factory;
}
private StringFieldScript.Factory factory(String code) {
switch (code) {
case "read_foo":
return (fieldName, params, lookup) -> ctx -> new StringFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit(foo.toString());
}
}
};
case "append_param":
return (fieldName, params, lookup) -> ctx -> new StringFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit(foo.toString() + getParams().get("param").toString());
}
}
};
case "loop":
return (fieldName, params, lookup) -> {
// Indicate that this script wants the field call "test", which *is* the name of this field
lookup.forkAndTrackFieldReferences("test");
throw new IllegalStateException("shoud have thrown on the line above");
};
default:
throw new IllegalArgumentException("unsupported script [" + code + "]");
}
}
};
}
};
ScriptModule scriptModule = new ScriptModule(
Settings.EMPTY,
org.elasticsearch.common.collect.List.of(scriptPlugin, new RuntimeFields())
);
try (ScriptService scriptService = new ScriptService(Settings.EMPTY, scriptModule.engines, scriptModule.contexts)) {
StringFieldScript.Factory factory = scriptService.compile(script, StringFieldScript.CONTEXT);
return new KeywordScriptFieldType("test", script, factory, emptyMap());
}
}
}

View File

@ -1,71 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
public class LongFieldScriptTests extends FieldScriptTestCase<LongFieldScript.Factory> {
public static final LongFieldScript.Factory DUMMY = (fieldName, params, lookup) -> ctx -> new LongFieldScript(
fieldName,
params,
lookup,
ctx
) {
@Override
public void execute() {
emit(1);
}
};
@Override
protected ScriptContext<LongFieldScript.Factory> context() {
return LongFieldScript.CONTEXT;
}
@Override
protected LongFieldScript.Factory dummyScript() {
return DUMMY;
}
public void testTooManyValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{}"))));
try (DirectoryReader reader = iw.getReader()) {
LongFieldScript script = new LongFieldScript(
"test",
org.elasticsearch.common.collect.Map.of(),
new SearchLookup(mock(MapperService.class), (ft, lookup) -> null, null),
reader.leaves().get(0)
) {
@Override
public void execute() {
for (int i = 0; i <= AbstractFieldScript.MAX_VALUES; i++) {
emit(0);
}
}
};
Exception e = expectThrows(IllegalArgumentException.class, script::execute);
assertThat(
e.getMessage(),
equalTo("Runtime field [test] is emitting [101] values while the maximum number of values allowed is [100]")
);
}
}
}
}

View File

@ -1,373 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.xpack.runtimefields.RuntimeFields;
import org.elasticsearch.xpack.runtimefields.fielddata.LongScriptFieldData;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
public class LongScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase {
public void testFormat() throws IOException {
assertThat(simpleMappedFieldType().docValueFormat("#.0", null).format(1), equalTo("1.0"));
assertThat(simpleMappedFieldType().docValueFormat("#,##0.##", null).format(11), equalTo("11"));
assertThat(simpleMappedFieldType().docValueFormat("#,##0.##", null).format(1123), equalTo("1,123"));
}
@Override
public void testDocValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}"))));
List<Long> results = new ArrayList<>();
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
LongScriptFieldType ft = build("add_param", org.elasticsearch.common.collect.Map.of("param", 1));
LongScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null);
searcher.search(new MatchAllDocsQuery(), new Collector() {
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) {
SortedNumericDocValues dv = ifd.load(context).getLongValues();
return new LeafCollector() {
@Override
public void setScorer(Scorable scorer) {}
@Override
public void collect(int doc) throws IOException {
if (dv.advanceExact(doc)) {
for (int i = 0; i < dv.docValueCount(); i++) {
results.add(dv.nextValue());
}
}
}
};
}
});
assertThat(results, equalTo(org.elasticsearch.common.collect.List.of(2L, 2L, 3L)));
}
}
}
@Override
public void testSort() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
LongScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null);
SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false);
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf));
assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1]}"));
assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2]}"));
assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4]}"));
}
}
}
public void testNow() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))
);
iw.addDocument(
org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))
);
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
LongScriptFieldData ifd = build("millis_ago", Collections.emptyMap()).fielddataBuilder("test", mockContext()::lookup)
.build(null, null);
SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false);
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf));
assertThat(readSource(reader, docs.scoreDocs[0].doc), equalTo("{\"timestamp\": [1595432181356]}"));
assertThat(readSource(reader, docs.scoreDocs[1].doc), equalTo("{\"timestamp\": [1595432181354]}"));
assertThat(readSource(reader, docs.scoreDocs[2].doc), equalTo("{\"timestamp\": [1595432181351]}"));
long t1 = (Long) (((FieldDoc) docs.scoreDocs[0]).fields[0]);
assertThat(t1, greaterThan(3638011399L));
long t2 = (Long) (((FieldDoc) docs.scoreDocs[1]).fields[0]);
long t3 = (Long) (((FieldDoc) docs.scoreDocs[2]).fields[0]);
assertThat(t2, equalTo(t1 + 2));
assertThat(t3, equalTo(t1 + 5));
}
}
}
@Override
public void testUsedInScript() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
QueryShardContext qsc = mockContext(true, simpleMappedFieldType());
assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() {
@Override
public boolean needs_score() {
return false;
}
@Override
public ScoreScript newInstance(LeafReaderContext ctx) {
return new ScoreScript(Collections.emptyMap(), qsc.lookup(), ctx) {
@Override
public double execute(ExplanationHolder explanation) {
ScriptDocValues.Longs longs = (ScriptDocValues.Longs) getDoc().get("test");
return longs.get(0);
}
};
}
}, 2.5f, "test", 0, Version.CURRENT)), equalTo(1));
}
}
}
@Override
public void testExistsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": []}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1));
}
}
}
@Override
public void testRangeQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType ft = simpleMappedFieldType();
assertThat(searcher.count(ft.rangeQuery("2", "3", true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(2, 3, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(1.1, 3, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(1.1, 3, false, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(2, 3, false, true, null, null, null, mockContext())), equalTo(0));
}
}
}
@Override
protected Query randomRangeQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.rangeQuery(randomLong(), randomLong(), randomBoolean(), randomBoolean(), null, null, null, ctx);
}
@Override
public void testTermQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().termQuery("1", mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(1, mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(1.1, mockContext())), equalTo(0));
assertThat(
searcher.count(build("add_param", org.elasticsearch.common.collect.Map.of("param", 1)).termQuery(2, mockContext())),
equalTo(1)
);
}
}
}
@Override
protected Query randomTermQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termQuery(randomLong(), ctx);
}
@Override
public void testTermsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of("1"), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(1), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(1.1), mockContext())),
equalTo(0)
);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(1.1, 2), mockContext())),
equalTo(1)
);
assertThat(
searcher.count(simpleMappedFieldType().termsQuery(org.elasticsearch.common.collect.List.of(2, 1), mockContext())),
equalTo(2)
);
}
}
}
@Override
protected Query randomTermsQuery(MappedFieldType ft, QueryShardContext ctx) {
return ft.termsQuery(org.elasticsearch.common.collect.List.of(randomLong()), ctx);
}
@Override
protected LongScriptFieldType simpleMappedFieldType() throws IOException {
return build("read_foo", Collections.emptyMap());
}
@Override
protected LongScriptFieldType loopFieldType() throws IOException {
return build("loop", org.elasticsearch.common.collect.Map.of());
}
@Override
protected String runtimeType() {
return "long";
}
private static LongScriptFieldType build(String code, Map<String, Object> params) throws IOException {
return build(new Script(ScriptType.INLINE, "test", code, params));
}
private static LongScriptFieldType build(Script script) throws IOException {
ScriptPlugin scriptPlugin = new ScriptPlugin() {
@Override
public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) {
return new ScriptEngine() {
@Override
public String getType() {
return "test";
}
@Override
public Set<ScriptContext<?>> getSupportedContexts() {
return org.elasticsearch.common.collect.Set.of(LongFieldScript.CONTEXT);
}
@Override
public <FactoryType> FactoryType compile(
String name,
String code,
ScriptContext<FactoryType> context,
Map<String, String> params
) {
@SuppressWarnings("unchecked")
FactoryType factory = (FactoryType) factory(code);
return factory;
}
private LongFieldScript.Factory factory(String code) {
switch (code) {
case "read_foo":
return (fieldName, params, lookup) -> (ctx) -> new LongFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit(((Number) foo).longValue());
}
}
};
case "add_param":
return (fieldName, params, lookup) -> (ctx) -> new LongFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object foo : (List<?>) getSource().get("foo")) {
emit(((Number) foo).longValue() + ((Number) getParams().get("param")).longValue());
}
}
};
case "millis_ago":
// Painless actually call System.currentTimeMillis. We could mock the time but this works fine too.
long now = System.currentTimeMillis();
return (fieldName, params, lookup) -> (ctx) -> new LongFieldScript(fieldName, params, lookup, ctx) {
@Override
public void execute() {
for (Object timestamp : (List<?>) getSource().get("timestamp")) {
emit(now - ((Number) timestamp).longValue());
}
}
};
case "loop":
return (fieldName, params, lookup) -> {
// Indicate that this script wants the field call "test", which *is* the name of this field
lookup.forkAndTrackFieldReferences("test");
throw new IllegalStateException("shoud have thrown on the line above");
};
default:
throw new IllegalArgumentException("unsupported script [" + code + "]");
}
}
};
}
};
ScriptModule scriptModule = new ScriptModule(
Settings.EMPTY,
org.elasticsearch.common.collect.List.of(scriptPlugin, new RuntimeFields())
);
try (ScriptService scriptService = new ScriptService(Settings.EMPTY, scriptModule.engines, scriptModule.contexts)) {
LongFieldScript.Factory factory = scriptService.compile(script, LongFieldScript.CONTEXT);
return new LongScriptFieldType("test", script, factory, emptyMap());
}
}
}

View File

@ -1,409 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexSortConfig;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperTestCase;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.xpack.runtimefields.RuntimeFields;
import org.elasticsearch.xpack.runtimefields.query.StringScriptFieldExistsQuery;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Mockito.mock;
public class RuntimeFieldMapperTests extends MapperTestCase {
private final String[] runtimeTypes;
public RuntimeFieldMapperTests() {
this.runtimeTypes = RuntimeFieldMapper.Builder.FIELD_TYPE_RESOLVER.keySet().toArray(new String[0]);
Arrays.sort(runtimeTypes);
}
@Override
protected void writeField(XContentBuilder builder) {
// do nothing
}
@Override
protected void writeFieldValue(XContentBuilder builder) {
throw new UnsupportedOperationException();
}
@Override
protected void assertExistsQuery(MappedFieldType fieldType, Query query, ParseContext.Document fields) {
assertThat(query, instanceOf(StringScriptFieldExistsQuery.class));
assertNoFieldNamesField(fields);
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "runtime").field("runtime_type", "keyword");
b.startObject("script").field("source", "dummy_source").field("lang", "test").endObject();
}
@Override
protected void registerParameters(ParameterChecker checker) {
// TODO need to be able to pass a completely new config rather than updating minimal mapping
}
public void testRuntimeTypeIsRequired() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("my_field")
.field("type", "runtime")
.field("script", "keyword('test')")
.endObject()
.endObject()
.endObject()
.endObject();
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> createMapperService(mapping));
assertEquals("Failed to parse mapping [_doc]: runtime_type must be specified for runtime field [my_field]", exception.getMessage());
}
public void testScriptIsRequired() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("my_field")
.field("type", "runtime")
.field("runtime_type", randomFrom(runtimeTypes))
.endObject()
.endObject()
.endObject()
.endObject();
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> createMapperService(mapping));
assertEquals("Failed to parse mapping [_doc]: script must be specified for runtime field [my_field]", exception.getMessage());
}
public void testCopyToIsNotSupported() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("my_field")
.field("type", "runtime")
.field("runtime_type", randomFrom(runtimeTypes))
.field("script", "keyword('test')")
.field("copy_to", "field")
.endObject()
.endObject()
.endObject()
.endObject();
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> createMapperService(mapping));
assertEquals("Failed to parse mapping [_doc]: runtime field [my_field] does not support [copy_to]", exception.getMessage());
}
public void testMultiFieldsIsNotSupported() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("my_field")
.field("type", "runtime")
.field("runtime_type", randomFrom(runtimeTypes))
.field("script", "keyword('test')")
.startObject("fields")
.startObject("test")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> createMapperService(mapping));
assertEquals("Failed to parse mapping [_doc]: runtime field [my_field] does not support [fields]", exception.getMessage());
}
public void testStoredScriptsAreNotSupported() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("my_field")
.field("type", "runtime")
.field("runtime_type", randomFrom(runtimeTypes))
.startObject("script")
.field("id", "test")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> createMapperService(mapping));
assertEquals(
"Failed to parse mapping [_doc]: stored scripts are not supported for runtime field [my_field]",
exception.getMessage()
);
}
public void testUnsupportedRuntimeType() {
MapperParsingException exc = expectThrows(MapperParsingException.class, () -> createMapperService(mapping("unsupported")));
assertEquals(
"Failed to parse mapping [_doc]: runtime_type [unsupported] not supported for runtime field [field]",
exc.getMessage()
);
}
public void testBoolean() throws IOException {
MapperService mapperService = createMapperService(mapping("boolean"));
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
assertThat(mapper, instanceOf(RuntimeFieldMapper.class));
assertEquals(Strings.toString(mapping("boolean")), Strings.toString(mapperService.documentMapper()));
}
public void testDouble() throws IOException {
MapperService mapperService = createMapperService(mapping("double"));
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
assertThat(mapper, instanceOf(RuntimeFieldMapper.class));
assertEquals(Strings.toString(mapping("double")), Strings.toString(mapperService.documentMapper()));
}
public void testIp() throws IOException {
MapperService mapperService = createMapperService(mapping("ip"));
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
assertThat(mapper, instanceOf(RuntimeFieldMapper.class));
assertEquals(Strings.toString(mapping("ip")), Strings.toString(mapperService.documentMapper()));
}
public void testKeyword() throws IOException {
MapperService mapperService = createMapperService(mapping("keyword"));
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
assertThat(mapper, instanceOf(RuntimeFieldMapper.class));
assertEquals(Strings.toString(mapping("keyword")), Strings.toString(mapperService.documentMapper()));
}
public void testLong() throws IOException {
MapperService mapperService = createMapperService(mapping("long"));
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
assertThat(mapper, instanceOf(RuntimeFieldMapper.class));
assertEquals(Strings.toString(mapping("long")), Strings.toString(mapperService.documentMapper()));
}
public void testDate() throws IOException {
MapperService mapperService = createMapperService(mapping("date"));
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
assertThat(mapper, instanceOf(RuntimeFieldMapper.class));
assertEquals(Strings.toString(mapping("date")), Strings.toString(mapperService.documentMapper()));
}
public void testDateWithFormat() throws IOException {
CheckedSupplier<XContentBuilder, IOException> mapping = () -> mapping("date", b -> b.field("format", "yyyy-MM-dd"));
MapperService mapperService = createMapperService(mapping.get());
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
assertThat(mapper, instanceOf(RuntimeFieldMapper.class));
assertEquals(Strings.toString(mapping.get()), Strings.toString(mapperService.documentMapper()));
}
public void testDateWithLocale() throws IOException {
CheckedSupplier<XContentBuilder, IOException> mapping = () -> mapping("date", b -> b.field("locale", "en_GB"));
MapperService mapperService = createMapperService(mapping.get());
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
assertThat(mapper, instanceOf(RuntimeFieldMapper.class));
assertEquals(Strings.toString(mapping.get()), Strings.toString(mapperService.documentMapper()));
}
public void testDateWithLocaleAndFormat() throws IOException {
CheckedSupplier<XContentBuilder, IOException> mapping = () -> mapping(
"date",
b -> b.field("format", "yyyy-MM-dd").field("locale", "en_GB")
);
MapperService mapperService = createMapperService(mapping.get());
FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
assertThat(mapper, instanceOf(RuntimeFieldMapper.class));
assertEquals(Strings.toString(mapping.get()), Strings.toString(mapperService.documentMapper()));
}
public void testNonDateWithFormat() {
String runtimeType = randomValueOtherThan("date", () -> randomFrom(runtimeTypes));
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(mapping(runtimeType, b -> b.field("format", "yyyy-MM-dd")))
);
assertThat(
e.getMessage(),
equalTo(
"Failed to parse mapping [_doc]: format can not be specified for [runtime] field [field] "
+ "of runtime_type ["
+ runtimeType
+ "]"
)
);
}
public void testNonDateWithLocale() {
String runtimeType = randomValueOtherThan("date", () -> randomFrom(runtimeTypes));
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(mapping(runtimeType, b -> b.field("locale", "en_GB")))
);
assertThat(
e.getMessage(),
equalTo(
"Failed to parse mapping [_doc]: locale can not be specified for [runtime] field [field] of "
+ "runtime_type ["
+ runtimeType
+ "]"
)
);
}
public void testFieldCaps() throws Exception {
for (String runtimeType : runtimeTypes) {
MapperService scriptIndexMapping = createMapperService(mapping(runtimeType));
MapperService concreteIndexMapping;
{
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("field")
.field("type", runtimeType)
.endObject()
.endObject()
.endObject()
.endObject();
concreteIndexMapping = createMapperService(mapping);
}
MappedFieldType scriptFieldType = scriptIndexMapping.fieldType("field");
MappedFieldType concreteIndexType = concreteIndexMapping.fieldType("field");
assertEquals(concreteIndexType.familyTypeName(), scriptFieldType.familyTypeName());
assertEquals(concreteIndexType.isSearchable(), scriptFieldType.isSearchable());
assertEquals(concreteIndexType.isAggregatable(), scriptFieldType.isAggregatable());
}
}
public void testIndexSorting() {
Settings build = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put("index.sort.field", "runtime")
.build();
IndexSettings indexSettings = new IndexSettings(IndexMetadata.builder("index").settings(build).build(), Settings.EMPTY);
IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null);
NoneCircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
final IndexFieldDataService indexFieldDataService = new IndexFieldDataService(indexSettings, cache, circuitBreakerService, null);
IndexSortConfig config = indexSettings.getIndexSortConfig();
assertTrue(config.hasIndexSort());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> config.buildIndexSort(
field -> new KeywordScriptFieldType(field, new Script(""), mock(StringFieldScript.Factory.class), Collections.emptyMap()),
(fieldType, searchLookupSupplier) -> indexFieldDataService.getForField(fieldType, "index", searchLookupSupplier)
)
);
assertEquals("docvalues not found for index sort field:[runtime]", iae.getMessage());
assertThat(iae.getCause(), instanceOf(UnsupportedOperationException.class));
assertEquals("index sorting not supported on runtime field [runtime]", iae.getCause().getMessage());
}
private static XContentBuilder mapping(String type) throws IOException {
return mapping(type, builder -> {});
}
private static XContentBuilder mapping(String type, CheckedConsumer<XContentBuilder, IOException> extra) throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject();
{
mapping.startObject("_doc");
{
mapping.startObject("properties");
{
mapping.startObject("field");
{
mapping.field("type", "runtime").field("runtime_type", type);
mapping.startObject("script");
{
mapping.field("source", "dummy_source").field("lang", "test");
}
mapping.endObject();
extra.accept(mapping);
}
mapping.endObject();
}
mapping.endObject();
}
mapping.endObject();
}
return mapping.endObject();
}
@Override
protected Collection<? extends Plugin> getPlugins() {
return org.elasticsearch.common.collect.List.of(new RuntimeFields(), new TestScriptPlugin());
}
private static class TestScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) {
return new TestScriptEngine() {
@Override
protected Object buildScriptFactory(ScriptContext<?> context) {
if (context == BooleanFieldScript.CONTEXT) {
return BooleanFieldScriptTests.DUMMY;
}
if (context == DateFieldScript.CONTEXT) {
return DateFieldScriptTests.DUMMY;
}
if (context == DoubleFieldScript.CONTEXT) {
return DoubleFieldScriptTests.DUMMY;
}
if (context == IpFieldScript.CONTEXT) {
return IpFieldScriptTests.DUMMY;
}
if (context == LongFieldScript.CONTEXT) {
return LongFieldScriptTests.DUMMY;
}
if (context == StringFieldScript.CONTEXT) {
return StringFieldScriptTests.DUMMY;
}
throw new IllegalArgumentException("Unsupported context: " + context);
};
public Set<ScriptContext<?>> getSupportedContexts() {
return new HashSet<>(new RuntimeFields().getContexts());
}
};
}
}
}

View File

@ -1,102 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScript.Factory> {
public static final StringFieldScript.Factory DUMMY = (fieldName, params, lookup) -> ctx -> new StringFieldScript(
fieldName,
params,
lookup,
ctx
) {
@Override
public void execute() {
emit("foo");
}
};
@Override
protected ScriptContext<StringFieldScript.Factory> context() {
return StringFieldScript.CONTEXT;
}
@Override
protected StringFieldScript.Factory dummyScript() {
return DUMMY;
}
public void testTooManyValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{}"))));
try (DirectoryReader reader = iw.getReader()) {
StringFieldScript script = new StringFieldScript(
"test",
org.elasticsearch.common.collect.Map.of(),
new SearchLookup(mock(MapperService.class), (ft, lookup) -> null, null),
reader.leaves().get(0)
) {
@Override
public void execute() {
for (int i = 0; i <= AbstractFieldScript.MAX_VALUES; i++) {
emit("test");
}
}
};
Exception e = expectThrows(IllegalArgumentException.class, script::execute);
assertThat(
e.getMessage(),
equalTo("Runtime field [test] is emitting [101] values while the maximum number of values allowed is [100]")
);
}
}
}
public void testTooManyChars() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(org.elasticsearch.common.collect.List.of(new StoredField("_source", new BytesRef("{}"))));
try (DirectoryReader reader = iw.getReader()) {
StringFieldScript script = new StringFieldScript(
"test",
org.elasticsearch.common.collect.Map.of(),
new SearchLookup(mock(MapperService.class), (ft, lookup) -> null, null),
reader.leaves().get(0)
) {
@Override
public void execute() {
StringBuilder big = new StringBuilder();
while (big.length() < StringFieldScript.MAX_CHARS / 4) {
big.append("test");
}
String bigString = big.toString();
for (int i = 0; i <= 4; i++) {
emit(bigString);
}
}
};
Exception e = expectThrows(IllegalArgumentException.class, script::execute);
assertThat(
e.getMessage(),
equalTo("Runtime field [test] is emitting [1310720] characters while the maximum number of values allowed is [1048576]")
);
}
}
}
}

View File

@ -1,51 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.mapper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptService;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
public abstract class TestScriptEngine implements ScriptEngine {
public static <F> ScriptService scriptService(ScriptContext<F> context, F factory) {
return new ScriptService(Settings.EMPTY, Collections.singletonMap("test", new TestScriptEngine() {
@Override
protected Object buildScriptFactory(ScriptContext<?> context) {
return factory;
}
@Override
public Set<ScriptContext<?>> getSupportedContexts() {
return Collections.singleton(context);
}
}), Collections.singletonMap(context.name, context));
}
@Override
public final String getType() {
return "test";
}
@Override
public final <FactoryType> FactoryType compile(
String name,
String code,
ScriptContext<FactoryType> context,
Map<String, String> params
) {
@SuppressWarnings("unchecked")
FactoryType castFactory = (FactoryType) buildScriptFactory(context);
return castFactory;
}
protected abstract Object buildScriptFactory(ScriptContext<?> context);
}

View File

@ -1,49 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.elasticsearch.xpack.runtimefields.mapper.BooleanFieldScript;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
public abstract class AbstractBooleanScriptFieldQueryTestCase<T extends AbstractBooleanScriptFieldQuery> extends
AbstractScriptFieldQueryTestCase<T> {
protected final BooleanFieldScript.LeafFactory leafFactory = mock(BooleanFieldScript.LeafFactory.class);
@Override
public final void testVisit() {
T query = createTestInstance();
List<Query> leavesVisited = new ArrayList<>();
query.visit(new QueryVisitor() {
@Override
public void consumeTerms(Query query, Term... terms) {
fail();
}
@Override
public void consumeTermsMatching(Query query, String field, Supplier<ByteRunAutomaton> automaton) {
fail();
}
@Override
public void visitLeaf(Query query) {
leavesVisited.add(query);
}
});
assertThat(leavesVisited, equalTo(org.elasticsearch.common.collect.List.of(query)));
}
}

View File

@ -1,22 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.elasticsearch.xpack.runtimefields.mapper.DoubleFieldScript;
import static org.mockito.Mockito.mock;
public abstract class AbstractDoubleScriptFieldQueryTestCase<T extends AbstractDoubleScriptFieldQuery> extends
AbstractScriptFieldQueryTestCase<T> {
protected final DoubleFieldScript.LeafFactory leafFactory = mock(DoubleFieldScript.LeafFactory.class);
@Override
public final void testVisit() {
assertEmptyVisit();
}
}

View File

@ -1,29 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.xpack.runtimefields.mapper.IpFieldScript;
import java.net.InetAddress;
import static org.mockito.Mockito.mock;
public abstract class AbstractIpScriptFieldQueryTestCase<T extends AbstractIpScriptFieldQuery> extends AbstractScriptFieldQueryTestCase<T> {
protected final IpFieldScript.LeafFactory leafFactory = mock(IpFieldScript.LeafFactory.class);
@Override
public final void testVisit() {
assertEmptyVisit();
}
protected static BytesRef encode(InetAddress addr) {
return new BytesRef(InetAddressPoint.encode(addr));
}
}

View File

@ -1,22 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.xpack.runtimefields.mapper.AbstractLongFieldScript;
import java.util.function.Function;
public abstract class AbstractLongScriptFieldQueryTestCase<T extends AbstractLongScriptFieldQuery> extends AbstractScriptFieldQueryTestCase<
T> {
protected final Function<LeafReaderContext, AbstractLongFieldScript> leafFactory = ctx -> null;
@Override
public final void testVisit() {
assertEmptyVisit();
}
}

View File

@ -1,72 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.elasticsearch.script.Script;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.equalTo;
public abstract class AbstractScriptFieldQueryTestCase<T extends AbstractScriptFieldQuery> extends ESTestCase {
protected abstract T createTestInstance();
protected abstract T copy(T orig);
protected abstract T mutate(T orig);
protected final Script randomScript() {
return new Script(randomAlphaOfLength(10));
}
public final void testEqualsAndHashCode() {
EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), this::copy, this::mutate);
}
public abstract void testMatches() throws IOException;
public final void testToString() {
T query = createTestInstance();
assertThat(query.toString(), equalTo(query.fieldName() + ":" + query.toString(query.fieldName())));
assertToString(query);
}
protected abstract void assertToString(T query);
public abstract void testVisit();
protected final void assertEmptyVisit() {
T query = createTestInstance();
List<Query> leavesVisited = new ArrayList<>();
query.visit(new QueryVisitor() {
@Override
public void consumeTerms(Query query, Term... terms) {
fail();
}
@Override
public void consumeTermsMatching(Query query, String field, Supplier<ByteRunAutomaton> automaton) {
fail();
}
@Override
public void visitLeaf(Query query) {
leavesVisited.add(query);
}
});
assertThat(leavesVisited, equalTo(org.elasticsearch.common.collect.List.of(query)));
}
}

View File

@ -1,50 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.Mockito.mock;
public abstract class AbstractStringScriptFieldQueryTestCase<T extends AbstractStringScriptFieldQuery> extends
AbstractScriptFieldQueryTestCase<T> {
protected final StringFieldScript.LeafFactory leafFactory = mock(StringFieldScript.LeafFactory.class);
/**
* {@link Query#visit Visit} a query, collecting {@link ByteRunAutomaton automata},
* failing if there are any terms or if there are more than one automaton.
*/
protected final ByteRunAutomaton visitForSingleAutomata(T testQuery) {
List<ByteRunAutomaton> automata = new ArrayList<>();
testQuery.visit(new QueryVisitor() {
@Override
public void consumeTerms(Query query, Term... terms) {
fail();
}
@Override
public void consumeTermsMatching(Query query, String field, Supplier<ByteRunAutomaton> automaton) {
assertThat(query, sameInstance(testQuery));
assertThat(field, equalTo(testQuery.fieldName()));
automata.add(automaton.get());
}
});
assertThat(automata, hasSize(1));
return automata.get(0);
}
}

View File

@ -1,42 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import static org.hamcrest.Matchers.equalTo;
public class BooleanScriptFieldExistsQueryTests extends AbstractBooleanScriptFieldQueryTestCase<BooleanScriptFieldExistsQuery> {
@Override
protected BooleanScriptFieldExistsQuery createTestInstance() {
return new BooleanScriptFieldExistsQuery(randomScript(), leafFactory, randomAlphaOfLength(5));
}
@Override
protected BooleanScriptFieldExistsQuery copy(BooleanScriptFieldExistsQuery orig) {
return new BooleanScriptFieldExistsQuery(orig.script(), leafFactory, orig.fieldName());
}
@Override
protected BooleanScriptFieldExistsQuery mutate(BooleanScriptFieldExistsQuery orig) {
if (randomBoolean()) {
new BooleanScriptFieldExistsQuery(randomValueOtherThan(orig.script(), this::randomScript), leafFactory, orig.fieldName());
}
return new BooleanScriptFieldExistsQuery(orig.script(), leafFactory, orig.fieldName() + "modified");
}
@Override
public void testMatches() {
assertTrue(createTestInstance().matches(between(1, Integer.MAX_VALUE), 0));
assertTrue(createTestInstance().matches(0, between(1, Integer.MAX_VALUE)));
assertTrue(createTestInstance().matches(between(1, Integer.MAX_VALUE), between(1, Integer.MAX_VALUE)));
assertFalse(createTestInstance().matches(0, 0));
}
@Override
protected void assertToString(BooleanScriptFieldExistsQuery query) {
assertThat(query.toString(query.fieldName()), equalTo("BooleanScriptFieldExistsQuery"));
}
}

View File

@ -1,66 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.runtimefields.query;
import org.elasticsearch.script.Script;
import static org.hamcrest.Matchers.equalTo;
public class BooleanScriptFieldTermQueryTests extends AbstractBooleanScriptFieldQueryTestCase<BooleanScriptFieldTermQuery> {
@Override
protected BooleanScriptFieldTermQuery createTestInstance() {
return createTestInstance(randomBoolean());
}
private BooleanScriptFieldTermQuery createTestInstance(boolean term) {
return new BooleanScriptFieldTermQuery(randomScript(), leafFactory, randomAlphaOfLength(5), term);
}
@Override
protected BooleanScriptFieldTermQuery copy(BooleanScriptFieldTermQuery orig) {
return new BooleanScriptFieldTermQuery(orig.script(), leafFactory, orig.fieldName(), orig.term());
}
@Override
protected BooleanScriptFieldTermQuery mutate(BooleanScriptFieldTermQuery orig) {
Script script = orig.script();
String fieldName = orig.fieldName();
boolean term = orig.term();
switch (randomInt(2)) {
case 0:
script = randomValueOtherThan(script, this::randomScript);
break;
case 1:
fieldName += "modified";
break;
case 2:
term = !term;
break;
default:
fail();
}
return new BooleanScriptFieldTermQuery(script, leafFactory, fieldName, term);
}
@Override
public void testMatches() {
assertTrue(createTestInstance(true).matches(between(1, Integer.MAX_VALUE), 0));
assertFalse(createTestInstance(true).matches(0, between(1, Integer.MAX_VALUE)));
assertTrue(createTestInstance(true).matches(between(1, Integer.MAX_VALUE), between(1, Integer.MAX_VALUE)));
assertFalse(createTestInstance(false).matches(between(1, Integer.MAX_VALUE), 0));
assertTrue(createTestInstance(false).matches(0, between(1, Integer.MAX_VALUE)));
assertTrue(createTestInstance(false).matches(between(1, Integer.MAX_VALUE), between(1, Integer.MAX_VALUE)));
assertFalse(createTestInstance().matches(0, 0));
}
@Override
protected void assertToString(BooleanScriptFieldTermQuery query) {
assertThat(query.toString(query.fieldName()), equalTo(Boolean.toString(query.term())));
}
}

Some files were not shown because too many files have changed in this diff Show More