Checkstyle and license headers in plugin
Original commit: elastic/x-pack-elasticsearch@e7855b949f
This commit is contained in:
parent
b504b0d2d9
commit
933a51edef
|
@ -40,7 +40,7 @@ subprojects {
|
|||
}
|
||||
|
||||
tasks.withType(LicenseHeadersTask.class) {
|
||||
approvedLicenses = ['Elasticsearch Confidential']
|
||||
approvedLicenses = ['Elasticsearch Confidential', 'Generated']
|
||||
additionalLicense 'ESCON', 'Elasticsearch Confidential', 'ELASTICSEARCH CONFIDENTIAL'
|
||||
}
|
||||
ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-api:${version}": ':x-pack-elasticsearch:plugin']
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
|
||||
<!-- NOCOMMIT Temporary-->
|
||||
<suppress files="plugin[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]xpack[/\\]sql[/\\].*.java" checks="LineLength" />
|
||||
<suppress files="plugin[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]xpack[/\\]sql[/\\]expression[/\\].*.java" checks="EqualsHashCode" />
|
||||
<suppress files="sql-clients[/\\].*.java" checks="LineLength" />
|
||||
|
||||
<suppress files="plugin[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]xpack[/\\]common[/\\]action[/\\]XPackDeleteByQueryAction.java" checks="LineLength" />
|
||||
|
|
|
@ -969,7 +969,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
}
|
||||
|
||||
static abstract class AnalyzeRule<SubPlan extends LogicalPlan> extends Rule<SubPlan, LogicalPlan> {
|
||||
abstract static class AnalyzeRule<SubPlan extends LogicalPlan> extends Rule<SubPlan, LogicalPlan> {
|
||||
|
||||
// transformUp (post-order) - that is first children and then the node
|
||||
// but with a twist; only if the tree is not resolved or analyzed
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.analysis.analyzer;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.AttributeSet;
|
||||
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||
|
@ -20,6 +18,14 @@ import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
|||
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
||||
import org.elasticsearch.xpack.sql.tree.Node;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.lang.String.format;
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
|
@ -29,7 +35,7 @@ abstract class Verifier {
|
|||
private final Node<?> source;
|
||||
private final String message;
|
||||
|
||||
public Failure(Node<?> source, String message) {
|
||||
Failure(Node<?> source, String message) {
|
||||
this.source = source;
|
||||
this.message = message;
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ import org.elasticsearch.search.SearchHitField;
|
|||
class DocValueExtractor implements HitExtractor {
|
||||
private final String fieldName;
|
||||
|
||||
public DocValueExtractor(String name) {
|
||||
DocValueExtractor(String name) {
|
||||
this.fieldName = name;
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ class InnerHitExtractor implements HitExtractor {
|
|||
private final boolean useDocValue;
|
||||
private final String[] tree;
|
||||
|
||||
public InnerHitExtractor(String hitName, String name, boolean useDocValue) {
|
||||
InnerHitExtractor(String hitName, String name, boolean useDocValue) {
|
||||
this.hitName = hitName;
|
||||
this.fieldName = name;
|
||||
this.useDocValue = useDocValue;
|
||||
|
|
|
@ -87,265 +87,268 @@ public class Scroller {
|
|||
ScrollerActionListener l = new SessionScrollActionListener(listener, previous.client, previous.keepAlive, previous.schema, ext, previous.limit, previous.docsRead);
|
||||
previous.client.searchScroll(new SearchScrollRequest(scrollId).scroll(previous.keepAlive), l);
|
||||
}
|
||||
}
|
||||
|
||||
// dedicated scroll used for aggs-only/group-by results
|
||||
class AggsScrollActionListener extends ScrollerActionListener {
|
||||
/**
|
||||
* Dedicated scroll used for aggs-only/group-by results.
|
||||
*/
|
||||
static class AggsScrollActionListener extends ScrollerActionListener {
|
||||
private final QueryContainer query;
|
||||
|
||||
private final QueryContainer query;
|
||||
AggsScrollActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, QueryContainer query) {
|
||||
super(listener, client, keepAlive, schema);
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
AggsScrollActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, QueryContainer query) {
|
||||
super(listener, client, keepAlive, schema);
|
||||
this.query = query;
|
||||
@Override
|
||||
protected RowSetCursor handleResponse(SearchResponse response) {
|
||||
Aggregations aggs = response.getAggregations();
|
||||
|
||||
List<Object[]> columns = new ArrayList<>();
|
||||
|
||||
// this method assumes the nested aggregation are all part of the same tree (the SQL group-by)
|
||||
int maxDepth = -1;
|
||||
|
||||
for (Reference ref : query.refs()) {
|
||||
Object[] arr = null;
|
||||
|
||||
ColumnsProcessor processor = null;
|
||||
|
||||
if (ref instanceof ProcessingRef) {
|
||||
ProcessingRef pRef = (ProcessingRef) ref;
|
||||
processor = pRef.processor();
|
||||
ref = pRef.ref();
|
||||
}
|
||||
|
||||
if (ref == TotalCountRef.INSTANCE) {
|
||||
arr = new Object[] { processIfNeeded(processor, Long.valueOf(response.getHits().getTotalHits())) };
|
||||
columns.add(arr);
|
||||
}
|
||||
else if (ref instanceof AggRef) {
|
||||
// workaround for elastic/elasticsearch/issues/23056
|
||||
String path = ((AggRef) ref).path();
|
||||
boolean formattedKey = path.endsWith(Agg.PATH_BUCKET_VALUE_FORMATTED);
|
||||
if (formattedKey) {
|
||||
path = path.substring(0, path.length() - Agg.PATH_BUCKET_VALUE_FORMATTED.length());
|
||||
}
|
||||
Object value = getAggProperty(aggs, path);
|
||||
if (formattedKey) {
|
||||
List<? extends Bucket> buckets = ((MultiBucketsAggregation) value).getBuckets();
|
||||
arr = new Object[buckets.size()];
|
||||
for (int i = 0; i < buckets.size(); i++) {
|
||||
arr[i] = buckets.get(i).getKeyAsString();
|
||||
}
|
||||
}
|
||||
else {
|
||||
arr = value instanceof Object[] ? (Object[]) value : new Object[] { value };
|
||||
}
|
||||
// process if needed
|
||||
for (int i = 0; i < arr.length; i++) {
|
||||
arr[i] = processIfNeeded(processor, arr[i]);
|
||||
}
|
||||
columns.add(arr);
|
||||
}
|
||||
// aggs without any grouping
|
||||
else {
|
||||
throw new SqlIllegalArgumentException("Unexpected non-agg/grouped column specified; %s", ref.getClass());
|
||||
}
|
||||
|
||||
if (ref.depth() > maxDepth) {
|
||||
maxDepth = ref.depth();
|
||||
}
|
||||
}
|
||||
|
||||
clearScroll(response.getScrollId());
|
||||
return new AggsRowSetCursor(schema, columns, maxDepth, query.limit());
|
||||
}
|
||||
|
||||
private static Object getAggProperty(Aggregations aggs, String path) {
|
||||
List<String> list = AggregationPath.parse(path).getPathElementsAsStringList();
|
||||
String aggName = list.get(0);
|
||||
InternalAggregation agg = aggs.get(aggName);
|
||||
if (agg == null) {
|
||||
throw new ExecutionException("Cannot find an aggregation named %s", aggName);
|
||||
}
|
||||
return agg.getProperty(list.subList(1, list.size()));
|
||||
}
|
||||
|
||||
private Object processIfNeeded(ColumnsProcessor processor, Object value) {
|
||||
return processor != null ? processor.apply(value) : value;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RowSetCursor handleResponse(SearchResponse response) {
|
||||
Aggregations aggs = response.getAggregations();
|
||||
/**
|
||||
* Initial scroll used for parsing search hits (handles possible aggs).
|
||||
*/
|
||||
static class HandshakeScrollActionListener extends SearchHitsActionListener {
|
||||
private final QueryContainer query;
|
||||
|
||||
List<Object[]> columns = new ArrayList<>();
|
||||
HandshakeScrollActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, QueryContainer query) {
|
||||
super(listener, client, keepAlive, schema, query.limit(), 0);
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
// this method assumes the nested aggregation are all part of the same tree (the SQL group-by)
|
||||
int maxDepth = -1;
|
||||
@Override
|
||||
public void onResponse(SearchResponse response) {
|
||||
super.onResponse(response);
|
||||
}
|
||||
|
||||
for (Reference ref : query.refs()) {
|
||||
Object[] arr = null;
|
||||
@Override
|
||||
protected List<HitExtractor> getExtractors() {
|
||||
// create response extractors for the first time
|
||||
List<Reference> refs = query.refs();
|
||||
|
||||
ColumnsProcessor processor = null;
|
||||
List<HitExtractor> exts = new ArrayList<>(refs.size());
|
||||
|
||||
for (Reference ref : refs) {
|
||||
exts.add(createExtractor(ref));
|
||||
}
|
||||
return exts;
|
||||
}
|
||||
|
||||
private HitExtractor createExtractor(Reference ref) {
|
||||
if (ref instanceof SearchHitFieldRef) {
|
||||
SearchHitFieldRef f = (SearchHitFieldRef) ref;
|
||||
return f.useDocValue() ? new DocValueExtractor(f.name()) : new SourceExtractor(f.name());
|
||||
}
|
||||
|
||||
if (ref instanceof NestedFieldRef) {
|
||||
NestedFieldRef f = (NestedFieldRef) ref;
|
||||
return new InnerHitExtractor(f.parent(), f.name(), f.useDocValue());
|
||||
}
|
||||
|
||||
if (ref instanceof ScriptFieldRef) {
|
||||
ScriptFieldRef f = (ScriptFieldRef) ref;
|
||||
return new DocValueExtractor(f.name());
|
||||
}
|
||||
|
||||
if (ref instanceof ProcessingRef) {
|
||||
ProcessingRef pRef = (ProcessingRef) ref;
|
||||
processor = pRef.processor();
|
||||
ref = pRef.ref();
|
||||
return new ProcessingHitExtractor(createExtractor(pRef.ref()), pRef.processor());
|
||||
}
|
||||
|
||||
if (ref == TotalCountRef.INSTANCE) {
|
||||
arr = new Object[] { processIfNeeded(processor, Long.valueOf(response.getHits().getTotalHits())) };
|
||||
columns.add(arr);
|
||||
}
|
||||
else if (ref instanceof AggRef) {
|
||||
// workaround for elastic/elasticsearch/issues/23056
|
||||
String path = ((AggRef) ref).path();
|
||||
boolean formattedKey = path.endsWith(Agg.PATH_BUCKET_VALUE_FORMATTED);
|
||||
if (formattedKey) {
|
||||
path = path.substring(0, path.length() - Agg.PATH_BUCKET_VALUE_FORMATTED.length());
|
||||
}
|
||||
Object value = getAggProperty(aggs, path);
|
||||
if (formattedKey) {
|
||||
List<? extends Bucket> buckets = ((MultiBucketsAggregation) value).getBuckets();
|
||||
arr = new Object[buckets.size()];
|
||||
for (int i = 0; i < buckets.size(); i++) {
|
||||
arr[i] = buckets.get(i).getKeyAsString();
|
||||
throw new SqlIllegalArgumentException("Unexpected ValueReference %s", ref.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Listener used for streaming the rest of the results after the handshake has been used.
|
||||
*/
|
||||
static class SessionScrollActionListener extends SearchHitsActionListener {
|
||||
|
||||
private List<HitExtractor> exts;
|
||||
|
||||
SessionScrollActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, List<HitExtractor> ext, int limit, int docCount) {
|
||||
super(listener, client, keepAlive, schema, limit, docCount);
|
||||
this.exts = ext;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<HitExtractor> getExtractors() {
|
||||
return exts;
|
||||
}
|
||||
}
|
||||
|
||||
abstract static class SearchHitsActionListener extends ScrollerActionListener {
|
||||
final int limit;
|
||||
int docsRead;
|
||||
|
||||
SearchHitsActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, int limit,
|
||||
int docsRead) {
|
||||
super(listener, client, keepAlive, schema);
|
||||
this.limit = limit;
|
||||
this.docsRead = docsRead;
|
||||
}
|
||||
|
||||
protected RowSetCursor handleResponse(SearchResponse response) {
|
||||
SearchHit[] hits = response.getHits().getHits();
|
||||
List<HitExtractor> exts = getExtractors();
|
||||
|
||||
// there are some results
|
||||
if (hits.length > 0) {
|
||||
String scrollId = response.getScrollId();
|
||||
Consumer<ActionListener<RowSetCursor>> next = null;
|
||||
|
||||
docsRead += hits.length;
|
||||
|
||||
// if there's an id, try to setup next scroll
|
||||
if (scrollId != null) {
|
||||
// is all the content already retrieved?
|
||||
if (Boolean.TRUE.equals(response.isTerminatedEarly()) || response.getHits().getTotalHits() == hits.length
|
||||
// or maybe the limit has been reached
|
||||
|| docsRead >= limit) {
|
||||
// if so, clear the scroll
|
||||
clearScroll(scrollId);
|
||||
// and remove it to indicate no more data is expected
|
||||
scrollId = null;
|
||||
} else {
|
||||
next = l -> Scroller.from(l, this, response.getScrollId(), exts);
|
||||
}
|
||||
}
|
||||
else {
|
||||
arr = value instanceof Object[] ? (Object[]) value : new Object[] { value };
|
||||
}
|
||||
// process if needed
|
||||
for (int i = 0; i < arr.length; i++) {
|
||||
arr[i] = processIfNeeded(processor, arr[i]);
|
||||
}
|
||||
columns.add(arr);
|
||||
int limitHits = limit > 0 && docsRead >= limit ? limit : -1;
|
||||
return new SearchHitRowSetCursor(schema, exts, hits, limitHits, scrollId, next);
|
||||
}
|
||||
// aggs without any grouping
|
||||
// no hits
|
||||
else {
|
||||
throw new SqlIllegalArgumentException("Unexpected non-agg/grouped column specified; %s", ref.getClass());
|
||||
}
|
||||
|
||||
if (ref.depth() > maxDepth) {
|
||||
maxDepth = ref.depth();
|
||||
clearScroll(response.getScrollId());
|
||||
// typically means last page but might be an aggs only query
|
||||
return needsHit(exts) ? Rows.empty(schema) : new SearchHitRowSetCursor(schema, exts);
|
||||
}
|
||||
}
|
||||
|
||||
clearScroll(response.getScrollId());
|
||||
return new AggsRowSetCursor(schema, columns, maxDepth, query.limit());
|
||||
}
|
||||
|
||||
private static Object getAggProperty(Aggregations aggs, String path) {
|
||||
List<String> list = AggregationPath.parse(path).getPathElementsAsStringList();
|
||||
String aggName = list.get(0);
|
||||
InternalAggregation agg = aggs.get(aggName);
|
||||
if (agg == null) {
|
||||
throw new ExecutionException("Cannot find an aggregation named %s", aggName);
|
||||
}
|
||||
return agg.getProperty(list.subList(1, list.size()));
|
||||
}
|
||||
|
||||
private Object processIfNeeded(ColumnsProcessor processor, Object value) {
|
||||
return processor != null ? processor.apply(value) : value;
|
||||
}
|
||||
}
|
||||
|
||||
// initial scroll used for parsing search hits (handles possible aggs)
|
||||
class HandshakeScrollActionListener extends SearchHitsActionListener {
|
||||
|
||||
private final QueryContainer query;
|
||||
|
||||
HandshakeScrollActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, QueryContainer query) {
|
||||
super(listener, client, keepAlive, schema, query.limit(), 0);
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResponse(SearchResponse response) {
|
||||
super.onResponse(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<HitExtractor> getExtractors() {
|
||||
// create response extractors for the first time
|
||||
List<Reference> refs = query.refs();
|
||||
|
||||
List<HitExtractor> exts = new ArrayList<>(refs.size());
|
||||
|
||||
for (Reference ref : refs) {
|
||||
exts.add(createExtractor(ref));
|
||||
}
|
||||
return exts;
|
||||
}
|
||||
|
||||
private HitExtractor createExtractor(Reference ref) {
|
||||
if (ref instanceof SearchHitFieldRef) {
|
||||
SearchHitFieldRef f = (SearchHitFieldRef) ref;
|
||||
return f.useDocValue() ? new DocValueExtractor(f.name()) : new SourceExtractor(f.name());
|
||||
private static boolean needsHit(List<HitExtractor> exts) {
|
||||
for (HitExtractor ext : exts) {
|
||||
if (ext instanceof DocValueExtractor || ext instanceof ProcessingHitExtractor) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (ref instanceof NestedFieldRef) {
|
||||
NestedFieldRef f = (NestedFieldRef) ref;
|
||||
return new InnerHitExtractor(f.parent(), f.name(), f.useDocValue());
|
||||
protected abstract List<HitExtractor> getExtractors();
|
||||
}
|
||||
|
||||
abstract static class ScrollerActionListener implements ActionListener<SearchResponse> {
|
||||
|
||||
final ActionListener<RowSetCursor> listener;
|
||||
|
||||
final Client client;
|
||||
final TimeValue keepAlive;
|
||||
final Schema schema;
|
||||
|
||||
ScrollerActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema) {
|
||||
this.listener = listener;
|
||||
|
||||
this.client = client;
|
||||
this.keepAlive = keepAlive;
|
||||
this.schema = schema;
|
||||
}
|
||||
|
||||
if (ref instanceof ScriptFieldRef) {
|
||||
ScriptFieldRef f = (ScriptFieldRef) ref;
|
||||
return new DocValueExtractor(f.name());
|
||||
// TODO: need to handle rejections plus check failures (shard size, etc...)
|
||||
@Override
|
||||
public void onResponse(final SearchResponse response) {
|
||||
try {
|
||||
ShardSearchFailure[] failure = response.getShardFailures();
|
||||
if (!ObjectUtils.isEmpty(failure)) {
|
||||
onFailure(new ExecutionException(failure[0].reason(), failure[0].getCause()));
|
||||
}
|
||||
listener.onResponse(handleResponse(response));
|
||||
} catch (Exception ex) {
|
||||
onFailure(ex);
|
||||
}
|
||||
}
|
||||
|
||||
if (ref instanceof ProcessingRef) {
|
||||
ProcessingRef pRef = (ProcessingRef) ref;
|
||||
return new ProcessingHitExtractor(createExtractor(pRef.ref()), pRef.processor());
|
||||
}
|
||||
protected abstract RowSetCursor handleResponse(SearchResponse response);
|
||||
|
||||
throw new SqlIllegalArgumentException("Unexpected ValueReference %s", ref.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
// listener used for streaming the rest of the results after the handshake has been used
|
||||
class SessionScrollActionListener extends SearchHitsActionListener {
|
||||
|
||||
private List<HitExtractor> exts;
|
||||
|
||||
SessionScrollActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, List<HitExtractor> ext, int limit, int docCount) {
|
||||
super(listener, client, keepAlive, schema, limit, docCount);
|
||||
this.exts = ext;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<HitExtractor> getExtractors() {
|
||||
return exts;
|
||||
}
|
||||
}
|
||||
|
||||
abstract class SearchHitsActionListener extends ScrollerActionListener {
|
||||
|
||||
final int limit;
|
||||
int docsRead;
|
||||
|
||||
SearchHitsActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, int limit, int docsRead) {
|
||||
super(listener, client, keepAlive, schema);
|
||||
this.limit = limit;
|
||||
this.docsRead = docsRead;
|
||||
}
|
||||
|
||||
protected RowSetCursor handleResponse(SearchResponse response) {
|
||||
SearchHit[] hits = response.getHits().getHits();
|
||||
List<HitExtractor> exts = getExtractors();
|
||||
|
||||
// there are some results
|
||||
if (hits.length > 0) {
|
||||
String scrollId = response.getScrollId();
|
||||
Consumer<ActionListener<RowSetCursor>> next = null;
|
||||
|
||||
docsRead += hits.length;
|
||||
|
||||
// if there's an id, try to setup next scroll
|
||||
protected final void clearScroll(String scrollId) {
|
||||
if (scrollId != null) {
|
||||
// is all the content already retrieved?
|
||||
if (Boolean.TRUE.equals(response.isTerminatedEarly()) || response.getHits().getTotalHits() == hits.length
|
||||
// or maybe the limit has been reached
|
||||
|| docsRead >= limit) {
|
||||
// if so, clear the scroll
|
||||
clearScroll(scrollId);
|
||||
// and remove it to indicate no more data is expected
|
||||
scrollId = null;
|
||||
}
|
||||
else {
|
||||
next = l -> Scroller.from(l, this, response.getScrollId(), exts);
|
||||
}
|
||||
}
|
||||
int limitHits = limit > 0 && docsRead >= limit ? limit : -1;
|
||||
return new SearchHitRowSetCursor(schema, exts, hits, limitHits, scrollId, next);
|
||||
}
|
||||
// no hits
|
||||
else {
|
||||
clearScroll(response.getScrollId());
|
||||
// typically means last page but might be an aggs only query
|
||||
return needsHit(exts) ? Rows.empty(schema) : new SearchHitRowSetCursor(schema, exts);
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean needsHit(List<HitExtractor> exts) {
|
||||
for (HitExtractor ext : exts) {
|
||||
if (ext instanceof DocValueExtractor || ext instanceof ProcessingHitExtractor) {
|
||||
return true;
|
||||
// fire and forget
|
||||
client.prepareClearScroll().addScrollId(scrollId).execute();
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
protected abstract List<HitExtractor> getExtractors();
|
||||
@Override
|
||||
public final void onFailure(Exception ex) {
|
||||
listener.onFailure(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
abstract class ScrollerActionListener implements ActionListener<SearchResponse> {
|
||||
|
||||
final ActionListener<RowSetCursor> listener;
|
||||
|
||||
final Client client;
|
||||
final TimeValue keepAlive;
|
||||
final Schema schema;
|
||||
|
||||
ScrollerActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema) {
|
||||
this.listener = listener;
|
||||
|
||||
this.client = client;
|
||||
this.keepAlive = keepAlive;
|
||||
this.schema = schema;
|
||||
}
|
||||
|
||||
// TODO: need to handle rejections plus check failures (shard size, etc...)
|
||||
@Override
|
||||
public void onResponse(final SearchResponse response) {
|
||||
try {
|
||||
ShardSearchFailure[] failure = response.getShardFailures();
|
||||
if (!ObjectUtils.isEmpty(failure)) {
|
||||
onFailure(new ExecutionException(failure[0].reason(), failure[0].getCause()));
|
||||
}
|
||||
listener.onResponse(handleResponse(response));
|
||||
} catch (Exception ex) {
|
||||
onFailure(ex);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract RowSetCursor handleResponse(SearchResponse response);
|
||||
|
||||
protected final void clearScroll(String scrollId) {
|
||||
if (scrollId != null) {
|
||||
// fire and forget
|
||||
client.prepareClearScroll().addScrollId(scrollId).execute();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void onFailure(Exception ex) {
|
||||
listener.onFailure(ex);
|
||||
}
|
||||
}
|
|
@ -12,7 +12,7 @@ import org.elasticsearch.search.SearchHit;
|
|||
class SourceExtractor implements HitExtractor {
|
||||
private final String fieldName;
|
||||
|
||||
public SourceExtractor(String name) {
|
||||
SourceExtractor(String name) {
|
||||
this.fieldName = name;
|
||||
}
|
||||
|
||||
|
|
|
@ -43,6 +43,7 @@ public abstract class NamedExpression extends Expression {
|
|||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
// NOCOMMIT making this final upsets checkstyle.
|
||||
return id.hashCode();
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ import org.elasticsearch.xpack.sql.type.DataType;
|
|||
|
||||
abstract class UnresolvedNamedExpression extends NamedExpression implements Unresolvable {
|
||||
|
||||
public UnresolvedNamedExpression(Location location, List<Expression> children) {
|
||||
UnresolvedNamedExpression(Location location, List<Expression> children) {
|
||||
super(location, "<unresolved>", children, ExpressionIdGenerator.EMPTY);
|
||||
}
|
||||
|
||||
|
|
|
@ -5,16 +5,53 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.expression.function;
|
||||
|
||||
import org.elasticsearch.xpack.sql.SqlException;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.Avg;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.Count;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.Max;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.Min;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.Sum;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfWeek;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfYear;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.HourOfDay;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MinuteOfDay;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MinuteOfHour;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.SecondOfMinute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.ACos;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.ASin;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.ATan;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Abs;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Cbrt;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Ceil;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Cos;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Cosh;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Degrees;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.E;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Exp;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Expm1;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Floor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Log;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Log10;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Pi;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Radians;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Round;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sin;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sinh;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sqrt;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Tan;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.net.URL;
|
||||
import java.util.*;
|
||||
|
||||
import org.elasticsearch.xpack.sql.SqlException;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.*;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.*;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine;
|
||||
import static org.elasticsearch.xpack.sql.util.CollectionUtils.of;
|
||||
|
|
|
@ -16,7 +16,7 @@ public enum FunctionType {
|
|||
|
||||
private final Class<? extends Function> baseClass;
|
||||
|
||||
private FunctionType(Class<? extends Function> base) {
|
||||
FunctionType(Class<? extends Function> base) {
|
||||
this.baseClass = base;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ import org.elasticsearch.xpack.sql.type.DataTypes;
|
|||
|
||||
public class FullTextPredicate extends Expression {
|
||||
|
||||
public static enum Operator {
|
||||
public enum Operator {
|
||||
AND,
|
||||
OR;
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -13,9 +13,10 @@ import org.antlr.v4.runtime.IntStream;
|
|||
// extension of ANTLR that does the uppercasing once for the whole stream
|
||||
// the ugly part is that it has to duplicate LA method
|
||||
class CaseInsensitiveStream extends ANTLRInputStream {
|
||||
// NOCOMMIT maybe we can fix this in the lexer or on the way in so we don't need the LA override
|
||||
protected char[] uppedChars;
|
||||
|
||||
public CaseInsensitiveStream(String input) {
|
||||
CaseInsensitiveStream(String input) {
|
||||
super(input);
|
||||
this.uppedChars = input.toUpperCase(Locale.ROOT).toCharArray();
|
||||
}
|
||||
|
|
|
@ -1,204 +0,0 @@
|
|||
T__0=1
|
||||
T__1=2
|
||||
T__2=3
|
||||
T__3=4
|
||||
T__4=5
|
||||
SELECT=6
|
||||
FROM=7
|
||||
AS=8
|
||||
ALL=9
|
||||
WHEN=10
|
||||
THEN=11
|
||||
ANY=12
|
||||
DISTINCT=13
|
||||
WHERE=14
|
||||
GROUP=15
|
||||
BY=16
|
||||
GROUPING=17
|
||||
SETS=18
|
||||
ORDER=19
|
||||
HAVING=20
|
||||
LIMIT=21
|
||||
OR=22
|
||||
AND=23
|
||||
IN=24
|
||||
NOT=25
|
||||
NO=26
|
||||
EXISTS=27
|
||||
BETWEEN=28
|
||||
LIKE=29
|
||||
RLIKE=30
|
||||
IS=31
|
||||
NULL=32
|
||||
TRUE=33
|
||||
FALSE=34
|
||||
LAST=35
|
||||
ASC=36
|
||||
DESC=37
|
||||
FOR=38
|
||||
INTEGER=39
|
||||
JOIN=40
|
||||
CROSS=41
|
||||
OUTER=42
|
||||
INNER=43
|
||||
LEFT=44
|
||||
RIGHT=45
|
||||
FULL=46
|
||||
NATURAL=47
|
||||
USING=48
|
||||
ON=49
|
||||
WITH=50
|
||||
TABLE=51
|
||||
INTO=52
|
||||
DESCRIBE=53
|
||||
OPTION=54
|
||||
EXPLAIN=55
|
||||
ANALYZE=56
|
||||
FORMAT=57
|
||||
TYPE=58
|
||||
TEXT=59
|
||||
VERIFY=60
|
||||
GRAPHVIZ=61
|
||||
LOGICAL=62
|
||||
PHYSICAL=63
|
||||
SHOW=64
|
||||
TABLES=65
|
||||
COLUMNS=66
|
||||
COLUMN=67
|
||||
FUNCTIONS=68
|
||||
TO=69
|
||||
DEBUG=70
|
||||
PLAN=71
|
||||
PARSED=72
|
||||
ANALYZED=73
|
||||
OPTIMIZED=74
|
||||
MAPPED=75
|
||||
EXECUTABLE=76
|
||||
USE=77
|
||||
SET=78
|
||||
RESET=79
|
||||
SESSION=80
|
||||
SCHEMAS=81
|
||||
EXTRACT=82
|
||||
QUERY=83
|
||||
MATCH=84
|
||||
CAST=85
|
||||
EQ=86
|
||||
NEQ=87
|
||||
LT=88
|
||||
LTE=89
|
||||
GT=90
|
||||
GTE=91
|
||||
PLUS=92
|
||||
MINUS=93
|
||||
ASTERISK=94
|
||||
SLASH=95
|
||||
PERCENT=96
|
||||
CONCAT=97
|
||||
STRING=98
|
||||
INTEGER_VALUE=99
|
||||
DECIMAL_VALUE=100
|
||||
IDENTIFIER=101
|
||||
DIGIT_IDENTIFIER=102
|
||||
QUOTED_IDENTIFIER=103
|
||||
BACKQUOTED_IDENTIFIER=104
|
||||
SIMPLE_COMMENT=105
|
||||
BRACKETED_COMMENT=106
|
||||
WS=107
|
||||
UNRECOGNIZED=108
|
||||
'('=1
|
||||
')'=2
|
||||
','=3
|
||||
'.'=4
|
||||
'"'=5
|
||||
'SELECT'=6
|
||||
'FROM'=7
|
||||
'AS'=8
|
||||
'ALL'=9
|
||||
'WHEN'=10
|
||||
'THEN'=11
|
||||
'ANY'=12
|
||||
'DISTINCT'=13
|
||||
'WHERE'=14
|
||||
'GROUP'=15
|
||||
'BY'=16
|
||||
'GROUPING'=17
|
||||
'SETS'=18
|
||||
'ORDER'=19
|
||||
'HAVING'=20
|
||||
'LIMIT'=21
|
||||
'OR'=22
|
||||
'AND'=23
|
||||
'IN'=24
|
||||
'NOT'=25
|
||||
'NO'=26
|
||||
'EXISTS'=27
|
||||
'BETWEEN'=28
|
||||
'LIKE'=29
|
||||
'RLIKE'=30
|
||||
'IS'=31
|
||||
'NULL'=32
|
||||
'TRUE'=33
|
||||
'FALSE'=34
|
||||
'LAST'=35
|
||||
'ASC'=36
|
||||
'DESC'=37
|
||||
'FOR'=38
|
||||
'INTEGER'=39
|
||||
'JOIN'=40
|
||||
'CROSS'=41
|
||||
'OUTER'=42
|
||||
'INNER'=43
|
||||
'LEFT'=44
|
||||
'RIGHT'=45
|
||||
'FULL'=46
|
||||
'NATURAL'=47
|
||||
'USING'=48
|
||||
'ON'=49
|
||||
'WITH'=50
|
||||
'TABLE'=51
|
||||
'INTO'=52
|
||||
'DESCRIBE'=53
|
||||
'OPTION'=54
|
||||
'EXPLAIN'=55
|
||||
'ANALYZE'=56
|
||||
'FORMAT'=57
|
||||
'TYPE'=58
|
||||
'TEXT'=59
|
||||
'VERIFY'=60
|
||||
'GRAPHVIZ'=61
|
||||
'LOGICAL'=62
|
||||
'PHYSICAL'=63
|
||||
'SHOW'=64
|
||||
'TABLES'=65
|
||||
'COLUMNS'=66
|
||||
'COLUMN'=67
|
||||
'FUNCTIONS'=68
|
||||
'TO'=69
|
||||
'DEBUG'=70
|
||||
'PLAN'=71
|
||||
'PARSED'=72
|
||||
'ANALYZED'=73
|
||||
'OPTIMIZED'=74
|
||||
'MAPPED'=75
|
||||
'EXECUTABLE'=76
|
||||
'USE'=77
|
||||
'SET'=78
|
||||
'RESET'=79
|
||||
'SESSION'=80
|
||||
'SCHEMAS'=81
|
||||
'EXTRACT'=82
|
||||
'QUERY'=83
|
||||
'MATCH'=84
|
||||
'CAST'=85
|
||||
'='=86
|
||||
'<'=88
|
||||
'<='=89
|
||||
'>'=90
|
||||
'>='=91
|
||||
'+'=92
|
||||
'-'=93
|
||||
'*'=94
|
||||
'/'=95
|
||||
'%'=96
|
||||
'||'=97
|
|
@ -42,4 +42,10 @@ abstract class BinaryPlan extends LogicalPlan {
|
|||
return Objects.equals(left(), other.left())
|
||||
&& Objects.equals(right(), other.right());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(left, right);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,11 +22,12 @@ public class Join extends BinaryPlan {
|
|||
private final JoinType type;
|
||||
private final Expression condition;
|
||||
|
||||
public static enum JoinType {
|
||||
INNER, LEFT // OUTER
|
||||
, RIGHT // OUTER
|
||||
, FULL // OUTER
|
||||
, IMPLICIT
|
||||
public enum JoinType {
|
||||
INNER,
|
||||
LEFT, // OUTER
|
||||
RIGHT, // OUTER
|
||||
FULL, // OUTER
|
||||
IMPLICIT,
|
||||
}
|
||||
|
||||
public Join(Location location, LogicalPlan left, LogicalPlan right, JoinType type, Expression condition) {
|
||||
|
|
|
@ -10,8 +10,7 @@ import java.util.Collections;
|
|||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
|
||||
abstract class LeafExec extends PhysicalPlan {
|
||||
|
||||
public LeafExec(Location location) {
|
||||
LeafExec(Location location) {
|
||||
super(location, Collections.emptyList());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -124,26 +124,26 @@ class Mapper extends RuleExecutor<PhysicalPlan> {
|
|||
throw new UnsupportedOperationException("Don't know how to handle join " + join.nodeString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
abstract class MapExecRule<SubPlan extends LogicalPlan> extends Rule<UnplannedExec, PhysicalPlan> {
|
||||
abstract static class MapExecRule<SubPlan extends LogicalPlan> extends Rule<UnplannedExec, PhysicalPlan> {
|
||||
|
||||
private final Class<SubPlan> subPlanToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass());
|
||||
private final Class<SubPlan> subPlanToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass());
|
||||
|
||||
@Override
|
||||
public final PhysicalPlan apply(PhysicalPlan plan) {
|
||||
return plan.transformUp(this::rule, UnplannedExec.class);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
protected final PhysicalPlan rule(UnplannedExec plan) {
|
||||
LogicalPlan subPlan = plan.plan();
|
||||
if (subPlanToken.isInstance(subPlan)) {
|
||||
return map((SubPlan) subPlan);
|
||||
@Override
|
||||
public final PhysicalPlan apply(PhysicalPlan plan) {
|
||||
return plan.transformUp(this::rule, UnplannedExec.class);
|
||||
}
|
||||
return plan;
|
||||
}
|
||||
|
||||
protected abstract PhysicalPlan map(SubPlan plan);
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
protected final PhysicalPlan rule(UnplannedExec plan) {
|
||||
LogicalPlan subPlan = plan.plan();
|
||||
if (subPlanToken.isInstance(subPlan)) {
|
||||
return map((SubPlan) subPlan);
|
||||
}
|
||||
return plan;
|
||||
}
|
||||
|
||||
protected abstract PhysicalPlan map(SubPlan plan);
|
||||
}
|
||||
}
|
|
@ -470,16 +470,19 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
|||
return exec.with(qContainer);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rule for folding physical plans together.
|
||||
*/
|
||||
abstract static class FoldingRule<SubPlan extends PhysicalPlan> extends Rule<SubPlan, PhysicalPlan> {
|
||||
|
||||
@Override
|
||||
public final PhysicalPlan apply(PhysicalPlan plan) {
|
||||
return plan.transformUp(this::rule, typeToken());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected abstract PhysicalPlan rule(SubPlan plan);
|
||||
}
|
||||
}
|
||||
|
||||
// rule for folding physical plans together
|
||||
abstract class FoldingRule<SubPlan extends PhysicalPlan> extends Rule<SubPlan, PhysicalPlan> {
|
||||
|
||||
@Override
|
||||
public final PhysicalPlan apply(PhysicalPlan plan) {
|
||||
return plan.transformUp(this::rule, typeToken());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected abstract PhysicalPlan rule(SubPlan plan);
|
||||
}
|
|
@ -5,14 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.planner;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.BinaryExpression;
|
||||
|
@ -51,7 +43,6 @@ import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MultiMatchQuery
|
|||
import org.elasticsearch.xpack.sql.expression.predicate.fulltext.StringQueryPredicate;
|
||||
import org.elasticsearch.xpack.sql.expression.regex.Like;
|
||||
import org.elasticsearch.xpack.sql.expression.regex.RLike;
|
||||
import org.elasticsearch.xpack.sql.planner.QueryTranslator.QueryTranslation;
|
||||
import org.elasticsearch.xpack.sql.querydsl.agg.Agg;
|
||||
import org.elasticsearch.xpack.sql.querydsl.agg.AggFilter;
|
||||
import org.elasticsearch.xpack.sql.querydsl.agg.AndAggFilter;
|
||||
|
@ -83,12 +74,18 @@ import org.elasticsearch.xpack.sql.type.DataTypes;
|
|||
import org.elasticsearch.xpack.sql.util.Assert;
|
||||
import org.elasticsearch.xpack.sql.util.ReflectionUtils;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import static java.lang.String.format;
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
||||
import static org.elasticsearch.xpack.sql.planner.QueryTranslator.*;
|
||||
|
||||
abstract class QueryTranslator {
|
||||
|
||||
|
@ -399,397 +396,396 @@ abstract class QueryTranslator {
|
|||
}
|
||||
throw new SqlIllegalArgumentException("Does not know how to convert argument %s for functon %s", arg.nodeString(), af.nodeString());
|
||||
}
|
||||
|
||||
// TODO: need to optimize on ngram
|
||||
// TODO: see whether escaping is needed
|
||||
static class Likes extends ExppressionTranslator<BinaryExpression> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) {
|
||||
Query q = null;
|
||||
boolean analyzed = true;
|
||||
String target = null;
|
||||
|
||||
if (e.left() instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) e.left();
|
||||
analyzed = fa.isAnalyzed();
|
||||
target = nameOf(analyzed ? fa : fa.notAnalyzedAttribute());
|
||||
}
|
||||
|
||||
String pattern = sqlToEsPatternMatching(valueOf(e.right()));
|
||||
if (e instanceof Like) {
|
||||
if (analyzed) {
|
||||
q = new QueryStringQuery(e.location(), pattern, target);
|
||||
}
|
||||
else {
|
||||
q = new WildcardQuery(e.location(), nameOf(e.left()), pattern);
|
||||
}
|
||||
}
|
||||
|
||||
if (e instanceof RLike) {
|
||||
if (analyzed) {
|
||||
q = new QueryStringQuery(e.location(), "/" + pattern + "/", target);
|
||||
}
|
||||
else {
|
||||
q = new RegexQuery(e.location(), nameOf(e.left()), sqlToEsPatternMatching(valueOf(e.right())));
|
||||
}
|
||||
}
|
||||
|
||||
return q != null ? new QueryTranslation(wrapIfNested(q, e.left())) : null;
|
||||
}
|
||||
|
||||
private static String sqlToEsPatternMatching(String pattern) {
|
||||
return pattern.replace("%", "*").replace("_", "?");
|
||||
}
|
||||
|
||||
private static boolean hasMatchingPattern(String pattern) {
|
||||
return pattern.contains("*") || pattern.contains("?");
|
||||
}
|
||||
}
|
||||
|
||||
static class StringQueries extends ExppressionTranslator<StringQueryPredicate> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(StringQueryPredicate q, boolean onAggs) {
|
||||
return new QueryTranslation(new QueryStringQuery(q.location(), q.query(), q.fields(), q));
|
||||
}
|
||||
}
|
||||
|
||||
static class Matches extends ExppressionTranslator<MatchQueryPredicate> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(MatchQueryPredicate q, boolean onAggs) {
|
||||
return new QueryTranslation(wrapIfNested(new MatchQuery(q.location(), nameOf(q.field()), q.query(), q), q.field()));
|
||||
}
|
||||
}
|
||||
|
||||
static class MultiMatches extends ExppressionTranslator<MultiMatchQueryPredicate> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(MultiMatchQueryPredicate q, boolean onAggs) {
|
||||
return new QueryTranslation(new MultiMatchQuery(q.location(), q.query(), q.fields(), q));
|
||||
}
|
||||
}
|
||||
|
||||
static class BinaryLogic extends ExppressionTranslator<BinaryExpression> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) {
|
||||
if (e instanceof And) {
|
||||
return and(e.location(), toQuery(e.left(), onAggs), toQuery(e.right(), onAggs));
|
||||
}
|
||||
if (e instanceof Or) {
|
||||
return or(e.location(), toQuery(e.left(), onAggs), toQuery(e.right(), onAggs));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
static class Nots extends ExppressionTranslator<Not> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(Not not, boolean onAggs) {
|
||||
QueryTranslation translation = toQuery(not.child(), onAggs);
|
||||
return new QueryTranslation(not(translation.query), translation.aggFilter);
|
||||
}
|
||||
}
|
||||
|
||||
// assume the Optimizer properly orders the predicates to ease the translation
|
||||
static class BinaryComparisons extends ExppressionTranslator<BinaryComparison> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(BinaryComparison bc, boolean onAggs) {
|
||||
Assert.isTrue(bc.right() instanceof Literal, "don't know how to translate right %s in %s", bc.right().nodeString(), bc);
|
||||
|
||||
if (bc.left() instanceof NamedExpression) {
|
||||
NamedExpression ne = (NamedExpression) bc.left();
|
||||
|
||||
Query query = null;
|
||||
AggFilter aggFilter = null;
|
||||
|
||||
Attribute at = ne.toAttribute();
|
||||
|
||||
// scalar function can appear in both WHERE and HAVING so handle it first
|
||||
// in both cases the function script is used - script-query/query for the former, bucket-selector/aggFilter for the latter
|
||||
|
||||
if (at instanceof ScalarFunctionAttribute) {
|
||||
ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) at;
|
||||
ScriptTemplate scriptTemplate = sfa.script();
|
||||
|
||||
String template = formatTemplate("%s %s {}", scriptTemplate.template(), bc.symbol());
|
||||
// no need to bind the wrapped/target agg - it is already available through the nested script (needed to create the script itself)
|
||||
Params params = paramsBuilder().script(scriptTemplate.params()).variable(valueOf(bc.right())).build();
|
||||
ScriptTemplate script = new ScriptTemplate(template, params, DataTypes.BOOLEAN);
|
||||
if (onAggs) {
|
||||
aggFilter = new AggFilter(at.id().toString(), script);
|
||||
}
|
||||
else {
|
||||
query = new ScriptQuery(at.location(), script);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Agg context means HAVING -> PipelineAggs
|
||||
//
|
||||
else if (onAggs) {
|
||||
String template = null;
|
||||
Params params = null;
|
||||
|
||||
// agg function
|
||||
if (at instanceof AggregateFunctionAttribute) {
|
||||
AggregateFunctionAttribute fa = (AggregateFunctionAttribute) at;
|
||||
|
||||
// TODO: handle case where both sides of the comparison are functions
|
||||
template = formatTemplate("{} %s {}", bc.symbol());
|
||||
|
||||
// bind the agg and the variable to the script
|
||||
params = paramsBuilder().agg(fa.functionId(), fa.propertyPath()).variable(valueOf(bc.right())).build();
|
||||
}
|
||||
|
||||
aggFilter = new AggFilter(at.id().toString(), new ScriptTemplate(template, params, DataTypes.BOOLEAN));
|
||||
}
|
||||
|
||||
//
|
||||
// No Agg context means WHERE clause
|
||||
//
|
||||
else {
|
||||
if (at instanceof FieldAttribute) {
|
||||
query = wrapIfNested(translateQuery(bc), ne);
|
||||
}
|
||||
}
|
||||
|
||||
return new QueryTranslation(query, aggFilter);
|
||||
}
|
||||
|
||||
else {
|
||||
throw new UnsupportedOperationException("No idea how to translate " + bc.left());
|
||||
}
|
||||
}
|
||||
|
||||
private static Query translateQuery(BinaryComparison bc) {
|
||||
Location loc = bc.location();
|
||||
String name = nameOf(bc.left());
|
||||
Object value = valueOf(bc.right());
|
||||
String format = dateFormat(bc.left());
|
||||
|
||||
if (bc instanceof GreaterThan) {
|
||||
return new RangeQuery(loc, name, value, false, null, false, format);
|
||||
}
|
||||
if (bc instanceof GreaterThanOrEqual) {
|
||||
return new RangeQuery(loc, name, value, true, null, false, format);
|
||||
}
|
||||
if (bc instanceof LessThan) {
|
||||
return new RangeQuery(loc, name, null, false, value, false, format);
|
||||
}
|
||||
if (bc instanceof LessThanOrEqual) {
|
||||
return new RangeQuery(loc, name, null, false, value, true, format);
|
||||
}
|
||||
if (bc instanceof Equals) {
|
||||
if (bc.left() instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) bc.left();
|
||||
if (fa.isAnalyzed()) {
|
||||
return new MatchQuery(loc, name, value);
|
||||
}
|
||||
}
|
||||
return new TermQuery(loc, name, value);
|
||||
}
|
||||
|
||||
Assert.isTrue(false, "don't know how to translate binary comparison %s in %s", bc.right().nodeString(), bc);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
static class Ranges extends ExppressionTranslator<Range> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(Range r, boolean onAggs) {
|
||||
Object lower = valueOf(r.lower());
|
||||
Object upper = valueOf(r.upper());
|
||||
|
||||
Expression e = r.value();
|
||||
|
||||
|
||||
if (e instanceof NamedExpression) {
|
||||
NamedExpression ne = (NamedExpression) e;
|
||||
|
||||
Query query = null;
|
||||
AggFilter aggFilter = null;
|
||||
|
||||
Attribute at = ne.toAttribute();
|
||||
|
||||
// scalar function can appear in both WHERE and HAVING so handle it first
|
||||
// in both cases the function script is used - script-query/query for the former, bucket-selector/aggFilter for the latter
|
||||
|
||||
if (at instanceof ScalarFunctionAttribute) {
|
||||
ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) at;
|
||||
ScriptTemplate scriptTemplate = sfa.script();
|
||||
|
||||
String template = formatTemplate("({} %s %s) && (%s %s {})",
|
||||
r.includeLower() ? "<=" : "<",
|
||||
scriptTemplate.template(),
|
||||
scriptTemplate.template(),
|
||||
r.includeUpper() ? "<=" : "<");
|
||||
|
||||
// no need to bind the wrapped/target - it is already available through the nested script (needed to create the script itself)
|
||||
Params params = paramsBuilder().variable(lower)
|
||||
.script(scriptTemplate.params())
|
||||
.script(scriptTemplate.params())
|
||||
.variable(upper)
|
||||
.build();
|
||||
|
||||
ScriptTemplate script = new ScriptTemplate(template, params, DataTypes.BOOLEAN);
|
||||
|
||||
if (onAggs) {
|
||||
aggFilter = new AggFilter(at.id().toString(), script);
|
||||
}
|
||||
else {
|
||||
query = new ScriptQuery(at.location(), script);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// HAVING
|
||||
//
|
||||
else if (onAggs) {
|
||||
String template = null;
|
||||
Params params = null;
|
||||
|
||||
// agg function
|
||||
if (at instanceof AggregateFunctionAttribute) {
|
||||
AggregateFunctionAttribute fa = (AggregateFunctionAttribute) at;
|
||||
|
||||
template = formatTemplate("{} %s {} && {} %s {}",
|
||||
r.includeLower() ? "<=" : "<",
|
||||
r.includeUpper() ? "<=" : "<");
|
||||
|
||||
params = paramsBuilder().variable(lower)
|
||||
.agg(fa.functionId(), fa.propertyPath())
|
||||
.agg(fa.functionId(), fa.propertyPath())
|
||||
.variable(upper)
|
||||
.build();
|
||||
|
||||
}
|
||||
aggFilter = new AggFilter(((NamedExpression) r.value()).id().toString(), new ScriptTemplate(template, params, DataTypes.BOOLEAN));
|
||||
}
|
||||
//
|
||||
// WHERE
|
||||
//
|
||||
else {
|
||||
// typical range
|
||||
if (at instanceof FieldAttribute) {
|
||||
RangeQuery rangeQuery = new RangeQuery(r.location(), nameOf(r.value()),
|
||||
valueOf(r.lower()), r.includeLower(), valueOf(r.upper()), r.includeUpper(), dateFormat(r.value()));
|
||||
query = wrapIfNested(rangeQuery, r.value());
|
||||
}
|
||||
}
|
||||
|
||||
return new QueryTranslation(query, aggFilter);
|
||||
}
|
||||
else {
|
||||
throw new UnsupportedOperationException("No idea how to translate " + e);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//
|
||||
// Agg translators
|
||||
//
|
||||
|
||||
static class DistinctCounts extends SingleValueAggTranslator<Count> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Count c) {
|
||||
if (!c.distinct()) {
|
||||
return null;
|
||||
}
|
||||
return new CardinalityAgg(id, path, field(c));
|
||||
}
|
||||
}
|
||||
|
||||
static class Sums extends SingleValueAggTranslator<Sum> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Sum s) {
|
||||
return new SumAgg(id, path, field(s));
|
||||
}
|
||||
}
|
||||
|
||||
static class Avgs extends SingleValueAggTranslator<Avg> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Avg a) {
|
||||
return new AvgAgg(id, path, field(a));
|
||||
}
|
||||
}
|
||||
|
||||
static class Maxes extends SingleValueAggTranslator<Max> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Max m) {
|
||||
return new MaxAgg(id, path, field(m));
|
||||
}
|
||||
}
|
||||
|
||||
static class Mins extends SingleValueAggTranslator<Min> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Min m) {
|
||||
return new MinAgg(id, path, field(m));
|
||||
}
|
||||
}
|
||||
|
||||
static class DateTimes extends SingleValueAggTranslator<Min> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Min m) {
|
||||
return new MinAgg(id, path, field(m));
|
||||
}
|
||||
}
|
||||
|
||||
abstract static class SingleValueAggTranslator<F extends Function> extends AggTranslator<F> {
|
||||
|
||||
@Override
|
||||
protected final LeafAgg asAgg(String id, String parent, F f) {
|
||||
String path = parent == null ? id : parent + Agg.PATH_DELIMITER + id;
|
||||
return toAgg(id, path + Agg.PATH_VALUE, f);
|
||||
}
|
||||
|
||||
protected abstract LeafAgg toAgg(String id, String path, F f);
|
||||
}
|
||||
|
||||
abstract static class AggTranslator<F extends Function> {
|
||||
|
||||
private final Class<F> typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public final LeafAgg apply(String id, String parent, Function f) {
|
||||
return (typeToken.isInstance(f) ? asAgg(id, parent, (F) f) : null);
|
||||
}
|
||||
|
||||
protected abstract LeafAgg asAgg(String id, String parent, F f);
|
||||
}
|
||||
|
||||
abstract static class ExppressionTranslator<E extends Expression> {
|
||||
|
||||
private final Class<E> typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public QueryTranslation translate(Expression exp, boolean onAggs) {
|
||||
return (typeToken.isInstance(exp) ? asQuery((E) exp, onAggs) : null);
|
||||
}
|
||||
|
||||
protected abstract QueryTranslation asQuery(E e, boolean onAggs);
|
||||
|
||||
protected static Query wrapIfNested(Query query, Expression exp) {
|
||||
if (exp instanceof NestedFieldAttribute) {
|
||||
NestedFieldAttribute nfa = (NestedFieldAttribute) exp;
|
||||
return new NestedQuery(nfa.location(), nfa.parentPath(), query);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// TODO: need to optimize on ngram
|
||||
// TODO: see whether escaping is needed
|
||||
class Likes extends ExppressionTranslator<BinaryExpression> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) {
|
||||
Query q = null;
|
||||
boolean analyzed = true;
|
||||
String target = null;
|
||||
|
||||
if (e.left() instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) e.left();
|
||||
analyzed = fa.isAnalyzed();
|
||||
target = nameOf(analyzed ? fa : fa.notAnalyzedAttribute());
|
||||
}
|
||||
|
||||
String pattern = sqlToEsPatternMatching(valueOf(e.right()));
|
||||
if (e instanceof Like) {
|
||||
if (analyzed) {
|
||||
q = new QueryStringQuery(e.location(), pattern, target);
|
||||
}
|
||||
else {
|
||||
q = new WildcardQuery(e.location(), nameOf(e.left()), pattern);
|
||||
}
|
||||
}
|
||||
|
||||
if (e instanceof RLike) {
|
||||
if (analyzed) {
|
||||
q = new QueryStringQuery(e.location(), "/" + pattern + "/", target);
|
||||
}
|
||||
else {
|
||||
q = new RegexQuery(e.location(), nameOf(e.left()), sqlToEsPatternMatching(valueOf(e.right())));
|
||||
}
|
||||
}
|
||||
|
||||
return q != null ? new QueryTranslation(wrapIfNested(q, e.left())) : null;
|
||||
}
|
||||
|
||||
private static String sqlToEsPatternMatching(String pattern) {
|
||||
return pattern.replace("%", "*").replace("_", "?");
|
||||
}
|
||||
|
||||
private static boolean hasMatchingPattern(String pattern) {
|
||||
return pattern.contains("*") || pattern.contains("?");
|
||||
}
|
||||
}
|
||||
|
||||
class StringQueries extends ExppressionTranslator<StringQueryPredicate> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(StringQueryPredicate q, boolean onAggs) {
|
||||
return new QueryTranslation(new QueryStringQuery(q.location(), q.query(), q.fields(), q));
|
||||
}
|
||||
}
|
||||
|
||||
class Matches extends ExppressionTranslator<MatchQueryPredicate> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(MatchQueryPredicate q, boolean onAggs) {
|
||||
return new QueryTranslation(wrapIfNested(new MatchQuery(q.location(), nameOf(q.field()), q.query(), q), q.field()));
|
||||
}
|
||||
}
|
||||
|
||||
class MultiMatches extends ExppressionTranslator<MultiMatchQueryPredicate> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(MultiMatchQueryPredicate q, boolean onAggs) {
|
||||
return new QueryTranslation(new MultiMatchQuery(q.location(), q.query(), q.fields(), q));
|
||||
}
|
||||
}
|
||||
|
||||
class BinaryLogic extends ExppressionTranslator<BinaryExpression> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) {
|
||||
if (e instanceof And) {
|
||||
return and(e.location(), toQuery(e.left(), onAggs), toQuery(e.right(), onAggs));
|
||||
}
|
||||
if (e instanceof Or) {
|
||||
return or(e.location(), toQuery(e.left(), onAggs), toQuery(e.right(), onAggs));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
class Nots extends ExppressionTranslator<Not> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(Not not, boolean onAggs) {
|
||||
QueryTranslation translation = toQuery(not.child(), onAggs);
|
||||
return new QueryTranslation(not(translation.query), translation.aggFilter);
|
||||
}
|
||||
}
|
||||
|
||||
// assume the Optimizer properly orders the predicates to ease the translation
|
||||
class BinaryComparisons extends ExppressionTranslator<BinaryComparison> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(BinaryComparison bc, boolean onAggs) {
|
||||
Assert.isTrue(bc.right() instanceof Literal, "don't know how to translate right %s in %s", bc.right().nodeString(), bc);
|
||||
|
||||
if (bc.left() instanceof NamedExpression) {
|
||||
NamedExpression ne = (NamedExpression) bc.left();
|
||||
|
||||
Query query = null;
|
||||
AggFilter aggFilter = null;
|
||||
|
||||
Attribute at = ne.toAttribute();
|
||||
|
||||
// scalar function can appear in both WHERE and HAVING so handle it first
|
||||
// in both cases the function script is used - script-query/query for the former, bucket-selector/aggFilter for the latter
|
||||
|
||||
if (at instanceof ScalarFunctionAttribute) {
|
||||
ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) at;
|
||||
ScriptTemplate scriptTemplate = sfa.script();
|
||||
|
||||
String template = formatTemplate("%s %s {}", scriptTemplate.template(), bc.symbol());
|
||||
// no need to bind the wrapped/target agg - it is already available through the nested script (needed to create the script itself)
|
||||
Params params = paramsBuilder().script(scriptTemplate.params()).variable(valueOf(bc.right())).build();
|
||||
ScriptTemplate script = new ScriptTemplate(template, params, DataTypes.BOOLEAN);
|
||||
if (onAggs) {
|
||||
aggFilter = new AggFilter(at.id().toString(), script);
|
||||
}
|
||||
else {
|
||||
query = new ScriptQuery(at.location(), script);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Agg context means HAVING -> PipelineAggs
|
||||
//
|
||||
else if (onAggs) {
|
||||
String template = null;
|
||||
Params params = null;
|
||||
|
||||
// agg function
|
||||
if (at instanceof AggregateFunctionAttribute) {
|
||||
AggregateFunctionAttribute fa = (AggregateFunctionAttribute) at;
|
||||
|
||||
// TODO: handle case where both sides of the comparison are functions
|
||||
template = formatTemplate("{} %s {}", bc.symbol());
|
||||
|
||||
// bind the agg and the variable to the script
|
||||
params = paramsBuilder().agg(fa.functionId(), fa.propertyPath()).variable(valueOf(bc.right())).build();
|
||||
}
|
||||
|
||||
aggFilter = new AggFilter(at.id().toString(), new ScriptTemplate(template, params, DataTypes.BOOLEAN));
|
||||
}
|
||||
|
||||
//
|
||||
// No Agg context means WHERE clause
|
||||
//
|
||||
else {
|
||||
if (at instanceof FieldAttribute) {
|
||||
query = wrapIfNested(translateQuery(bc), ne);
|
||||
}
|
||||
}
|
||||
|
||||
return new QueryTranslation(query, aggFilter);
|
||||
}
|
||||
|
||||
else {
|
||||
throw new UnsupportedOperationException("No idea how to translate " + bc.left());
|
||||
}
|
||||
}
|
||||
|
||||
private static Query translateQuery(BinaryComparison bc) {
|
||||
Location loc = bc.location();
|
||||
String name = nameOf(bc.left());
|
||||
Object value = valueOf(bc.right());
|
||||
String format = dateFormat(bc.left());
|
||||
|
||||
if (bc instanceof GreaterThan) {
|
||||
return new RangeQuery(loc, name, value, false, null, false, format);
|
||||
}
|
||||
if (bc instanceof GreaterThanOrEqual) {
|
||||
return new RangeQuery(loc, name, value, true, null, false, format);
|
||||
}
|
||||
if (bc instanceof LessThan) {
|
||||
return new RangeQuery(loc, name, null, false, value, false, format);
|
||||
}
|
||||
if (bc instanceof LessThanOrEqual) {
|
||||
return new RangeQuery(loc, name, null, false, value, true, format);
|
||||
}
|
||||
if (bc instanceof Equals) {
|
||||
if (bc.left() instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) bc.left();
|
||||
if (fa.isAnalyzed()) {
|
||||
return new MatchQuery(loc, name, value);
|
||||
}
|
||||
}
|
||||
return new TermQuery(loc, name, value);
|
||||
}
|
||||
|
||||
Assert.isTrue(false, "don't know how to translate binary comparison %s in %s", bc.right().nodeString(), bc);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
class Ranges extends ExppressionTranslator<Range> {
|
||||
|
||||
@Override
|
||||
protected QueryTranslation asQuery(Range r, boolean onAggs) {
|
||||
Object lower = valueOf(r.lower());
|
||||
Object upper = valueOf(r.upper());
|
||||
|
||||
Expression e = r.value();
|
||||
|
||||
|
||||
if (e instanceof NamedExpression) {
|
||||
NamedExpression ne = (NamedExpression) e;
|
||||
|
||||
Query query = null;
|
||||
AggFilter aggFilter = null;
|
||||
|
||||
Attribute at = ne.toAttribute();
|
||||
|
||||
// scalar function can appear in both WHERE and HAVING so handle it first
|
||||
// in both cases the function script is used - script-query/query for the former, bucket-selector/aggFilter for the latter
|
||||
|
||||
if (at instanceof ScalarFunctionAttribute) {
|
||||
ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) at;
|
||||
ScriptTemplate scriptTemplate = sfa.script();
|
||||
|
||||
String template = formatTemplate("({} %s %s) && (%s %s {})",
|
||||
r.includeLower() ? "<=" : "<",
|
||||
scriptTemplate.template(),
|
||||
scriptTemplate.template(),
|
||||
r.includeUpper() ? "<=" : "<");
|
||||
|
||||
// no need to bind the wrapped/target - it is already available through the nested script (needed to create the script itself)
|
||||
Params params = paramsBuilder().variable(lower)
|
||||
.script(scriptTemplate.params())
|
||||
.script(scriptTemplate.params())
|
||||
.variable(upper)
|
||||
.build();
|
||||
|
||||
ScriptTemplate script = new ScriptTemplate(template, params, DataTypes.BOOLEAN);
|
||||
|
||||
if (onAggs) {
|
||||
aggFilter = new AggFilter(at.id().toString(), script);
|
||||
}
|
||||
else {
|
||||
query = new ScriptQuery(at.location(), script);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// HAVING
|
||||
//
|
||||
else if (onAggs) {
|
||||
String template = null;
|
||||
Params params = null;
|
||||
|
||||
// agg function
|
||||
if (at instanceof AggregateFunctionAttribute) {
|
||||
AggregateFunctionAttribute fa = (AggregateFunctionAttribute) at;
|
||||
|
||||
template = formatTemplate("{} %s {} && {} %s {}",
|
||||
r.includeLower() ? "<=" : "<",
|
||||
r.includeUpper() ? "<=" : "<");
|
||||
|
||||
params = paramsBuilder().variable(lower)
|
||||
.agg(fa.functionId(), fa.propertyPath())
|
||||
.agg(fa.functionId(), fa.propertyPath())
|
||||
.variable(upper)
|
||||
.build();
|
||||
|
||||
}
|
||||
aggFilter = new AggFilter(((NamedExpression) r.value()).id().toString(), new ScriptTemplate(template, params, DataTypes.BOOLEAN));
|
||||
}
|
||||
//
|
||||
// WHERE
|
||||
//
|
||||
else {
|
||||
// typical range
|
||||
if (at instanceof FieldAttribute) {
|
||||
RangeQuery rangeQuery = new RangeQuery(r.location(), nameOf(r.value()),
|
||||
valueOf(r.lower()), r.includeLower(), valueOf(r.upper()), r.includeUpper(), dateFormat(r.value()));
|
||||
query = wrapIfNested(rangeQuery, r.value());
|
||||
}
|
||||
}
|
||||
|
||||
return new QueryTranslation(query, aggFilter);
|
||||
}
|
||||
else {
|
||||
throw new UnsupportedOperationException("No idea how to translate " + e);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//
|
||||
// Agg translators
|
||||
//
|
||||
|
||||
class DistinctCounts extends SingleValueAggTranslator<Count> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Count c) {
|
||||
if (!c.distinct()) {
|
||||
return null;
|
||||
}
|
||||
return new CardinalityAgg(id, path, field(c));
|
||||
}
|
||||
}
|
||||
|
||||
class Sums extends SingleValueAggTranslator<Sum> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Sum s) {
|
||||
return new SumAgg(id, path, field(s));
|
||||
}
|
||||
}
|
||||
|
||||
class Avgs extends SingleValueAggTranslator<Avg> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Avg a) {
|
||||
return new AvgAgg(id, path, field(a));
|
||||
}
|
||||
}
|
||||
|
||||
class Maxes extends SingleValueAggTranslator<Max> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Max m) {
|
||||
return new MaxAgg(id, path, field(m));
|
||||
}
|
||||
}
|
||||
|
||||
class Mins extends SingleValueAggTranslator<Min> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Min m) {
|
||||
return new MinAgg(id, path, field(m));
|
||||
}
|
||||
}
|
||||
|
||||
class DateTimes extends SingleValueAggTranslator<Min> {
|
||||
|
||||
@Override
|
||||
protected LeafAgg toAgg(String id, String path, Min m) {
|
||||
return new MinAgg(id, path, field(m));
|
||||
}
|
||||
}
|
||||
|
||||
abstract class SingleValueAggTranslator<F extends Function> extends AggTranslator<F> {
|
||||
|
||||
@Override
|
||||
protected final LeafAgg asAgg(String id, String parent, F f) {
|
||||
String path = parent == null ? id : parent + Agg.PATH_DELIMITER + id;
|
||||
return toAgg(id, path + Agg.PATH_VALUE, f);
|
||||
}
|
||||
|
||||
protected abstract LeafAgg toAgg(String id, String path, F f);
|
||||
}
|
||||
|
||||
abstract class AggTranslator<F extends Function> {
|
||||
|
||||
private final Class<F> typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public final LeafAgg apply(String id, String parent, Function f) {
|
||||
return (typeToken.isInstance(f) ? asAgg(id, parent, (F) f) : null);
|
||||
}
|
||||
|
||||
protected abstract LeafAgg asAgg(String id, String parent, F f);
|
||||
}
|
||||
|
||||
abstract class ExppressionTranslator<E extends Expression> {
|
||||
|
||||
private final Class<E> typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public QueryTranslation translate(Expression exp, boolean onAggs) {
|
||||
return (typeToken.isInstance(exp) ? asQuery((E) exp, onAggs) : null);
|
||||
}
|
||||
|
||||
protected abstract QueryTranslation asQuery(E e, boolean onAggs);
|
||||
|
||||
protected static Query wrapIfNested(Query query, Expression exp) {
|
||||
if (exp instanceof NestedFieldAttribute) {
|
||||
NestedFieldAttribute nfa = (NestedFieldAttribute) exp;
|
||||
return new NestedQuery(nfa.location(), nfa.parentPath(), query);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
|
@ -20,7 +20,7 @@ abstract class Verifier {
|
|||
private final Node<?> source;
|
||||
private final String message;
|
||||
|
||||
public Failure(Node<?> source, String message) {
|
||||
Failure(Node<?> source, String message) {
|
||||
this.source = source;
|
||||
this.message = message + " " + source.nodeString();
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.type.Schema;
|
|||
|
||||
class EmptyRowSetCursor extends AbstractRowSetCursor {
|
||||
|
||||
public EmptyRowSetCursor(Schema schema) {
|
||||
EmptyRowSetCursor(Schema schema) {
|
||||
super(schema, null);
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import static java.util.Collections.emptyList;
|
|||
|
||||
public class Schema implements Iterable<Entry> {
|
||||
|
||||
public static interface Entry {
|
||||
public interface Entry {
|
||||
String name();
|
||||
DataType type();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue