After some recent changes this interface is now empty.

Original commit: elastic/x-pack-elasticsearch@4a44812f78
This commit is contained in:
Nik Everett 2018-01-14 10:32:31 -05:00 committed by GitHub
parent 70cea58262
commit c4474f8574
9 changed files with 37 additions and 51 deletions

View File

@ -36,7 +36,6 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definiti
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import org.elasticsearch.xpack.sql.querydsl.agg.AggPath; import org.elasticsearch.xpack.sql.querydsl.agg.AggPath;
import org.elasticsearch.xpack.sql.querydsl.container.AggRef; import org.elasticsearch.xpack.sql.querydsl.container.AggRef;
import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference;
import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef; import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef;
import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer;
import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef; import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef;
@ -115,9 +114,9 @@ public class Scroller {
// this method assumes the nested aggregation are all part of the same tree (the SQL group-by) // this method assumes the nested aggregation are all part of the same tree (the SQL group-by)
int maxDepth = -1; int maxDepth = -1;
List<ColumnReference> cols = query.columns(); List<FieldExtraction> cols = query.columns();
for (int index = 0; index < cols.size(); index++) { for (int index = 0; index < cols.size(); index++) {
ColumnReference col = cols.get(index); FieldExtraction col = cols.get(index);
Supplier<Object> supplier = null; Supplier<Object> supplier = null;
if (col instanceof ComputedRef) { if (col instanceof ComputedRef) {
@ -157,7 +156,7 @@ public class Scroller {
listener::onFailure)); listener::onFailure));
} }
private Object[] extractAggValue(ColumnReference col, SearchResponse response) { private Object[] extractAggValue(FieldExtraction col, SearchResponse response) {
if (col == TotalCountRef.INSTANCE) { if (col == TotalCountRef.INSTANCE) {
return new Object[] { Long.valueOf(response.getHits().getTotalHits()) }; return new Object[] { Long.valueOf(response.getHits().getTotalHits()) };
} }
@ -254,17 +253,17 @@ public class Scroller {
private List<HitExtractor> getExtractors() { private List<HitExtractor> getExtractors() {
// create response extractors for the first time // create response extractors for the first time
List<ColumnReference> refs = query.columns(); List<FieldExtraction> refs = query.columns();
List<HitExtractor> exts = new ArrayList<>(refs.size()); List<HitExtractor> exts = new ArrayList<>(refs.size());
for (ColumnReference ref : refs) { for (FieldExtraction ref : refs) {
exts.add(createExtractor(ref)); exts.add(createExtractor(ref));
} }
return exts; return exts;
} }
private HitExtractor createExtractor(ColumnReference ref) { private HitExtractor createExtractor(FieldExtraction ref) {
if (ref instanceof SearchHitFieldRef) { if (ref instanceof SearchHitFieldRef) {
SearchHitFieldRef f = (SearchHitFieldRef) ref; SearchHitFieldRef f = (SearchHitFieldRef) ref;
return new FieldHitExtractor(f.name(), f.useDocValue(), f.hitName()); return new FieldHitExtractor(f.name(), f.useDocValue(), f.hitName());

View File

@ -9,7 +9,6 @@ import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference;
import org.elasticsearch.xpack.sql.tree.Node; import org.elasticsearch.xpack.sql.tree.Node;
import java.util.List; import java.util.List;
@ -33,13 +32,13 @@ public abstract class ProcessorDefinition extends Node<ProcessorDefinition> impl
/** /**
* Resolve {@link Attribute}s which are unprocessable into * Resolve {@link Attribute}s which are unprocessable into
* {@link ColumnReference}s which are processable. * {@link FieldExtraction}s which are processable.
* *
* @return {@code this} if the resolution doesn't change the * @return {@code this} if the resolution doesn't change the
* definition, a new {@link ProcessorDefinition} otherwise * definition, a new {@link ProcessorDefinition} otherwise
*/ */
public abstract ProcessorDefinition resolveAttributes(AttributeResolver resolver); public abstract ProcessorDefinition resolveAttributes(AttributeResolver resolver);
public interface AttributeResolver { public interface AttributeResolver {
ColumnReference resolve(Attribute attribute); FieldExtraction resolve(Attribute attribute);
} }
} }

View File

@ -5,12 +5,12 @@
*/ */
package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition;
import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder;
import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference;
public class ReferenceInput extends NonExecutableInput<ColumnReference> { public class ReferenceInput extends NonExecutableInput<FieldExtraction> {
public ReferenceInput(Expression expression, ColumnReference context) { public ReferenceInput(Expression expression, FieldExtraction context) {
super(expression, context); super(expression, context);
} }

View File

@ -5,10 +5,11 @@
*/ */
package org.elasticsearch.xpack.sql.querydsl.container; package org.elasticsearch.xpack.sql.querydsl.container;
import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder;
import org.elasticsearch.xpack.sql.querydsl.agg.AggPath; import org.elasticsearch.xpack.sql.querydsl.agg.AggPath;
public class AggRef implements ColumnReference { public class AggRef implements FieldExtraction {
private final String path; private final String path;
private final int depth; private final int depth;

View File

@ -1,17 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.querydsl.container;
import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
/**
* Entity representing a 'column' backed by one or multiple results from ES. A
* column reference can also extract a field (meta or otherwise) from a result
* set, so extends {@link FieldExtraction}.
*/
public interface ColumnReference extends FieldExtraction {
// TODO remove this interface intirely in a followup
}

View File

@ -5,10 +5,11 @@
*/ */
package org.elasticsearch.xpack.sql.querydsl.container; package org.elasticsearch.xpack.sql.querydsl.container;
import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
public class ComputedRef implements ColumnReference { public class ComputedRef implements FieldExtraction {
private final ProcessorDefinition processor; private final ProcessorDefinition processor;

View File

@ -5,7 +5,9 @@
*/ */
package org.elasticsearch.xpack.sql.querydsl.container; package org.elasticsearch.xpack.sql.querydsl.container;
public abstract class FieldReference implements ColumnReference { import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
public abstract class FieldReference implements FieldExtraction {
/** /**
* Field name. * Field name.
* *

View File

@ -11,6 +11,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
import org.elasticsearch.xpack.sql.execution.search.SourceGenerator; import org.elasticsearch.xpack.sql.execution.search.SourceGenerator;
import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.FieldAttribute;
@ -55,7 +56,7 @@ public class QueryContainer {
// final output seen by the client (hence the list or ordering) // final output seen by the client (hence the list or ordering)
// gets converted by the Scroller into Extractors for hits or actual results in case of aggregations // gets converted by the Scroller into Extractors for hits or actual results in case of aggregations
private final List<ColumnReference> columns; private final List<FieldExtraction> columns;
// aliases (maps an alias to its actual resolved attribute) // aliases (maps an alias to its actual resolved attribute)
private final Map<Attribute, Attribute> aliases; private final Map<Attribute, Attribute> aliases;
@ -78,7 +79,7 @@ public class QueryContainer {
this(null, null, null, null, null, null, null, -1); this(null, null, null, null, null, null, null, -1);
} }
public QueryContainer(Query query, Aggs aggs, List<ColumnReference> refs, Map<Attribute, Attribute> aliases, public QueryContainer(Query query, Aggs aggs, List<FieldExtraction> refs, Map<Attribute, Attribute> aliases,
Map<String, GroupingAgg> pseudoFunctions, Map<String, GroupingAgg> pseudoFunctions,
Map<Attribute, ProcessorDefinition> scalarFunctions, Map<Attribute, ProcessorDefinition> scalarFunctions,
Set<Sort> sort, int limit) { Set<Sort> sort, int limit) {
@ -90,8 +91,8 @@ public class QueryContainer {
this.columns = refs == null || refs.isEmpty() ? emptyList() : refs; this.columns = refs == null || refs.isEmpty() ? emptyList() : refs;
this.sort = sort == null || sort.isEmpty() ? emptySet() : sort; this.sort = sort == null || sort.isEmpty() ? emptySet() : sort;
this.limit = limit; this.limit = limit;
aggsOnly = columns.stream().allMatch(ColumnReference::supportedByAggsOnlyQuery); aggsOnly = columns.stream().allMatch(FieldExtraction::supportedByAggsOnlyQuery);
aggDepth = columns.stream().mapToInt(ColumnReference::depth).max().orElse(0); aggDepth = columns.stream().mapToInt(FieldExtraction::depth).max().orElse(0);
} }
public Query query() { public Query query() {
@ -102,7 +103,7 @@ public class QueryContainer {
return aggs; return aggs;
} }
public List<ColumnReference> columns() { public List<FieldExtraction> columns() {
return columns; return columns;
} }
@ -142,7 +143,7 @@ public class QueryContainer {
return new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit); return new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit);
} }
public QueryContainer with(List<ColumnReference> r) { public QueryContainer with(List<FieldExtraction> r) {
return new QueryContainer(query, aggs, r, aliases, pseudoFunctions, scalarFunctions, sort, limit); return new QueryContainer(query, aggs, r, aliases, pseudoFunctions, scalarFunctions, sort, limit);
} }
@ -179,13 +180,13 @@ public class QueryContainer {
// //
// reference methods // reference methods
// //
private ColumnReference searchHitFieldRef(FieldAttribute fieldAttr) { private FieldExtraction searchHitFieldRef(FieldAttribute fieldAttr) {
return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.dataType().hasDocValues()); return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.dataType().hasDocValues());
} }
private Tuple<QueryContainer, ColumnReference> nestedFieldRef(FieldAttribute attr) { private Tuple<QueryContainer, FieldExtraction> nestedFieldRef(FieldAttribute attr) {
// Find the nested query for this field. If there isn't one then create it // Find the nested query for this field. If there isn't one then create it
List<ColumnReference> nestedRefs = new ArrayList<>(); List<FieldExtraction> nestedRefs = new ArrayList<>();
Query q = rewriteToContainNestedField(query, attr.location(), Query q = rewriteToContainNestedField(query, attr.location(),
attr.nestedParent().path(), aliasName(attr), attr.dataType().hasDocValues()); attr.nestedParent().path(), aliasName(attr), attr.dataType().hasDocValues());
@ -221,7 +222,7 @@ public class QueryContainer {
} }
// replace function's input with references // replace function's input with references
private Tuple<QueryContainer, ColumnReference> computingRef(ScalarFunctionAttribute sfa) { private Tuple<QueryContainer, FieldExtraction> computingRef(ScalarFunctionAttribute sfa) {
Attribute name = aliases.getOrDefault(sfa, sfa); Attribute name = aliases.getOrDefault(sfa, sfa);
ProcessorDefinition proc = scalarFunctions.get(name); ProcessorDefinition proc = scalarFunctions.get(name);
@ -243,9 +244,9 @@ public class QueryContainer {
} }
@Override @Override
public ColumnReference resolve(Attribute attribute) { public FieldExtraction resolve(Attribute attribute) {
Attribute attr = aliases.getOrDefault(attribute, attribute); Attribute attr = aliases.getOrDefault(attribute, attribute);
Tuple<QueryContainer, ColumnReference> ref = container.toReference(attr); Tuple<QueryContainer, FieldExtraction> ref = container.toReference(attr);
container = ref.v1(); container = ref.v1();
return ref.v2(); return ref.v2();
} }
@ -262,11 +263,11 @@ public class QueryContainer {
} }
public QueryContainer addColumn(Attribute attr) { public QueryContainer addColumn(Attribute attr) {
Tuple<QueryContainer, ColumnReference> tuple = toReference(attr); Tuple<QueryContainer, FieldExtraction> tuple = toReference(attr);
return tuple.v1().addColumn(tuple.v2()); return tuple.v1().addColumn(tuple.v2());
} }
private Tuple<QueryContainer, ColumnReference> toReference(Attribute attr) { private Tuple<QueryContainer, FieldExtraction> toReference(Attribute attr) {
if (attr instanceof FieldAttribute) { if (attr instanceof FieldAttribute) {
FieldAttribute fa = (FieldAttribute) attr; FieldAttribute fa = (FieldAttribute) attr;
if (fa.isNested()) { if (fa.isNested()) {
@ -288,7 +289,7 @@ public class QueryContainer {
throw new SqlIllegalArgumentException("Unknown output attribute %s", attr); throw new SqlIllegalArgumentException("Unknown output attribute %s", attr);
} }
public QueryContainer addColumn(ColumnReference ref) { public QueryContainer addColumn(FieldExtraction ref) {
return with(combine(columns, ref)); return with(combine(columns, ref));
} }
@ -304,7 +305,7 @@ public class QueryContainer {
} }
public QueryContainer addAggCount(GroupingAgg parentGroup, String functionId) { public QueryContainer addAggCount(GroupingAgg parentGroup, String functionId) {
ColumnReference ref = parentGroup == null ? TotalCountRef.INSTANCE : new AggRef(AggPath.bucketCount(parentGroup.asParentPath())); FieldExtraction ref = parentGroup == null ? TotalCountRef.INSTANCE : new AggRef(AggPath.bucketCount(parentGroup.asParentPath()));
Map<String, GroupingAgg> pseudoFunctions = new LinkedHashMap<>(this.pseudoFunctions); Map<String, GroupingAgg> pseudoFunctions = new LinkedHashMap<>(this.pseudoFunctions);
pseudoFunctions.put(functionId, parentGroup); pseudoFunctions.put(functionId, parentGroup);
return new QueryContainer(query, aggs, combine(columns, ref), aliases, pseudoFunctions, scalarFunctions, sort, limit); return new QueryContainer(query, aggs, combine(columns, ref), aliases, pseudoFunctions, scalarFunctions, sort, limit);

View File

@ -6,15 +6,15 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
public class AttributeInputTests extends ESTestCase { public class AttributeInputTests extends ESTestCase {
public void testResolveAttributes() { public void testResolveAttributes() {
ColumnReference column = mock(ColumnReference.class); FieldExtraction column = mock(FieldExtraction.class);
Expression expression = mock(Expression.class); Expression expression = mock(Expression.class);
Attribute attribute = mock(Attribute.class); Attribute attribute = mock(Attribute.class);