Remove some collection utils (elastic/x-pack-elasticsearch#2384)
Core doesn't go in for fancy collection utils in general and just manipulates the required collections in line. In an effort to keep SQL "more like the rest of Elasticsearch" I'm starting to remove SQL's `CollectionUtils`. Original commit: elastic/x-pack-elasticsearch@878ee181cb
This commit is contained in:
parent
833ff18181
commit
fd13c54cdc
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.expression.function;
|
||||
|
||||
import org.elasticsearch.xpack.sql.SqlException;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.Avg;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.Count;
|
||||
|
@ -53,23 +52,31 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sinh;
|
|||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sqrt;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.Tan;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine;
|
||||
import static org.elasticsearch.xpack.sql.util.CollectionUtils.of;
|
||||
|
||||
public class DefaultFunctionRegistry extends AbstractFunctionRegistry {
|
||||
|
||||
private static final Collection<Class<? extends Function>> FUNCTIONS = combine(agg(), scalar());
|
||||
|
||||
private static final Map<String, String> ALIASES = combine(dateTimeAliases());
|
||||
private static final Map<String, String> ALIASES;
|
||||
static {
|
||||
Map<String, String> aliases = new TreeMap<>();
|
||||
aliases.put("DAY", "DAY_OF_MONTH");
|
||||
aliases.put("DOM", "DAY_OF_MONTH");
|
||||
aliases.put("DOW", "DAY_OF_WEEK");
|
||||
aliases.put("DOY", "DAY_OF_YEAR");
|
||||
aliases.put("HOUR", "HOUR_OF_DAY");
|
||||
aliases.put("MINUTE", "MINUTE_OF_HOUR");
|
||||
aliases.put("MONTH", "MONTH_OF_YEAR");
|
||||
aliases.put("SECOND", "SECOND_OF_MINUTE");
|
||||
ALIASES = unmodifiableMap(aliases);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Function>> functions() {
|
||||
|
@ -147,50 +154,4 @@ public class DefaultFunctionRegistry extends AbstractFunctionRegistry {
|
|||
Tan.class
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Collection<Class<? extends ScalarFunction>> functions(Class<? extends ScalarFunction> type) {
|
||||
String path = type.getPackage().getName().replace('.', '/');
|
||||
ClassLoader cl = type.getClassLoader();
|
||||
Enumeration<URL> classes;
|
||||
try {
|
||||
classes = cl.getResources(path);
|
||||
} catch (IOException e1) {
|
||||
throw new SqlException("Cannot determine functions in package %s", path);
|
||||
}
|
||||
|
||||
Collection<Class<? extends ScalarFunction>> collection = new ArrayList<>();
|
||||
|
||||
while(classes.hasMoreElements()) {
|
||||
String url = classes.nextElement().toString();
|
||||
if (url.endsWith(".class")) {
|
||||
Class<?> c;
|
||||
try {
|
||||
c = Class.forName(url, false, cl);
|
||||
} catch (ClassNotFoundException cnfe) {
|
||||
throw new SqlException(cnfe, "Cannot load class %s", url);
|
||||
}
|
||||
if (type.isAssignableFrom(c)) {
|
||||
int mod = c.getModifiers();
|
||||
if (Modifier.isPublic(mod) && !Modifier.isAbstract(mod)) {
|
||||
collection.add((Class<? extends ScalarFunction>) c);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return collection;
|
||||
}
|
||||
|
||||
private static Map<String, String> dateTimeAliases() {
|
||||
return of("DAY", "DAY_OF_MONTH",
|
||||
"DOM", "DAY_OF_MONTH",
|
||||
"DOW", "DAY_OF_WEEK",
|
||||
"DOY", "DAY_OF_YEAR",
|
||||
"HOUR", "HOUR_OF_DAY",
|
||||
"MINUTE", "MINUTE_OF_HOUR",
|
||||
"MONTH", "MONTH_OF_YEAR",
|
||||
"SECOND", "SECOND_OF_MINUTE");
|
||||
}
|
||||
}
|
|
@ -5,10 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.plan.logical.command;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.plan.QueryPlan;
|
||||
|
@ -20,10 +16,15 @@ import org.elasticsearch.xpack.sql.session.Rows;
|
|||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||
import org.elasticsearch.xpack.sql.util.CollectionUtils;
|
||||
import org.elasticsearch.xpack.sql.util.Graphviz;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
|
||||
public class Explain extends Command {
|
||||
|
||||
|
@ -120,10 +121,14 @@ public class Explain extends Command {
|
|||
sb.append(executionPlan.toString());
|
||||
|
||||
planString = sb.toString();
|
||||
}
|
||||
else {
|
||||
Map<String, QueryPlan<?>> plans = CollectionUtils.of("Parsed", plan, "Analyzed", analyzedPlan, "Optimized", optimizedPlan, "Mapped", mappedPlan, "Execution", executionPlan);
|
||||
planString = Graphviz.dot(plans, false);
|
||||
} else {
|
||||
Map<String, QueryPlan<?>> plans = new HashMap<>();
|
||||
plans.put("Parsed", plan);
|
||||
plans.put("Analyzed", analyzedPlan);
|
||||
plans.put("Optimized", optimizedPlan);
|
||||
plans.put("Mapped", mappedPlan);
|
||||
plans.put("Execution", executionPlan);
|
||||
planString = Graphviz.dot(unmodifiableMap(plans), false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -324,7 +324,9 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
|||
}
|
||||
|
||||
if (!aliases.isEmpty()) {
|
||||
queryC = queryC.withAliases(combine(queryC.aliases(), aliases));
|
||||
Map<Attribute, Attribute> newAliases = new LinkedHashMap<>(queryC.aliases());
|
||||
newAliases.putAll(aliases);
|
||||
queryC = queryC.withAliases(newAliases);
|
||||
}
|
||||
return new EsQueryExec(exec.location(), exec.index(), a.output(), queryC);
|
||||
}
|
||||
|
|
|
@ -5,18 +5,16 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.querydsl.agg;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine;
|
||||
import static java.util.Collections.emptyList;
|
||||
import static org.elasticsearch.xpack.sql.util.StringUtils.EMPTY;
|
||||
|
||||
public abstract class GroupingAgg extends Agg {
|
||||
|
@ -92,7 +90,12 @@ public abstract class GroupingAgg extends Agg {
|
|||
}
|
||||
|
||||
public GroupingAgg with(String leafAggId, Direction order) {
|
||||
return Objects.equals(this.order.get(leafAggId), order) ? this : clone(id(), propertyPath(), fieldName(), subAggs, subPipelines, combine(this.order, singletonMap(leafAggId, order)));
|
||||
if (Objects.equals(this.order.get(leafAggId), order)) {
|
||||
return this;
|
||||
}
|
||||
Map<String, Direction> newOrder = new LinkedHashMap<>(this.order);
|
||||
newOrder.put(leafAggId, order);
|
||||
return clone(id(), propertyPath(), fieldName(), subAggs, subPipelines, newOrder);
|
||||
}
|
||||
|
||||
// NOCOMMIT clone is a scary name.
|
||||
|
|
|
@ -22,11 +22,11 @@ import org.elasticsearch.xpack.sql.querydsl.query.AndQuery;
|
|||
import org.elasticsearch.xpack.sql.querydsl.query.MatchAll;
|
||||
import org.elasticsearch.xpack.sql.querydsl.query.NestedQuery;
|
||||
import org.elasticsearch.xpack.sql.querydsl.query.Query;
|
||||
import org.elasticsearch.xpack.sql.util.CollectionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -206,7 +206,7 @@ public class QueryContainer {
|
|||
String name = aliasName(attr);
|
||||
|
||||
Query q = query;
|
||||
Map<String, Boolean> field = singletonMap(name, Boolean.valueOf(shouldUseDocValue(attr)));
|
||||
Map<String, Boolean> field = singletonMap(name, shouldUseDocValue(attr));
|
||||
if (q == null) {
|
||||
q = new NestedQuery(attr.location(), parent, field, new MatchAll(attr.location()));
|
||||
}
|
||||
|
@ -216,7 +216,9 @@ public class QueryContainer {
|
|||
if (parent.equals(n.path())) {
|
||||
if (!n.fields().keySet().contains(name)) {
|
||||
foundMatch.set(true);
|
||||
return new NestedQuery(n.location(), n.path(), combine(n.fields(), field), n.child());
|
||||
Map<String, Boolean> fields = new LinkedHashMap<>(n.fields());
|
||||
fields.putAll(field);
|
||||
return new NestedQuery(n.location(), n.path(), fields, n.child());
|
||||
}
|
||||
}
|
||||
return n;
|
||||
|
@ -259,7 +261,9 @@ public class QueryContainer {
|
|||
public QueryContainer addAggCount(GroupingAgg parentGroup, String functionId, ColumnProcessor processor) {
|
||||
Reference ref = parentGroup == null ? TotalCountRef.INSTANCE : new AggRef(AggPath.bucketCount(parentGroup.asParentPath()));
|
||||
ref = processor != null ? new ProcessingRef(processor, ref) : ref;
|
||||
return new QueryContainer(query, aggs, combine(refs, ref), aliases, processors, combine(pseudoFunctions, CollectionUtils.of(functionId, parentGroup)), sort, limit);
|
||||
Map<String, GroupingAgg> pseudoFunctions = new LinkedHashMap<>(this.pseudoFunctions);
|
||||
pseudoFunctions.put(functionId, parentGroup);
|
||||
return new QueryContainer(query, aggs, combine(refs, ref), aliases, processors, pseudoFunctions, sort, limit);
|
||||
}
|
||||
|
||||
public QueryContainer addAgg(String groupId, LeafAgg agg, ColumnProcessor processor) {
|
||||
|
|
|
@ -8,72 +8,13 @@ package org.elasticsearch.xpack.sql.util;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
public abstract class CollectionUtils {
|
||||
|
||||
private static class Entry<K, V> {
|
||||
private final K k;
|
||||
private final V v;
|
||||
|
||||
private Entry(K k, V v) {
|
||||
this.k = k;
|
||||
this.v = v;
|
||||
}
|
||||
}
|
||||
|
||||
private static class ArrayIterator<T> implements Iterator<T> {
|
||||
|
||||
private final T[] array;
|
||||
private int index = 0;
|
||||
|
||||
private ArrayIterator(T[] array) {
|
||||
this.array = array;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return index < array.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T next() {
|
||||
return array[index++];
|
||||
}
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
@SuppressWarnings("varargs")
|
||||
public static <K, V> Map<K, V> combine(Map<? extends K, ? extends V>... maps) {
|
||||
if (ObjectUtils.isEmpty(maps)) {
|
||||
return emptyMap();
|
||||
}
|
||||
|
||||
Map<K, V> map = new LinkedHashMap<>();
|
||||
|
||||
for (Map<? extends K, ? extends V> m : maps) {
|
||||
map.putAll(m);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
public static <K, V> Map<K, V> combine(Map<? extends K, ? extends V> left, Map<? extends K, ? extends V> right) {
|
||||
Map<K, V> map = new LinkedHashMap<>(left.size() + right.size());
|
||||
if (!left.isEmpty()) {
|
||||
map.putAll(left);
|
||||
}
|
||||
if (!right.isEmpty()) {
|
||||
map.putAll(right);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T> List<T> combine(List<? extends T> left, List<? extends T> right) {
|
||||
if (right.isEmpty()) {
|
||||
|
@ -119,55 +60,4 @@ public abstract class CollectionUtils {
|
|||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public static <K, V> Map<K, V> of(K k1, V v1) {
|
||||
return fromEntries(entryOf(k1, v1));
|
||||
}
|
||||
|
||||
public static <K, V> Map<K, V> of(K k1, V v1, K k2, V v2) {
|
||||
return fromEntries(entryOf(k1, v1), entryOf(k2, v2));
|
||||
}
|
||||
|
||||
public static <K, V> Map<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3) {
|
||||
return fromEntries(entryOf(k1, v1), entryOf(k2, v2), entryOf(k3, v3));
|
||||
}
|
||||
|
||||
public static <K, V> Map<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
|
||||
return fromEntries(entryOf(k1, v1), entryOf(k2, v2), entryOf(k3, v3), entryOf(k4, v4));
|
||||
}
|
||||
|
||||
public static <K, V> Map<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
|
||||
return fromEntries(entryOf(k1, v1), entryOf(k2, v2), entryOf(k3, v3), entryOf(k4, v4), entryOf(k5, v5));
|
||||
}
|
||||
|
||||
public static <K, V> Map<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6) {
|
||||
return fromEntries(entryOf(k1, v1), entryOf(k2, v2), entryOf(k3, v3), entryOf(k4, v4), entryOf(k5, v5), entryOf(k6, v6));
|
||||
}
|
||||
|
||||
public static <K, V> Map<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7) {
|
||||
return fromEntries(entryOf(k1, v1), entryOf(k2, v2), entryOf(k3, v3), entryOf(k4, v4), entryOf(k5, v5), entryOf(k6, v6), entryOf(k7, v7));
|
||||
}
|
||||
|
||||
public static <K, V> Map<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8) {
|
||||
return fromEntries(entryOf(k1, v1), entryOf(k2, v2), entryOf(k3, v3), entryOf(k4, v4), entryOf(k5, v5), entryOf(k6, v6), entryOf(k7, v7), entryOf(k8, v8));
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
private static <K, V> Map<K, V> fromEntries(Entry<K, V>... entries) {
|
||||
Map<K, V> map = new LinkedHashMap<K, V>();
|
||||
for (Entry<K, V> entry : entries) {
|
||||
if (entry.k != null) {
|
||||
map.put(entry.k, entry.v);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
private static <K, V> Entry<K, V> entryOf(K k, V v) {
|
||||
return new Entry<>(k, v);
|
||||
}
|
||||
|
||||
public static <T> Iterator<T> iterator(final T[] array) {
|
||||
return new ArrayIterator<>(array);
|
||||
}
|
||||
}
|
|
@ -67,7 +67,9 @@ public abstract class Graphviz {
|
|||
sb.append(quoteGraphviz(entry.getKey()));
|
||||
sb.append(";\n\n");
|
||||
|
||||
// to help align the clusters, add an invisible node (that could otherwise be used for labeling but it consumes too much space) used for alignment
|
||||
/* to help align the clusters, add an invisible node (that could
|
||||
* otherwise be used for labeling but it consumes too much space)
|
||||
* used for alignment */
|
||||
indent(sb, CLUSTER_INDENT);
|
||||
sb.append("c" + clusterId);
|
||||
sb.append("[style=invis]\n");
|
||||
|
|
Loading…
Reference in New Issue