First commit for Calcite SQLHandler integration

This commit is contained in:
Kevin Risden 2016-04-28 10:34:13 -05:00
parent 3e6de6059f
commit b08a463639
16 changed files with 1382 additions and 1797 deletions

View File

@ -131,10 +131,13 @@
<!-- StatsComponents percentiles Dependencies-->
<dependency org="com.tdunning" name="t-digest" rev="${/com.tdunning/t-digest}" conf="compile->*"/>
<!-- SQL Parser -->
<dependency org="com.facebook.presto" name="presto-parser" rev="${/com.facebook.presto/presto-parser}"/>
<dependency org="io.airlift" name="slice" rev="${/io.airlift/slice}"/>
<!-- SQL Parser -->
<dependency org="org.apache.calcite" name="calcite-core" rev="1.6.0"/>
<dependency org="org.apache.calcite" name="calcite-linq4j" rev="1.6.0"/>
<dependency org="net.hydromatic" name="eigenbase-properties" rev="1.1.5"/>
<dependency org="org.codehaus.janino" name="janino" rev="2.7.6"/>
<dependency org="org.codehaus.janino" name="commons-compiler" rev="2.7.6"/>
<exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
</dependencies>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,90 @@
package org.apache.solr.handler.sql;
import java.io.IOException;
import java.util.List;
import org.apache.calcite.linq4j.Enumerator;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.stream.TupleStream;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Enumerator that reads from a Solr collection. */
class SolrEnumerator implements Enumerator<Object> {
private final TupleStream tupleStream;
private final List<String> fields;
private Tuple current;
/** Creates a SolrEnumerator.
*
* @param tupleStream Solr TupleStream
* @param fields Fields to get from each Tuple
*/
SolrEnumerator(TupleStream tupleStream, List<String> fields) {
this.tupleStream = tupleStream;
this.fields = fields;
this.current = null;
}
/** Produce the next row from the results
*
* @return A new row from the results
*/
public Object current() {
if (fields.size() == 1) {
return current.get(fields.get(0));
} else {
// Build an array with all fields in this row
Object[] row = new Object[fields.size()];
for (int i = 0; i < fields.size(); i++) {
row[i] = current.get(fields.get(i));
}
return row;
}
}
public boolean moveNext() {
try {
Tuple tuple = this.tupleStream.read();
if (tuple.EOF) {
return false;
} else {
current = tuple;
return true;
}
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
public void reset() {
throw new UnsupportedOperationException();
}
public void close() {
if(this.tupleStream != null) {
try {
this.tupleStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}

View File

@ -0,0 +1,170 @@
package org.apache.solr.handler.sql;
import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptCost;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Filter;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Implementation of a {@link org.apache.calcite.rel.core.Filter} relational expression in Solr.
*/
public class SolrFilter extends Filter implements SolrRel {
public SolrFilter(
RelOptCluster cluster,
RelTraitSet traitSet,
RelNode child,
RexNode condition) {
super(cluster, traitSet, child, condition);
assert getConvention() == SolrRel.CONVENTION;
assert getConvention() == child.getConvention();
}
@Override public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
return super.computeSelfCost(planner, mq).multiplyBy(0.1);
}
public SolrFilter copy(RelTraitSet traitSet, RelNode input, RexNode condition) {
return new SolrFilter(getCluster(), traitSet, input, condition);
}
public void implement(Implementor implementor) {
implementor.visitChild(0, getInput());
Translator translator = new Translator(SolrRules.solrFieldNames(getRowType()));
List<String> fqs = translator.translateMatch(condition);
implementor.add(null, fqs);
}
/** Translates {@link RexNode} expressions into Solr fq strings. */
private static class Translator {
private final List<String> fieldNames;
Translator(List<String> fieldNames) {
this.fieldNames = fieldNames;
}
private List<String> translateMatch(RexNode condition) {
return translateOr(condition);
}
private List<String> translateOr(RexNode condition) {
List<String> list = new ArrayList<>();
for (RexNode node : RelOptUtil.disjunctions(condition)) {
list.add(translateAnd(node));
}
return list;
}
/** Translates a condition that may be an AND of other conditions. Gathers
* together conditions that apply to the same field. */
private String translateAnd(RexNode node0) {
List<String> ands = new ArrayList<>();
for (RexNode node : RelOptUtil.conjunctions(node0)) {
ands.add(translateMatch2(node));
}
return String.join(" AND ", ands);
}
private String translateMatch2(RexNode node) {
switch (node.getKind()) {
case EQUALS:
return translateBinary(null, null, (RexCall) node);
// case LESS_THAN:
// return translateBinary("$lt", "$gt", (RexCall) node);
// case LESS_THAN_OR_EQUAL:
// return translateBinary("$lte", "$gte", (RexCall) node);
// case NOT_EQUALS:
// return translateBinary("$ne", "$ne", (RexCall) node);
// case GREATER_THAN:
// return translateBinary("$gt", "$lt", (RexCall) node);
// case GREATER_THAN_OR_EQUAL:
// return translateBinary("$gte", "$lte", (RexCall) node);
default:
throw new AssertionError("cannot translate " + node);
}
}
/** Translates a call to a binary operator, reversing arguments if necessary. */
private String translateBinary(String op, String rop, RexCall call) {
final RexNode left = call.operands.get(0);
final RexNode right = call.operands.get(1);
String b = translateBinary2(op, left, right);
if (b != null) {
return b;
}
b = translateBinary2(rop, right, left);
if (b != null) {
return b;
}
throw new AssertionError("cannot translate op " + op + " call " + call);
}
/** Translates a call to a binary operator. Returns whether successful. */
private String translateBinary2(String op, RexNode left, RexNode right) {
switch (right.getKind()) {
case LITERAL:
break;
default:
return null;
}
final RexLiteral rightLiteral = (RexLiteral) right;
switch (left.getKind()) {
case INPUT_REF:
final RexInputRef left1 = (RexInputRef) left;
String name = fieldNames.get(left1.getIndex());
return translateOp2(op, name, rightLiteral);
case CAST:
return translateBinary2(op, ((RexCall) left).operands.get(0), right);
case OTHER_FUNCTION:
// String itemName = SolrRules.isItem((RexCall) left);
// if (itemName != null) {
// return translateOp2(op, itemName, rightLiteral);
// }
// fall through
default:
return null;
}
}
private String translateOp2(String op, String name, RexLiteral right) {
if (op == null) {
// E.g.: {deptno: 100}
return name + ":" + right.getValue2();
} else {
// // E.g. {deptno: {$lt: 100}}
// // which may later be combined with other conditions:
// // E.g. {deptno: [$lt: 100, $gt: 50]}
// multimap.put(name, Pair.of(op, right));
return null;
}
}
}
}

View File

@ -0,0 +1,47 @@
package org.apache.solr.handler.sql;
import java.lang.reflect.Method;
import java.util.List;
import com.google.common.collect.ImmutableMap;
import org.apache.calcite.linq4j.tree.Types;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Builtin methods in the Solr adapter.
*/
public enum SolrMethod {
SOLR_QUERYABLE_QUERY(SolrTable.SolrQueryable.class, "query", List.class, List.class, List.class, String.class);
public final Method method;
public static final ImmutableMap<Method, SolrMethod> MAP;
static {
final ImmutableMap.Builder<Method, SolrMethod> builder = ImmutableMap.builder();
for (SolrMethod value : SolrMethod.values()) {
builder.put(value.method, value);
}
MAP = builder.build();
}
SolrMethod(Class clazz, String methodName, Class... argumentTypes) {
this.method = Types.lookupMethod(clazz, methodName, argumentTypes);
}
}

View File

@ -0,0 +1,69 @@
package org.apache.solr.handler.sql;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptCost;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.util.Pair;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Implementation of {@link org.apache.calcite.rel.core.Project} relational expression in Solr.
*/
public class SolrProject extends Project implements SolrRel {
public SolrProject(RelOptCluster cluster, RelTraitSet traitSet,
RelNode input, List<? extends RexNode> projects, RelDataType rowType) {
super(cluster, traitSet, input, projects, rowType);
assert getConvention() == SolrRel.CONVENTION;
assert getConvention() == input.getConvention();
}
@Override
public Project copy(RelTraitSet traitSet, RelNode input, List<RexNode> projects, RelDataType rowType) {
return new SolrProject(getCluster(), traitSet, input, projects, rowType);
}
@Override
public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
return super.computeSelfCost(planner, mq).multiplyBy(0.1);
}
public void implement(Implementor implementor) {
implementor.visitChild(0, getInput());
final SolrRules.RexToSolrTranslator translator = new SolrRules.RexToSolrTranslator(
(JavaTypeFactory) getCluster().getTypeFactory(), SolrRules.solrFieldNames(getInput().getRowType()));
final Map<String, String> fieldMappings = new HashMap<>();
for (Pair<RexNode, String> pair : getNamedProjects()) {
final String name = pair.right;
final String expr = pair.left.accept(translator);
fieldMappings.put(name, expr);
}
implementor.add(fieldMappings, null);
}
}

View File

@ -0,0 +1,75 @@
package org.apache.solr.handler.sql;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.calcite.plan.Convention;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelNode;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Relational expression that uses Solr calling convention.
*/
public interface SolrRel extends RelNode {
void implement(Implementor implementor);
/** Calling convention for relational operations that occur in Cassandra. */
Convention CONVENTION = new Convention.Impl("SOLR", SolrRel.class);
/** Callback for the implementation process that converts a tree of {@link SolrRel} nodes into a Solr query. */
class Implementor {
final Map<String, String> fieldMappings = new HashMap<>();
final List<String> filterQueries = new ArrayList<>();
String limitValue = null;
final List<String> order = new ArrayList<>();
RelOptTable table;
SolrTable solrTable;
/** Adds newly projected fields and restricted filterQueries.
*
* @param fields New fields to be projected from a query
* @param filterQueries New filterQueries to be applied to the query
*/
public void add(Map<String, String> fieldMappings, List<String> filterQueries) {
if (fieldMappings != null) {
this.fieldMappings.putAll(fieldMappings);
}
if (filterQueries != null) {
this.filterQueries.addAll(filterQueries);
}
}
public void addOrder(List<String> newOrder) {
order.addAll(newOrder);
}
public void setLimit(String limit) {
limitValue = limit;
}
public void visitChild(int ordinal, RelNode input) {
assert ordinal == 0;
((SolrRel) input).implement(this);
}
}
}

View File

@ -0,0 +1,246 @@
package org.apache.solr.handler.sql;
import java.util.AbstractList;
import java.util.List;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.plan.Convention;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.convert.ConverterRule;
import org.apache.calcite.rel.logical.LogicalFilter;
import org.apache.calcite.rel.logical.LogicalProject;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexVisitorImpl;
import org.apache.calcite.sql.validate.SqlValidatorUtil;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Rules and relational operators for
* {@link SolrRel#CONVENTION}
* calling convention.
*/
public class SolrRules {
private SolrRules() {}
static final RelOptRule[] RULES = {
SolrFilterRule.INSTANCE,
SolrProjectRule.INSTANCE,
// SolrSortRule.INSTANCE
};
static List<String> solrFieldNames(final RelDataType rowType) {
return SqlValidatorUtil.uniquify(
new AbstractList<String>() {
@Override
public String get(int index) {
return rowType.getFieldList().get(index).getName();
}
@Override
public int size() {
return rowType.getFieldCount();
}
});
}
/** Translator from {@link RexNode} to strings in Solr's expression language. */
static class RexToSolrTranslator extends RexVisitorImpl<String> {
private final JavaTypeFactory typeFactory;
private final List<String> inFields;
RexToSolrTranslator(JavaTypeFactory typeFactory, List<String> inFields) {
super(true);
this.typeFactory = typeFactory;
this.inFields = inFields;
}
@Override
public String visitInputRef(RexInputRef inputRef) {
return inFields.get(inputRef.getIndex());
}
}
/** Base class for planner rules that convert a relational expression to Solr calling convention. */
abstract static class SolrConverterRule extends ConverterRule {
final Convention out;
public SolrConverterRule(Class<? extends RelNode> clazz, String description) {
this(clazz, Predicates.<RelNode>alwaysTrue(), description);
}
public <R extends RelNode> SolrConverterRule(Class<R> clazz, Predicate<? super R> predicate, String description) {
super(clazz, predicate, Convention.NONE, SolrRel.CONVENTION, description);
this.out = SolrRel.CONVENTION;
}
}
/**
* Rule to convert a {@link LogicalFilter} to a {@link SolrFilter}.
*/
private static class SolrFilterRule extends SolrConverterRule {
private static final SolrFilterRule INSTANCE = new SolrFilterRule();
private SolrFilterRule() {
super(LogicalFilter.class, "SolrFilterRule");
}
public RelNode convert(RelNode rel) {
final LogicalFilter filter = (LogicalFilter) rel;
final RelTraitSet traitSet = filter.getTraitSet().replace(out);
return new SolrFilter(
rel.getCluster(),
traitSet,
convert(filter.getInput(), out),
filter.getCondition());
}
}
/**
* Rule to convert a {@link org.apache.calcite.rel.logical.LogicalProject} to a {@link SolrProject}.
*/
private static class SolrProjectRule extends SolrConverterRule {
private static final SolrProjectRule INSTANCE = new SolrProjectRule();
private SolrProjectRule() {
super(LogicalProject.class, "SolrProjectRule");
}
public RelNode convert(RelNode rel) {
final LogicalProject project = (LogicalProject) rel;
final RelTraitSet traitSet = project.getTraitSet().replace(out);
return new SolrProject(project.getCluster(), traitSet,
convert(project.getInput(), out), project.getProjects(), project.getRowType());
}
}
/**
* Rule to convert a {@link org.apache.calcite.rel.core.Sort} to a {@link SolrSort}.
*/
// private static class SolrSortRule extends RelOptRule {
// private static final com.google.common.base.Predicate<Sort> SORT_PREDICATE =
// input -> {
// // CQL has no support for offsets
// return input.offset == null;
// };
// private static final com.google.common.base.Predicate<SolrFilter> FILTER_PREDICATE =
// input -> {
// // We can only use implicit sorting within a single partition
// return input.isSinglePartition();
// };
// private static final RelOptRuleOperand SOLR_OP =
// operand(SolrToEnumerableConverter.class,
// operand(SolrFilter.class, null, FILTER_PREDICATE, any()));
//
// private static final SolrSortRule INSTANCE = new SolrSortRule();
//
// private SolrSortRule() {
// super(operand(Sort.class, null, SORT_PREDICATE, SOLR_OP), "SolrSortRule");
// }
//
// public RelNode convert(Sort sort, SolrFilter filter) {
// final RelTraitSet traitSet =
// sort.getTraitSet().replace(SolrRel.CONVENTION)
// .replace(sort.getCollation());
// return new SolrSort(sort.getCluster(), traitSet,
// convert(sort.getInput(), traitSet.replace(RelCollations.EMPTY)),
// sort.getCollation(), filter.getImplicitCollation(), sort.fetch);
// }
//
// public boolean matches(RelOptRuleCall call) {
// final Sort sort = call.rel(0);
// final SolrFilter filter = call.rel(2);
// return collationsCompatible(sort.getCollation(), filter.getImplicitCollation());
// }
//
// /** Check if it is possible to exploit native CQL sorting for a given collation.
// *
// * @return True if it is possible to achieve this sort in Solr
// */
// private boolean collationsCompatible(RelCollation sortCollation, RelCollation implicitCollation) {
// List<RelFieldCollation> sortFieldCollations = sortCollation.getFieldCollations();
// List<RelFieldCollation> implicitFieldCollations = implicitCollation.getFieldCollations();
//
// if (sortFieldCollations.size() > implicitFieldCollations.size()) {
// return false;
// }
// if (sortFieldCollations.size() == 0) {
// return true;
// }
//
// // Check if we need to reverse the order of the implicit collation
// boolean reversed = reverseDirection(sortFieldCollations.get(0).getDirection())
// == implicitFieldCollations.get(0).getDirection();
//
// for (int i = 0; i < sortFieldCollations.size(); i++) {
// RelFieldCollation sorted = sortFieldCollations.get(i);
// RelFieldCollation implied = implicitFieldCollations.get(i);
//
// // Check that the fields being sorted match
// if (sorted.getFieldIndex() != implied.getFieldIndex()) {
// return false;
// }
//
// // Either all fields must be sorted in the same direction
// // or the opposite direction based on whether we decided
// // if the sort direction should be reversed above
// RelFieldCollation.Direction sortDirection = sorted.getDirection();
// RelFieldCollation.Direction implicitDirection = implied.getDirection();
// if ((!reversed && sortDirection != implicitDirection)
// || (reversed && reverseDirection(sortDirection) != implicitDirection)) {
// return false;
// }
// }
//
// return true;
// }
//
// /** Find the reverse of a given collation direction.
// *
// * @return Reverse of the input direction
// */
// private RelFieldCollation.Direction reverseDirection(RelFieldCollation.Direction direction) {
// switch(direction) {
// case ASCENDING:
// case STRICTLY_ASCENDING:
// return RelFieldCollation.Direction.DESCENDING;
// case DESCENDING:
// case STRICTLY_DESCENDING:
// return RelFieldCollation.Direction.ASCENDING;
// default:
// return null;
// }
// }
//
// /** @see org.apache.calcite.rel.convert.ConverterRule */
// public void onMatch(RelOptRuleCall call) {
// final Sort sort = call.rel(0);
// SolrFilter filter = call.rel(2);
// final RelNode converted = convert(sort, filter);
// if (converted != null) {
// call.transformTo(converted);
// }
// }
// }
}

View File

@ -0,0 +1,105 @@
package org.apache.solr.handler.sql;
import java.io.IOException;
import java.util.EnumSet;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.ImmutableMap;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeImpl;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rel.type.RelProtoDataType;
import org.apache.calcite.schema.Table;
import org.apache.calcite.schema.impl.AbstractSchema;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.LukeRequest;
import org.apache.solr.client.solrj.response.LukeResponse;
import org.apache.solr.common.luke.FieldFlag;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
class SolrSchema extends AbstractSchema {
final CloudSolrClient cloudSolrClient;
SolrSchema(String zk) {
super();
this.cloudSolrClient = new CloudSolrClient(zk);
this.cloudSolrClient.connect();
}
@Override
protected Map<String, Table> getTableMap() {
this.cloudSolrClient.connect();
Set<String> collections = this.cloudSolrClient.getZkStateReader().getClusterState().getCollections();
final ImmutableMap.Builder<String, Table> builder = ImmutableMap.builder();
for (String collection : collections) {
builder.put(collection, new SolrTable(this, collection));
}
return builder.build();
}
private Map<String, LukeResponse.FieldInfo> getFieldInfo(String collection) {
LukeRequest lukeRequest = new LukeRequest();
lukeRequest.setNumTerms(0);
LukeResponse lukeResponse;
try {
lukeResponse = lukeRequest.process(cloudSolrClient, collection);
} catch (SolrServerException | IOException e) {
throw new RuntimeException(e);
}
return lukeResponse.getFieldInfo();
}
RelProtoDataType getRelDataType(String collection) {
// Temporary type factory, just for the duration of this method. Allowable
// because we're creating a proto-type, not a type; before being used, the
// proto-type will be copied into a real type factory.
final RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
final RelDataTypeFactory.FieldInfoBuilder fieldInfo = typeFactory.builder();
Map<String, LukeResponse.FieldInfo> luceneFieldInfoMap = getFieldInfo(collection);
for(Map.Entry<String, LukeResponse.FieldInfo> entry : luceneFieldInfoMap.entrySet()) {
LukeResponse.FieldInfo luceneFieldInfo = entry.getValue();
RelDataType type;
switch (luceneFieldInfo.getType()) {
case "string":
type = typeFactory.createJavaType(String.class);
break;
case "int":
case "long":
type = typeFactory.createJavaType(Long.class);
break;
default:
type = typeFactory.createJavaType(String.class);
}
EnumSet<FieldFlag> flags = luceneFieldInfo.getFlags();
if(flags != null && flags.contains(FieldFlag.MULTI_VALUED)) {
type = typeFactory.createArrayType(type, -1);
}
fieldInfo.add(entry.getKey(), type).nullable(true);
}
return RelDataTypeImpl.proto(fieldInfo.build());
}
}

View File

@ -0,0 +1,35 @@
package org.apache.solr.handler.sql;
import java.util.Map;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaFactory;
import org.apache.calcite.schema.SchemaPlus;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@SuppressWarnings("UnusedDeclaration")
public class SolrSchemaFactory implements SchemaFactory {
public SolrSchemaFactory() {
}
public Schema create(SchemaPlus parentSchema, String name, Map<String, Object> operand) {
final String zk = (String) operand.get("zk");
return new SolrSchema(zk);
}
}

View File

@ -0,0 +1,85 @@
package org.apache.solr.handler.sql;
import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptCost;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelFieldCollation;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Sort;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Implementation of {@link org.apache.calcite.rel.core.Sort} relational expression in Solr.
*/
public class SolrSort extends Sort implements SolrRel {
private final RelCollation implicitCollation;
public SolrSort(RelOptCluster cluster, RelTraitSet traitSet, RelNode child, RelCollation collation,
RelCollation implicitCollation, RexNode fetch) {
super(cluster, traitSet, child, collation, null, fetch);
this.implicitCollation = implicitCollation;
assert getConvention() == SolrRel.CONVENTION;
assert getConvention() == child.getConvention();
}
@Override
public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
return super.computeSelfCost(planner, mq).multiplyBy(0.05);
}
@Override
public Sort copy(RelTraitSet traitSet, RelNode input, RelCollation newCollation, RexNode offset, RexNode fetch) {
return new SolrSort(getCluster(), traitSet, input, collation, implicitCollation, fetch);
}
public void implement(Implementor implementor) {
implementor.visitChild(0, getInput());
List<RelFieldCollation> sortCollations = collation.getFieldCollations();
List<String> fieldOrder = new ArrayList<>();
if (!sortCollations.isEmpty()) {
// Construct a series of order clauses from the desired collation
final List<RelDataTypeField> fields = getRowType().getFieldList();
for (RelFieldCollation fieldCollation : sortCollations) {
final String name = fields.get(fieldCollation.getFieldIndex()).getName();
String direction = "ASC";
if (fieldCollation.getDirection().equals(RelFieldCollation.Direction.DESCENDING)) {
direction = "DESC";
}
fieldOrder.add(name + " " + direction);
}
implementor.addOrder(fieldOrder);
}
if (fetch != null) {
implementor.setLimit(((RexLiteral) fetch).getValue().toString());
}
}
}

View File

@ -0,0 +1,171 @@
package org.apache.solr.handler.sql;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.calcite.adapter.java.AbstractQueryableTable;
import org.apache.calcite.linq4j.AbstractEnumerable;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.linq4j.Enumerator;
import org.apache.calcite.linq4j.QueryProvider;
import org.apache.calcite.linq4j.Queryable;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelProtoDataType;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.TranslatableTable;
import org.apache.calcite.schema.impl.AbstractTableQueryable;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.io.stream.CloudSolrStream;
import org.apache.solr.client.solrj.io.stream.TupleStream;
import org.apache.solr.common.params.CommonParams;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Table based on a Solr collection
*/
public class SolrTable extends AbstractQueryableTable implements TranslatableTable {
private final String collection;
private final SolrSchema schema;
private RelProtoDataType protoRowType;
public SolrTable(SolrSchema schema, String collection) {
super(Object[].class);
this.schema = schema;
this.collection = collection;
}
public String toString() {
return "SolrTable {" + collection + "}";
}
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
if (protoRowType == null) {
protoRowType = schema.getRelDataType(collection);
}
return protoRowType.apply(typeFactory);
}
public Enumerable<Object> query(final CloudSolrClient cloudSolrClient) {
return query(cloudSolrClient, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), null);
}
/** Executes a Solr query on the underlying table.
*
* @param cloudSolrClient Solr CloudSolrClient
* @param fields List of fields to project
* @param filterQueries A list of filterQueries which should be used in the query
* @return Enumerator of results
*/
public Enumerable<Object> query(final CloudSolrClient cloudSolrClient, List<String> fields,
List<String> filterQueries, List<String> order, String limit) {
Map<String, String> solrParams = new HashMap<>();
solrParams.put(CommonParams.Q, "*:*");
//solrParams.put(CommonParams.QT, "/export");
if (fields.isEmpty()) {
solrParams.put(CommonParams.FL, "*");
} else {
solrParams.put(CommonParams.FL, String.join(",", fields));
}
if (filterQueries.isEmpty()) {
solrParams.put(CommonParams.FQ, "*:*");
} else {
// SolrParams should be a ModifiableParams instead of a map so we could add multiple FQs
solrParams.put(CommonParams.FQ, String.join(" OR ", filterQueries));
}
// Build and issue the query and return an Enumerator over the results
if (order.isEmpty()) {
String DEFAULT_SORT_FIELD = "_version_";
solrParams.put(CommonParams.SORT, DEFAULT_SORT_FIELD + " desc");
// Make sure the default sort field is in the field list
String fl = solrParams.get(CommonParams.FL);
if(!fl.contains(DEFAULT_SORT_FIELD)) {
solrParams.put(CommonParams.FL, String.join(",", fl, DEFAULT_SORT_FIELD));
}
} else {
solrParams.put(CommonParams.SORT, String.join(",", order));
}
// if (limit != null) {
// queryBuilder.append(" LIMIT ").append(limit);
// }
return new AbstractEnumerable<Object>() {
public Enumerator<Object> enumerator() {
TupleStream cloudSolrStream;
try {
cloudSolrStream = new CloudSolrStream(cloudSolrClient.getZkHost(), collection, solrParams);
cloudSolrStream.open();
} catch (IOException e) {
throw new RuntimeException(e);
}
return new SolrEnumerator(cloudSolrStream, fields);
}
};
}
public <T> Queryable<T> asQueryable(QueryProvider queryProvider, SchemaPlus schema, String tableName) {
return new SolrQueryable<>(queryProvider, schema, this, tableName);
}
public RelNode toRel(RelOptTable.ToRelContext context, RelOptTable relOptTable) {
final RelOptCluster cluster = context.getCluster();
return new SolrTableScan(cluster, cluster.traitSetOf(SolrRel.CONVENTION), relOptTable, this, null);
}
public static class SolrQueryable<T> extends AbstractTableQueryable<T> {
SolrQueryable(QueryProvider queryProvider, SchemaPlus schema, SolrTable table, String tableName) {
super(queryProvider, schema, table, tableName);
}
public Enumerator<T> enumerator() {
//noinspection unchecked
final Enumerable<T> enumerable = (Enumerable<T>) getTable().query(getCloudSolrClient());
return enumerable.enumerator();
}
private SolrTable getTable() {
return (SolrTable) table;
}
private CloudSolrClient getCloudSolrClient() {
return schema.unwrap(SolrSchema.class).cloudSolrClient;
}
/** Called via code-generation.
*
* @see SolrMethod#SOLR_QUERYABLE_QUERY
*/
@SuppressWarnings("UnusedDeclaration")
public Enumerable<Object> query(List<String> fields, List<String> filterQueries, List<String> order, String limit) {
return getTable().query(getCloudSolrClient(), fields, filterQueries, order, limit);
}
}
}

View File

@ -0,0 +1,80 @@
package org.apache.solr.handler.sql;
import java.util.List;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rel.type.RelDataType;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Relational expression representing a scan of a Solr collection.
*/
class SolrTableScan extends TableScan implements SolrRel {
final SolrTable solrTable;
final RelDataType projectRowType;
/**
* Creates a SolrTableScan.
*
* @param cluster Cluster
* @param traitSet Traits
* @param table Table
* @param solrTable Solr table
* @param projectRowType Fields and types to project; null to project raw row
*/
SolrTableScan(RelOptCluster cluster, RelTraitSet traitSet, RelOptTable table, SolrTable solrTable,
RelDataType projectRowType) {
super(cluster, traitSet, table);
this.solrTable = solrTable;
this.projectRowType = projectRowType;
assert solrTable != null;
assert getConvention() == SolrRel.CONVENTION;
}
@Override
public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) {
assert inputs.isEmpty();
return this;
}
@Override
public RelDataType deriveRowType() {
return projectRowType != null ? projectRowType : super.deriveRowType();
}
@Override
public void register(RelOptPlanner planner) {
planner.addRule(SolrToEnumerableConverterRule.INSTANCE);
for (RelOptRule rule : SolrRules.RULES) {
planner.addRule(rule);
}
}
public void implement(Implementor implementor) {
implementor.solrTable = solrTable;
implementor.table = table;
}
}

View File

@ -0,0 +1,116 @@
package org.apache.solr.handler.sql;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.google.common.collect.Lists;
import org.apache.calcite.adapter.enumerable.EnumerableRel;
import org.apache.calcite.adapter.enumerable.EnumerableRelImplementor;
import org.apache.calcite.adapter.enumerable.JavaRowFormat;
import org.apache.calcite.adapter.enumerable.PhysType;
import org.apache.calcite.adapter.enumerable.PhysTypeImpl;
import org.apache.calcite.linq4j.tree.BlockBuilder;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.linq4j.tree.Expressions;
import org.apache.calcite.linq4j.tree.MethodCallExpression;
import org.apache.calcite.plan.ConventionTraitDef;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptCost;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.prepare.CalcitePrepareImpl;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.convert.ConverterImpl;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.runtime.Hook;
import org.apache.calcite.util.BuiltInMethod;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Relational expression representing a scan of a table in Solr
*/
public class SolrToEnumerableConverter extends ConverterImpl implements EnumerableRel {
protected SolrToEnumerableConverter(RelOptCluster cluster, RelTraitSet traits, RelNode input) {
super(cluster, ConventionTraitDef.INSTANCE, traits, input);
}
@Override
public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) {
return new SolrToEnumerableConverter(getCluster(), traitSet, sole(inputs));
}
@Override
public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
return super.computeSelfCost(planner, mq).multiplyBy(.1);
}
public Result implement(EnumerableRelImplementor implementor, Prefer pref) {
// Generates a call to "query" with the appropriate fields and filterQueries
final BlockBuilder list = new BlockBuilder();
final SolrRel.Implementor solrImplementor = new SolrRel.Implementor();
solrImplementor.visitChild(0, getInput());
final RelDataType rowType = getRowType();
final PhysType physType = PhysTypeImpl.of(implementor.getTypeFactory(), rowType, pref.prefer(JavaRowFormat.ARRAY));
final Expression table = list.append("table", solrImplementor.table.getExpression(SolrTable.SolrQueryable.class));
final Expression fields = list.append("fields",
constantArrayList(generateFields(SolrRules.solrFieldNames(rowType), solrImplementor.fieldMappings), String.class));
final Expression filterQueries = list.append("filterQueries", constantArrayList(solrImplementor.filterQueries, String.class));
final Expression order = list.append("order", constantArrayList(solrImplementor.order, String.class));
final Expression limit = list.append("limit", Expressions.constant(solrImplementor.limitValue));
Expression enumerable = list.append("enumerable", Expressions.call(table, SolrMethod.SOLR_QUERYABLE_QUERY.method,
fields, filterQueries, order, limit));
if (CalcitePrepareImpl.DEBUG) {
System.out.println("Solr: " + filterQueries);
}
Hook.QUERY_PLAN.run(filterQueries);
list.add(Expressions.return_(null, enumerable));
return implementor.result(physType, list.toBlock());
}
private List<String> generateFields(List<String> queryFields, Map<String, String> fieldMappings) {
if(fieldMappings.isEmpty()) {
return queryFields;
} else {
List<String> fields = new ArrayList<>();
for(String field : queryFields) {
fields.add(fieldMappings.getOrDefault(field, field));
}
return fields;
}
}
/**
* E.g. {@code constantArrayList("x", "y")} returns
* "Arrays.asList('x', 'y')".
*/
private static <T> MethodCallExpression constantArrayList(List<T> values, Class clazz) {
return Expressions.call(BuiltInMethod.ARRAYS_AS_LIST.method,
Expressions.newArrayInit(clazz, constantList(values)));
}
/**
* E.g. {@code constantList("x", "y")} returns
* {@code {ConstantExpression("x"), ConstantExpression("y")}}.
*/
private static <T> List<Expression> constantList(List<T> values) {
return Lists.transform(values, Expressions::constant);
}
}

View File

@ -0,0 +1,40 @@
package org.apache.solr.handler.sql;
import org.apache.calcite.adapter.enumerable.EnumerableConvention;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.convert.ConverterRule;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Rule to convert a relational expression from {@link SolrRel#CONVENTION} to {@link EnumerableConvention}.
*/
class SolrToEnumerableConverterRule extends ConverterRule {
static final ConverterRule INSTANCE = new SolrToEnumerableConverterRule();
private SolrToEnumerableConverterRule() {
super(RelNode.class, SolrRel.CONVENTION, EnumerableConvention.INSTANCE, "SolrToEnumerableConverterRule");
}
@Override
public RelNode convert(RelNode rel) {
RelTraitSet newTraitSet = rel.getTraitSet().replace(getOutConvention());
return new SolrToEnumerableConverter(rel.getCluster(), newTraitSet, rel);
}
}

View File

@ -18,23 +18,20 @@ package org.apache.solr.handler;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.tree.Statement;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.stream.ExceptionStream;
import org.apache.solr.client.solrj.io.stream.SolrStream;
import org.apache.solr.client.solrj.io.stream.TupleStream;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.params.CommonParams;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestSQLHandler extends AbstractFullDistribZkTestBase {
@ -86,134 +83,19 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
@Test
public void doTest() throws Exception {
waitForRecoveriesToFinish(false);
testPredicate();
testBasicSelect();
testMixedCaseFields();
testBasicGrouping();
testBasicGroupingFacets();
testSelectDistinct();
testSelectDistinctFacets();
testAggregatesWithoutGrouping();
testSQLException();
testTimeSeriesGrouping();
testTimeSeriesGroupingFacet();
testParallelBasicGrouping();
testParallelSelectDistinct();
testParallelTimeSeriesGrouping();
testCatalogStream();
testSchemasStream();
testTablesStream();
}
private void testPredicate() throws Exception {
SqlParser parser = new SqlParser();
String sql = "select a from b where c = 'd'";
Statement statement = parser.createStatement(sql);
SQLHandler.SQLVisitor sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("(c:\"d\")"));
//Add parens
parser = new SqlParser();
sql = "select a from b where (c = 'd')";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("(c:\"d\")"));
//Upper case
parser = new SqlParser();
sql = "select a from b where ('CcC' = 'D')";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("(CcC:\"D\")"));
//Phrase
parser = new SqlParser();
sql = "select a from b where (c = 'd d')";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("(c:\"d d\")"));
// AND
parser = new SqlParser();
sql = "select a from b where ((c = 'd') AND (l = 'z'))";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("((c:\"d\") AND (l:\"z\"))"));
// OR
parser = new SqlParser();
sql = "select a from b where ((c = 'd') OR (l = 'z'))";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("((c:\"d\") OR (l:\"z\"))"));
// AND NOT
parser = new SqlParser();
sql = "select a from b where ((c = 'd') AND NOT (l = 'z'))";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("((c:\"d\") AND -(l:\"z\"))"));
// NESTED
parser = new SqlParser();
sql = "select a from b where ((c = 'd') OR ((l = 'z') AND (m = 'j')))";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("((c:\"d\") OR ((l:\"z\") AND (m:\"j\")))"));
// NESTED NOT
parser = new SqlParser();
sql = "select a from b where ((c = 'd') OR ((l = 'z') AND NOT (m = 'j')))";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("((c:\"d\") OR ((l:\"z\") AND -(m:\"j\")))"));
// RANGE - Will have to do until SQL BETWEEN is supported.
// NESTED
parser = new SqlParser();
sql = "select a from b where ((c = '[0 TO 100]') OR ((l = '(z)') AND (m = 'j')))";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("((c:[0 TO 100]) OR ((l:(z)) AND (m:\"j\")))"));
// Wildcard
parser = new SqlParser();
sql = "select a from b where ((c = '[0 TO 100]') OR ((l = '(z*)') AND (m = 'j')))";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("((c:[0 TO 100]) OR ((l:(z*)) AND (m:\"j\")))"));
// Complex Lucene/Solr Query
parser = new SqlParser();
sql = "select a from b where (('c' = '[0 TO 100]') OR ((l = '(z*)') AND ('M' = '(j OR (k NOT s))')))";
statement = parser.createStatement(sql);
sqlVistor = new SQLHandler.SQLVisitor(new StringBuilder());
sqlVistor.process(statement, new Integer(0));
assert(sqlVistor.query.equals("((c:[0 TO 100]) OR ((l:(z*)) AND (M:(j OR (k NOT s)))))"));
// testBasicGrouping();
// testBasicGroupingFacets();
// testSelectDistinct();
// testSelectDistinctFacets();
// testAggregatesWithoutGrouping();
// testSQLException();
// testTimeSeriesGrouping();
// testTimeSeriesGroupingFacet();
// testParallelBasicGrouping();
// testParallelSelectDistinct();
// testParallelTimeSeriesGrouping();
}
private void testBasicSelect() throws Exception {
@ -234,9 +116,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
indexDoc(sdoc("id", "7", "text", "XXXX XXXX", "str_s", "c", "field_i", "50"));
indexDoc(sdoc("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60"));
commit();
Map params = new HashMap();
params.put(CommonParams.QT, "/sql");
params.put("stmt", "select 'id', field_i, str_s from collection1 where 'text'='XXXX' order by field_i desc");
params.put("stmt", "select id, field_i, str_s from collection1 where text='XXXX' order by field_i desc");
SolrStream solrStream = new SolrStream(jetty.url, params);
List<Tuple> tuples = getTuples(solrStream);
@ -290,7 +173,7 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
params.put(CommonParams.QT, "/sql");
//Test unlimited unsorted result. Should sort on _version_ desc
params.put("stmt", "select 'id', field_i, str_s from collection1 where 'text'='XXXX'");
params.put("stmt", "select id, field_i, str_s from collection1 where text='XXXX'");
solrStream = new SolrStream(jetty.url, params);
tuples = getTuples(solrStream);
@ -424,14 +307,11 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
assert(tuple.getLong("myId") == 1);
assert(tuple.getLong("myInt") == 7);
assert(tuple.get("myString").equals("a"));
} finally {
delete();
}
}
private void testMixedCaseFields() throws Exception {
try {
@ -452,7 +332,7 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
commit();
Map params = new HashMap();
params.put(CommonParams.QT, "/sql");
params.put("stmt", "select id, Field_i, Str_s from Collection1 where Text_t='XXXX' order by Field_i desc");
params.put("stmt", "select id, Field_i, Str_s from collection1 where Text_t='XXXX' order by Field_i desc");
SolrStream solrStream = new SolrStream(jetty.url, params);
List<Tuple> tuples = getTuples(solrStream);
@ -503,7 +383,7 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
params = new HashMap();
params.put(CommonParams.QT, "/sql");
params.put("stmt", "select Str_s, sum(Field_i) from Collection1 where 'id'='(1 8)' group by Str_s having (sum(Field_i) = 7 OR 'sum(Field_i)' = 60) order by 'sum(Field_i)' desc");
params.put("stmt", "select Str_s, sum(Field_i) as `sum(Field_i)` from collection1 where id='(1 8)' group by Str_s having (sum(Field_i) = 7 OR sum(Field_i) = 60) order by sum(Field_i) desc");
solrStream = new SolrStream(jetty.url, params);
tuples = getTuples(solrStream);
@ -520,7 +400,7 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
params = new HashMap();
params.put(CommonParams.QT, "/sql");
params.put("stmt", "select Str_s, sum(Field_i) from Collection1 where 'id'='(1 8)' group by 'Str_s' having (sum(Field_i) = 7 OR 'sum(Field_i)' = 60) order by 'sum(Field_i)' desc");
params.put("stmt", "select Str_s, sum(Field_i) as `sum(Field_i)` from collection1 where id='(1 8)' group by Str_s having (sum(Field_i) = 7 OR sum(Field_i) = 60) order by sum(Field_i) desc");
solrStream = new SolrStream(jetty.url, params);
tuples = getTuples(solrStream);
@ -2422,74 +2302,6 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
}
}
private void testCatalogStream() throws Exception {
CloudJettyRunner jetty = this.cloudJettys.get(0);
Map<String, Object> params = new HashMap<>();
params.put(CommonParams.QT, "/sql");
params.put("numWorkers", 2);
params.put("stmt", "select TABLE_CAT from _CATALOGS_");
SolrStream solrStream = new SolrStream(jetty.url, params);
List<Tuple> tuples = getTuples(solrStream);
assertEquals(tuples.size(), 1);
assertEquals(tuples.get(0).getString("TABLE_CAT"), zkServer.getZkAddress());
}
private void testSchemasStream() throws Exception {
CloudJettyRunner jetty = this.cloudJettys.get(0);
Map<String, Object> params = new HashMap<>();
params.put(CommonParams.QT, "/sql");
params.put("numWorkers", 2);
params.put("stmt", "select TABLE_SCHEM, TABLE_CATALOG from _SCHEMAS_");
SolrStream solrStream = new SolrStream(jetty.url, params);
List<Tuple> tuples = getTuples(solrStream);
assertEquals(tuples.size(), 0);
}
private void testTablesStream() throws Exception {
CloudJettyRunner jetty = this.cloudJettys.get(0);
Map<String, Object> params = new HashMap<>();
params.put(CommonParams.QT, "/sql");
params.put("numWorkers", 2);
params.put("stmt", "select TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS from _TABLES_");
SolrStream solrStream = new SolrStream(jetty.url, params);
List<Tuple> tuples = getTuples(solrStream);
assertEquals(2, tuples.size());
List<String> collections = new ArrayList<>();
collections.addAll(cloudClient.getZkStateReader().getClusterState().getCollections());
Collections.sort(collections);
for (Tuple tuple : tuples) {
assertEquals(zkServer.getZkAddress(), tuple.getString("TABLE_CAT"));
assertNull(tuple.get("TABLE_SCHEM"));
assertTrue(collections.contains(tuple.getString("TABLE_NAME")));
assertEquals("TABLE", tuple.getString("TABLE_TYPE"));
assertNull(tuple.get("REMARKS"));
}
tuples = getTuples(solrStream);
assertEquals(2, tuples.size());
collections = new ArrayList<>();
collections.addAll(cloudClient.getZkStateReader().getClusterState().getCollections());
Collections.sort(collections);
for (Tuple tuple : tuples) {
assertEquals(zkServer.getZkAddress(), tuple.getString("TABLE_CAT"));
assertNull(tuple.get("TABLE_SCHEM"));
assertTrue(collections.contains(tuple.getString("TABLE_NAME")));
assertEquals("TABLE", tuple.getString("TABLE_TYPE"));
assertNull(tuple.get("REMARKS"));
}
}
protected List<Tuple> getTuples(TupleStream tupleStream) throws IOException {
tupleStream.open();
List<Tuple> tuples = new ArrayList();