SOLR-7986: JDBC Driver for SQL Interface

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1703867 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joel Bernstein 2015-09-18 14:17:39 +00:00
parent 66883c7ac5
commit 48a16205fa
10 changed files with 2887 additions and 0 deletions
solr
CHANGES.txt
solrj/src
java/org/apache/solr/client/solrj/io/sql
resources/META-INF/services
test-files/solrj/solr/collection1/conf
test/org/apache/solr/client/solrj/io/sql

View File

@ -67,6 +67,8 @@ New Features
* SOLR-7903: Add the FacetStream to the Streaming API and wire it into the SQLHandler (Joel Bernstein)
* SOLR-7986: JDBC Driver for SQL Interface (Uwe Schindler, Joel Bernstein)
Optimizations
----------------------
* SOLR-7876: Speed up queries and operations that use many terms when timeAllowed has not been

View File

@ -0,0 +1,333 @@
package org.apache.solr.client.solrj.io.sql;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executor;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.io.SolrClientCache;
class ConnectionImpl implements Connection {
private SolrClientCache sqlSolrClientCache = new SolrClientCache();
private CloudSolrClient client;
private String collection;
Properties props;
private boolean closed;
ConnectionImpl(String zkHost, String collection, Properties props) {
this.client = sqlSolrClientCache.getCloudSolrClient(zkHost);
this.collection = collection;
this.props = props;
}
@Override
public Statement createStatement() throws SQLException {
return new StatementImpl(client, this.collection, props, sqlSolrClientCache);
}
@Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
return null;
}
@Override
public CallableStatement prepareCall(String sql) throws SQLException {
return null;
}
@Override
public String nativeSQL(String sql) throws SQLException {
return null;
}
@Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
}
@Override
public boolean getAutoCommit() throws SQLException {
return false;
}
@Override
public void commit() throws SQLException {
}
@Override
public void rollback() throws SQLException {
}
@Override
public void close() throws SQLException {
if(closed) {
return;
}
try {
this.sqlSolrClientCache.close();
this.closed = true;
} catch (Exception e) {
throw new SQLException(e);
}
}
@Override
public boolean isClosed() throws SQLException {
return closed;
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setReadOnly(boolean readOnly) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean isReadOnly() throws SQLException {
return true;
}
@Override
public void setCatalog(String catalog) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public String getCatalog() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setTransactionIsolation(int level) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getTransactionIsolation() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public SQLWarning getWarnings() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void clearWarnings() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setHoldability(int holdability) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getHoldability() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Savepoint setSavepoint() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Savepoint setSavepoint(String name) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Clob createClob() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Blob createBlob() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public NClob createNClob() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public SQLXML createSQLXML() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean isValid(int timeout) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setClientInfo(String name, String value) throws SQLClientInfoException {
throw new UnsupportedOperationException();
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
throw new UnsupportedOperationException();
}
@Override
public String getClientInfo(String name) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Properties getClientInfo() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setSchema(String schema) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public String getSchema() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void abort(Executor executor) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getNetworkTimeout() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,148 @@
package org.apache.solr.client.solrj.io.sql;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.net.URLDecoder;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.DriverPropertyInfo;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
import org.apache.solr.common.util.SuppressForbidden;
/**
* Get a Connection with with a url and properties.
*
* jdbc:solr://zkhost:port?collection=collection&amp;aggregationMode=map_reduce
**/
public class DriverImpl implements Driver {
static {
try {
DriverManager.registerDriver(new DriverImpl());
} catch (SQLException e) {
throw new RuntimeException("Can't register driver!", e);
}
}
public Connection connect(String url, Properties props) throws SQLException {
if(!acceptsURL(url)) {
return null;
}
StringBuilder buf = new StringBuilder(url);
boolean needsAmp = true;
if(!url.contains("?")) {
buf.append("?");
needsAmp = false;
}
for(Object key : props.keySet()) {
Object value = props.get(key);
if(needsAmp) {
buf.append("&");
}
buf.append(key.toString()).append("=").append(value);
needsAmp = true;
}
return connect(buf.toString());
}
public Connection connect(String url) throws SQLException {
if(!acceptsURL(url)) {
return null;
}
String[] parts = url.split("://", 0);
if(parts.length < 2) {
throw new SQLException("The zkHost must start with ://");
}
String zkUrl = parts[1];
String[] zkUrlParts = zkUrl.split("\\?");
if(zkUrlParts.length < 2) {
throw new SQLException("The connection url has no connection properties. At a mininum the collection must be specified.");
}
String connectionProps = zkUrlParts[1];
String zkHost = zkUrlParts[0];
Properties props = new Properties();
loadParams(connectionProps, props);
String collection = (String)props.remove("collection");
if(!props.containsKey("aggregationMode")) {
props.setProperty("aggregationMode","facet");
}
return new ConnectionImpl(zkHost, collection, props);
}
public int getMajorVersion() {
return 1;
}
public int getMinorVersion() {
return 0;
}
public boolean acceptsURL(String url) {
if(url.startsWith("jdbc:solr")) {
return true;
} else {
return false;
}
}
public boolean jdbcCompliant() {
return false;
}
@SuppressForbidden(reason="Required by jdbc")
public Logger getParentLogger() {
return null;
}
public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) {
return null;
}
private void loadParams(String params, Properties props) throws SQLException {
try {
String[] pairs = params.split("&");
for (String pair : pairs) {
String[] keyValue = pair.split("=");
String key = URLDecoder.decode(keyValue[0], "UTF-8");
String value = URLDecoder.decode(keyValue[1], "UTF-8");
props.put(key, value);
}
} catch(Exception e) {
throw new SQLException(e);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,339 @@
package org.apache.solr.client.solrj.io.sql;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Properties;
import java.util.Random;
import org.apache.solr.client.solrj.io.stream.SolrStream;
import org.apache.solr.client.solrj.io.SolrClientCache;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.io.stream.StreamContext;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CommonParams;
class StatementImpl implements Statement {
private CloudSolrClient client;
private SolrClientCache sqlSolrClientCache;
private String collection;
private Properties properties;
private SolrStream solrStream;
private boolean closed;
StatementImpl(CloudSolrClient client, String collection, Properties properties, SolrClientCache sqlSolrClientCache) {
this.client = client;
this.collection = collection;
this.properties = properties;
this.sqlSolrClientCache = sqlSolrClientCache;
}
@Override
public ResultSet executeQuery(String sql) throws SQLException {
try {
closed = false; // If closed reopen so Statement can be reused.
this.solrStream = constructStream(sql);
StreamContext context = new StreamContext();
context.setSolrClientCache(sqlSolrClientCache);
this.solrStream.setStreamContext(context);
this.solrStream.open();
return new ResultSetImpl(this.solrStream);
} catch(Exception e) {
throw new SQLException(e);
}
}
protected SolrStream constructStream(String sql) throws IOException {
try {
ZkStateReader zkStateReader = client.getZkStateReader();
ClusterState clusterState = zkStateReader.getClusterState();
Collection<Slice> slices = clusterState.getActiveSlices(this.collection);
if(slices == null) {
throw new Exception("Collection not found:"+this.collection);
}
Map params = new HashMap();
List<Replica> shuffler = new ArrayList();
for(Slice slice : slices) {
Collection<Replica> replicas = slice.getReplicas();
for (Replica replica : replicas) {
shuffler.add(replica);
}
}
Collections.shuffle(shuffler, new Random());
params.put(CommonParams.QT, "/sql");
params.put("sql", sql);
params.putAll(properties);
Replica rep = shuffler.get(0);
ZkCoreNodeProps zkProps = new ZkCoreNodeProps(rep);
String url = zkProps.getCoreUrl();
return new SolrStream(url, params);
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public int executeUpdate(String sql) throws SQLException {
return 0;
}
@Override
public void close() throws SQLException {
if(closed) {
return;
}
try {
this.solrStream.close();
this.closed = true;
} catch (Exception e) {
throw new SQLException(e);
}
}
@Override
public int getMaxFieldSize() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setMaxFieldSize(int max) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getMaxRows() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setMaxRows(int max) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setEscapeProcessing(boolean enable) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getQueryTimeout() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setQueryTimeout(int seconds) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void cancel() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public SQLWarning getWarnings() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void clearWarnings() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setCursorName(String name) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean execute(String sql) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public ResultSet getResultSet() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getUpdateCount() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean getMoreResults() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setFetchDirection(int direction) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getFetchDirection() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setFetchSize(int rows) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getFetchSize() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getResultSetConcurrency() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getResultSetType() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void addBatch(String sql) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void clearBatch() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int[] executeBatch() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Connection getConnection() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean getMoreResults(int current) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getResultSetHoldability() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean isClosed() throws SQLException {
return closed;
}
@Override
public void setPoolable(boolean poolable) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean isPoolable() throws SQLException {
return true;
}
@Override
public void closeOnCompletion() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean isCloseOnCompletion() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,51 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* JDBC Driver Package
*
* Sample usage
* <pre>
* Connection con = null;
* Statement stmt = null;
* ResultSet rs = null;
*
* try {
* con = DriverManager.getConnection("jdbc:solr://zkHost:port?collection=collection&amp;aggregationMode=map_reduce");
* stmt = con.createStatement();
* rs = stmt.executeQuery("select a, sum(b) from tablex group by a");
* while(rs.next()) {
* String a = rs.getString("a");
* double sumB = rs.getString("sum(b)");
* }
* } finally {
* rs.close();
* stmt.close();
* con.close();
* }
* </pre>
*
* Connection properties can also be passed in using a Properties object.
*
* The <b>collection</b> parameter is mandatory and should point to a SolrCloud collection that is configured with the /sql
* request handler.
*
* The aggregationMode parameter is optional. It can be used to switch between Map/Reduce (map_reduce) or the JSON Facet API (facet) for
* group by aggregations. The default is "facet".
**/
package org.apache.solr.client.solrj.io.sql;

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.solr.client.solrj.io.sql.DriverImpl

View File

@ -0,0 +1,599 @@
<?xml version="1.0" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- The Solr schema file. This file should be named "schema.xml" and
should be located where the classloader for the Solr webapp can find it.
This schema is used for testing, and as such has everything and the
kitchen sink thrown in. See example/solr/conf/schema.xml for a
more concise example.
-->
<schema name="test" version="1.5">
<types>
<!-- field type definitions... note that the "name" attribute is
just a label to be used by field definitions. The "class"
attribute and any other attributes determine the real type and
behavior of the fieldtype.
-->
<!-- numeric field types that store and index the text
value verbatim (and hence don't sort correctly or support range queries.)
These are provided more for backward compatability, allowing one
to create a schema that matches an existing lucene index.
-->
<fieldType name="int" docValues="true" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="float" docValues="true" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="tint" class="solr.TrieIntField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
<!-- numeric field types that manipulate the value into
a string value that isn't human readable in it's internal form,
but sorts correctly and supports range queries.
If sortMissingLast="true" then a sort on this field will cause documents
without the field to come after documents with the field,
regardless of the requested sort order.
If sortMissingFirst="true" then a sort on this field will cause documents
without the field to come before documents with the field,
regardless of the requested sort order.
If sortMissingLast="false" and sortMissingFirst="false" (the default),
then default lucene sorting will be used which places docs without the field
first in an ascending sort and last in a descending sort.
-->
<!-- Field type demonstrating an Analyzer failure -->
<fieldtype name="failtype1" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="0" catenateWords="0" catenateNumbers="0" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<!-- Demonstrating ignoreCaseChange -->
<fieldtype name="wdf_nocase" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="0" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="0" preserveOriginal="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="wdf_preserve" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="0" preserveOriginal="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="boolean" class="solr.BoolField" sortMissingLast="true"/>
<fieldtype name="string" class="solr.StrField" sortMissingLast="true" docValues="true"/>
<!-- format for date is 1995-12-31T23:59:59.999Z and only the fractional
seconds part (.999) is optional.
-->
<fieldtype name="date" class="solr.TrieDateField" precisionStep="0"/>
<fieldtype name="tdate" class="solr.TrieDateField" precisionStep="6"/>
<!-- solr.TextField allows the specification of custom
text analyzers specified as a tokenizer and a list
of token filters.
-->
<fieldtype name="text" class="solr.TextField">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.StandardFilterFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="nametext" class="solr.TextField">
<analyzer class="org.apache.lucene.analysis.core.WhitespaceAnalyzer"/>
</fieldtype>
<fieldtype name="teststop" class="solr.TextField">
<analyzer>
<tokenizer class="solr.LowerCaseTokenizerFactory"/>
<filter class="solr.StandardFilterFactory"/>
</analyzer>
</fieldtype>
<!-- fieldtypes in this section isolate tokenizers and tokenfilters for testing -->
<fieldtype name="lowertok" class="solr.TextField">
<analyzer><tokenizer class="solr.LowerCaseTokenizerFactory"/></analyzer>
</fieldtype>
<fieldtype name="keywordtok" class="solr.TextField">
<analyzer><tokenizer class="solr.MockTokenizerFactory" pattern="keyword"/></analyzer>
</fieldtype>
<fieldtype name="standardtok" class="solr.TextField">
<analyzer><tokenizer class="solr.StandardTokenizerFactory"/></analyzer>
</fieldtype>
<fieldtype name="lettertok" class="solr.TextField">
<analyzer><tokenizer class="solr.LetterTokenizerFactory"/></analyzer>
</fieldtype>
<fieldtype name="whitetok" class="solr.TextField">
<analyzer><tokenizer class="solr.MockTokenizerFactory"/></analyzer>
</fieldtype>
<fieldtype name="HTMLstandardtok" class="solr.TextField">
<analyzer>
<charFilter class="solr.HTMLStripCharFilterFactory"/>
<tokenizer class="solr.StandardTokenizerFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="HTMLwhitetok" class="solr.TextField">
<analyzer>
<charFilter class="solr.HTMLStripCharFilterFactory"/>
<tokenizer class="solr.MockTokenizerFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="standardtokfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.StandardFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="standardfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.StandardFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="lowerfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="lowerpunctfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="1" splitOnCaseChange="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="patternreplacefilt" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory" pattern="keyword"/>
<filter class="solr.PatternReplaceFilterFactory"
pattern="([^a-zA-Z])" replacement="_" replace="all"
/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory" pattern="keyword"/>
</analyzer>
</fieldtype>
<fieldtype name="patterntok" class="solr.TextField">
<analyzer>
<tokenizer class="solr.PatternTokenizerFactory" pattern=","/>
</analyzer>
</fieldtype>
<fieldtype name="porterfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<!-- fieldtype name="snowballfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.SnowballPorterFilterFactory"/>
</analyzer>
</fieldtype -->
<fieldtype name="engporterfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="custengporterfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="stopfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true"/>
</analyzer>
</fieldtype>
<fieldtype name="custstopfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="lengthfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LengthFilterFactory" min="2" max="5"/>
</analyzer>
</fieldtype>
<fieldType name="charfilthtmlmap" class="solr.TextField">
<analyzer>
<charFilter class="solr.HTMLStripCharFilterFactory"/>
<tokenizer class="solr.MockTokenizerFactory"/>
</analyzer>
</fieldType>
<fieldtype name="subword" class="solr.TextField" multiValued="true" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="numericsubword" class="solr.TextField" multiValued="true" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.WordDelimiterFilterFactory" splitOnNumerics="0" splitOnCaseChange="0" generateWordParts="1" generateNumberParts="0" catenateWords="0" catenateNumbers="0" catenateAll="0"/>
<filter class="solr.StopFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.WordDelimiterFilterFactory" splitOnNumerics="0" splitOnCaseChange="0" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.StopFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="protectedsubword" class="solr.TextField" multiValued="true" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.WordDelimiterFilterFactory" splitOnNumerics="0" splitOnCaseChange="0" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<!-- more flexible in matching skus, but more chance of a false match -->
<fieldtype name="skutype1" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<!-- less flexible in matching skus, but less chance of a false match -->
<fieldtype name="skutype2" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<!-- less flexible in matching skus, but less chance of a false match -->
<fieldtype name="syn" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="unstored" class="solr.StrField" indexed="true" stored="false"/>
<fieldtype name="textgap" class="solr.TextField" multiValued="true" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<fieldType name="uuid" class="solr.UUIDField" />
<!-- Try out some point types -->
<fieldType name="xy" class="solr.PointType" dimension="2" subFieldType="double"/>
<fieldType name="x" class="solr.PointType" dimension="1" subFieldType="double"/>
<fieldType name="tenD" class="solr.PointType" dimension="10" subFieldType="double"/>
<!-- Use the sub field suffix -->
<fieldType name="xyd" class="solr.PointType" dimension="2" subFieldSuffix="_d1"/>
<fieldtype name="geohash" class="solr.GeoHashField"/>
<fieldType name="latLon" class="solr.LatLonType" subFieldType="double"/>
<!-- some per-field similarity examples -->
<!-- specify a Similarity classname directly -->
<!--
<fieldType name="sim1" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
</analyzer>
<similarity class="org.apache.lucene.misc.SweetSpotSimilarity"/>
</fieldType>
-->
<!-- specify a Similarity factory -->
<!--
<fieldType name="sim2" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
</analyzer>
<similarity class="org.apache.solr.search.similarities.CustomSimilarityFactory">
<str name="echo">is there an echo?</str>
</similarity>
</fieldType>
-->
<!-- don't specify any sim at all: get the default -->
<!--
<fieldType name="sim3" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
</analyzer>
</fieldType>
-->
</types>
<fields>
<field name="id" type="int" indexed="true" stored="true" multiValued="false" required="false"/>
<field name="signatureField" type="string" indexed="true" stored="false"/>
<field name="s_multi" type="string" indexed="true" stored="true" docValues="true" multiValued="true"/>
<field name="i_multi" type="int" indexed="true" stored="true" docValues="true" multiValued="true"/>
<field name="f_multi" type="float" indexed="true" stored="true" docValues="true" multiValued="true"/>
<field name="l_multi" type="long" indexed="true" stored="true" docValues="true" multiValued="true"/>
<field name="d_multi" type="double" indexed="true" stored="true" docValues="true" multiValued="true"/>
<field name="uuid" type="uuid" stored="true" />
<field name="name" type="nametext" indexed="true" stored="true"/>
<field name="text" type="text" indexed="true" stored="false"/>
<field name="subject" type="text" indexed="true" stored="true"/>
<field name="title" type="nametext" indexed="true" stored="true"/>
<field name="weight" type="float" indexed="true" stored="true" multiValued="false"/>
<field name="bday" type="date" indexed="true" stored="true" multiValued="false"/>
<field name="title_stemmed" type="text" indexed="true" stored="false"/>
<field name="title_lettertok" type="lettertok" indexed="true" stored="false"/>
<field name="syn" type="syn" indexed="true" stored="true"/>
<!-- to test property inheritance and overriding -->
<field name="shouldbeunstored" type="unstored" />
<field name="shouldbestored" type="unstored" stored="true"/>
<field name="shouldbeunindexed" type="unstored" indexed="false" stored="true"/>
<!-- Test points -->
<!-- Test points -->
<field name="home" type="xy" indexed="true" stored="true" multiValued="false"/>
<field name="x" type="x" indexed="true" stored="true" multiValued="false"/>
<field name="homed" type="xyd" indexed="true" stored="true" multiValued="false"/>
<field name="home_ns" type="xy" indexed="true" stored="false" multiValued="false"/>
<field name="work" type="xy" indexed="true" stored="true" multiValued="false"/>
<field name="home_ll" type="latLon" indexed="true" stored="true" multiValued="false"/>
<field name="home_gh" type="geohash" indexed="true" stored="true" multiValued="false"/>
<field name="point10" type="tenD" indexed="true" stored="true" multiValued="false"/>
<!-- test different combinations of indexed and stored -->
<field name="bind" type="boolean" indexed="true" stored="false"/>
<field name="bsto" type="boolean" indexed="false" stored="true"/>
<field name="bindsto" type="boolean" indexed="true" stored="true"/>
<field name="isto" type="int" indexed="false" stored="true"/>
<field name="iind" type="int" indexed="true" stored="false"/>
<field name="ssto" type="string" indexed="false" stored="true"/>
<field name="sind" type="string" indexed="true" stored="false"/>
<field name="sindsto" type="string" indexed="true" stored="true"/>
<!-- test combinations of term vector settings -->
<field name="test_basictv" type="text" termVectors="true"/>
<field name="test_notv" type="text" termVectors="false"/>
<field name="test_postv" type="text" termVectors="true" termPositions="true"/>
<field name="test_offtv" type="text" termVectors="true" termOffsets="true"/>
<field name="test_posofftv" type="text" termVectors="true"
termPositions="true" termOffsets="true"/>
<!-- fields to test individual tokenizers and tokenfilters -->
<field name="teststop" type="teststop" indexed="true" stored="true"/>
<field name="lowertok" type="lowertok" indexed="true" stored="true"/>
<field name="keywordtok" type="keywordtok" indexed="true" stored="true"/>
<field name="standardtok" type="standardtok" indexed="true" stored="true"/>
<field name="HTMLstandardtok" type="HTMLstandardtok" indexed="true" stored="true"/>
<field name="lettertok" type="lettertok" indexed="true" stored="true"/>
<field name="whitetok" type="whitetok" indexed="true" stored="true"/>
<field name="HTMLwhitetok" type="HTMLwhitetok" indexed="true" stored="true"/>
<field name="standardtokfilt" type="standardtokfilt" indexed="true" stored="true"/>
<field name="standardfilt" type="standardfilt" indexed="true" stored="true"/>
<field name="lowerfilt" type="lowerfilt" indexed="true" stored="true"/>
<field name="lowerfilt1" type="lowerfilt" indexed="true" stored="true"/>
<field name="lowerfilt1and2" type="lowerfilt" indexed="true" stored="true"/>
<field name="patterntok" type="patterntok" indexed="true" stored="true"/>
<field name="patternreplacefilt" type="patternreplacefilt" indexed="true" stored="true"/>
<field name="porterfilt" type="porterfilt" indexed="true" stored="true"/>
<field name="engporterfilt" type="engporterfilt" indexed="true" stored="true"/>
<field name="custengporterfilt" type="custengporterfilt" indexed="true" stored="true"/>
<field name="stopfilt" type="stopfilt" indexed="true" stored="true"/>
<field name="custstopfilt" type="custstopfilt" indexed="true" stored="true"/>
<field name="lengthfilt" type="lengthfilt" indexed="true" stored="true"/>
<field name="wdf_nocase" type="wdf_nocase" indexed="true" stored="true"/>
<field name="wdf_preserve" type="wdf_preserve" indexed="true" stored="true"/>
<field name="numberpartfail" type="failtype1" indexed="true" stored="true"/>
<field name="nullfirst" type="string" indexed="true" stored="true" sortMissingFirst="true" multiValued="false"/>
<field name="subword" type="subword" indexed="true" stored="true"/>
<field name="subword_offsets" type="subword" indexed="true" stored="true" termOffsets="true"/>
<field name="numericsubword" type="numericsubword" indexed="true" stored="true"/>
<field name="protectedsubword" type="protectedsubword" indexed="true" stored="true"/>
<field name="sku1" type="skutype1" indexed="true" stored="true"/>
<field name="sku2" type="skutype2" indexed="true" stored="true"/>
<field name="textgap" type="textgap" indexed="true" stored="true"/>
<field name="timestamp" type="date" indexed="true" stored="true" default="NOW" multiValued="false"/>
<field name="multiDefault" type="string" indexed="true" stored="true" default="muLti-Default" multiValued="true"/>
<field name="intDefault" type="int" indexed="true" stored="true" default="42" multiValued="false"/>
<!--
<field name="sim1text" type="sim1" indexed="true" stored="true"/>
<field name="sim2text" type="sim2" indexed="true" stored="true"/>
<field name="sim3text" type="sim3" indexed="true" stored="true"/>
-->
<field name="tlong" type="tlong" indexed="true" stored="true" />
<field name="_version_" type="long" indexed="true" stored="true"/>
<!-- Dynamic field definitions. If a field name is not found, dynamicFields
will be used if the name matches any of the patterns.
RESTRICTION: the glob-like pattern in the name attribute must have
a "*" only at the start or the end.
EXAMPLE: name="*_i" will match any field ending in _i (like myid_i, z_i)
Longer patterns will be matched first. if equal size patterns
both match, the first appearing in the schema will be used.
-->
<dynamicField name="*_i" type="int" indexed="true" stored="true"/>
<dynamicField name="*_i1" type="int" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_s" type="string" indexed="true" stored="true"/>
<dynamicField name="*_s1" type="string" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_l" type="long" indexed="true" stored="true"/>
<dynamicField name="*_l1" type="long" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_t" type="text" indexed="true" stored="true"/>
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
<dynamicField name="*_f" type="float" indexed="true" stored="true"/>
<dynamicField name="*_f1" type="float" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_d" type="double" indexed="true" stored="true"/>
<dynamicField name="*_d1" type="double" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
<dynamicField name="*_dt1" type="date" indexed="true" stored="true" multiValued="false"/>
<!-- some trie-coded dynamic fields for faster range queries -->
<dynamicField name="*_ti" type="tint" indexed="true" stored="true"/>
<dynamicField name="*_ti1" type="tint" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_tl" type="tlong" indexed="true" stored="true"/>
<dynamicField name="*_tl1" type="tlong" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_tf" type="tfloat" indexed="true" stored="true"/>
<dynamicField name="*_tf1" type="tfloat" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_td" type="tdouble" indexed="true" stored="true"/>
<dynamicField name="*_td1" type="tdouble" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_tds" type="tdouble" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_tdt" type="tdate" indexed="true" stored="true"/>
<dynamicField name="*_tdt1" type="tdate" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_sI" type="string" indexed="true" stored="false"/>
<dynamicField name="*_sS" type="string" indexed="false" stored="true"/>
<dynamicField name="t_*" type="text" indexed="true" stored="true"/>
<dynamicField name="tv_*" type="text" indexed="true" stored="true"
termVectors="true" termPositions="true" termOffsets="true"/>
<dynamicField name="tv_mv_*" type="text" indexed="true" stored="true" multiValued="true"
termVectors="true" termPositions="true" termOffsets="true"/>
<dynamicField name="*_p" type="xyd" indexed="true" stored="true" multiValued="false"/>
<!-- special fields for dynamic copyField test -->
<dynamicField name="dynamic_*" type="string" indexed="true" stored="true"/>
<dynamicField name="*_dynamic" type="string" indexed="true" stored="true"/>
<!-- for testing to ensure that longer patterns are matched first -->
<dynamicField name="*aa" type="string" indexed="true" stored="true"/>
<!-- ignored becuase not stored or indexed -->
<dynamicField name="*_ignored" type="text" indexed="false" stored="false"/>
<dynamicField name="*_mfacet" type="string" indexed="true" stored="false" multiValued="true" />
<!-- make sure custom sims work with dynamic fields -->
<!--
<dynamicField name="*_sim1" type="sim1" indexed="true" stored="true"/>
<dynamicField name="*_sim2" type="sim2" indexed="true" stored="true"/>
<dynamicField name="*_sim3" type="sim3" indexed="true" stored="true"/>
-->
</fields>
<defaultSearchField>text</defaultSearchField>
<uniqueKey>id</uniqueKey>
<!-- copyField commands copy one field to another at the time a document
is added to the index. It's used either to index the same field different
ways, or to add multiple fields to the same field for easier/faster searching.
-->
<copyField source="title" dest="title_stemmed"/>
<copyField source="title" dest="title_lettertok"/>
<copyField source="title" dest="text"/>
<copyField source="subject" dest="text"/>
<copyField source="lowerfilt1" dest="lowerfilt1and2"/>
<copyField source="lowerfilt" dest="lowerfilt1and2"/>
<copyField source="*_t" dest="text"/>
<!-- dynamic destination -->
<copyField source="*_dynamic" dest="dynamic_*"/>
</schema>

View File

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!--
This is a stripped down config file used for a simple example...
It is *not* a good example to work from.
-->
<config>
<luceneMatchVersion>${tests.luceneMatchVersion:LUCENE_CURRENT}</luceneMatchVersion>
<indexConfig>
<useCompoundFile>${useCompoundFile:false}</useCompoundFile>
</indexConfig>
<dataDir>${solr.data.dir:}</dataDir>
<directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.StandardDirectoryFactory}"/>
<updateHandler class="solr.DirectUpdateHandler2">
<updateLog>
<str name="dir">${solr.data.dir:}</str>
</updateLog>
</updateHandler>
<!-- realtime get handler, guaranteed to return the latest stored fields
of any document, without the need to commit or open a new searcher. The current
implementation relies on the updateLog feature being enabled. -->
<requestHandler name="/get" class="solr.RealTimeGetHandler">
<lst name="defaults">
<str name="omitHeader">true</str>
</lst>
</requestHandler>
<requestHandler name="/export" class="solr.SearchHandler">
<lst name="invariants">
<str name="rq">{!xport}</str>
<str name="wt">xsort</str>
<str name="distrib">false</str>
</lst>
<arr name="components">
<str>query</str>
</arr>
</requestHandler>
<!--
Distributed Stream processing.
-->
<requestHandler name="/stream" class="solr.StreamHandler">
<lst name="invariants">
<str name="wt">json</str>
<str name="distrib">false</str>
</lst>
</requestHandler>
<requestHandler name="/sql" class="solr.SQLHandler">
<lst name="invariants">
<str name="wt">json</str>
<str name="distrib">false</str>
</lst>
</requestHandler>
<requestDispatcher handleSelect="true" >
<requestParsers enableRemoteStreaming="false" multipartUploadLimitInKB="2048" />
</requestDispatcher>
<requestHandler name="/replication" class="solr.ReplicationHandler" startup="lazy" />
<requestHandler name="standard" class="solr.StandardRequestHandler" default="true" />
<requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/admin/" class="org.apache.solr.handler.admin.AdminHandlers" />
<requestHandler name="/admin/ping" class="solr.PingRequestHandler">
<lst name="invariants">
<str name="q">*:*</str>
</lst>
<lst name="defaults">
<str name="echoParams">all</str>
</lst>
<str name="healthcheckFile">server-enabled.txt</str>
</requestHandler>
<!-- config for the admin interface -->
<admin>
<defaultQuery>solr</defaultQuery>
</admin>
</config>

View File

@ -0,0 +1,270 @@
package org.apache.solr.client.solrj.io.sql;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.ResultSet;
import java.sql.DriverManager;
import java.util.Properties;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.cloud.AbstractZkTestCase;
import org.apache.solr.common.SolrInputDocument;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* All base tests will be done with CloudSolrStream. Under the covers CloudSolrStream uses SolrStream so
* SolrStream will get fully exercised through these tests.
*
**/
@Slow
@LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40","Lucene41","Lucene42","Lucene45"})
public class JdbcTest extends AbstractFullDistribZkTestBase {
private static final String SOLR_HOME = getFile("solrj" + File.separator + "solr").getAbsolutePath();
private StreamFactory streamFactory;
static {
schemaString = "schema-sql.xml";
}
@BeforeClass
public static void beforeSuperClass() {
AbstractZkTestCase.SOLRHOME = new File(SOLR_HOME());
}
@AfterClass
public static void afterSuperClass() {
}
protected String getCloudSolrConfig() {
return "solrconfig-sql.xml";
}
@Override
public String getSolrHome() {
return SOLR_HOME;
}
public static String SOLR_HOME() {
return SOLR_HOME;
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
// we expect this time of exception as shards go up and down...
//ignoreException(".*");
//System.setProperty("export.test", "true");
System.setProperty("numShards", Integer.toString(sliceCount));
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
resetExceptionIgnores();
}
public JdbcTest() {
super();
sliceCount = 2;
}
@Test
public void doTest() throws Exception {
indexr(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1");
indexr(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2");
indexr(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3");
indexr(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4");
indexr(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5");
indexr(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6");
indexr(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7");
indexr(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8");
indexr(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9");
indexr(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10");
commit();
String zkHost = zkServer.getZkAddress();
Properties props = new Properties();
Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1", props);
Statement stmt = con.createStatement();
ResultSet rs = stmt.executeQuery("select id, a_i, a_s, a_f from collection1 order by a_i desc limit 2");
assert(rs.next());
assert(rs.getLong("a_i") == 14);
assert(rs.getString("a_s").equals("hello0"));
assert(rs.getDouble("a_f") == 10);
assert(rs.next());
assert(rs.getLong("a_i") == 13);
assert(rs.getString("a_s").equals("hello3"));
assert(rs.getDouble("a_f") == 9);
assert(!rs.next());
stmt.close();
//Test statement reuse
rs = stmt.executeQuery("select id, a_i, a_s, a_f from collection1 order by a_i asc limit 2");
assert(rs.next());
assert(rs.getLong("a_i") == 0);
assert(rs.getString("a_s").equals("hello0"));
assert(rs.getDouble("a_f") == 1);
assert(rs.next());
assert(rs.getLong("a_i") == 1);
assert(rs.getString("a_s").equals("hello0"));
assert(rs.getDouble("a_f") == 5);
assert(!rs.next());
stmt.close();
//Test connection reuse
stmt = con.createStatement();
rs = stmt.executeQuery("select id, a_i, a_s, a_f from collection1 order by a_i desc limit 2");
assert(rs.next());
assert(rs.getLong("a_i") == 14);
assert(rs.next());
assert(rs.getLong("a_i") == 13);
stmt.close();
//Test statement reuse
rs = stmt.executeQuery("select id, a_i, a_s, a_f from collection1 order by a_i asc limit 2");
assert(rs.next());
assert(rs.getLong("a_i") == 0);
assert(rs.next());
assert(rs.getLong("a_i") == 1);
assert(!rs.next());
stmt.close();
//Test simple loop
rs = stmt.executeQuery("select id, a_i, a_s, a_f from collection1 order by a_i asc limit 100");
int count = 0;
while(rs.next()) {
++count;
}
assert(count == 10);
stmt.close();
con.close();
//Test facet aggregation
props = new Properties();
props.put("aggregationMode", "facet");
con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1", props);
stmt = con.createStatement();
rs = stmt.executeQuery("select a_s, sum(a_f) from collection1 group by a_s order by sum(a_f) desc");
assert(rs.next());
assert(rs.getString("a_s").equals("hello3"));
assert(rs.getDouble("sum(a_f)") == 26);
assert(rs.next());
assert(rs.getString("a_s").equals("hello0"));
assert(rs.getDouble("sum(a_f)") == 18);
assert(rs.next());
assert(rs.getString("a_s").equals("hello4"));
assert(rs.getDouble("sum(a_f)") == 11);
stmt.close();
con.close();
//Test map / reduce aggregation
props = new Properties();
props.put("aggregationMode", "map_reduce");
props.put("numWorkers", "2");
con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1", props);
stmt = con.createStatement();
rs = stmt.executeQuery("select a_s, sum(a_f) from collection1 group by a_s order by sum(a_f) desc");
assert(rs.next());
assert(rs.getString("a_s").equals("hello3"));
assert(rs.getDouble("sum(a_f)") == 26);
assert(rs.next());
assert(rs.getString("a_s").equals("hello0"));
assert(rs.getDouble("sum(a_f)") == 18);
assert(rs.next());
assert(rs.getString("a_s").equals("hello4"));
assert(rs.getDouble("sum(a_f)") == 11);
stmt.close();
con.close();
//Test params on the url
con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1&aggregationMode=map_reduce&numWorkers=2");
Properties p = ((ConnectionImpl)con).props;
assert(p.getProperty("aggregationMode").equals("map_reduce"));
assert(p.getProperty("numWorkers").equals("2"));
stmt = con.createStatement();
rs = stmt.executeQuery("select a_s, sum(a_f) from collection1 group by a_s order by sum(a_f) desc");
assert(rs.next());
assert(rs.getString("a_s").equals("hello3"));
assert(rs.getDouble("sum(a_f)") == 26);
assert(rs.next());
assert(rs.getString("a_s").equals("hello0"));
assert(rs.getDouble("sum(a_f)") == 18);
assert(rs.next());
assert(rs.getString("a_s").equals("hello4"));
assert(rs.getDouble("sum(a_f)") == 11);
stmt.close();
con.close();
del("*:*");
commit();
}
@Override
protected void indexr(Object... fields) throws Exception {
SolrInputDocument doc = getDoc(fields);
indexDoc(doc);
}
}