From 133f1f403118b94e2be503a20bbdbe39354d9b67 Mon Sep 17 00:00:00 2001 From: Shalin Shekhar Mangar Date: Sat, 13 Dec 2008 17:38:00 +0000 Subject: [PATCH] SOLR-812 -- Configurable JDBC settings in JdbcDataSource including optimized defaults for read only mode git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@726241 13f79535-47bb-0310-9956-ffa450edef68 --- contrib/dataimporthandler/CHANGES.txt | 3 + .../solr/handler/dataimport/DataImporter.java | 26 ++++---- .../handler/dataimport/JdbcDataSource.java | 59 ++++++++++++++----- 3 files changed, 63 insertions(+), 25 deletions(-) diff --git a/contrib/dataimporthandler/CHANGES.txt b/contrib/dataimporthandler/CHANGES.txt index 80f67ee0c2e..798ce5ed983 100644 --- a/contrib/dataimporthandler/CHANGES.txt +++ b/contrib/dataimporthandler/CHANGES.txt @@ -41,6 +41,9 @@ New Features 7. SOLR-891: A Transformer to read strings from Clob type. (Noble Paul via shalin) +8. SOLR-812: Configurable JDBC settings in JdbcDataSource including optimized defaults for read only mode. + (David Smiley, Glen Newton, shalin) + Optimizations ---------------------- 1. SOLR-846: Reduce memory consumption during delta import by removing keys when used diff --git a/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java b/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java index 55631887201..74c6900da39 100644 --- a/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java +++ b/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java @@ -19,7 +19,6 @@ package org.apache.solr.handler.dataimport; import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrCore; -import org.apache.solr.schema.FieldType; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; import org.slf4j.Logger; @@ -37,9 +36,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; /** - *

- * Stores all configuration information for pulling and indexing data. - *

+ *

Stores all configuration information for pulling and indexing data.

*

* This API is experimental and subject to change * @@ -129,7 +126,7 @@ public class DataImporter { SchemaField field = schema.getFieldOrNull(fld.getName()); if (field == null) { field = config.lowerNameVsSchemaField.get(fld.getName().toLowerCase()); - if (field == null) { + if (field == null) { errors.add("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema"); } } @@ -147,13 +144,14 @@ public class DataImporter { } - /**Used by tests + /** + * Used by tests */ - void loadAndInit(String configStr){ + void loadAndInit(String configStr) { loadDataConfig(configStr); Map fields = new HashMap(); DataConfig.Entity e = getConfig().documents.get(0).entities.get(0); - initEntity(e, fields, false); + initEntity(e, fields, false); } void loadDataConfig(String configFile) { @@ -191,9 +189,9 @@ public class DataImporter { for (DataConfig.Field f : e.fields) { if (schema != null) { SchemaField schemaField = schema.getFieldOrNull(f.getName()); - if (schemaField == null) { + if (schemaField == null) { schemaField = config.lowerNameVsSchemaField.get(f.getName().toLowerCase()); - if(schemaField != null) f.name = schemaField.getName(); + if (schemaField != null) f.name = schemaField.getName(); } if (schemaField != null) { f.multiValued = schemaField.multiValued(); @@ -282,6 +280,14 @@ public class DataImporter { try { Properties copyProps = new Properties(); copyProps.putAll(p); + Map map = ctx.getRequestParameters(); + if (map.containsKey("rows")) { + int rows = Integer.parseInt((String) map.get("rows")); + if (map.containsKey("start")) { + rows += Integer.parseInt((String) map.get("start")); + } + copyProps.setProperty("maxRows", String.valueOf(rows)); + } dataSrc.init(ctx, copyProps); } catch (Exception e) { throw new DataImportHandlerException(DataImportHandlerException.SEVERE, diff --git a/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java b/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java index 3e2db7af574..b22fc413630 100644 --- a/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java +++ b/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java @@ -17,23 +17,17 @@ package org.apache.solr.handler.dataimport; import org.apache.solr.common.SolrException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.sql.*; import java.util.*; import java.util.concurrent.Callable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** - *

- * A DataSource implementation which can fetch data using JDBC. - *

- *

- *

- * Refer to http://wiki.apache.org/solr/DataImportHandler - * for more details. - *

+ *

A DataSource implementation which can fetch data using JDBC.

Refer to http://wiki.apache.org/solr/DataImportHandler for more + * details.

*

* This API is experimental and may change in the future. * @@ -56,6 +50,8 @@ public class JdbcDataSource extends private int batchSize = FETCH_SIZE; + private int maxRows = 0; + public void init(Context context, Properties initProps) { Object o = initProps.get(CONVERT_TYPE); if (o != null) @@ -70,7 +66,7 @@ public class JdbcDataSource extends if (batchSize == -1) batchSize = Integer.MIN_VALUE; } catch (NumberFormatException e) { - LOG.warn( "Invalid batch size: " + bsz); + LOG.warn("Invalid batch size: " + bsz); } } @@ -114,6 +110,11 @@ public class JdbcDataSource extends throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Driver must be specified"); } + String s = initProps.getProperty("maxRows"); + if (s != null) { + maxRows = Integer.parseInt(s); + } + factory = new Callable() { public Connection call() throws Exception { LOG.info("Creating a connection for entity " @@ -123,6 +124,34 @@ public class JdbcDataSource extends Connection c = null; try { c = DriverManager.getConnection(url, initProps); + if (Boolean.parseBoolean(initProps.getProperty("readOnly"))) { + c.setReadOnly(true); + // Add other sane defaults + c.setAutoCommit(true); + c.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); + c.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); + } + if (!Boolean.parseBoolean(initProps.getProperty("autoCommit"))) { + c.setAutoCommit(false); + } + String transactionIsolation = initProps.getProperty("transactionIsolation"); + if ("TRANSACTION_READ_UNCOMMITTED".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); + } else if ("TRANSACTION_READ_COMMITTED".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); + } else if ("TRANSACTION_REPEATABLE_READ".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); + } else if ("TRANSACTION_SERIALIZABLE".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE); + } else if ("TRANSACTION_NONE".equals(transactionIsolation)) { + c.setTransactionIsolation(Connection.TRANSACTION_NONE); + } + String holdability = initProps.getProperty("holdability"); + if ("CLOSE_CURSORS_AT_COMMIT".equals(holdability)) { + c.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); + } else { + c.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + } } catch (SQLException e) { // DriverManager does not allow you to use a driver which is not loaded through // the class loader of the class which is trying to make the connection. @@ -144,7 +173,7 @@ public class JdbcDataSource extends } private void logError(String msg, Exception e) { - LOG.warn( msg, e); + LOG.warn(msg, e); } private List readFieldNames(ResultSetMetaData metaData) @@ -170,9 +199,9 @@ public class JdbcDataSource extends try { Connection c = getConnection(); - stmt = c.createStatement(ResultSet.TYPE_FORWARD_ONLY, - ResultSet.CONCUR_READ_ONLY); + stmt = c.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); stmt.setFetchSize(batchSize); + stmt.setMaxRows(maxRows); LOG.debug("Executing SQL: " + query); long start = System.currentTimeMillis(); if (stmt.execute(query)) {