SOLR-6855: bin/solr -e dih launches, but has some path cruft issues preventing some of the imports don't work

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1647825 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Timothy Potter 2014-12-24 18:32:21 +00:00
parent d0a7f224e9
commit 1a494d6384
13 changed files with 57 additions and 103 deletions

View File

@ -554,6 +554,9 @@ Other Changes
* SOLR-6885: Add core name to RecoveryThread name. (Christine Poerschke via shalin)
* SOLR-6855: bin/solr -e dih launches, but has some path cruft issues preventing some of the
imports don't work (Hossman, Timothy Potter)
================== 4.10.3 ==================
Bug Fixes

View File

@ -185,6 +185,18 @@ public abstract class ManagedResource {
"Failed to load stored data for "+resourceId+" due to: "+ioExc, ioExc);
}
Object managedData = processStoredData(data);
if (managedInitArgs == null)
managedInitArgs = new NamedList<>();
onManagedDataLoadedFromStorage(managedInitArgs, managedData);
}
/**
* Processes the stored data.
*/
protected Object processStoredData(Object data) throws SolrException {
Object managedData = null;
if (data != null) {
if (!(data instanceof Map)) {
@ -221,12 +233,7 @@ public abstract class ManagedResource {
managedData = jsonObj;
}
}
if (managedInitArgs == null) {
managedInitArgs = new NamedList<>();
}
onManagedDataLoadedFromStorage(managedInitArgs, managedData);
return managedData;
}
/**

View File

@ -16,6 +16,7 @@ package org.apache.solr.rest;
* limitations under the License.
*/
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
@ -449,6 +450,31 @@ public class RestManager {
this.restManager = restManager;
}
/**
* Overrides the parent impl to handle FileNotFoundException better
*/
@Override
protected synchronized void reloadFromStorage() throws SolrException {
String resourceId = getResourceId();
Object data = null;
try {
data = storage.load(resourceId);
} catch (FileNotFoundException fnf) {
// this is ok - simply means there are no managed components added yet
} catch (IOException ioExc) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"Failed to load stored data for "+resourceId+" due to: "+ioExc, ioExc);
}
Object managedData = processStoredData(data);
if (managedInitArgs == null)
managedInitArgs = new NamedList<>();
if (managedData != null)
onManagedDataLoadedFromStorage(managedInitArgs, managedData);
}
/**
* Loads and initializes any ManagedResources that have been created but
* are not associated with any Solr components.
@ -459,10 +485,7 @@ public class RestManager {
throws SolrException {
if (managedData == null) {
// this is OK, just means there are no stored registrations
// storing an empty list is safe and avoid future warnings about
// the data not existing
storeManagedData(new ArrayList<Map<String,String>>(0));
// this is ok - just means no managed components have been added yet
return;
}

View File

@ -16,12 +16,11 @@
Solr DataImportHandler example configuration
--------------------------------------------
Change to the parent (example) directory. Start solr by executing the following command
To run this example, use the "-e" option of the bin/solr script:
> cd ..
> java -Dsolr.solr.home="./example-DIH/solr/" -jar start.jar
> bin/solr -e dih
in this directory, and when Solr is started connect to:
When Solr is started connect to:
http://localhost:8983/solr/

View File

@ -1,16 +0,0 @@
/*C2*/SET SCHEMA PUBLIC
CONNECT USER SA
SET AUTOCOMMIT FALSE
/*C3*/SET SCHEMA PUBLIC
CONNECT USER SA
SET AUTOCOMMIT FALSE
/*C4*/SET SCHEMA PUBLIC
CONNECT USER SA
SET AUTOCOMMIT FALSE
/*C5*/SET SCHEMA PUBLIC
CONNECT USER SA
SET AUTOCOMMIT FALSE
/*C2*/DISCONNECT
/*C3*/DISCONNECT
/*C4*/DISCONNECT
/*C5*/DISCONNECT

View File

@ -1,17 +0,0 @@
#HSQL Database Engine 1.8.0.10
#Tue Aug 19 10:31:19 EDT 2014
hsqldb.script_format=0
runtime.gc_interval=0
sql.enforce_strict_size=false
hsqldb.cache_size_scale=8
readonly=false
hsqldb.nio_data_file=true
hsqldb.cache_scale=14
version=1.8.0
hsqldb.default_table_type=memory
hsqldb.cache_file_scale=1
hsqldb.log_size=200
modified=yes
hsqldb.cache_version=1.7.0
hsqldb.original_version=1.8.0
hsqldb.compatible_version=1.8.0

View File

@ -1,5 +1,5 @@
<dataConfig>
<dataSource driver="org.hsqldb.jdbcDriver" url="jdbc:hsqldb:./example-DIH/hsqldb/ex" user="sa" />
<dataSource driver="org.hsqldb.jdbcDriver" url="jdbc:hsqldb:${solr.install.dir}/example/example-DIH/hsqldb/ex" user="sa" />
<document>
<entity name="item" query="select * from item"
deltaQuery="select id from item where last_modified > '${dataimporter.last_index_time}'">

View File

@ -432,15 +432,6 @@
</analyzer>
</fieldType>
<!-- A text type for English text where stopwords and synonyms are managed using the REST API -->
<fieldType name="managed_en" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.ManagedStopFilterFactory" managed="english" />
<filter class="solr.ManagedSynonymFilterFactory" managed="english" />
</analyzer>
</fieldType>
<!-- A general text field that has reasonable, generic
cross-language defaults: it tokenizes with StandardTokenizer,
removes stop words from case-insensitive "stopwords.txt"

View File

@ -351,15 +351,6 @@
</analyzer>
</fieldType>
<!-- A text type for English text where stopwords and synonyms are managed using the REST API -->
<fieldType name="managed_en" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.ManagedStopFilterFactory" managed="english" />
<filter class="solr.ManagedSynonymFilterFactory" managed="english" />
</analyzer>
</fieldType>
<!-- A general text field that has reasonable, generic
cross-language defaults: it tokenizes with StandardTokenizer,
removes stop words from case-insensitive "stopwords.txt"

View File

@ -382,15 +382,6 @@
</analyzer>
</fieldType>
<!-- A text type for English text where stopwords and synonyms are managed using the REST API -->
<fieldType name="managed_en" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.ManagedStopFilterFactory" managed="english" />
<filter class="solr.ManagedSynonymFilterFactory" managed="english" />
</analyzer>
</fieldType>
<!-- A general text field that has reasonable, generic
cross-language defaults: it tokenizes with StandardTokenizer,
removes stop words from case-insensitive "stopwords.txt"

View File

@ -432,15 +432,6 @@
</analyzer>
</fieldType>
<!-- A text type for English text where stopwords and synonyms are managed using the REST API -->
<fieldType name="managed_en" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.ManagedStopFilterFactory" managed="english" />
<filter class="solr.ManagedSynonymFilterFactory" managed="english" />
</analyzer>
</fieldType>
<!-- A general text field that has reasonable, generic
cross-language defaults: it tokenizes with StandardTokenizer,
removes stop words from case-insensitive "stopwords.txt"

View File

@ -314,15 +314,6 @@
</analyzer>
</fieldType>
<!-- A text type for English text where stopwords and synonyms are managed using the REST API -->
<fieldType name="managed_en" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.ManagedStopFilterFactory" managed="english" />
<filter class="solr.ManagedSynonymFilterFactory" managed="english" />
</analyzer>
</fieldType>
<!-- A general text field that has reasonable, generic
cross-language defaults: it tokenizes with StandardTokenizer,
and down cases. -->

View File

@ -2,7 +2,7 @@
<dataSource type="BinFileDataSource" />
<document>
<entity name="tika-test" processor="TikaEntityProcessor"
url="exampledocs/solr-word.pdf" format="text">
url="${solr.install.dir}/example/exampledocs/solr-word.pdf" format="text">
<field column="Author" name="author" meta="true"/>
<field column="title" name="title" meta="true"/>
<field column="text" name="text"/>