SOLR-414 -- Plugin initialization now supports SolrCore and ResourceLoader "Aware" plugins. Plugins that implement SolrCoreAware or ResourceLoaderAware are informed about the SolrCore/ResourceLoader.

git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@597847 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Ryan McKinley 2007-11-24 13:51:46 +00:00
parent de35ba93f1
commit e76c594d89
46 changed files with 812 additions and 473 deletions

View File

@ -160,6 +160,9 @@ New Features
31. SOLR-176: Add detailed timing data to query response output. The SearchHandler
interface now returns how long each section takes. (klaas)
32. SOLR-414: Plugin initialization now supports SolrCore and ResourceLoader "Aware"
plugins. Plugins that implement SolrCoreAware or ResourceLoaderAware are
informed about the SolrCore/ResourceLoader. (Henri Biestro, ryan)
Changes in runtime behavior
@ -229,14 +232,9 @@ Other Changes
3. Upgraded to Lucene 2.2.0; June 18, 2007.
4. SOLR-215: In an push to support multiple SolrCores, the TokenizerFactory
and TokenFilterFactory initalization interface has changed to accept the
solrConfig. Initialization should happen in:
init(SolrConfig solrConfig, Map<String,String> args)
rather then:
init(Map<String,String> args)
Existing classes should continue to work, but it is encouraged to update
the initialization code. (Henri Biestro via ryan)
4. SOLR-215: Static access to SolrCore.getSolrCore() and SolrConfig.config
have been deprecated in order to support multiple loaded cores.
(Henri Biestro via ryan)
5. SOLR-367: The create method in all TokenFilter and Tokenizer Factories
provided by Solr now declare their specific return types instead of just

View File

@ -21,33 +21,22 @@ package org.apache.solr.analysis;
import java.util.Map;
import java.util.logging.Logger;
import org.apache.solr.core.SolrConfig;
/**
* Simple abstract implementation that handles init arg processing.
*
* @version $Id$
*/
public abstract class BaseTokenFilterFactory implements TokenFilterFactory, SolrConfig.Initializable {
public abstract class BaseTokenFilterFactory implements TokenFilterFactory {
final static Logger log = Logger.getLogger(BaseTokenFilterFactory.class.getName());
/** The init args */
protected Map<String,String> args;
@Deprecated
public void init(Map<String,String> args) {
this.args=args;
}
/**
* @since solr 1.3
*/
public void init(SolrConfig solrConfig, Map<String,String> args) {
this.init( args ); // maintain backwards compatibility
this.args=args;
}
public Map<String,String> getArgs() {
return args;
}

View File

@ -20,29 +20,22 @@ package org.apache.solr.analysis;
import java.util.Map;
import java.util.logging.Logger;
import org.apache.solr.core.SolrConfig;
/**
* Simple abstract implementation that handles init arg processing.
*
* @version $Id$
*/
public abstract class BaseTokenizerFactory implements TokenizerFactory, SolrConfig.Initializable {
public abstract class BaseTokenizerFactory implements TokenizerFactory {
final static Logger log = Logger.getLogger(BaseTokenizerFactory.class.getName());
/** The init args */
protected Map<String,String> args;
@Deprecated
public void init(Map<String,String> args) {
this.args=args;
}
public void init(SolrConfig solrConfig, Map<String,String> args) {
this.init( args );
this.args=args;
}
public Map<String,String> getArgs() {
return args;
}

View File

@ -72,8 +72,8 @@ public class CapitalizationFilterFactory extends BaseTokenFilterFactory
boolean forceFirstLetter = true; // make sure the first letter is capitol even if it is in the keep list
@Override
public void init(SolrConfig solrConfig, Map<String,String> args) {
super.init( solrConfig, args );
public void init(Map<String,String> args) {
super.init( args );
String k = args.get( KEEP );
if( k != null ) {

View File

@ -18,7 +18,6 @@ package org.apache.solr.analysis;
*/
import java.util.Map;
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
@ -33,8 +32,8 @@ public class EdgeNGramFilterFactory extends BaseTokenFilterFactory {
private String side;
@Override
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
public void init(Map<String, String> args) {
super.init(args);
String maxArg = args.get("maxGramSize");
maxGramSize = (maxArg != null ? Integer.parseInt(maxArg)
: EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE);

View File

@ -17,8 +17,6 @@ package org.apache.solr.analysis;
* limitations under the License.
*/
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer;
import java.io.Reader;
@ -34,8 +32,9 @@ public class EdgeNGramTokenizerFactory extends BaseTokenizerFactory {
private String side;
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
@Override
public void init(Map<String, String> args) {
super.init(args);
String maxArg = args.get("maxGramSize");
maxGramSize = (maxArg != null ? Integer.parseInt(maxArg) : EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);

View File

@ -17,13 +17,13 @@
package org.apache.solr.analysis;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.util.plugin.ResourceLoaderAware;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.Token;
import java.util.Map;
import java.util.List;
import java.util.Set;
import java.io.IOException;
@ -31,14 +31,13 @@ import java.io.IOException;
/**
* @version $Id$
*/
public class EnglishPorterFilterFactory extends BaseTokenFilterFactory {
@Override
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
public class EnglishPorterFilterFactory extends BaseTokenFilterFactory implements ResourceLoaderAware {
public void inform(ResourceLoader loader) {
String wordFile = args.get("protected");
if (wordFile != null) {
try {
List<String> wlist = solrConfig.getLines(wordFile);
List<String> wlist = loader.getLines(wordFile);
protectedWords = StopFilter.makeStopSet((String[])wlist.toArray(new String[0]));
} catch (IOException e) {
throw new RuntimeException(e);
@ -51,6 +50,7 @@ public class EnglishPorterFilterFactory extends BaseTokenFilterFactory {
public EnglishPorterFilter create(TokenStream input) {
return new EnglishPorterFilter(input,protectedWords);
}
}
@ -84,6 +84,7 @@ class EnglishPorterFilter extends TokenFilter {
}
**/
@Override
public Token next() throws IOException {
Token tok = input.next();
if (tok==null) return null;

View File

@ -18,17 +18,18 @@
package org.apache.solr.analysis;
import org.apache.lucene.analysis.el.*;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import java.util.Map;
import java.util.HashMap;
import org.apache.solr.core.SolrConfig;
import java.util.Map;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.el.GreekCharsets;
import org.apache.lucene.analysis.el.GreekLowerCaseFilter;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
public class GreekLowerCaseFilterFactory extends BaseTokenFilterFactory {
public class GreekLowerCaseFilterFactory extends BaseTokenFilterFactory
{
private static Map<String,char[]> CHARSETS = new HashMap<String,char[]>();
static {
CHARSETS.put("UnicodeGreek",GreekCharsets.UnicodeGreek);
@ -39,8 +40,9 @@ public class GreekLowerCaseFilterFactory extends BaseTokenFilterFactory {
private char[] charset = GreekCharsets.UnicodeGreek;
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
@Override
public void init(Map<String, String> args) {
super.init(args);
String charsetName = args.get("charset");
if (null != charsetName) charset = CHARSETS.get(charsetName);
if (null == charset) {

View File

@ -17,7 +17,9 @@
package org.apache.solr.analysis;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.util.plugin.ResourceLoaderAware;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
@ -30,21 +32,19 @@ import java.io.IOException;
* @version $Id$
* @since solr 1.3
*/
public class KeepWordFilterFactory extends BaseTokenFilterFactory {
public class KeepWordFilterFactory extends BaseTokenFilterFactory implements ResourceLoaderAware {
private Set<String> words;
private boolean ignoreCase;
@Override
@SuppressWarnings("unchecked")
public void init(SolrConfig config, Map<String, String> args) {
super.init(config, args);
public void inform(ResourceLoader loader) {
String wordFile = args.get("words");
ignoreCase = getBoolean("ignoreCase",false);
if (wordFile != null) {
try {
List<String> wlist = config.getLines(wordFile);
List<String> wlist = loader.getLines(wordFile);
words = StopFilter.makeStopSet(
(String[])wlist.toArray(new String[0]), ignoreCase);
}
@ -69,4 +69,5 @@ public class KeepWordFilterFactory extends BaseTokenFilterFactory {
public KeepWordFilter create(TokenStream input) {
return new KeepWordFilter(input,words,ignoreCase);
}
}

View File

@ -17,7 +17,6 @@
package org.apache.solr.analysis;
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.LengthFilter;
@ -28,8 +27,10 @@ import java.util.Map;
*/
public class LengthFilterFactory extends BaseTokenFilterFactory {
int min,max;
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
@Override
public void init(Map<String, String> args) {
super.init(args);
min=Integer.parseInt(args.get("min"));
max=Integer.parseInt(args.get("max"));
}

View File

@ -18,7 +18,6 @@ package org.apache.solr.analysis;
*/
import java.util.Map;
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ngram.NGramTokenFilter;
@ -30,10 +29,10 @@ public class NGramFilterFactory extends BaseTokenFilterFactory {
private int minGramSize = 0;
/** Initializes the n-gram min and max sizes and the side from which one should start tokenizing. */
/** Initialize the n-gram min and max sizes and the side from which one should start tokenizing. */
@Override
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
public void init(Map<String, String> args) {
super.init(args);
String maxArg = args.get("maxGramSize");
maxGramSize = (maxArg != null ? Integer.parseInt(maxArg)
: NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE);

View File

@ -17,7 +17,6 @@ package org.apache.solr.analysis;
* limitations under the License.
*/
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ngram.NGramTokenizer;
@ -32,8 +31,9 @@ public class NGramTokenizerFactory extends BaseTokenizerFactory {
private int minGramSize = 0;
/** Initializes the n-gram min and max sizes and the side from which one should start tokenizing. */
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
@Override
public void init(Map<String, String> args) {
super.init(args);
String maxArg = args.get("maxGramSize");
maxGramSize = (maxArg != null ? Integer.parseInt(maxArg) : NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);

View File

@ -16,7 +16,6 @@
*/
package org.apache.solr.analysis;
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import java.util.Map;
@ -32,8 +31,9 @@ public class PatternReplaceFilterFactory extends BaseTokenFilterFactory {
String replacement;
boolean all = true;
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
@Override
public void init(Map<String, String> args) {
super.init(args);
try {
p = Pattern.compile(args.get("pattern"));
} catch (PatternSyntaxException e) {

View File

@ -70,7 +70,8 @@ public class PatternTokenizerFactory extends BaseTokenizerFactory
/**
* Require a configured pattern
*/
public void init(SolrConfig solrConfig, Map<String,String> args)
@Override
public void init(Map<String,String> args)
{
this.args = args;
String regex = args.get( PATTERN );
@ -92,13 +93,6 @@ public class PatternTokenizerFactory extends BaseTokenizerFactory
}
}
/**
* The arguments passed to init()
*/
public Map<String, String> getArgs() {
return this.args;
}
/**
* Split the input using configured pattern
*/

View File

@ -61,8 +61,8 @@ public class PhoneticFilterFactory extends BaseTokenFilterFactory
protected Encoder encoder = null;
@Override
public void init(SolrConfig solrConfig, Map<String,String> args) {
super.init( solrConfig, args );
public void init(Map<String,String> args) {
super.init( args );
if( args.get( "inject" ) != null ) {
inject = Boolean.getBoolean( args.get( INJECT ) );

View File

@ -1,4 +1,3 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -16,19 +15,20 @@
* limitations under the License.
*/
package org.apache.solr.analysis;
import org.apache.lucene.analysis.ru.*;
import java.io.Reader;
import org.apache.lucene.analysis.CharTokenizer;
import java.util.Map;
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.ru.RussianLetterTokenizer;
public class RussianLetterTokenizerFactory extends BaseTokenizerFactory {
private char[] charset;
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
@Override
public void init(Map<String, String> args) {
super.init(args);
charset = RussianCommon.getCharset(args.get("charset"));
}

View File

@ -1,4 +1,3 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -16,20 +15,20 @@
* limitations under the License.
*/
package org.apache.solr.analysis;
import org.apache.lucene.analysis.ru.*;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import java.util.Map;
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ru.RussianLowerCaseFilter;
public class RussianLowerCaseFilterFactory extends BaseTokenFilterFactory {
private char[] charset;
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
@Override
public void init(Map<String, String> args) {
super.init(args);
charset = RussianCommon.getCharset(args.get("charset"));
}

View File

@ -18,19 +18,18 @@
package org.apache.solr.analysis;
import org.apache.lucene.analysis.ru.*;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import java.io.IOException;
import java.util.Map;
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ru.RussianStemFilter;
public class RussianStemFilterFactory extends BaseTokenFilterFactory {
private char[] charset;
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
public void init(Map<String, String> args) {
super.init(args);
charset = RussianCommon.getCharset(args.get("charset"));
}

View File

@ -21,7 +21,6 @@ import java.util.Map;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrConfig;
/**
* Factory for SnowballFilters, with configurable language
@ -31,12 +30,12 @@ import org.apache.solr.core.SolrConfig;
*
* @version $Id$
*/
public class SnowballPorterFilterFactory extends BaseTokenFilterFactory {
private String language = "English";
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
@Override
public void init(Map<String, String> args) {
super.init(args);
final String cfgLanguage = args.get("language");
if(cfgLanguage!=null) language = cfgLanguage;
SolrCore.log.fine("SnowballPorterFilterFactory: language=" + language);

View File

@ -17,12 +17,12 @@
package org.apache.solr.analysis;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.util.plugin.ResourceLoaderAware;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.StopAnalyzer;
import org.apache.lucene.analysis.TokenStream;
import java.util.Map;
import java.util.List;
import java.util.Set;
import java.io.IOException;
@ -30,17 +30,15 @@ import java.io.IOException;
/**
* @version $Id$
*/
public class StopFilterFactory extends BaseTokenFilterFactory {
public class StopFilterFactory extends BaseTokenFilterFactory implements ResourceLoaderAware {
@Override
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
public void inform(ResourceLoader loader) {
String stopWordFile = args.get("words");
ignoreCase = getBoolean("ignoreCase",false);
if (stopWordFile != null) {
try {
List<String> wlist = solrConfig.getLines(stopWordFile);
List<String> wlist = loader.getLines(stopWordFile);
stopWords = StopFilter.makeStopSet((String[])wlist.toArray(new String[0]), ignoreCase);
} catch (IOException e) {
throw new RuntimeException(e);

View File

@ -18,21 +18,21 @@
package org.apache.solr.analysis;
import org.apache.lucene.analysis.TokenStream;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.core.SolrCore;
import org.apache.solr.util.plugin.ResourceLoaderAware;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @version $Id$
*/
public class SynonymFilterFactory extends BaseTokenFilterFactory {
@Override
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
public class SynonymFilterFactory extends BaseTokenFilterFactory implements ResourceLoaderAware {
public void inform(ResourceLoader loader) {
String synonyms = args.get("synonyms");
ignoreCase = getBoolean("ignoreCase",false);
@ -41,7 +41,7 @@ public class SynonymFilterFactory extends BaseTokenFilterFactory {
if (synonyms != null) {
List<String> wlist=null;
try {
wlist = solrConfig.getLines(synonyms);
wlist = loader.getLines(synonyms);
} catch (IOException e) {
throw new RuntimeException(e);
}

View File

@ -18,7 +18,6 @@
package org.apache.solr.analysis;
import org.apache.lucene.analysis.TokenStream;
import org.apache.solr.core.SolrConfig;
import java.util.Map;
/**
@ -53,11 +52,9 @@ public interface TokenFilterFactory {
* <p>The args are user-level initialization parameters that
* may be specified when declaring a the factory in the
* schema.xml
* This method is deprecated; the replacement is to have your class implement SolrConfig.Initializable
* and implement 'void init(SolrConfig solrConfig, Map<String,String> args);'
*/
@Deprecated
public void init(Map<String,String> args);
/**
* Accessor method for reporting the args used to initialize this factory.
* <p>
@ -66,6 +63,7 @@ public interface TokenFilterFactory {
* </p>
*/
public Map<String,String> getArgs();
/** Transform the specified input TokenStream */
public TokenStream create(TokenStream input);
}

View File

@ -52,11 +52,9 @@ public interface TokenizerFactory {
* <p>The args are user-level initialization parameters that
* may be specified when declaring a the factory in the
* schema.xml
* This method is deprecated; the replacement is to have your class implement SolrConfig.Initializable
* and implement 'void init(SolrConfig solrConfig, Map<String,String> args);'
*/
@Deprecated
public void init(Map<String,String> args);
/**
* Accessor method for reporting the args used to initialize this factory.
* <p>
@ -65,6 +63,7 @@ public interface TokenizerFactory {
* </p>
*/
public Map<String,String> getArgs();
/** Creates a TokenStream of the specified input */
public TokenStream create(Reader input);
}

View File

@ -19,7 +19,6 @@ package org.apache.solr.analysis;
import java.util.Map;
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import org.apache.solr.common.SolrException;
@ -32,8 +31,8 @@ public class TrimFilterFactory extends BaseTokenFilterFactory {
protected boolean updateOffsets = false;
@Override
public void init(SolrConfig solrConfig, Map<String,String> args) {
super.init(solrConfig, args );
public void init(Map<String,String> args) {
super.init( args );
String v = args.get( "updateOffsets" );
if( v != null ) {

View File

@ -16,7 +16,6 @@
*/
package org.apache.solr.analysis;
import org.apache.solr.core.SolrConfig;
import org.apache.lucene.analysis.TokenStream;
import java.util.Map;
@ -32,8 +31,9 @@ public class WordDelimiterFilterFactory extends BaseTokenFilterFactory {
int catenateAll=0;
int splitOnCaseChange=0;
public void init(SolrConfig solrConfig, Map<String, String> args) {
super.init(solrConfig, args);
@Override
public void init(Map<String, String> args) {
super.init(args);
generateWordParts = getInt("generateWordParts", 1);
generateNumberParts = getInt("generateNumberParts", 1);
catenateWords = getInt("catenateWords", 0);

View File

@ -0,0 +1,47 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.common;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
/**
* @since solr 1.3
*/
public interface ResourceLoader
{
public InputStream openResource(String resource) throws IOException;
/**
* Accesses a resource by name and returns the (non comment) lines
* containing data.
*
* <p>
* A comment line is any line that starts with the character "#"
* </p>
*
* @param resource
* @return a list of non-blank non-comment lines with whitespace trimmed
* from front and back.
* @throws IOException
*/
public List<String> getLines(String resource) throws IOException;
public Object newInstance(String cname, String ... subpackages);
}

View File

@ -20,9 +20,11 @@ package org.apache.solr.core;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.DOMUtil;
import org.apache.solr.core.SolrCore;
import org.apache.solr.util.plugin.ResourceLoaderAware;
import org.apache.solr.util.plugin.SolrCoreAware;
import javax.naming.Context;
import javax.naming.InitialContext;
@ -38,27 +40,24 @@ import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import java.net.URLClassLoader;
import java.net.URL;
import java.net.MalformedURLException;
/**
* @version $Id$
*/
public class Config {
public static final Logger log = Logger.getLogger(SolrCore.class.getName());
public static final Logger log = Logger.getLogger(Config.class.getName());
static final XPathFactory xpathFactory = XPathFactory.newInstance();
private final String instanceDir; // solr home directory
private final Document doc;
private final String prefix;
private final String name;
private final SolrResourceLoader loader;
@Deprecated
public Config(String name, InputStream is, String prefix) throws ParserConfigurationException, IOException, SAXException
{
this( Config.locateInstanceDir(), name, is, prefix );
this( null, name, is, prefix );
}
public Config(String instanceDir, String name) throws ParserConfigurationException, IOException, SAXException
@ -68,21 +67,15 @@ public class Config {
public Config(String instanceDir, String name, InputStream is, String prefix) throws ParserConfigurationException, IOException, SAXException
{
if( instanceDir == null ) {
instanceDir = Config.locateInstanceDir();
}
this.instanceDir = normalizeDir(instanceDir);
log.info("Solr home set to '" + instanceDir + "'");
classLoader = null;
this.loader = new SolrResourceLoader( instanceDir );
this.name = name;
this.prefix = prefix;
if (prefix!=null && !prefix.endsWith("/")) prefix += '/';
InputStream lis = is;
try {
if (lis == null)
lis = openResource(name);
lis = loader.openResource(name);
javax.xml.parsers.DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
doc = builder.parse(lis);
@ -98,6 +91,14 @@ public class Config {
}
}
/**
* @since solr 1.3
*/
public SolrResourceLoader getResourceLoader()
{
return loader;
}
public Document getDocument() {
return doc;
}
@ -211,215 +212,43 @@ public class Config {
return val!=null ? Float.parseFloat(val) : def;
}
// The directory where solr will look for config files by default.
// defaults to "./solr/conf/"
public String getConfigDir() {
return instanceDir + "conf/";
}
public InputStream openResource(String resource) {
InputStream is=null;
try {
File f = new File(resource);
if (!f.isAbsolute()) {
// try $CWD/solrconf/
f = new File(getConfigDir() + resource);
}
if (f.isFile() && f.canRead()) {
return new FileInputStream(f);
} else {
// try $CWD
f = new File(resource);
if (f.isFile() && f.canRead()) {
return new FileInputStream(f);
}
}
ClassLoader loader = getClassLoader();
is = loader.getResourceAsStream(resource);
} catch (Exception e) {
throw new RuntimeException("Error opening " + resource, e);
}
if (is==null) {
throw new RuntimeException("Can't find resource '" + resource + "' in classpath or '" + getConfigDir() + "', cwd="+System.getProperty("user.dir"));
}
return is;
}
/**
* Accesses a resource by name and returns the (non comment) lines
* containing data.
*
* <p>
* A comment line is any line that starts with the character "#"
* </p>
*
* @param resource
* @return a list of non-blank non-comment lines with whitespace trimmed
* from front and back.
* @throws IOException
*/
public List<String> getLines(String resource) throws IOException {
BufferedReader input = null;
try {
// todo - allow configurable charset?
input = new BufferedReader(new InputStreamReader(openResource(resource), "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
ArrayList<String> lines = new ArrayList<String>();
for (String word=null; (word=input.readLine())!=null;) {
// skip comments
if (word.startsWith("#")) continue;
word=word.trim();
// skip blank lines
if (word.length()==0) continue;
lines.add(word);
}
return lines;
}
//
// classloader related functions
//
private static final String project = "solr";
private static final String base = "org.apache" + "." + project;
private static final String[] packages = {"","analysis.","schema.","handler.","search.","update.","core.","request.","update.processor.","util."};
public Class findClass(String cname, String... subpackages) {
ClassLoader loader = getClassLoader();
if (subpackages.length==0) subpackages = packages;
// first try cname == full name
try {
return Class.forName(cname, true, loader);
} catch (ClassNotFoundException e) {
String newName=cname;
if (newName.startsWith(project)) {
newName = cname.substring(project.length()+1);
}
for (String subpackage : subpackages) {
try {
String name = base + '.' + subpackage + newName;
log.finest("Trying class name " + name);
return Class.forName(name, true, loader);
} catch (ClassNotFoundException e1) {
// ignore... assume first exception is best.
}
}
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error loading class '" + cname + "'", e, false);
}
}
public Object newInstance(String cname, String... subpackages) {
Class clazz = findClass(cname,subpackages);
if( clazz == null ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"Can not find class: "+cname + " in " + getClassLoader(), false);
}
try {
return clazz.newInstance();
}
catch (Exception e) {
e.printStackTrace();
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"Error instantiating class: '" + clazz.getName()+"'", e, false );
}
}
private static String normalizeDir(String path) {
if (path==null) return null;
if ( !(path.endsWith("/") || path.endsWith("\\")) ) {
path+='/';
}
return path;
}
public String getInstanceDir() {
return instanceDir;
}
public static String locateInstanceDir() {
String home = null;
// Try JNDI
try {
Context c = new InitialContext();
home = (String)c.lookup("java:comp/env/solr/home");
log.info("Using JNDI solr.home: "+home );
} catch (NoInitialContextException e) {
log.info("JNDI not configured for Solr (NoInitialContextEx)");
} catch (NamingException e) {
log.info("No /solr/home in JNDI");
} catch( RuntimeException ex ) {
log.warning("Odd RuntimeException while testing for JNDI: " + ex.getMessage());
}
// Now try system property
if( home == null ) {
String prop = project + ".solr.home";
home = normalizeDir(System.getProperty(prop));
if( home != null ) {
log.info("using system property solr.home: " + home );
}
}
// if all else fails, try
if( home == null ) {
home = project + '/';
log.info("Solr home defaulted to '" + home + "' (could not find system property or JNDI)");
}
return normalizeDir( home );
}
/**
* Classloader loading resources specified in any configs
* @see #getClassLoader()
*/
private ClassLoader classLoader = null;
/**
* Returns the classloader to be use when loading resources
* specified in this config
*
* <p>
* This loader will delegate to the context classloader when possible,
* otherwise it will attempt to resolve resources useing any jar files
* found in the "lib/" directory in the "Solr Home" directory.
* <p>
*/
private ClassLoader getClassLoader() {
if (null == classLoader) {
// NB5.5/win32/1.5_10: need to go thru local var or classLoader is not set!
ClassLoader loader = Thread.currentThread().getContextClassLoader();
File f = new File(instanceDir + "lib/");
if (f.canRead() && f.isDirectory()) {
File[] jarFiles = f.listFiles();
URL[] jars = new URL[jarFiles.length];
try {
for (int j = 0; j < jarFiles.length; j++) {
jars[j] = jarFiles[j].toURI().toURL();
log.info("Adding '" + jars[j].toString() + "' to Solr classloader");
}
loader = URLClassLoader.newInstance(jars, loader);
} catch (MalformedURLException e) {
SolrException.log(log,"Can't construct solr lib class loader", e);
}
}
classLoader = loader;
}
return classLoader;
}
/**
* @return the XML filename
*/
public String getName() {
return name;
}
// The following functions were moved to ResourceLoader
//-----------------------------------------------------------------------------
@Deprecated
public String getConfigDir() {
return loader.getConfigDir();
}
@Deprecated
public InputStream openResource(String resource) {
return loader.openResource(resource);
}
@Deprecated
public List<String> getLines(String resource) throws IOException {
return loader.getLines(resource);
}
@Deprecated
public Class findClass(String cname, String... subpackages) {
return loader.findClass(cname, subpackages);
}
@Deprecated
public Object newInstance(String cname, String ... subpackages) {
return loader.newInstance(cname, subpackages);
}
@Deprecated
public String getInstanceDir() {
return loader.getInstanceDir();
}
}

View File

@ -25,6 +25,7 @@ import java.util.logging.Logger;
import javax.xml.xpath.XPathConstants;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.DOMUtil;
import org.apache.solr.common.util.NamedList;
@ -34,6 +35,8 @@ import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryResponse;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.util.plugin.AbstractPluginLoader;
import org.apache.solr.util.plugin.ResourceLoaderAware;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.w3c.dom.NodeList;
import org.w3c.dom.Node;
@ -133,7 +136,7 @@ final class RequestHandlers {
new AbstractPluginLoader<SolrRequestHandler>( "[solrconfig.xml] requestHandler", true, true )
{
@Override
protected SolrRequestHandler create( Config config, String name, String className, Node node ) throws Exception
protected SolrRequestHandler create( ResourceLoader config, String name, String className, Node node ) throws Exception
{
String startup = DOMUtil.getAttr( node, "startup" );
if( startup != null ) {
@ -163,7 +166,7 @@ final class RequestHandlers {
NodeList nodes = (NodeList)config.evaluate("requestHandler", XPathConstants.NODESET);
// Load the handlers and get the default one
SolrRequestHandler defaultHandler = loader.load( config, nodes );
SolrRequestHandler defaultHandler = loader.load( config.getResourceLoader(), nodes );
if( defaultHandler == null ) {
defaultHandler = get(RequestHandlers.DEFAULT_HANDLER_NAME);
if( defaultHandler == null ) {
@ -231,6 +234,14 @@ final class RequestHandlers {
try {
_handler = (SolrRequestHandler)core.createRequestHandler(_className);
_handler.init( _args );
if( _handler instanceof ResourceLoaderAware ) {
((ResourceLoaderAware)_handler).inform( core.getSolrConfig().getResourceLoader() );
}
if( _handler instanceof SolrCoreAware ) {
((SolrCoreAware)_handler).inform( core );
}
}
catch( Exception ex ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "lazy loading error", ex );

View File

@ -30,7 +30,6 @@ import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import java.util.Map;
import java.util.Collection;
import java.util.HashSet;
import java.util.StringTokenizer;
@ -60,15 +59,6 @@ public class SolrConfig extends Config {
@Deprecated
public static SolrConfig config = null;
/** An interface to denote objects that need a SolrConfig to be initialized.
* These are mainly TokenFilterFactory and TokenizerFactory subclasses.
*/
public interface Initializable {
/** <code>init</code> will be called just once, immediately after creation.
*/
void init(SolrConfig solrConfig, Map<String,String> args);
}
public final String configFile;
/**

View File

@ -35,6 +35,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.SolrParams;
@ -55,8 +56,6 @@ import org.apache.solr.request.XMLResponseWriter;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.LuceneQParserPlugin;
import org.apache.solr.search.OldLuceneQParserPlugin;
import org.apache.solr.update.DirectUpdateHandler;
import org.apache.solr.update.SolrIndexWriter;
import org.apache.solr.update.UpdateHandler;
@ -110,6 +109,13 @@ public final class SolrCore {
return solrConfig;
}
/**
* @since solr 1.3
*/
public SolrResourceLoader getResourceLoader() {
return solrConfig.getResourceLoader();
}
public String getConfigFile() {
return solrConfig.configFile;
}
@ -206,7 +212,7 @@ public final class SolrCore {
if (msg == null) msg = "SolrCore Object";
try {
try {
clazz = solrConfig.findClass(className);
clazz = solrConfig.getResourceLoader().findClass(className);
if (cast != null && !cast.isAssignableFrom(clazz))
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,"Error Instantiating "+msg+", "+className+ " is not a " +cast.getName());
@ -269,11 +275,12 @@ public final class SolrCore {
// the sync block is needed)
instance = this; // set singleton
SolrResourceLoader loader = config.getResourceLoader();
if (dataDir ==null) {
dataDir = config.get("dataDir",config.getInstanceDir()+"data");
dataDir = config.get("dataDir",loader.getInstanceDir()+"data");
}
log.info("Opening new SolrCore at " + config.getInstanceDir() + ", dataDir="+dataDir);
log.info("Opening new SolrCore at " + loader.getInstanceDir() + ", dataDir="+dataDir);
if (schema==null) {
schema = new IndexSchema(config, "schema.xml");
@ -316,6 +323,10 @@ public final class SolrCore {
catch (IOException e) {
throw new RuntimeException(e);
}
// Finally tell anyone who wants to know
loader.inform( loader );
loader.inform( this );
}
}
@ -342,7 +353,7 @@ public final class SolrCore {
};
NodeList nodes = (NodeList)solrConfig.evaluate("updateRequestProcessor/factory", XPathConstants.NODESET);
UpdateRequestProcessorFactory def = loader.load( solrConfig, nodes );
UpdateRequestProcessorFactory def = loader.load( solrConfig.getResourceLoader(), nodes );
if( def == null ) {
def = new ChainedUpdateProcessorFactory(); // the default
def.init( thiscore, null );
@ -877,7 +888,7 @@ public final class SolrCore {
NamedListPluginLoader<QueryResponseWriter> loader =
new NamedListPluginLoader<QueryResponseWriter>( "[solrconfig.xml] "+xpath, responseWriters );
defaultResponseWriter = loader.load( solrConfig, nodes );
defaultResponseWriter = loader.load( solrConfig.getResourceLoader(), nodes );
// configure the default response writer; this one should never be null
if (defaultResponseWriter == null) {
@ -928,7 +939,7 @@ public final class SolrCore {
NamedListPluginLoader<QParserPlugin> loader =
new NamedListPluginLoader<QParserPlugin>( "[solrconfig.xml] "+xpath, qParserPlugins);
loader.load( solrConfig, nodes );
loader.load( solrConfig.getResourceLoader(), nodes );
// default parsers
for (int i=0; i<QParserPlugin.standardPlugins.length; i+=2) {

View File

@ -0,0 +1,344 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.naming.NoInitialContextException;
import org.apache.solr.analysis.TokenFilterFactory;
import org.apache.solr.analysis.TokenizerFactory;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.handler.component.SearchComponent;
import org.apache.solr.request.QueryResponseWriter;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.schema.FieldType;
import org.apache.solr.util.plugin.ResourceLoaderAware;
import org.apache.solr.util.plugin.SolrCoreAware;
/**
* @since solr 1.3
*/
public class SolrResourceLoader implements ResourceLoader
{
public static final Logger log = Logger.getLogger(SolrResourceLoader.class.getName());
static final String project = "solr";
static final String base = "org.apache" + "." + project;
static final String[] packages = {"","analysis.","schema.","handler.","search.","update.","core.","request.","update.processor.","util."};
private final ClassLoader classLoader;
private final String instanceDir;
private final List<SolrCoreAware> waitingForCore = new ArrayList<SolrCoreAware>();
private final List<ResourceLoaderAware> waitingForResources = new ArrayList<ResourceLoaderAware>();
/**
* <p>
* This loader will delegate to the context classloader when possible,
* otherwise it will attempt to resolve resources using any jar files
* found in the "lib/" directory in the "Solr Home" directory.
* <p>
*/
public SolrResourceLoader( String instanceDir, ClassLoader loader )
{
if( instanceDir == null ) {
instanceDir = SolrResourceLoader.locateInstanceDir();
}
this.instanceDir = normalizeDir(instanceDir);
log.info("Solr home set to '" + this.instanceDir + "'");
if( loader == null ) {
// NB5.5/win32/1.5_10: need to go thru local var or classLoader is not set!
loader = Thread.currentThread().getContextClassLoader();
}
File f = new File(instanceDir + "lib/");
if (f.canRead() && f.isDirectory()) {
File[] jarFiles = f.listFiles();
URL[] jars = new URL[jarFiles.length];
try {
for (int j = 0; j < jarFiles.length; j++) {
jars[j] = jarFiles[j].toURI().toURL();
log.info("Adding '" + jars[j].toString() + "' to Solr classloader");
}
loader = URLClassLoader.newInstance(jars, loader);
} catch (MalformedURLException e) {
SolrException.log(log,"Can't construct solr lib class loader", e);
}
}
this.classLoader = loader;
}
public SolrResourceLoader( String instanceDir )
{
this( instanceDir, null );
}
protected static String normalizeDir(String path) {
if (path==null) return null;
if ( !(path.endsWith("/") || path.endsWith("\\")) ) {
path+='/';
}
return path;
}
public String getConfigDir() {
return instanceDir + "conf/";
}
public InputStream openResource(String resource) {
InputStream is=null;
try {
File f = new File(resource);
if (!f.isAbsolute()) {
// try $CWD/solrconf/
f = new File(getConfigDir() + resource);
}
if (f.isFile() && f.canRead()) {
return new FileInputStream(f);
} else {
// try $CWD
f = new File(resource);
if (f.isFile() && f.canRead()) {
return new FileInputStream(f);
}
}
is = classLoader.getResourceAsStream(resource);
} catch (Exception e) {
throw new RuntimeException("Error opening " + resource, e);
}
if (is==null) {
throw new RuntimeException("Can't find resource '" + resource + "' in classpath or '" + getConfigDir() + "', cwd="+System.getProperty("user.dir"));
}
return is;
}
/**
* Accesses a resource by name and returns the (non comment) lines
* containing data.
*
* <p>
* A comment line is any line that starts with the character "#"
* </p>
*
* @param resource
* @return a list of non-blank non-comment lines with whitespace trimmed
* from front and back.
* @throws IOException
*/
public List<String> getLines(String resource) throws IOException {
BufferedReader input = null;
try {
// TODO - allow configurable charset?
input = new BufferedReader(new InputStreamReader(openResource(resource), "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
ArrayList<String> lines = new ArrayList<String>();
for (String word=null; (word=input.readLine())!=null;) {
// skip comments
if (word.startsWith("#")) continue;
word=word.trim();
// skip blank lines
if (word.length()==0) continue;
lines.add(word);
}
return lines;
}
public Class findClass(String cname, String... subpackages) {
if (subpackages.length==0) subpackages = packages;
// first try cname == full name
try {
return Class.forName(cname, true, classLoader);
} catch (ClassNotFoundException e) {
String newName=cname;
if (newName.startsWith(project)) {
newName = cname.substring(project.length()+1);
}
for (String subpackage : subpackages) {
try {
String name = base + '.' + subpackage + newName;
log.finest("Trying class name " + name);
return Class.forName(name, true, classLoader);
} catch (ClassNotFoundException e1) {
// ignore... assume first exception is best.
}
}
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error loading class '" + cname + "'", e, false);
}
}
public Object newInstance(String cname, String ... subpackages) {
Class clazz = findClass(cname,subpackages);
if( clazz == null ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"Can not find class: "+cname + " in " + classLoader, false);
}
Object obj = null;
try {
obj = clazz.newInstance();
}
catch (Exception e) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"Error instantiating class: '" + clazz.getName()+"'", e, false );
}
if( obj instanceof SolrCoreAware ) {
assertAwareCompatibility( SolrCoreAware.class, obj );
waitingForCore.add( (SolrCoreAware)obj );
}
if( obj instanceof ResourceLoaderAware ) {
assertAwareCompatibility( ResourceLoaderAware.class, obj );
waitingForResources.add( (ResourceLoaderAware)obj );
}
return obj;
}
/**
* Tell all {@link SolrCoreAware} instances about the SolrCore
*/
public void inform(SolrCore core)
{
for( SolrCoreAware aware : waitingForCore ) {
aware.inform( core );
}
waitingForCore.clear();
}
/**
* Tell all {@link ResourceLoaderAware} instances about the loader
*/
public void inform( ResourceLoader loader )
{
for( ResourceLoaderAware aware : waitingForResources ) {
aware.inform( loader );
}
waitingForResources.clear();
}
public static String locateInstanceDir() {
String home = null;
// Try JNDI
try {
Context c = new InitialContext();
home = (String)c.lookup("java:comp/env/solr/home");
log.info("Using JNDI solr.home: "+home );
} catch (NoInitialContextException e) {
log.info("JNDI not configured for Solr (NoInitialContextEx)");
} catch (NamingException e) {
log.info("No /solr/home in JNDI");
} catch( RuntimeException ex ) {
log.warning("Odd RuntimeException while testing for JNDI: " + ex.getMessage());
}
// Now try system property
if( home == null ) {
String prop = project + ".solr.home";
home = normalizeDir(System.getProperty(prop));
if( home != null ) {
log.info("using system property solr.home: " + home );
}
}
// if all else fails, try
if( home == null ) {
home = project + '/';
log.info("Solr home defaulted to '" + home + "' (could not find system property or JNDI)");
}
return normalizeDir( home );
}
public String getInstanceDir() {
return instanceDir;
}
/**
* Keep a list of classes that are allowed to implement each 'Aware' interface
*/
private static final Map<Class, Class[]> awareCompatibility;
static {
awareCompatibility = new HashMap<Class, Class[]>();
awareCompatibility.put(
SolrCoreAware.class, new Class[] {
SolrRequestHandler.class,
QueryResponseWriter.class,
SearchComponent.class
}
);
awareCompatibility.put(
ResourceLoaderAware.class, new Class[] {
TokenFilterFactory.class,
TokenizerFactory.class,
FieldType.class
}
);
}
/**
* Utility function to throw an exception if the class is invalid
*/
void assertAwareCompatibility( Class aware, Object obj )
{
Class[] valid = awareCompatibility.get( aware );
if( valid == null ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"Unknown Aware interface: "+aware );
}
for( Class v : valid ) {
if( v.isInstance( obj ) ) {
return;
}
}
StringBuilder builder = new StringBuilder();
builder.append( "Invalid 'Aware' object: " ).append( obj );
builder.append( " -- ").append( aware.getName() );
builder.append( " must be an instance of: " );
for( Class v : valid ) {
builder.append( "[" ).append( v.getName() ).append( "] ") ;
}
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, builder.toString() );
}
}

View File

@ -36,6 +36,7 @@ import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.XML;
import org.apache.solr.core.Config;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.handler.RequestHandlerUtils;
import org.apache.solr.request.SolrQueryRequest;
@ -88,7 +89,7 @@ public class SystemInfoHandler extends RequestHandlerBase
// Solr Home
SimpleOrderedMap<Object> dirs = new SimpleOrderedMap<Object>();
dirs.add( "instance", new File( core.getSolrConfig().getInstanceDir() ).getAbsolutePath() );
dirs.add( "instance", new File( core.getResourceLoader().getInstanceDir() ).getAbsolutePath() );
dirs.add( "data", new File( core.getDataDir() ).getAbsolutePath() );
dirs.add( "index", new File( core.getIndexDir() ).getAbsolutePath() );
info.add( "directory", dirs );

View File

@ -82,7 +82,7 @@ public class SolrHighlighter
// Load the fragmenters
String xpath = "highlighting/fragmenter";
NamedListPluginLoader<SolrFragmenter> fragloader = new NamedListPluginLoader<SolrFragmenter>( xpath, fragmenters );
SolrFragmenter frag = fragloader.load( config, (NodeList)config.evaluate( xpath, XPathConstants.NODESET ) );
SolrFragmenter frag = fragloader.load( config.getResourceLoader(), (NodeList)config.evaluate( xpath, XPathConstants.NODESET ) );
if( frag == null ) {
frag = new GapFragmenter();
}
@ -92,7 +92,7 @@ public class SolrHighlighter
// Load the formatters
xpath = "highlighting/formatter";
NamedListPluginLoader<SolrFormatter> fmtloader = new NamedListPluginLoader<SolrFormatter>( xpath, formatters );
SolrFormatter fmt = fmtloader.load( config, (NodeList)config.evaluate( xpath, XPathConstants.NODESET ) );
SolrFormatter fmt = fmtloader.load( config.getResourceLoader(), (NodeList)config.evaluate( xpath, XPathConstants.NODESET ) );
if( fmt == null ) {
fmt = new HtmlFormatter();
}

View File

@ -23,11 +23,12 @@ import org.apache.lucene.document.Fieldable;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.DOMUtil;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.Config;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.analysis.TokenFilterFactory;
import org.apache.solr.analysis.TokenizerChain;
import org.apache.solr.analysis.TokenizerFactory;
@ -53,7 +54,6 @@ import java.util.logging.Logger;
*
* @version $Id$
*/
public final class IndexSchema {
final static Logger log = Logger.getLogger(IndexSchema.class.getName());
private final SolrConfig solrConfig;
@ -71,6 +71,9 @@ public final class IndexSchema {
this.solrConfig = solrConfig;
this.schemaFile=schemaFile;
readSchema(solrConfig);
SolrResourceLoader loader = solrConfig.getResourceLoader();
loader.inform( loader );
}
public SolrConfig getSolrConfig() {
@ -82,7 +85,7 @@ public final class IndexSchema {
* @see Config#openResource
*/
public InputStream getInputStream() {
return solrConfig.openResource(schemaFile);
return solrConfig.getResourceLoader().openResource(schemaFile);
}
/** Gets the name of the schema file. */
@ -320,9 +323,9 @@ public final class IndexSchema {
AbstractPluginLoader<FieldType> loader = new AbstractPluginLoader<FieldType>( "[schema.xml] fieldType", true, true) {
@Override
protected FieldType create( Config config, String name, String className, Node node ) throws Exception
protected FieldType create( ResourceLoader loader, String name, String className, Node node ) throws Exception
{
FieldType ft = (FieldType)solrConfig.newInstance(className);
FieldType ft = (FieldType)loader.newInstance(className);
ft.setTypeName(name);
String expression = "./analyzer[@type='query']";
@ -359,7 +362,7 @@ public final class IndexSchema {
String expression = "/schema/types/fieldtype | /schema/types/fieldType";
NodeList nodes = (NodeList) xpath.evaluate(expression, document, XPathConstants.NODESET);
loader.load( solrConfig, nodes );
loader.load( solrConfig.getResourceLoader(), nodes );
@ -377,7 +380,6 @@ public final class IndexSchema {
String name = DOMUtil.getAttr(attrs,"name","field definition");
log.finest("reading field def "+name);
String type = DOMUtil.getAttr(attrs,"type","field " + name);
String val;
FieldType ft = fieldTypes.get(type);
if (ft==null) {
@ -457,7 +459,7 @@ public final class IndexSchema {
similarity = new DefaultSimilarity();
log.fine("using default similarity");
} else {
similarity = (Similarity)solrConfig.newInstance(node.getNodeValue().trim());
similarity = (Similarity)solrConfig.getResourceLoader().newInstance(node.getNodeValue().trim());
log.fine("using similarity " + similarity.getClass().getName());
}
@ -592,69 +594,65 @@ public final class IndexSchema {
NamedNodeMap attrs = node.getAttributes();
String analyzerName = DOMUtil.getAttr(attrs,"class");
if (analyzerName != null) {
return (Analyzer)solrConfig.newInstance(analyzerName);
return (Analyzer)solrConfig.getResourceLoader().newInstance(analyzerName);
}
XPath xpath = XPathFactory.newInstance().newXPath();
Node tokNode = (Node)xpath.evaluate("./tokenizer", node, XPathConstants.NODE);
NodeList nList = (NodeList)xpath.evaluate("./filter", node, XPathConstants.NODESET);
if (tokNode==null){
// Load the Tokenizer
// Although an analyzer only allows a single Tokenizer, we load a list to make sure
// the configuration is ok
// --------------------------------------------------------------------------------
final ArrayList<TokenizerFactory> tokenizers = new ArrayList<TokenizerFactory>(1);
AbstractPluginLoader<TokenizerFactory> tokenizerLoader =
new AbstractPluginLoader<TokenizerFactory>( "[schema.xml] analyzer/tokenizer", false, false )
{
@Override
protected void init(TokenizerFactory plugin, Node node) throws Exception {
if( !tokenizers.isEmpty() ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"The schema defines multiple tokenizers for: "+node );
}
plugin.init( DOMUtil.toMapExcept(node.getAttributes(),"class") );
tokenizers.add( plugin );
}
@Override
protected TokenizerFactory register(String name, TokenizerFactory plugin) throws Exception {
return plugin; // does not need to do anything
}
};
tokenizerLoader.load( solrConfig.getResourceLoader(), (NodeList)xpath.evaluate("./tokenizer", node, XPathConstants.NODESET) );
// Make sure somethign was loaded
if( tokenizers.isEmpty() ) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,"analyzer without class or tokenizer & filter list");
}
TokenizerFactory tfac = readTokenizerFactory(solrConfig, tokNode);
/******
// oops, getChildNodes() includes text (newlines, etc) in addition
// to the actual child elements
NodeList nList = node.getChildNodes();
TokenizerFactory tfac = readTokenizerFactory(nList.item(0));
if (tfac==null) {
throw new SolrException( SolrException.StatusCode.SERVER_ERROR,"TokenizerFactory must be specified first in analyzer");
// Load the Filters
// --------------------------------------------------------------------------------
final ArrayList<TokenFilterFactory> filters = new ArrayList<TokenFilterFactory>();
AbstractPluginLoader<TokenFilterFactory> filterLoader =
new AbstractPluginLoader<TokenFilterFactory>( "[schema.xml] analyzer/filter", false, false )
{
@Override
protected void init(TokenFilterFactory plugin, Node node) throws Exception {
if( plugin != null ) {
plugin.init( DOMUtil.toMapExcept(node.getAttributes(),"class") );
filters.add( plugin );
}
******/
ArrayList<TokenFilterFactory> filters = new ArrayList<TokenFilterFactory>();
for (int i=0; i<nList.getLength(); i++) {
TokenFilterFactory filt = readTokenFilterFactory(solrConfig, nList.item(i));
if (filt != null) filters.add(filt);
}
return new TokenizerChain(tfac, filters.toArray(new TokenFilterFactory[filters.size()]));
@Override
protected TokenFilterFactory register(String name, TokenFilterFactory plugin) throws Exception {
return plugin; // does not need to do anything
}
};
filterLoader.load( solrConfig.getResourceLoader(), (NodeList)xpath.evaluate("./filter", node, XPathConstants.NODESET) );
// <tokenizer class="solr.StandardFilterFactory"/>
private TokenizerFactory readTokenizerFactory(SolrConfig solrConfig, Node node) {
// if (node.getNodeName() != "tokenizer") return null;
NamedNodeMap attrs = node.getAttributes();
String className = DOMUtil.getAttr(attrs,"class","tokenizer");
TokenizerFactory tfac = (TokenizerFactory)solrConfig.newInstance(className);
if (tfac instanceof SolrConfig.Initializable) {
((SolrConfig.Initializable)tfac).init(solrConfig, DOMUtil.toMapExcept(attrs,"class"));
}
else {
log.warning("calling the deprecated form of init; should be calling init(SolrConfig solrConfig, Map<String,String> args) " + className );
tfac.init(DOMUtil.toMapExcept(attrs,"class"));
}
return tfac;
}
// <tokenizer class="solr.StandardFilterFactory"/>
private TokenFilterFactory readTokenFilterFactory(SolrConfig solrConfig, Node node) {
// if (node.getNodeName() != "filter") return null;
NamedNodeMap attrs = node.getAttributes();
String className = DOMUtil.getAttr(attrs,"class","token filter");
TokenFilterFactory tfac = (TokenFilterFactory)solrConfig.newInstance(className);
if (tfac instanceof SolrConfig.Initializable) {
((SolrConfig.Initializable)tfac).init(solrConfig, DOMUtil.toMapExcept(attrs,"class"));
}
else {
log.warning("calling the deprecated form of init; should be calling init(SolrConfig solrConfig, Map<String,String> args) " + className );
tfac.init(DOMUtil.toMapExcept(attrs,"class"));
}
return tfac;
}
return new TokenizerChain(tokenizers.get(0), filters.toArray(new TokenFilterFactory[filters.size()]));
};
static abstract class DynamicReplacement implements Comparable<DynamicReplacement> {

View File

@ -22,9 +22,11 @@ import org.w3c.dom.NodeList;
import java.util.Map;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.DOMUtil;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrResourceLoader;
import javax.xml.xpath.XPathConstants;
@ -82,14 +84,14 @@ public class CacheConfig {
config.args.put("name",config.nodeName);
}
SolrResourceLoader loader = solrConfig.getResourceLoader();
config.cacheImpl = (String)config.args.get("class");
config.regenImpl = (String)config.args.get("regenerator");
config.clazz = solrConfig.findClass(config.cacheImpl);
config.clazz = loader.findClass(config.cacheImpl);
if (config.regenImpl != null) {
config.regenerator = (CacheRegenerator) solrConfig.newInstance(config.regenImpl);
config.regenerator = (CacheRegenerator) loader.newInstance(config.regenImpl);
}
return config;
}

View File

@ -75,7 +75,7 @@ public class ChainedUpdateProcessorFactory extends UpdateRequestProcessorFactory
XPath xpath = XPathFactory.newInstance().newXPath();
try {
loader.load( core.getSolrConfig(), (NodeList) xpath.evaluate( "chain", node, XPathConstants.NODESET ) );
loader.load( core.getResourceLoader(), (NodeList) xpath.evaluate( "chain", node, XPathConstants.NODESET ) );
}
catch (XPathExpressionException e) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,

View File

@ -21,11 +21,11 @@ import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.DOMUtil;
import org.apache.solr.core.Config;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrResourceLoader;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
@ -78,9 +78,9 @@ public abstract class AbstractPluginLoader<T>
* @param node - the XML node defining this plugin
*/
@SuppressWarnings("unchecked")
protected T create( Config config, String name, String className, Node node ) throws Exception
protected T create( ResourceLoader loader, String name, String className, Node node ) throws Exception
{
return (T) config.newInstance( className, getDefaultPackages() );
return (T) loader.newInstance( className, getDefaultPackages() );
}
/**
@ -121,7 +121,7 @@ public abstract class AbstractPluginLoader<T>
* If a default element is defined, it will be returned from this function.
*
*/
public T load( Config config, NodeList nodes )
public T load( ResourceLoader loader, NodeList nodes )
{
List<PluginInitInfo> info = new ArrayList<PluginInitInfo>();
T defaultPlugin = null;
@ -137,7 +137,7 @@ public abstract class AbstractPluginLoader<T>
String className = DOMUtil.getAttr(node,"class", type);
String defaultStr = DOMUtil.getAttr(node,"default", null );
T plugin = create(config, name, className, node );
T plugin = create(loader, name, className, node );
log.info("created "+name+": " + plugin.getClass().getName() );
// Either initialize now or wait till everything has been registered
@ -149,7 +149,7 @@ public abstract class AbstractPluginLoader<T>
}
T old = register( name, plugin );
if( old != null ) {
if( old != null && !( name == null && !requireName ) ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"Multiple "+type+" registered to the same name: "+name+" ignoring: "+old );
}

View File

@ -0,0 +1,28 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.util.plugin;
import org.apache.solr.common.ResourceLoader;
/**
* @since solr 1.3
*/
public interface ResourceLoaderAware
{
void inform( ResourceLoader loader );
}

View File

@ -0,0 +1,28 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.util.plugin;
import org.apache.solr.core.SolrCore;
/**
* @since solr 1.3
*/
public interface SolrCoreAware
{
void inform( SolrCore core );
}

View File

@ -28,6 +28,7 @@ import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamSource;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.core.SolrConfig;
/** Singleton that creates a Transformer for the XSLTServletFilter.
@ -72,7 +73,7 @@ public class TransformerProvider {
log.fine("Using cached Templates:" + filename);
}
} else {
lastTemplates = getTemplates(solrConfig, filename,cacheLifetimeSeconds);
lastTemplates = getTemplates(solrConfig.getResourceLoader(), filename,cacheLifetimeSeconds);
}
Transformer result = null;
@ -90,7 +91,7 @@ public class TransformerProvider {
}
/** Return a Templates object for the given filename */
private Templates getTemplates(SolrConfig solrConfig, String filename,int cacheLifetimeSeconds) throws IOException {
private Templates getTemplates(ResourceLoader loader, String filename,int cacheLifetimeSeconds) throws IOException {
Templates result = null;
lastFilename = null;
@ -98,7 +99,7 @@ public class TransformerProvider {
if(log.isLoggable(Level.FINE)) {
log.fine("compiling XSLT templates:" + filename);
}
final InputStream xsltStream = solrConfig.openResource("xslt/" + filename);
final InputStream xsltStream = loader.openResource("xslt/" + filename);
result = tFactory.newTemplates(new StreamSource(xsltStream));
} catch (Exception e) {
log.throwing(getClass().getName(), "newTemplates", e);

View File

@ -33,7 +33,7 @@ public class TestCapitalizationFilter extends BaseTokenTestCase {
args.put( CapitalizationFilterFactory.ONLY_FIRST_WORD, "true" );
CapitalizationFilterFactory factory = new CapitalizationFilterFactory();
factory.init( solrConfig, args );
factory.init( args );
assertEquals( "Kitten", factory.processWord( "kiTTEN", 0 ) );
factory.forceFirstLetter = true;
@ -63,7 +63,7 @@ public class TestCapitalizationFilter extends BaseTokenTestCase {
// Now try some prefixes
factory = new CapitalizationFilterFactory();
args.put( "okPrefix", "McK" ); // all words
factory.init( solrConfig, args );
factory.init( args );
out = tsToString( factory.create( new IterTokenStream( "McKinley" ) ) );
assertEquals( "McKinley", out );

View File

@ -44,7 +44,8 @@ public class TestKeepWordFilter extends BaseTokenTestCase {
// Test Stopwords
KeepWordFilterFactory factory = new KeepWordFilterFactory();
args.put( "ignoreCase", "true" );
factory.init( solrConfig, args );
factory.init( args );
factory.inform( solrConfig.getResourceLoader() );
factory.setWords( words );
List<Token> expect = tokens( "aaa BBB" );
@ -53,7 +54,8 @@ public class TestKeepWordFilter extends BaseTokenTestCase {
// Now force case
args.put( "ignoreCase", "false" );
factory.init( solrConfig, args );
factory.init( args );
factory.inform( solrConfig.getResourceLoader() );
expect = tokens( "aaa" );
real = getTokens(factory.create( new IterTokenStream(input) ));

View File

@ -50,7 +50,7 @@ public class TestPatternTokenizerFactory extends AnalysisTestCase
args.put( PatternTokenizerFactory.PATTERN, test[1] );
PatternTokenizerFactory tokenizer = new PatternTokenizerFactory();
tokenizer.init(solrConfig, args );
tokenizer.init( args );
TokenStream stream = tokenizer.create( new StringReader( test[2] ) );
String out = TestHyphenatedWordsFilter.tsToString( stream );

View File

@ -40,24 +40,24 @@ public class TestPhoneticFilter extends BaseTokenTestCase {
PhoneticFilterFactory ff = new PhoneticFilterFactory();
try {
ff.init( solrConfig, args );
ff.init( args );
fail( "missing encoder parameter" );
}
catch( Exception ex ) {}
args.put( PhoneticFilterFactory.ENCODER, "XXX" );
try {
ff.init( solrConfig, args );
ff.init( args );
fail( "unknown encoder parameter" );
}
catch( Exception ex ) {}
args.put( PhoneticFilterFactory.ENCODER, "Metaphone" );
ff.init( solrConfig, args );
ff.init( args );
assertTrue( ff.encoder instanceof Metaphone );
assertTrue( ff.inject ); // default
args.put( PhoneticFilterFactory.INJECT, "false" );
ff.init( solrConfig, args );
ff.init( args );
assertFalse( ff.inject );
}

View File

@ -0,0 +1,80 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.lucene.analysis.ngram.NGramTokenFilter;
import org.apache.solr.analysis.KeywordTokenizerFactory;
import org.apache.solr.analysis.NGramFilterFactory;
import org.apache.solr.common.SolrException;
import org.apache.solr.handler.admin.LukeRequestHandler;
import org.apache.solr.handler.component.FacetComponent;
import org.apache.solr.request.JSONResponseWriter;
import org.apache.solr.util.plugin.ResourceLoaderAware;
import org.apache.solr.util.plugin.SolrCoreAware;
public class ResourceLoaderTest extends TestCase
{
public void testAwareCompatibility()
{
SolrResourceLoader loader = new SolrResourceLoader( "." );
Class clazz = ResourceLoaderAware.class;
// Check ResourceLoaderAware valid objects
loader.assertAwareCompatibility( clazz, new NGramFilterFactory() );
loader.assertAwareCompatibility( clazz, new KeywordTokenizerFactory() );
// Make sure it throws an error for invalid objects
Object[] invalid = new Object[] {
new NGramTokenFilter( null ),
"hello", new Float( 12.3f ),
new LukeRequestHandler(),
new JSONResponseWriter()
};
for( Object obj : invalid ) {
try {
loader.assertAwareCompatibility( clazz, obj );
Assert.fail( "Should be invalid class: "+obj + " FOR " + clazz );
}
catch( SolrException ex ) { } // OK
}
clazz = SolrCoreAware.class;
// Check ResourceLoaderAware valid objects
loader.assertAwareCompatibility( clazz, new LukeRequestHandler() );
loader.assertAwareCompatibility( clazz, new FacetComponent() );
loader.assertAwareCompatibility( clazz, new JSONResponseWriter() );
// Make sure it throws an error for invalid objects
invalid = new Object[] {
new NGramFilterFactory(),
"hello", new Float( 12.3f ),
new KeywordTokenizerFactory()
};
for( Object obj : invalid ) {
try {
loader.assertAwareCompatibility( clazz, obj );
Assert.fail( "Should be invalid class: "+obj + " FOR " + clazz );
}
catch( SolrException ex ) { } // OK
}
}
}

View File

@ -103,7 +103,7 @@ public class DirectSolrConnection
SolrConfig config = null;
try {
config = new SolrConfig(instanceDir, SolrConfig.DEFAULT_CONF_FILE, null);
instanceDir = config.getInstanceDir();
instanceDir = config.getResourceLoader().getInstanceDir();
// If the Data directory is specified, initialize SolrCore directly
IndexSchema schema = new IndexSchema(config, instanceDir+"/conf/schema.xml");