mirror of https://github.com/apache/lucene.git
SOLR-1601: Schema browser does not indicate presence of charFilter
git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@884180 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6c332be920
commit
4afe02ed8e
|
@ -99,6 +99,7 @@ Bug Fixes
|
|||
the behavior of a non-distributed request since it only returned
|
||||
the id,score fields instead of all fields in addition to score. (yonik)
|
||||
|
||||
* SOLR-1601: Schema browser does not indicate presence of charFilter. (koji)
|
||||
|
||||
Other Changes
|
||||
----------------------
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.lucene.search.ConstantScoreRangeQuery;
|
|||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.PriorityQueue;
|
||||
import org.apache.solr.analysis.CharFilterFactory;
|
||||
import org.apache.solr.analysis.TokenFilterFactory;
|
||||
import org.apache.solr.analysis.TokenizerChain;
|
||||
import org.apache.solr.analysis.TokenizerFactory;
|
||||
|
@ -391,25 +392,40 @@ public class LukeRequestHandler extends RequestHandlerBase
|
|||
SimpleOrderedMap<Object> aninfo = new SimpleOrderedMap<Object>();
|
||||
aninfo.add("className", analyzer.getClass().getName());
|
||||
if (analyzer instanceof TokenizerChain) {
|
||||
SimpleOrderedMap<Object> tokenizer = new SimpleOrderedMap<Object>();
|
||||
TokenizerChain tchain = (TokenizerChain)analyzer;
|
||||
TokenizerFactory tfac = tchain.getTokenizerFactory();
|
||||
tokenizer.add("className", tfac.getClass().getName());
|
||||
tokenizer.add("args", tfac.getArgs());
|
||||
aninfo.add("tokenizer", tokenizer);
|
||||
TokenFilterFactory[] filtfacs = tchain.getTokenFilterFactories();
|
||||
|
||||
SimpleOrderedMap<Map<String, Object>> filters = new SimpleOrderedMap<Map<String, Object>>();
|
||||
for (TokenFilterFactory filtfac : filtfacs) {
|
||||
Map<String, Object> tok = new HashMap<String, Object>();
|
||||
String className = filtfac.getClass().getName();
|
||||
tok.put("className", className);
|
||||
tok.put("args", filtfac.getArgs());
|
||||
filters.add(className.substring(className.lastIndexOf('.')+1), tok);
|
||||
}
|
||||
if (filters.size() > 0) {
|
||||
aninfo.add("filters", filters);
|
||||
}
|
||||
|
||||
TokenizerChain tchain = (TokenizerChain)analyzer;
|
||||
|
||||
CharFilterFactory[] cfiltfacs = tchain.getCharFilterFactories();
|
||||
SimpleOrderedMap<Map<String, Object>> cfilters = new SimpleOrderedMap<Map<String, Object>>();
|
||||
for (CharFilterFactory cfiltfac : cfiltfacs) {
|
||||
Map<String, Object> tok = new HashMap<String, Object>();
|
||||
String className = cfiltfac.getClass().getName();
|
||||
tok.put("className", className);
|
||||
tok.put("args", cfiltfac.getArgs());
|
||||
cfilters.add(className.substring(className.lastIndexOf('.')+1), tok);
|
||||
}
|
||||
if (cfilters.size() > 0) {
|
||||
aninfo.add("charFilters", cfilters);
|
||||
}
|
||||
|
||||
SimpleOrderedMap<Object> tokenizer = new SimpleOrderedMap<Object>();
|
||||
TokenizerFactory tfac = tchain.getTokenizerFactory();
|
||||
tokenizer.add("className", tfac.getClass().getName());
|
||||
tokenizer.add("args", tfac.getArgs());
|
||||
aninfo.add("tokenizer", tokenizer);
|
||||
|
||||
TokenFilterFactory[] filtfacs = tchain.getTokenFilterFactories();
|
||||
SimpleOrderedMap<Map<String, Object>> filters = new SimpleOrderedMap<Map<String, Object>>();
|
||||
for (TokenFilterFactory filtfac : filtfacs) {
|
||||
Map<String, Object> tok = new HashMap<String, Object>();
|
||||
String className = filtfac.getClass().getName();
|
||||
tok.put("className", className);
|
||||
tok.put("args", filtfac.getArgs());
|
||||
filters.add(className.substring(className.lastIndexOf('.')+1), tok);
|
||||
}
|
||||
if (filters.size() > 0) {
|
||||
aninfo.add("filters", filters);
|
||||
}
|
||||
}
|
||||
return aninfo;
|
||||
}
|
||||
|
|
|
@ -282,7 +282,7 @@
|
|||
//Displays information about an Analyzer in the main content area
|
||||
displayAnalyzer: function(analyzer, type, shouldCollapse) {
|
||||
var tid = type.replace(' ', '');
|
||||
var collapse = shouldCollapse && (analyzer.tokenizer != undefined || analyzer.filters != undefined);
|
||||
var collapse = shouldCollapse && (analyzer.charFilters != undefined || analyzer.tokenizer != undefined || analyzer.filters != undefined);
|
||||
$('#mainInfo').append(solr.createNameValueText(type, function(p) {
|
||||
p.appendChild(document.createTextNode(analyzer.className + ' '));
|
||||
if (collapse) {
|
||||
|
@ -298,6 +298,24 @@
|
|||
if (collapse) {
|
||||
adiv.style.display='none';
|
||||
}
|
||||
if (analyzer.charFilters != undefined) {
|
||||
adiv.appendChild(solr.createNameValueText('Char Filters', ''));
|
||||
var f = document.createElement('ol');
|
||||
$.each(analyzer.charFilters, function(i, item) {
|
||||
var fil = document.createElement('li');
|
||||
var filterText = item.className;
|
||||
if (item.args != undefined) {
|
||||
filterText += ' args:{'
|
||||
$.each(item.args, function(fi, fitem) {
|
||||
filterText += fi + ': ' + fitem + ' ';
|
||||
});
|
||||
filterText +='}';
|
||||
fil.innerHTML = filterText;
|
||||
f.appendChild(fil);
|
||||
}
|
||||
});
|
||||
adiv.appendChild(f);
|
||||
}
|
||||
if (analyzer.tokenizer != undefined) {
|
||||
adiv.appendChild(solr.createNameValueText("Tokenizer Class", analyzer.tokenizer.className));
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue