mirror of https://github.com/apache/lucene.git
SOLR-1601: Schema browser does not indicate presence of charFilter
git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@884180 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6c332be920
commit
4afe02ed8e
|
@ -99,6 +99,7 @@ Bug Fixes
|
||||||
the behavior of a non-distributed request since it only returned
|
the behavior of a non-distributed request since it only returned
|
||||||
the id,score fields instead of all fields in addition to score. (yonik)
|
the id,score fields instead of all fields in addition to score. (yonik)
|
||||||
|
|
||||||
|
* SOLR-1601: Schema browser does not indicate presence of charFilter. (koji)
|
||||||
|
|
||||||
Other Changes
|
Other Changes
|
||||||
----------------------
|
----------------------
|
||||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.lucene.search.ConstantScoreRangeQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.PriorityQueue;
|
import org.apache.lucene.util.PriorityQueue;
|
||||||
|
import org.apache.solr.analysis.CharFilterFactory;
|
||||||
import org.apache.solr.analysis.TokenFilterFactory;
|
import org.apache.solr.analysis.TokenFilterFactory;
|
||||||
import org.apache.solr.analysis.TokenizerChain;
|
import org.apache.solr.analysis.TokenizerChain;
|
||||||
import org.apache.solr.analysis.TokenizerFactory;
|
import org.apache.solr.analysis.TokenizerFactory;
|
||||||
|
@ -391,25 +392,40 @@ public class LukeRequestHandler extends RequestHandlerBase
|
||||||
SimpleOrderedMap<Object> aninfo = new SimpleOrderedMap<Object>();
|
SimpleOrderedMap<Object> aninfo = new SimpleOrderedMap<Object>();
|
||||||
aninfo.add("className", analyzer.getClass().getName());
|
aninfo.add("className", analyzer.getClass().getName());
|
||||||
if (analyzer instanceof TokenizerChain) {
|
if (analyzer instanceof TokenizerChain) {
|
||||||
SimpleOrderedMap<Object> tokenizer = new SimpleOrderedMap<Object>();
|
|
||||||
TokenizerChain tchain = (TokenizerChain)analyzer;
|
TokenizerChain tchain = (TokenizerChain)analyzer;
|
||||||
TokenizerFactory tfac = tchain.getTokenizerFactory();
|
|
||||||
tokenizer.add("className", tfac.getClass().getName());
|
CharFilterFactory[] cfiltfacs = tchain.getCharFilterFactories();
|
||||||
tokenizer.add("args", tfac.getArgs());
|
SimpleOrderedMap<Map<String, Object>> cfilters = new SimpleOrderedMap<Map<String, Object>>();
|
||||||
aninfo.add("tokenizer", tokenizer);
|
for (CharFilterFactory cfiltfac : cfiltfacs) {
|
||||||
TokenFilterFactory[] filtfacs = tchain.getTokenFilterFactories();
|
Map<String, Object> tok = new HashMap<String, Object>();
|
||||||
|
String className = cfiltfac.getClass().getName();
|
||||||
SimpleOrderedMap<Map<String, Object>> filters = new SimpleOrderedMap<Map<String, Object>>();
|
tok.put("className", className);
|
||||||
for (TokenFilterFactory filtfac : filtfacs) {
|
tok.put("args", cfiltfac.getArgs());
|
||||||
Map<String, Object> tok = new HashMap<String, Object>();
|
cfilters.add(className.substring(className.lastIndexOf('.')+1), tok);
|
||||||
String className = filtfac.getClass().getName();
|
}
|
||||||
tok.put("className", className);
|
if (cfilters.size() > 0) {
|
||||||
tok.put("args", filtfac.getArgs());
|
aninfo.add("charFilters", cfilters);
|
||||||
filters.add(className.substring(className.lastIndexOf('.')+1), tok);
|
}
|
||||||
}
|
|
||||||
if (filters.size() > 0) {
|
SimpleOrderedMap<Object> tokenizer = new SimpleOrderedMap<Object>();
|
||||||
aninfo.add("filters", filters);
|
TokenizerFactory tfac = tchain.getTokenizerFactory();
|
||||||
}
|
tokenizer.add("className", tfac.getClass().getName());
|
||||||
|
tokenizer.add("args", tfac.getArgs());
|
||||||
|
aninfo.add("tokenizer", tokenizer);
|
||||||
|
|
||||||
|
TokenFilterFactory[] filtfacs = tchain.getTokenFilterFactories();
|
||||||
|
SimpleOrderedMap<Map<String, Object>> filters = new SimpleOrderedMap<Map<String, Object>>();
|
||||||
|
for (TokenFilterFactory filtfac : filtfacs) {
|
||||||
|
Map<String, Object> tok = new HashMap<String, Object>();
|
||||||
|
String className = filtfac.getClass().getName();
|
||||||
|
tok.put("className", className);
|
||||||
|
tok.put("args", filtfac.getArgs());
|
||||||
|
filters.add(className.substring(className.lastIndexOf('.')+1), tok);
|
||||||
|
}
|
||||||
|
if (filters.size() > 0) {
|
||||||
|
aninfo.add("filters", filters);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return aninfo;
|
return aninfo;
|
||||||
}
|
}
|
||||||
|
|
|
@ -282,7 +282,7 @@
|
||||||
//Displays information about an Analyzer in the main content area
|
//Displays information about an Analyzer in the main content area
|
||||||
displayAnalyzer: function(analyzer, type, shouldCollapse) {
|
displayAnalyzer: function(analyzer, type, shouldCollapse) {
|
||||||
var tid = type.replace(' ', '');
|
var tid = type.replace(' ', '');
|
||||||
var collapse = shouldCollapse && (analyzer.tokenizer != undefined || analyzer.filters != undefined);
|
var collapse = shouldCollapse && (analyzer.charFilters != undefined || analyzer.tokenizer != undefined || analyzer.filters != undefined);
|
||||||
$('#mainInfo').append(solr.createNameValueText(type, function(p) {
|
$('#mainInfo').append(solr.createNameValueText(type, function(p) {
|
||||||
p.appendChild(document.createTextNode(analyzer.className + ' '));
|
p.appendChild(document.createTextNode(analyzer.className + ' '));
|
||||||
if (collapse) {
|
if (collapse) {
|
||||||
|
@ -298,6 +298,24 @@
|
||||||
if (collapse) {
|
if (collapse) {
|
||||||
adiv.style.display='none';
|
adiv.style.display='none';
|
||||||
}
|
}
|
||||||
|
if (analyzer.charFilters != undefined) {
|
||||||
|
adiv.appendChild(solr.createNameValueText('Char Filters', ''));
|
||||||
|
var f = document.createElement('ol');
|
||||||
|
$.each(analyzer.charFilters, function(i, item) {
|
||||||
|
var fil = document.createElement('li');
|
||||||
|
var filterText = item.className;
|
||||||
|
if (item.args != undefined) {
|
||||||
|
filterText += ' args:{'
|
||||||
|
$.each(item.args, function(fi, fitem) {
|
||||||
|
filterText += fi + ': ' + fitem + ' ';
|
||||||
|
});
|
||||||
|
filterText +='}';
|
||||||
|
fil.innerHTML = filterText;
|
||||||
|
f.appendChild(fil);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
adiv.appendChild(f);
|
||||||
|
}
|
||||||
if (analyzer.tokenizer != undefined) {
|
if (analyzer.tokenizer != undefined) {
|
||||||
adiv.appendChild(solr.createNameValueText("Tokenizer Class", analyzer.tokenizer.className));
|
adiv.appendChild(solr.createNameValueText("Tokenizer Class", analyzer.tokenizer.className));
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue