mirror of https://github.com/apache/lucene.git
adding new KeywordTokenizerFactory
git-svn-id: https://svn.apache.org/repos/asf/incubator/solr/trunk@411882 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4ce2a2d2a2
commit
003e9eaf96
|
@ -14,7 +14,8 @@ New Features
|
|||
7. Added DisMaxRequestHandler and SolrPluginUtils. (Chris Hostetter)
|
||||
8. Support for customizing the QueryResponseWriter per request
|
||||
(Mike Baranczak / SOLR-16 / hossman)
|
||||
|
||||
9. Added KeywordTokenizerFactory (hossman)
|
||||
|
||||
Changes in runtime behavior
|
||||
1. classes reorganized into different packages, package names changed to Apache
|
||||
2. force read of document stored fields in QuerySenderListener
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/**
|
||||
* Copyright 2006 The Apache Software Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.analysis;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.KeywordTokenizer;
|
||||
|
||||
import java.io.Reader;
|
||||
|
||||
/**
|
||||
* @version $Id: LowerCaseTokenizerFactory.java 382610 2006-03-03 01:43:03Z yonik $
|
||||
*/
|
||||
public class KeywordTokenizerFactory extends BaseTokenizerFactory {
|
||||
public TokenStream create(Reader input) {
|
||||
return new KeywordTokenizer(input);
|
||||
}
|
||||
}
|
|
@ -182,6 +182,20 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
|
|||
assertEquals("value", arrayParams[1]);
|
||||
}
|
||||
|
||||
public void testKeywordTokenizerFactory() {
|
||||
|
||||
assertU(adoc("id", "42",
|
||||
"keywordtok", "How nOw broWn-ish C.o.w. ?"));
|
||||
assertU(commit());
|
||||
assertQ("stored value matches?",
|
||||
req("id:42")
|
||||
,"//str[.='How nOw broWn-ish C.o.w. ?']"
|
||||
);
|
||||
assertQ("query on exact matches?",
|
||||
req("keywordtok:\"How nOw broWn-ish C.o.w. ?\"")
|
||||
,"//str[.='How nOw broWn-ish C.o.w. ?']"
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// /** this doesn't work, but if it did, this is how we'd test it. */
|
||||
|
|
|
@ -95,6 +95,9 @@
|
|||
<fieldtype name="lowertok" class="solr.TextField">
|
||||
<analyzer><tokenizer class="solr.LowerCaseTokenizerFactory"/></analyzer>
|
||||
</fieldtype>
|
||||
<fieldtype name="keywordtok" class="solr.TextField">
|
||||
<analyzer><tokenizer class="solr.KeywordTokenizerFactory"/></analyzer>
|
||||
</fieldtype>
|
||||
<fieldtype name="standardtok" class="solr.TextField">
|
||||
<analyzer><tokenizer class="solr.StandardTokenizerFactory"/></analyzer>
|
||||
</fieldtype>
|
||||
|
@ -270,6 +273,7 @@
|
|||
<!-- fields to test individual tokenizers and tokenfilters -->
|
||||
<field name="teststop" type="teststop" indexed="true" stored="true"/>
|
||||
<field name="lowertok" type="lowertok" indexed="true" stored="true"/>
|
||||
<field name="keywordtok" type="keywordtok" indexed="true" stored="true"/>
|
||||
<field name="standardtok" type="standardtok" indexed="true" stored="true"/>
|
||||
<field name="HTMLstandardtok" type="HTMLstandardtok" indexed="true" stored="true"/>
|
||||
<field name="lettertok" type="lettertok" indexed="true" stored="true"/>
|
||||
|
|
Loading…
Reference in New Issue