mirror of https://github.com/apache/lucene.git
SOLR-657: Replace deprecated calls with the non-deprecated equivalents
git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@701485 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b5081cdf69
commit
dbf9fc8683
|
@ -41,6 +41,9 @@ New Features
|
|||
point and optimized status.
|
||||
See http://lucene.apache.org/java/2_3_2/api/org/apache/lucene/index/IndexDeletionPolicy.html
|
||||
(yonik, Noble Paul, Akshay Ukey via shalin)
|
||||
|
||||
3. SOLR-657: Replace deprecated calls with the non-deprecated equivalents
|
||||
(Lars Kotthoff via ryan)
|
||||
|
||||
|
||||
Optimizations
|
||||
|
@ -56,6 +59,7 @@ Bug Fixes
|
|||
|
||||
2. SOLR-771: CoreAdminHandler STATUS should display 'normalized' paths (koji, hossman, shalin)
|
||||
|
||||
|
||||
Build
|
||||
----------------------
|
||||
1. SOLR-776: Added in ability to sign artifacts via Ant for releases (gsingers)
|
||||
|
|
|
@ -134,7 +134,7 @@ public class SynonymFilterFactory extends BaseTokenFilterFactory implements Reso
|
|||
List<String> tokList = new ArrayList<String>();
|
||||
try {
|
||||
for( Token token = ts.next(); token != null; token = ts.next() ){
|
||||
String text = token.termText();
|
||||
String text = new String(token.termBuffer(), 0, token.termLength());
|
||||
if( text.length() > 0 )
|
||||
tokList.add( text );
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.solr.core;
|
|||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.search.DocList;
|
||||
import org.apache.solr.search.DocIterator;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.request.LocalSolrQueryRequest;
|
||||
import org.apache.solr.request.SolrQueryResponse;
|
||||
|
@ -47,7 +48,7 @@ class QuerySenderListener extends AbstractSolrEventListener {
|
|||
};
|
||||
|
||||
SolrQueryResponse rsp = new SolrQueryResponse();
|
||||
core.execute(req,rsp);
|
||||
core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp);
|
||||
|
||||
// Retrieve the Document instances (not just the ids) to warm
|
||||
// the OS disk cache, and any Solr document cache. Only the top
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.apache.solr.schema.IndexSchema;
|
|||
import org.apache.solr.search.QParserPlugin;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.search.ValueSourceParser;
|
||||
import org.apache.solr.update.DirectUpdateHandler;
|
||||
import org.apache.solr.update.DirectUpdateHandler2;
|
||||
import org.apache.solr.update.SolrIndexWriter;
|
||||
import org.apache.solr.update.UpdateHandler;
|
||||
import org.apache.solr.update.processor.LogUpdateProcessorFactory;
|
||||
|
@ -487,7 +487,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
getSearcher(false,false,null);
|
||||
|
||||
updateHandler = createUpdateHandler(
|
||||
solrConfig.get("updateHandler/@class", DirectUpdateHandler.class.getName())
|
||||
solrConfig.get("updateHandler/@class", DirectUpdateHandler2.class.getName())
|
||||
);
|
||||
|
||||
infoRegistry.put("updateHandler", updateHandler);
|
||||
|
@ -1225,8 +1225,8 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
|
||||
public void execute(SolrRequestHandler handler, SolrQueryRequest req, SolrQueryResponse rsp) {
|
||||
if (handler==null) {
|
||||
log.warn(logid+"Null Request Handler '" + req.getQueryType() +"' :" + req);
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,"Null Request Handler '" + req.getQueryType() + "'", true);
|
||||
log.warn(logid+"Null Request Handler '" + req.getParams().get(CommonParams.QT) +"' :" + req);
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,"Null Request Handler '" + req.getParams().get(CommonParams.QT) + "'", true);
|
||||
}
|
||||
// setup response header and handle request
|
||||
final NamedList<Object> responseHeader = new SimpleOrderedMap<Object>();
|
||||
|
@ -1355,7 +1355,7 @@ public final class SolrCore implements SolrInfoMBean {
|
|||
* 'wt' parameter, attempts to find that one; otherwise return the default writer.
|
||||
*/
|
||||
public final QueryResponseWriter getQueryResponseWriter(SolrQueryRequest request) {
|
||||
return getQueryResponseWriter(request.getParam("wt"));
|
||||
return getQueryResponseWriter(request.getParams().get(CommonParams.WT));
|
||||
}
|
||||
|
||||
private final Map<String, QParserPlugin> qParserPlugins = new HashMap<String, QParserPlugin>();
|
||||
|
|
|
@ -376,7 +376,7 @@ public class LukeRequestHandler extends RequestHandlerBase
|
|||
finfo.add("fields", fields);
|
||||
finfo.add("dynamicFields", dynamicFields);
|
||||
finfo.add("uniqueKeyField", uniqueField.getName());
|
||||
finfo.add("defaultSearchField", schema.getDefaultSearchFieldName());
|
||||
finfo.add("defaultSearchField", schema.getSolrQueryParser(null).getField());
|
||||
finfo.add("types", types);
|
||||
return finfo;
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ public class SystemInfoHandler extends RequestHandlerBase
|
|||
SimpleOrderedMap<Object> info = new SimpleOrderedMap<Object>();
|
||||
|
||||
IndexSchema schema = core.getSchema();
|
||||
info.add( "schema", schema != null ? schema.getName():"no schema!" );
|
||||
info.add( "schema", schema != null ? schema.getSchemaName():"no schema!" );
|
||||
|
||||
// Host
|
||||
InetAddress addr = InetAddress.getLocalHost();
|
||||
|
|
|
@ -300,7 +300,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
|
|||
TokenStream tokens = analyzer.tokenStream( null, new StringReader( query ) );
|
||||
Token token = tokens.next();
|
||||
while( token != null ) {
|
||||
norm.append( token.termText() );
|
||||
norm.append( new String(token.termBuffer(), 0, token.termLength()) );
|
||||
token = tokens.next();
|
||||
}
|
||||
return norm.toString();
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.solr.request;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.FastWriter;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
|
@ -54,7 +55,7 @@ public abstract class TextResponseWriter {
|
|||
this.schema = req.getSchema();
|
||||
this.req = req;
|
||||
this.rsp = rsp;
|
||||
String indent = req.getParam("indent");
|
||||
String indent = req.getParams().get("indent");
|
||||
if (indent != null && !"".equals(indent) && !"off".equals(indent)) {
|
||||
doIndent=true;
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.request;
|
|||
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.XML;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
|
@ -61,18 +62,18 @@ final public class XMLWriter {
|
|||
|
||||
public static void writeResponse(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) throws IOException {
|
||||
|
||||
String ver = req.getParam("version");
|
||||
String ver = req.getParams().get(CommonParams.VERSION);
|
||||
|
||||
writer.write(XML_START1);
|
||||
|
||||
String stylesheet = req.getParam("stylesheet");
|
||||
String stylesheet = req.getParams().get("stylesheet");
|
||||
if (stylesheet != null && stylesheet.length() > 0) {
|
||||
writer.write(XML_STYLESHEET);
|
||||
writer.write(stylesheet);
|
||||
writer.write(XML_STYLESHEET_END);
|
||||
}
|
||||
|
||||
String noSchema = req.getParam("noSchema");
|
||||
String noSchema = req.getParams().get("noSchema");
|
||||
// todo - change when schema becomes available?
|
||||
if (false && noSchema == null)
|
||||
writer.write(XML_START2_SCHEMA);
|
||||
|
@ -87,7 +88,7 @@ final public class XMLWriter {
|
|||
XMLWriter xw = new XMLWriter(writer, req.getSchema(), req, ver);
|
||||
xw.defaultFieldList = rsp.getReturnFields();
|
||||
|
||||
String indent = req.getParam("indent");
|
||||
String indent = req.getParams().get("indent");
|
||||
if (indent != null) {
|
||||
if ("".equals(indent) || "off".equals(indent)) {
|
||||
xw.setIndent(false);
|
||||
|
|
|
@ -97,7 +97,7 @@ public class FieldQParserPlugin extends QParserPlugin {
|
|||
return null;
|
||||
else if (lst.size() == 1) {
|
||||
t = lst.get(0);
|
||||
return new TermQuery(new Term(field, t.termText()));
|
||||
return new TermQuery(new Term(field, new String(t.termBuffer(), 0, t.termLength())));
|
||||
} else {
|
||||
if (severalTokensAtSamePosition) {
|
||||
if (positionCount == 1) {
|
||||
|
@ -106,7 +106,7 @@ public class FieldQParserPlugin extends QParserPlugin {
|
|||
for (int i = 0; i < lst.size(); i++) {
|
||||
t = (org.apache.lucene.analysis.Token) lst.get(i);
|
||||
TermQuery currentQuery = new TermQuery(
|
||||
new Term(field, t.termText()));
|
||||
new Term(field, new String(t.termBuffer(), 0, t.termLength())));
|
||||
q.add(currentQuery, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
return q;
|
||||
|
@ -122,7 +122,7 @@ public class FieldQParserPlugin extends QParserPlugin {
|
|||
mpq.add((Term[])multiTerms.toArray(new Term[0]));
|
||||
multiTerms.clear();
|
||||
}
|
||||
multiTerms.add(new Term(field, t.termText()));
|
||||
multiTerms.add(new Term(field, new String(t.termBuffer(), 0, t.termLength())));
|
||||
}
|
||||
mpq.add((Term[])multiTerms.toArray(new Term[0]));
|
||||
return mpq;
|
||||
|
@ -132,7 +132,8 @@ public class FieldQParserPlugin extends QParserPlugin {
|
|||
PhraseQuery q = new PhraseQuery();
|
||||
q.setSlop(phraseSlop);
|
||||
for (int i = 0; i < lst.size(); i++) {
|
||||
q.add(new Term(field, lst.get(i).termText()));
|
||||
Token token = lst.get(i);
|
||||
q.add(new Term(field, new String(token.termBuffer(), 0, token.termLength())));
|
||||
}
|
||||
return q;
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ class LuceneQParser extends QParser {
|
|||
|
||||
String defaultField = getParam(CommonParams.DF);
|
||||
if (defaultField==null) {
|
||||
defaultField = getReq().getSchema().getDefaultSearchFieldName();
|
||||
defaultField = getReq().getSchema().getSolrQueryParser(null).getField();
|
||||
}
|
||||
lparser = new SolrQueryParser(this, defaultField);
|
||||
|
||||
|
@ -71,7 +71,7 @@ class LuceneQParser extends QParser {
|
|||
lparser.setDefaultOperator("AND".equals(opParam) ? QueryParser.Operator.AND : QueryParser.Operator.OR);
|
||||
} else {
|
||||
// try to get default operator from schema
|
||||
String operator = getReq().getSchema().getQueryParserDefaultOperator();
|
||||
String operator = getReq().getSchema().getSolrQueryParser(null).getField();
|
||||
lparser.setDefaultOperator("AND".equals(operator) ?
|
||||
QueryParser.Operator.AND : QueryParser.Operator.OR);
|
||||
}
|
||||
|
|
|
@ -97,7 +97,7 @@ if (c.query instanceof TermQuery) {
|
|||
filter = (Filter)cache.get(filterQuery);
|
||||
}
|
||||
if (filter == null) { // miss
|
||||
filter = new QueryFilter(filterQuery); // construct new entry
|
||||
filter = new CachingWrapperFilter(new QueryWrapperFilter(filterQuery)); // construct new entry
|
||||
synchronized (cache) {
|
||||
cache.put(filterQuery, filter); // cache it
|
||||
}
|
||||
|
|
|
@ -325,7 +325,7 @@ class MultiValueTokenStream extends TokenStream {
|
|||
}
|
||||
// create an modified token which is the offset into the concatenated
|
||||
// string of all values
|
||||
Token offsetToken = new Token(nextToken.termText(),
|
||||
Token offsetToken = new Token(new String(nextToken.termBuffer(), 0, nextToken.termLength()),
|
||||
nextToken.startOffset() + curOffset,
|
||||
nextToken.endOffset() + curOffset);
|
||||
offsetToken.setPositionIncrement(nextToken.getPositionIncrement() + extra*10);
|
||||
|
|
|
@ -178,7 +178,7 @@ public class SolrPluginUtils {
|
|||
public static int setReturnFields(SolrQueryRequest req,
|
||||
SolrQueryResponse res) {
|
||||
|
||||
return setReturnFields(req.getParam(FL), res);
|
||||
return setReturnFields(req.getParams().get(org.apache.solr.common.params.CommonParams.FL), res);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -380,14 +380,14 @@ public class SolrPluginUtils {
|
|||
DocList results)
|
||||
throws IOException {
|
||||
|
||||
String debug = req.getParam(org.apache.solr.common.params.CommonParams.DEBUG_QUERY);
|
||||
String debug = req.getParams().get(org.apache.solr.common.params.CommonParams.DEBUG_QUERY);
|
||||
|
||||
NamedList dbg = null;
|
||||
if (debug!=null) {
|
||||
dbg = new SimpleOrderedMap();
|
||||
|
||||
/* userQuery may have been pre-processes .. expose that */
|
||||
dbg.add("rawquerystring", req.getQueryString());
|
||||
dbg.add("rawquerystring", req.getParams().get(org.apache.solr.common.params.CommonParams.Q));
|
||||
dbg.add("querystring", userQuery);
|
||||
|
||||
/* QueryParsing.toString isn't perfect, use it to see converted
|
||||
|
@ -399,7 +399,7 @@ public class SolrPluginUtils {
|
|||
|
||||
dbg.add("explain", getExplainList
|
||||
(query, results, req.getSearcher(), req.getSchema()));
|
||||
String otherQueryS = req.getParam("explainOther");
|
||||
String otherQueryS = req.getParams().get(org.apache.solr.common.params.CommonParams.EXPLAIN_OTHER);
|
||||
if (otherQueryS != null && otherQueryS.length() > 0) {
|
||||
DocList otherResults = doSimpleQuery
|
||||
(otherQueryS,req.getSearcher(), req.getSchema(),0,10);
|
||||
|
@ -802,7 +802,7 @@ public class SolrPluginUtils {
|
|||
*/
|
||||
public static Sort getSort(SolrQueryRequest req) {
|
||||
|
||||
String sort = req.getParam(org.apache.solr.common.params.CommonParams.SORT);
|
||||
String sort = req.getParams().get(org.apache.solr.common.params.CommonParams.SORT);
|
||||
if (null == sort || sort.equals("")) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ class FindClasses {
|
|||
try {
|
||||
for (int i =0; i < jars.length; i++) {
|
||||
jarFiles[i] = new JarFile(jars[i]);
|
||||
urls[i] = jars[i].toURL();
|
||||
urls[i] = jars[i].toURI().toURL();
|
||||
}
|
||||
} catch (MalformedURLException e) {
|
||||
throw new RuntimeException
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
package org.apache.solr.util;
|
||||
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.XML;
|
||||
import org.apache.solr.core.SolrConfig;
|
||||
|
@ -117,7 +118,7 @@ public class TestHarness {
|
|||
public TestHarness( String dataDirectory,
|
||||
SolrConfig solrConfig,
|
||||
String schemaFile) {
|
||||
this( dataDirectory, solrConfig, new IndexSchema(solrConfig, schemaFile));
|
||||
this( dataDirectory, solrConfig, new IndexSchema(solrConfig, schemaFile, null));
|
||||
}
|
||||
/**
|
||||
* @param dataDirectory path for index data, will not be cleaned up
|
||||
|
@ -300,7 +301,7 @@ public class TestHarness {
|
|||
* @see LocalSolrQueryRequest
|
||||
*/
|
||||
public String query(SolrQueryRequest req) throws IOException, Exception {
|
||||
return query(req.getQueryType(), req);
|
||||
return query(req.getParams().get(CommonParams.QT), req);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -21,6 +21,7 @@ import org.apache.lucene.document.*;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.solr.common.params.AppendedSolrParams;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.params.DefaultSolrParams;
|
||||
import org.apache.solr.common.params.MapSolrParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
|
@ -297,14 +298,14 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
|
|||
args.put("string", "string value");
|
||||
args.put("array", new String[] {"array", "value"});
|
||||
SolrQueryRequest req = new LocalSolrQueryRequest(null, null, null, 0, 20, args);
|
||||
assertEquals("string value", req.getParam("string"));
|
||||
assertEquals("array", req.getParam("array"));
|
||||
assertEquals("string value", req.getParams().get("string"));
|
||||
assertEquals("array", req.getParams().get("array"));
|
||||
|
||||
String[] stringParams = req.getParams("string");
|
||||
String[] stringParams = req.getParams().getParams("string");
|
||||
assertEquals(1, stringParams.length);
|
||||
assertEquals("string value", stringParams[0]);
|
||||
|
||||
String[] arrayParams = req.getParams("array");
|
||||
String[] arrayParams = req.getParams().getParams("array");
|
||||
assertEquals(2, arrayParams.length);
|
||||
assertEquals("array", arrayParams[0]);
|
||||
assertEquals("value", arrayParams[1]);
|
||||
|
@ -337,7 +338,7 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
|
|||
|
||||
public void testTermVectorFields() {
|
||||
|
||||
IndexSchema ischema = new IndexSchema(solrConfig, getSchemaFile());
|
||||
IndexSchema ischema = new IndexSchema(solrConfig, getSchemaFile(), null);
|
||||
SchemaField f; // Solr field type
|
||||
Field luf; // Lucene field
|
||||
|
||||
|
@ -506,7 +507,7 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
|
|||
}
|
||||
public void testCompressableFieldType() {
|
||||
|
||||
IndexSchema ischema = new IndexSchema(solrConfig, getSchemaFile());
|
||||
IndexSchema ischema = new IndexSchema(solrConfig, getSchemaFile(), null);
|
||||
SchemaField f; // Solr field type
|
||||
Field luf; // Lucene field
|
||||
|
||||
|
@ -538,7 +539,7 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
|
|||
|
||||
SolrQueryRequest req = req("q", "title:keyword", "fl", "id,title,test_hlt");
|
||||
SolrQueryResponse rsp = new SolrQueryResponse();
|
||||
core.execute(req, rsp);
|
||||
core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp);
|
||||
|
||||
DocList dl = (DocList) rsp.getValues().get("response");
|
||||
org.apache.lucene.document.Document d = req.getSearcher().doc(dl.iterator().nextDoc());
|
||||
|
@ -558,7 +559,7 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
|
|||
|
||||
SolrQueryRequest req = req("q", "title:keyword", "fl", "id,title");
|
||||
SolrQueryResponse rsp = new SolrQueryResponse();
|
||||
core.execute(req, rsp);
|
||||
core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp);
|
||||
|
||||
DocList dl = (DocList) rsp.getValues().get("response");
|
||||
DocIterator di = dl.iterator();
|
||||
|
|
|
@ -37,10 +37,10 @@ public abstract class BaseTokenTestCase extends AnalysisTestCase
|
|||
StringBuffer out = new StringBuffer();
|
||||
Token t = in.next();
|
||||
if (null != t)
|
||||
out.append(t.termText());
|
||||
out.append(new String(t.termBuffer(), 0, t.termLength()));
|
||||
|
||||
for (t = in.next(); null != t; t = in.next()) {
|
||||
out.append(" ").append(t.termText());
|
||||
out.append(" ").append(new String(t.termBuffer(), 0, t.termLength()));
|
||||
}
|
||||
in.close();
|
||||
return out.toString();
|
||||
|
@ -49,7 +49,7 @@ public abstract class BaseTokenTestCase extends AnalysisTestCase
|
|||
public List<String> tok2str(Iterable<Token> tokLst) {
|
||||
ArrayList<String> lst = new ArrayList<String>();
|
||||
for ( Token t : tokLst ) {
|
||||
lst.add( t.termText());
|
||||
lst.add( new String(t.termBuffer(), 0, t.termLength()));
|
||||
}
|
||||
return lst;
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ public abstract class BaseTokenTestCase extends AnalysisTestCase
|
|||
for (Iterator iter = a.iterator(); iter.hasNext();) {
|
||||
Token tok = (Token)iter.next();
|
||||
pos += tok.getPositionIncrement();
|
||||
if (!tokAt(b, tok.termText(), pos
|
||||
if (!tokAt(b, new String(tok.termBuffer(), 0, tok.termLength()), pos
|
||||
, checkOff ? tok.startOffset() : -1
|
||||
, checkOff ? tok.endOffset() : -1
|
||||
))
|
||||
|
@ -85,7 +85,7 @@ public abstract class BaseTokenTestCase extends AnalysisTestCase
|
|||
for (Iterator iter = lst.iterator(); iter.hasNext();) {
|
||||
Token tok = (Token)iter.next();
|
||||
pos += tok.getPositionIncrement();
|
||||
if (pos==tokPos && tok.termText().equals(val)
|
||||
if (pos==tokPos && new String(tok.termBuffer(), 0, tok.termLength()).equals(val)
|
||||
&& (startOff==-1 || tok.startOffset()==startOff)
|
||||
&& (endOff ==-1 || tok.endOffset() ==endOff )
|
||||
)
|
||||
|
|
|
@ -33,9 +33,9 @@ public class TestBufferedTokenStream extends BaseTokenTestCase {
|
|||
public static class AB_Q_Stream extends BufferedTokenStream {
|
||||
public AB_Q_Stream(TokenStream input) {super(input);}
|
||||
protected Token process(Token t) throws IOException {
|
||||
if ("A".equals(t.termText())) {
|
||||
if ("A".equals(new String(t.termBuffer(), 0, t.termLength()))) {
|
||||
Token t2 = read();
|
||||
if (t2!=null && "B".equals(t2.termText())) t.setTermText("Q");
|
||||
if (t2!=null && "B".equals(new String(t2.termBuffer(), 0, t2.termLength()))) t.setTermText("Q");
|
||||
if (t2!=null) pushBack(t2);
|
||||
}
|
||||
return t;
|
||||
|
@ -46,7 +46,8 @@ public class TestBufferedTokenStream extends BaseTokenTestCase {
|
|||
public static class AB_AAB_Stream extends BufferedTokenStream {
|
||||
public AB_AAB_Stream(TokenStream input) {super(input);}
|
||||
protected Token process(Token t) throws IOException {
|
||||
if ("A".equals(t.termText()) && "B".equals(peek(1).termText()))
|
||||
if ("A".equals(new String(t.termBuffer(), 0, t.termLength())) &&
|
||||
"B".equals(new String(peek(1).termBuffer(), 0, peek(1).termLength())))
|
||||
write(t);
|
||||
return t;
|
||||
}
|
||||
|
|
|
@ -65,7 +65,7 @@ public class TestPatternTokenizerFactory extends AnalysisTestCase
|
|||
int i=0;
|
||||
for( Token t = stream.next(); null != t; t = stream.next() )
|
||||
{
|
||||
assertEquals( "split: "+test[1] + " "+i, split[i++], t.termText() );
|
||||
assertEquals( "split: "+test[1] + " "+i, split[i++], new String(t.termBuffer(), 0, t.termLength()) );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -259,7 +259,7 @@ public class TestSynonymMap extends AnalysisTestCase {
|
|||
Token[] tokens = ((SynonymMap)map.submap.get( src )).synonyms;
|
||||
boolean inc = false;
|
||||
for( Token token : tokens ){
|
||||
if( exp.equals( token.termText() ) )
|
||||
if( exp.equals( new String(token.termBuffer(), 0, token.termLength()) ) )
|
||||
inc = true;
|
||||
}
|
||||
assertTrue( inc );
|
||||
|
|
|
@ -147,17 +147,18 @@ public class TestWordDelimiterFilter extends AbstractSolrTestCase {
|
|||
|
||||
int i=0;
|
||||
for(Token t; (t=wdf.next())!=null;) {
|
||||
if (t.termText().equals("foo")) {
|
||||
String termText = new String(t.termBuffer(), 0, t.termLength());
|
||||
if (termText.equals("foo")) {
|
||||
assertEquals(5, t.startOffset());
|
||||
assertEquals(8, t.endOffset());
|
||||
i++;
|
||||
}
|
||||
if (t.termText().equals("bar")) {
|
||||
if (termText.equals("bar")) {
|
||||
assertEquals(9, t.startOffset());
|
||||
assertEquals(12, t.endOffset());
|
||||
i++;
|
||||
}
|
||||
if (t.termText().equals("foobar")) {
|
||||
if (termText.equals("foobar")) {
|
||||
assertEquals(5, t.startOffset());
|
||||
assertEquals(12, t.endOffset());
|
||||
i++;
|
||||
|
|
|
@ -35,7 +35,7 @@ public class HighlighterConfigTest extends AbstractSolrTestCase {
|
|||
|
||||
public void testConfig()
|
||||
{
|
||||
SolrHighlighter highlighter = SolrCore.getSolrCore().getHighlighter();
|
||||
SolrHighlighter highlighter = h.getCore().getHighlighter();
|
||||
System.out.println( "highlighter" );
|
||||
|
||||
assertTrue( highlighter instanceof DummyHighlighter );
|
||||
|
|
|
@ -54,7 +54,7 @@ public class HighlighterTest extends AbstractSolrTestCase {
|
|||
|
||||
public void testConfig()
|
||||
{
|
||||
SolrHighlighter highlighter = SolrCore.getSolrCore().getHighlighter();
|
||||
SolrHighlighter highlighter = h.getCore().getHighlighter();
|
||||
System.out.println( "highlighter" );
|
||||
|
||||
// Make sure we loaded the one formatter
|
||||
|
|
|
@ -99,11 +99,10 @@ public class DateFieldTest extends LegacyDateFieldTest {
|
|||
}
|
||||
|
||||
public void testFormatter() {
|
||||
DateFormat fmt = f.getThreadLocalDateFormat();
|
||||
assertEquals("1970-01-01T00:00:00.005", fmt.format(new Date(5)));
|
||||
assertEquals("1970-01-01T00:00:00", fmt.format(new Date(0)));
|
||||
assertEquals("1970-01-01T00:00:00.37", fmt.format(new Date(370)));
|
||||
assertEquals("1970-01-01T00:00:00.9", fmt.format(new Date(900)));
|
||||
assertEquals("1970-01-01T00:00:00.005", f.formatDate(new Date(5)));
|
||||
assertEquals("1970-01-01T00:00:00", f.formatDate(new Date(0)));
|
||||
assertEquals("1970-01-01T00:00:00.37", f.formatDate(new Date(370)));
|
||||
assertEquals("1970-01-01T00:00:00.9", f.formatDate(new Date(900)));
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -95,11 +95,10 @@ public class LegacyDateFieldTest extends TestCase {
|
|||
assertItoR("1995-12-31T23:59:59Z", "1995-12-31T23:59:59");
|
||||
}
|
||||
public void testFormatter() {
|
||||
DateFormat fmt = f.getThreadLocalDateFormat();
|
||||
assertEquals("1970-01-01T00:00:00.005", fmt.format(new Date(5)));
|
||||
assertEquals("1970-01-01T00:00:00.005", f.formatDate(new Date(5)));
|
||||
// all of this is broken behavior
|
||||
assertEquals("1970-01-01T00:00:00.000", fmt.format(new Date(0)));
|
||||
assertEquals("1970-01-01T00:00:00.370", fmt.format(new Date(370)));
|
||||
assertEquals("1970-01-01T00:00:00.900", fmt.format(new Date(900)));
|
||||
assertEquals("1970-01-01T00:00:00.000", f.formatDate(new Date(0)));
|
||||
assertEquals("1970-01-01T00:00:00.370", f.formatDate(new Date(370)));
|
||||
assertEquals("1970-01-01T00:00:00.900", f.formatDate(new Date(900)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ public class SolrRequestParserTest extends AbstractSolrTestCase {
|
|||
String body2 = "qwertasdfgzxcvb";
|
||||
String body3 = "1234567890";
|
||||
|
||||
SolrCore core = SolrCore.getSolrCore();
|
||||
SolrCore core = h.getCore();
|
||||
|
||||
Map<String,String[]> args = new HashMap<String, String[]>();
|
||||
args.put( CommonParams.STREAM_BODY, new String[] {body1} );
|
||||
|
@ -110,7 +110,7 @@ public class SolrRequestParserTest extends AbstractSolrTestCase {
|
|||
return;
|
||||
}
|
||||
|
||||
SolrCore core = SolrCore.getSolrCore();
|
||||
SolrCore core = h.getCore();
|
||||
|
||||
Map<String,String[]> args = new HashMap<String, String[]>();
|
||||
args.put( CommonParams.STREAM_URL, new String[] {url} );
|
||||
|
|
|
@ -37,7 +37,7 @@ public class DirectUpdateHandlerTest extends AbstractSolrTestCase {
|
|||
|
||||
public void testRequireUniqueKey() throws Exception
|
||||
{
|
||||
SolrCore core = SolrCore.getSolrCore();
|
||||
SolrCore core = h.getCore();
|
||||
|
||||
UpdateHandler updater = core.getUpdateHandler();
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ public class DocumentBuilderTest extends AbstractSolrTestCase {
|
|||
|
||||
public void testNullField()
|
||||
{
|
||||
SolrCore core = SolrCore.getSolrCore();
|
||||
SolrCore core = h.getCore();
|
||||
|
||||
// make sure a null value is not indexed
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
|
|
|
@ -133,7 +133,7 @@ public class SolrPluginUtilsTest extends AbstractSolrTestCase {
|
|||
assertTrue(t+" sanity test isn't TermQuery: " + out.getClass(),
|
||||
out instanceof TermQuery);
|
||||
assertEquals(t+" sanity test is wrong field",
|
||||
h.getCore().getSchema().getDefaultSearchFieldName(),
|
||||
h.getCore().getSchema().getSolrQueryParser(null).getField(),
|
||||
((TermQuery)out).getTerm().field());
|
||||
|
||||
t = "subject:XXXXXXXX";
|
||||
|
|
Loading…
Reference in New Issue