SOLR-657: Replace deprecated calls with the non-deprecated equivalents

git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@701485 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Ryan McKinley 2008-10-03 18:43:57 +00:00
parent b5081cdf69
commit dbf9fc8683
30 changed files with 79 additions and 69 deletions

View File

@ -42,6 +42,9 @@ New Features
See http://lucene.apache.org/java/2_3_2/api/org/apache/lucene/index/IndexDeletionPolicy.html See http://lucene.apache.org/java/2_3_2/api/org/apache/lucene/index/IndexDeletionPolicy.html
(yonik, Noble Paul, Akshay Ukey via shalin) (yonik, Noble Paul, Akshay Ukey via shalin)
3. SOLR-657: Replace deprecated calls with the non-deprecated equivalents
(Lars Kotthoff via ryan)
Optimizations Optimizations
---------------------- ----------------------
@ -56,6 +59,7 @@ Bug Fixes
2. SOLR-771: CoreAdminHandler STATUS should display 'normalized' paths (koji, hossman, shalin) 2. SOLR-771: CoreAdminHandler STATUS should display 'normalized' paths (koji, hossman, shalin)
Build Build
---------------------- ----------------------
1. SOLR-776: Added in ability to sign artifacts via Ant for releases (gsingers) 1. SOLR-776: Added in ability to sign artifacts via Ant for releases (gsingers)

View File

@ -134,7 +134,7 @@ public class SynonymFilterFactory extends BaseTokenFilterFactory implements Reso
List<String> tokList = new ArrayList<String>(); List<String> tokList = new ArrayList<String>();
try { try {
for( Token token = ts.next(); token != null; token = ts.next() ){ for( Token token = ts.next(); token != null; token = ts.next() ){
String text = token.termText(); String text = new String(token.termBuffer(), 0, token.termLength());
if( text.length() > 0 ) if( text.length() > 0 )
tokList.add( text ); tokList.add( text );
} }

View File

@ -20,6 +20,7 @@ package org.apache.solr.core;
import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.DocList; import org.apache.solr.search.DocList;
import org.apache.solr.search.DocIterator; import org.apache.solr.search.DocIterator;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryResponse; import org.apache.solr.request.SolrQueryResponse;
@ -47,7 +48,7 @@ class QuerySenderListener extends AbstractSolrEventListener {
}; };
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
core.execute(req,rsp); core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp);
// Retrieve the Document instances (not just the ids) to warm // Retrieve the Document instances (not just the ids) to warm
// the OS disk cache, and any Solr document cache. Only the top // the OS disk cache, and any Solr document cache. Only the top

View File

@ -39,7 +39,7 @@ import org.apache.solr.schema.IndexSchema;
import org.apache.solr.search.QParserPlugin; import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.ValueSourceParser; import org.apache.solr.search.ValueSourceParser;
import org.apache.solr.update.DirectUpdateHandler; import org.apache.solr.update.DirectUpdateHandler2;
import org.apache.solr.update.SolrIndexWriter; import org.apache.solr.update.SolrIndexWriter;
import org.apache.solr.update.UpdateHandler; import org.apache.solr.update.UpdateHandler;
import org.apache.solr.update.processor.LogUpdateProcessorFactory; import org.apache.solr.update.processor.LogUpdateProcessorFactory;
@ -487,7 +487,7 @@ public final class SolrCore implements SolrInfoMBean {
getSearcher(false,false,null); getSearcher(false,false,null);
updateHandler = createUpdateHandler( updateHandler = createUpdateHandler(
solrConfig.get("updateHandler/@class", DirectUpdateHandler.class.getName()) solrConfig.get("updateHandler/@class", DirectUpdateHandler2.class.getName())
); );
infoRegistry.put("updateHandler", updateHandler); infoRegistry.put("updateHandler", updateHandler);
@ -1225,8 +1225,8 @@ public final class SolrCore implements SolrInfoMBean {
public void execute(SolrRequestHandler handler, SolrQueryRequest req, SolrQueryResponse rsp) { public void execute(SolrRequestHandler handler, SolrQueryRequest req, SolrQueryResponse rsp) {
if (handler==null) { if (handler==null) {
log.warn(logid+"Null Request Handler '" + req.getQueryType() +"' :" + req); log.warn(logid+"Null Request Handler '" + req.getParams().get(CommonParams.QT) +"' :" + req);
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,"Null Request Handler '" + req.getQueryType() + "'", true); throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,"Null Request Handler '" + req.getParams().get(CommonParams.QT) + "'", true);
} }
// setup response header and handle request // setup response header and handle request
final NamedList<Object> responseHeader = new SimpleOrderedMap<Object>(); final NamedList<Object> responseHeader = new SimpleOrderedMap<Object>();
@ -1355,7 +1355,7 @@ public final class SolrCore implements SolrInfoMBean {
* 'wt' parameter, attempts to find that one; otherwise return the default writer. * 'wt' parameter, attempts to find that one; otherwise return the default writer.
*/ */
public final QueryResponseWriter getQueryResponseWriter(SolrQueryRequest request) { public final QueryResponseWriter getQueryResponseWriter(SolrQueryRequest request) {
return getQueryResponseWriter(request.getParam("wt")); return getQueryResponseWriter(request.getParams().get(CommonParams.WT));
} }
private final Map<String, QParserPlugin> qParserPlugins = new HashMap<String, QParserPlugin>(); private final Map<String, QParserPlugin> qParserPlugins = new HashMap<String, QParserPlugin>();

View File

@ -376,7 +376,7 @@ public class LukeRequestHandler extends RequestHandlerBase
finfo.add("fields", fields); finfo.add("fields", fields);
finfo.add("dynamicFields", dynamicFields); finfo.add("dynamicFields", dynamicFields);
finfo.add("uniqueKeyField", uniqueField.getName()); finfo.add("uniqueKeyField", uniqueField.getName());
finfo.add("defaultSearchField", schema.getDefaultSearchFieldName()); finfo.add("defaultSearchField", schema.getSolrQueryParser(null).getField());
finfo.add("types", types); finfo.add("types", types);
return finfo; return finfo;
} }

View File

@ -73,7 +73,7 @@ public class SystemInfoHandler extends RequestHandlerBase
SimpleOrderedMap<Object> info = new SimpleOrderedMap<Object>(); SimpleOrderedMap<Object> info = new SimpleOrderedMap<Object>();
IndexSchema schema = core.getSchema(); IndexSchema schema = core.getSchema();
info.add( "schema", schema != null ? schema.getName():"no schema!" ); info.add( "schema", schema != null ? schema.getSchemaName():"no schema!" );
// Host // Host
InetAddress addr = InetAddress.getLocalHost(); InetAddress addr = InetAddress.getLocalHost();

View File

@ -300,7 +300,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
TokenStream tokens = analyzer.tokenStream( null, new StringReader( query ) ); TokenStream tokens = analyzer.tokenStream( null, new StringReader( query ) );
Token token = tokens.next(); Token token = tokens.next();
while( token != null ) { while( token != null ) {
norm.append( token.termText() ); norm.append( new String(token.termBuffer(), 0, token.termLength()) );
token = tokens.next(); token = tokens.next();
} }
return norm.toString(); return norm.toString();

View File

@ -18,6 +18,7 @@
package org.apache.solr.request; package org.apache.solr.request;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.FastWriter; import org.apache.solr.common.util.FastWriter;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
@ -54,7 +55,7 @@ public abstract class TextResponseWriter {
this.schema = req.getSchema(); this.schema = req.getSchema();
this.req = req; this.req = req;
this.rsp = rsp; this.rsp = rsp;
String indent = req.getParam("indent"); String indent = req.getParams().get("indent");
if (indent != null && !"".equals(indent) && !"off".equals(indent)) { if (indent != null && !"".equals(indent) && !"off".equals(indent)) {
doIndent=true; doIndent=true;
} }

View File

@ -19,6 +19,7 @@ package org.apache.solr.request;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.XML; import org.apache.solr.common.util.XML;
import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SolrIndexSearcher;
@ -61,18 +62,18 @@ final public class XMLWriter {
public static void writeResponse(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { public static void writeResponse(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) throws IOException {
String ver = req.getParam("version"); String ver = req.getParams().get(CommonParams.VERSION);
writer.write(XML_START1); writer.write(XML_START1);
String stylesheet = req.getParam("stylesheet"); String stylesheet = req.getParams().get("stylesheet");
if (stylesheet != null && stylesheet.length() > 0) { if (stylesheet != null && stylesheet.length() > 0) {
writer.write(XML_STYLESHEET); writer.write(XML_STYLESHEET);
writer.write(stylesheet); writer.write(stylesheet);
writer.write(XML_STYLESHEET_END); writer.write(XML_STYLESHEET_END);
} }
String noSchema = req.getParam("noSchema"); String noSchema = req.getParams().get("noSchema");
// todo - change when schema becomes available? // todo - change when schema becomes available?
if (false && noSchema == null) if (false && noSchema == null)
writer.write(XML_START2_SCHEMA); writer.write(XML_START2_SCHEMA);
@ -87,7 +88,7 @@ final public class XMLWriter {
XMLWriter xw = new XMLWriter(writer, req.getSchema(), req, ver); XMLWriter xw = new XMLWriter(writer, req.getSchema(), req, ver);
xw.defaultFieldList = rsp.getReturnFields(); xw.defaultFieldList = rsp.getReturnFields();
String indent = req.getParam("indent"); String indent = req.getParams().get("indent");
if (indent != null) { if (indent != null) {
if ("".equals(indent) || "off".equals(indent)) { if ("".equals(indent) || "off".equals(indent)) {
xw.setIndent(false); xw.setIndent(false);

View File

@ -97,7 +97,7 @@ public class FieldQParserPlugin extends QParserPlugin {
return null; return null;
else if (lst.size() == 1) { else if (lst.size() == 1) {
t = lst.get(0); t = lst.get(0);
return new TermQuery(new Term(field, t.termText())); return new TermQuery(new Term(field, new String(t.termBuffer(), 0, t.termLength())));
} else { } else {
if (severalTokensAtSamePosition) { if (severalTokensAtSamePosition) {
if (positionCount == 1) { if (positionCount == 1) {
@ -106,7 +106,7 @@ public class FieldQParserPlugin extends QParserPlugin {
for (int i = 0; i < lst.size(); i++) { for (int i = 0; i < lst.size(); i++) {
t = (org.apache.lucene.analysis.Token) lst.get(i); t = (org.apache.lucene.analysis.Token) lst.get(i);
TermQuery currentQuery = new TermQuery( TermQuery currentQuery = new TermQuery(
new Term(field, t.termText())); new Term(field, new String(t.termBuffer(), 0, t.termLength())));
q.add(currentQuery, BooleanClause.Occur.SHOULD); q.add(currentQuery, BooleanClause.Occur.SHOULD);
} }
return q; return q;
@ -122,7 +122,7 @@ public class FieldQParserPlugin extends QParserPlugin {
mpq.add((Term[])multiTerms.toArray(new Term[0])); mpq.add((Term[])multiTerms.toArray(new Term[0]));
multiTerms.clear(); multiTerms.clear();
} }
multiTerms.add(new Term(field, t.termText())); multiTerms.add(new Term(field, new String(t.termBuffer(), 0, t.termLength())));
} }
mpq.add((Term[])multiTerms.toArray(new Term[0])); mpq.add((Term[])multiTerms.toArray(new Term[0]));
return mpq; return mpq;
@ -132,7 +132,8 @@ public class FieldQParserPlugin extends QParserPlugin {
PhraseQuery q = new PhraseQuery(); PhraseQuery q = new PhraseQuery();
q.setSlop(phraseSlop); q.setSlop(phraseSlop);
for (int i = 0; i < lst.size(); i++) { for (int i = 0; i < lst.size(); i++) {
q.add(new Term(field, lst.get(i).termText())); Token token = lst.get(i);
q.add(new Term(field, new String(token.termBuffer(), 0, token.termLength())));
} }
return q; return q;
} }

View File

@ -61,7 +61,7 @@ class LuceneQParser extends QParser {
String defaultField = getParam(CommonParams.DF); String defaultField = getParam(CommonParams.DF);
if (defaultField==null) { if (defaultField==null) {
defaultField = getReq().getSchema().getDefaultSearchFieldName(); defaultField = getReq().getSchema().getSolrQueryParser(null).getField();
} }
lparser = new SolrQueryParser(this, defaultField); lparser = new SolrQueryParser(this, defaultField);
@ -71,7 +71,7 @@ class LuceneQParser extends QParser {
lparser.setDefaultOperator("AND".equals(opParam) ? QueryParser.Operator.AND : QueryParser.Operator.OR); lparser.setDefaultOperator("AND".equals(opParam) ? QueryParser.Operator.AND : QueryParser.Operator.OR);
} else { } else {
// try to get default operator from schema // try to get default operator from schema
String operator = getReq().getSchema().getQueryParserDefaultOperator(); String operator = getReq().getSchema().getSolrQueryParser(null).getField();
lparser.setDefaultOperator("AND".equals(operator) ? lparser.setDefaultOperator("AND".equals(operator) ?
QueryParser.Operator.AND : QueryParser.Operator.OR); QueryParser.Operator.AND : QueryParser.Operator.OR);
} }

View File

@ -97,7 +97,7 @@ if (c.query instanceof TermQuery) {
filter = (Filter)cache.get(filterQuery); filter = (Filter)cache.get(filterQuery);
} }
if (filter == null) { // miss if (filter == null) { // miss
filter = new QueryFilter(filterQuery); // construct new entry filter = new CachingWrapperFilter(new QueryWrapperFilter(filterQuery)); // construct new entry
synchronized (cache) { synchronized (cache) {
cache.put(filterQuery, filter); // cache it cache.put(filterQuery, filter); // cache it
} }

View File

@ -325,7 +325,7 @@ class MultiValueTokenStream extends TokenStream {
} }
// create an modified token which is the offset into the concatenated // create an modified token which is the offset into the concatenated
// string of all values // string of all values
Token offsetToken = new Token(nextToken.termText(), Token offsetToken = new Token(new String(nextToken.termBuffer(), 0, nextToken.termLength()),
nextToken.startOffset() + curOffset, nextToken.startOffset() + curOffset,
nextToken.endOffset() + curOffset); nextToken.endOffset() + curOffset);
offsetToken.setPositionIncrement(nextToken.getPositionIncrement() + extra*10); offsetToken.setPositionIncrement(nextToken.getPositionIncrement() + extra*10);

View File

@ -178,7 +178,7 @@ public class SolrPluginUtils {
public static int setReturnFields(SolrQueryRequest req, public static int setReturnFields(SolrQueryRequest req,
SolrQueryResponse res) { SolrQueryResponse res) {
return setReturnFields(req.getParam(FL), res); return setReturnFields(req.getParams().get(org.apache.solr.common.params.CommonParams.FL), res);
} }
/** /**
@ -380,14 +380,14 @@ public class SolrPluginUtils {
DocList results) DocList results)
throws IOException { throws IOException {
String debug = req.getParam(org.apache.solr.common.params.CommonParams.DEBUG_QUERY); String debug = req.getParams().get(org.apache.solr.common.params.CommonParams.DEBUG_QUERY);
NamedList dbg = null; NamedList dbg = null;
if (debug!=null) { if (debug!=null) {
dbg = new SimpleOrderedMap(); dbg = new SimpleOrderedMap();
/* userQuery may have been pre-processes .. expose that */ /* userQuery may have been pre-processes .. expose that */
dbg.add("rawquerystring", req.getQueryString()); dbg.add("rawquerystring", req.getParams().get(org.apache.solr.common.params.CommonParams.Q));
dbg.add("querystring", userQuery); dbg.add("querystring", userQuery);
/* QueryParsing.toString isn't perfect, use it to see converted /* QueryParsing.toString isn't perfect, use it to see converted
@ -399,7 +399,7 @@ public class SolrPluginUtils {
dbg.add("explain", getExplainList dbg.add("explain", getExplainList
(query, results, req.getSearcher(), req.getSchema())); (query, results, req.getSearcher(), req.getSchema()));
String otherQueryS = req.getParam("explainOther"); String otherQueryS = req.getParams().get(org.apache.solr.common.params.CommonParams.EXPLAIN_OTHER);
if (otherQueryS != null && otherQueryS.length() > 0) { if (otherQueryS != null && otherQueryS.length() > 0) {
DocList otherResults = doSimpleQuery DocList otherResults = doSimpleQuery
(otherQueryS,req.getSearcher(), req.getSchema(),0,10); (otherQueryS,req.getSearcher(), req.getSchema(),0,10);
@ -802,7 +802,7 @@ public class SolrPluginUtils {
*/ */
public static Sort getSort(SolrQueryRequest req) { public static Sort getSort(SolrQueryRequest req) {
String sort = req.getParam(org.apache.solr.common.params.CommonParams.SORT); String sort = req.getParams().get(org.apache.solr.common.params.CommonParams.SORT);
if (null == sort || sort.equals("")) { if (null == sort || sort.equals("")) {
return null; return null;
} }

View File

@ -131,7 +131,7 @@ class FindClasses {
try { try {
for (int i =0; i < jars.length; i++) { for (int i =0; i < jars.length; i++) {
jarFiles[i] = new JarFile(jars[i]); jarFiles[i] = new JarFile(jars[i]);
urls[i] = jars[i].toURL(); urls[i] = jars[i].toURI().toURL();
} }
} catch (MalformedURLException e) { } catch (MalformedURLException e) {
throw new RuntimeException throw new RuntimeException

View File

@ -17,6 +17,7 @@
package org.apache.solr.util; package org.apache.solr.util;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.XML; import org.apache.solr.common.util.XML;
import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrConfig;
@ -117,7 +118,7 @@ public class TestHarness {
public TestHarness( String dataDirectory, public TestHarness( String dataDirectory,
SolrConfig solrConfig, SolrConfig solrConfig,
String schemaFile) { String schemaFile) {
this( dataDirectory, solrConfig, new IndexSchema(solrConfig, schemaFile)); this( dataDirectory, solrConfig, new IndexSchema(solrConfig, schemaFile, null));
} }
/** /**
* @param dataDirectory path for index data, will not be cleaned up * @param dataDirectory path for index data, will not be cleaned up
@ -300,7 +301,7 @@ public class TestHarness {
* @see LocalSolrQueryRequest * @see LocalSolrQueryRequest
*/ */
public String query(SolrQueryRequest req) throws IOException, Exception { public String query(SolrQueryRequest req) throws IOException, Exception {
return query(req.getQueryType(), req); return query(req.getParams().get(CommonParams.QT), req);
} }
/** /**

View File

@ -21,6 +21,7 @@ import org.apache.lucene.document.*;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.solr.common.params.AppendedSolrParams; import org.apache.solr.common.params.AppendedSolrParams;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.DefaultSolrParams; import org.apache.solr.common.params.DefaultSolrParams;
import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.SolrParams;
@ -297,14 +298,14 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
args.put("string", "string value"); args.put("string", "string value");
args.put("array", new String[] {"array", "value"}); args.put("array", new String[] {"array", "value"});
SolrQueryRequest req = new LocalSolrQueryRequest(null, null, null, 0, 20, args); SolrQueryRequest req = new LocalSolrQueryRequest(null, null, null, 0, 20, args);
assertEquals("string value", req.getParam("string")); assertEquals("string value", req.getParams().get("string"));
assertEquals("array", req.getParam("array")); assertEquals("array", req.getParams().get("array"));
String[] stringParams = req.getParams("string"); String[] stringParams = req.getParams().getParams("string");
assertEquals(1, stringParams.length); assertEquals(1, stringParams.length);
assertEquals("string value", stringParams[0]); assertEquals("string value", stringParams[0]);
String[] arrayParams = req.getParams("array"); String[] arrayParams = req.getParams().getParams("array");
assertEquals(2, arrayParams.length); assertEquals(2, arrayParams.length);
assertEquals("array", arrayParams[0]); assertEquals("array", arrayParams[0]);
assertEquals("value", arrayParams[1]); assertEquals("value", arrayParams[1]);
@ -337,7 +338,7 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
public void testTermVectorFields() { public void testTermVectorFields() {
IndexSchema ischema = new IndexSchema(solrConfig, getSchemaFile()); IndexSchema ischema = new IndexSchema(solrConfig, getSchemaFile(), null);
SchemaField f; // Solr field type SchemaField f; // Solr field type
Field luf; // Lucene field Field luf; // Lucene field
@ -506,7 +507,7 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
} }
public void testCompressableFieldType() { public void testCompressableFieldType() {
IndexSchema ischema = new IndexSchema(solrConfig, getSchemaFile()); IndexSchema ischema = new IndexSchema(solrConfig, getSchemaFile(), null);
SchemaField f; // Solr field type SchemaField f; // Solr field type
Field luf; // Lucene field Field luf; // Lucene field
@ -538,7 +539,7 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
SolrQueryRequest req = req("q", "title:keyword", "fl", "id,title,test_hlt"); SolrQueryRequest req = req("q", "title:keyword", "fl", "id,title,test_hlt");
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
core.execute(req, rsp); core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp);
DocList dl = (DocList) rsp.getValues().get("response"); DocList dl = (DocList) rsp.getValues().get("response");
org.apache.lucene.document.Document d = req.getSearcher().doc(dl.iterator().nextDoc()); org.apache.lucene.document.Document d = req.getSearcher().doc(dl.iterator().nextDoc());
@ -558,7 +559,7 @@ public class BasicFunctionalityTest extends AbstractSolrTestCase {
SolrQueryRequest req = req("q", "title:keyword", "fl", "id,title"); SolrQueryRequest req = req("q", "title:keyword", "fl", "id,title");
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
core.execute(req, rsp); core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp);
DocList dl = (DocList) rsp.getValues().get("response"); DocList dl = (DocList) rsp.getValues().get("response");
DocIterator di = dl.iterator(); DocIterator di = dl.iterator();

View File

@ -37,10 +37,10 @@ public abstract class BaseTokenTestCase extends AnalysisTestCase
StringBuffer out = new StringBuffer(); StringBuffer out = new StringBuffer();
Token t = in.next(); Token t = in.next();
if (null != t) if (null != t)
out.append(t.termText()); out.append(new String(t.termBuffer(), 0, t.termLength()));
for (t = in.next(); null != t; t = in.next()) { for (t = in.next(); null != t; t = in.next()) {
out.append(" ").append(t.termText()); out.append(" ").append(new String(t.termBuffer(), 0, t.termLength()));
} }
in.close(); in.close();
return out.toString(); return out.toString();
@ -49,7 +49,7 @@ public abstract class BaseTokenTestCase extends AnalysisTestCase
public List<String> tok2str(Iterable<Token> tokLst) { public List<String> tok2str(Iterable<Token> tokLst) {
ArrayList<String> lst = new ArrayList<String>(); ArrayList<String> lst = new ArrayList<String>();
for ( Token t : tokLst ) { for ( Token t : tokLst ) {
lst.add( t.termText()); lst.add( new String(t.termBuffer(), 0, t.termLength()));
} }
return lst; return lst;
} }
@ -70,7 +70,7 @@ public abstract class BaseTokenTestCase extends AnalysisTestCase
for (Iterator iter = a.iterator(); iter.hasNext();) { for (Iterator iter = a.iterator(); iter.hasNext();) {
Token tok = (Token)iter.next(); Token tok = (Token)iter.next();
pos += tok.getPositionIncrement(); pos += tok.getPositionIncrement();
if (!tokAt(b, tok.termText(), pos if (!tokAt(b, new String(tok.termBuffer(), 0, tok.termLength()), pos
, checkOff ? tok.startOffset() : -1 , checkOff ? tok.startOffset() : -1
, checkOff ? tok.endOffset() : -1 , checkOff ? tok.endOffset() : -1
)) ))
@ -85,7 +85,7 @@ public abstract class BaseTokenTestCase extends AnalysisTestCase
for (Iterator iter = lst.iterator(); iter.hasNext();) { for (Iterator iter = lst.iterator(); iter.hasNext();) {
Token tok = (Token)iter.next(); Token tok = (Token)iter.next();
pos += tok.getPositionIncrement(); pos += tok.getPositionIncrement();
if (pos==tokPos && tok.termText().equals(val) if (pos==tokPos && new String(tok.termBuffer(), 0, tok.termLength()).equals(val)
&& (startOff==-1 || tok.startOffset()==startOff) && (startOff==-1 || tok.startOffset()==startOff)
&& (endOff ==-1 || tok.endOffset() ==endOff ) && (endOff ==-1 || tok.endOffset() ==endOff )
) )

View File

@ -33,9 +33,9 @@ public class TestBufferedTokenStream extends BaseTokenTestCase {
public static class AB_Q_Stream extends BufferedTokenStream { public static class AB_Q_Stream extends BufferedTokenStream {
public AB_Q_Stream(TokenStream input) {super(input);} public AB_Q_Stream(TokenStream input) {super(input);}
protected Token process(Token t) throws IOException { protected Token process(Token t) throws IOException {
if ("A".equals(t.termText())) { if ("A".equals(new String(t.termBuffer(), 0, t.termLength()))) {
Token t2 = read(); Token t2 = read();
if (t2!=null && "B".equals(t2.termText())) t.setTermText("Q"); if (t2!=null && "B".equals(new String(t2.termBuffer(), 0, t2.termLength()))) t.setTermText("Q");
if (t2!=null) pushBack(t2); if (t2!=null) pushBack(t2);
} }
return t; return t;
@ -46,7 +46,8 @@ public class TestBufferedTokenStream extends BaseTokenTestCase {
public static class AB_AAB_Stream extends BufferedTokenStream { public static class AB_AAB_Stream extends BufferedTokenStream {
public AB_AAB_Stream(TokenStream input) {super(input);} public AB_AAB_Stream(TokenStream input) {super(input);}
protected Token process(Token t) throws IOException { protected Token process(Token t) throws IOException {
if ("A".equals(t.termText()) && "B".equals(peek(1).termText())) if ("A".equals(new String(t.termBuffer(), 0, t.termLength())) &&
"B".equals(new String(peek(1).termBuffer(), 0, peek(1).termLength())))
write(t); write(t);
return t; return t;
} }

View File

@ -65,7 +65,7 @@ public class TestPatternTokenizerFactory extends AnalysisTestCase
int i=0; int i=0;
for( Token t = stream.next(); null != t; t = stream.next() ) for( Token t = stream.next(); null != t; t = stream.next() )
{ {
assertEquals( "split: "+test[1] + " "+i, split[i++], t.termText() ); assertEquals( "split: "+test[1] + " "+i, split[i++], new String(t.termBuffer(), 0, t.termLength()) );
} }
} }
} }

View File

@ -259,7 +259,7 @@ public class TestSynonymMap extends AnalysisTestCase {
Token[] tokens = ((SynonymMap)map.submap.get( src )).synonyms; Token[] tokens = ((SynonymMap)map.submap.get( src )).synonyms;
boolean inc = false; boolean inc = false;
for( Token token : tokens ){ for( Token token : tokens ){
if( exp.equals( token.termText() ) ) if( exp.equals( new String(token.termBuffer(), 0, token.termLength()) ) )
inc = true; inc = true;
} }
assertTrue( inc ); assertTrue( inc );

View File

@ -147,17 +147,18 @@ public class TestWordDelimiterFilter extends AbstractSolrTestCase {
int i=0; int i=0;
for(Token t; (t=wdf.next())!=null;) { for(Token t; (t=wdf.next())!=null;) {
if (t.termText().equals("foo")) { String termText = new String(t.termBuffer(), 0, t.termLength());
if (termText.equals("foo")) {
assertEquals(5, t.startOffset()); assertEquals(5, t.startOffset());
assertEquals(8, t.endOffset()); assertEquals(8, t.endOffset());
i++; i++;
} }
if (t.termText().equals("bar")) { if (termText.equals("bar")) {
assertEquals(9, t.startOffset()); assertEquals(9, t.startOffset());
assertEquals(12, t.endOffset()); assertEquals(12, t.endOffset());
i++; i++;
} }
if (t.termText().equals("foobar")) { if (termText.equals("foobar")) {
assertEquals(5, t.startOffset()); assertEquals(5, t.startOffset());
assertEquals(12, t.endOffset()); assertEquals(12, t.endOffset());
i++; i++;

View File

@ -35,7 +35,7 @@ public class HighlighterConfigTest extends AbstractSolrTestCase {
public void testConfig() public void testConfig()
{ {
SolrHighlighter highlighter = SolrCore.getSolrCore().getHighlighter(); SolrHighlighter highlighter = h.getCore().getHighlighter();
System.out.println( "highlighter" ); System.out.println( "highlighter" );
assertTrue( highlighter instanceof DummyHighlighter ); assertTrue( highlighter instanceof DummyHighlighter );

View File

@ -54,7 +54,7 @@ public class HighlighterTest extends AbstractSolrTestCase {
public void testConfig() public void testConfig()
{ {
SolrHighlighter highlighter = SolrCore.getSolrCore().getHighlighter(); SolrHighlighter highlighter = h.getCore().getHighlighter();
System.out.println( "highlighter" ); System.out.println( "highlighter" );
// Make sure we loaded the one formatter // Make sure we loaded the one formatter

View File

@ -99,11 +99,10 @@ public class DateFieldTest extends LegacyDateFieldTest {
} }
public void testFormatter() { public void testFormatter() {
DateFormat fmt = f.getThreadLocalDateFormat(); assertEquals("1970-01-01T00:00:00.005", f.formatDate(new Date(5)));
assertEquals("1970-01-01T00:00:00.005", fmt.format(new Date(5))); assertEquals("1970-01-01T00:00:00", f.formatDate(new Date(0)));
assertEquals("1970-01-01T00:00:00", fmt.format(new Date(0))); assertEquals("1970-01-01T00:00:00.37", f.formatDate(new Date(370)));
assertEquals("1970-01-01T00:00:00.37", fmt.format(new Date(370))); assertEquals("1970-01-01T00:00:00.9", f.formatDate(new Date(900)));
assertEquals("1970-01-01T00:00:00.9", fmt.format(new Date(900)));
} }

View File

@ -95,11 +95,10 @@ public class LegacyDateFieldTest extends TestCase {
assertItoR("1995-12-31T23:59:59Z", "1995-12-31T23:59:59"); assertItoR("1995-12-31T23:59:59Z", "1995-12-31T23:59:59");
} }
public void testFormatter() { public void testFormatter() {
DateFormat fmt = f.getThreadLocalDateFormat(); assertEquals("1970-01-01T00:00:00.005", f.formatDate(new Date(5)));
assertEquals("1970-01-01T00:00:00.005", fmt.format(new Date(5)));
// all of this is broken behavior // all of this is broken behavior
assertEquals("1970-01-01T00:00:00.000", fmt.format(new Date(0))); assertEquals("1970-01-01T00:00:00.000", f.formatDate(new Date(0)));
assertEquals("1970-01-01T00:00:00.370", fmt.format(new Date(370))); assertEquals("1970-01-01T00:00:00.370", f.formatDate(new Date(370)));
assertEquals("1970-01-01T00:00:00.900", fmt.format(new Date(900))); assertEquals("1970-01-01T00:00:00.900", f.formatDate(new Date(900)));
} }
} }

View File

@ -56,7 +56,7 @@ public class SolrRequestParserTest extends AbstractSolrTestCase {
String body2 = "qwertasdfgzxcvb"; String body2 = "qwertasdfgzxcvb";
String body3 = "1234567890"; String body3 = "1234567890";
SolrCore core = SolrCore.getSolrCore(); SolrCore core = h.getCore();
Map<String,String[]> args = new HashMap<String, String[]>(); Map<String,String[]> args = new HashMap<String, String[]>();
args.put( CommonParams.STREAM_BODY, new String[] {body1} ); args.put( CommonParams.STREAM_BODY, new String[] {body1} );
@ -110,7 +110,7 @@ public class SolrRequestParserTest extends AbstractSolrTestCase {
return; return;
} }
SolrCore core = SolrCore.getSolrCore(); SolrCore core = h.getCore();
Map<String,String[]> args = new HashMap<String, String[]>(); Map<String,String[]> args = new HashMap<String, String[]>();
args.put( CommonParams.STREAM_URL, new String[] {url} ); args.put( CommonParams.STREAM_URL, new String[] {url} );

View File

@ -37,7 +37,7 @@ public class DirectUpdateHandlerTest extends AbstractSolrTestCase {
public void testRequireUniqueKey() throws Exception public void testRequireUniqueKey() throws Exception
{ {
SolrCore core = SolrCore.getSolrCore(); SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler(); UpdateHandler updater = core.getUpdateHandler();

View File

@ -51,7 +51,7 @@ public class DocumentBuilderTest extends AbstractSolrTestCase {
public void testNullField() public void testNullField()
{ {
SolrCore core = SolrCore.getSolrCore(); SolrCore core = h.getCore();
// make sure a null value is not indexed // make sure a null value is not indexed
SolrInputDocument doc = new SolrInputDocument(); SolrInputDocument doc = new SolrInputDocument();

View File

@ -133,7 +133,7 @@ public class SolrPluginUtilsTest extends AbstractSolrTestCase {
assertTrue(t+" sanity test isn't TermQuery: " + out.getClass(), assertTrue(t+" sanity test isn't TermQuery: " + out.getClass(),
out instanceof TermQuery); out instanceof TermQuery);
assertEquals(t+" sanity test is wrong field", assertEquals(t+" sanity test is wrong field",
h.getCore().getSchema().getDefaultSearchFieldName(), h.getCore().getSchema().getSolrQueryParser(null).getField(),
((TermQuery)out).getTerm().field()); ((TermQuery)out).getTerm().field());
t = "subject:XXXXXXXX"; t = "subject:XXXXXXXX";