SOLR-6976: Remove methods and classes deprecated in 4.x

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1653549 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alan Woodward 2015-01-21 15:06:04 +00:00
parent 8932af3b4b
commit 344be1a6b1
58 changed files with 237 additions and 2791 deletions

View File

@ -183,6 +183,36 @@ Upgrading from Solr 4.x
* Due to changes in the underlying commons-codec package, users of the BeiderMorseFilterFactory
will need to rebuild their indexes after upgrading. See LUCENE-6058 for more details.
* CachedSqlEntityProcessor has been removed, use SqlEntityProcessor with the
cacheImpl parameter.
* HttpDataSource has been removed, use URLDataSource instead.
* LegacyHTMLStripCharFilter has been removed
* CoreAdminRequest.persist() call has been removed. All changes made via
CoreAdmin are persistent.
* SpellCheckResponse.getSuggestions() and getSuggestionFrequencies() have been
removed, use getAlternatives() and getAlternativeFrequencies() instead.
* SolrQuery deprecated methods have been removed:
- setMissing() is now setFacetMissing()
- getFacetSort() is now getFacetSortString()
- setFacetSort(boolean) should instead use setFacetSort(String) with
FacetParams.FACET_SORT_COUNT or FacetParams.FACET_SORT_INDEX
- setSortField(String, ORDER) should use setSort(SortClause)
- addSortField(String, ORDER) should use addSort(SortClause)
- removeSortField(String, ORDER) shoudl use removeSort(SortClause)
- getSortFields() should use getSorts()
- set/getQueryType() should use set/getRequestHandler()
* ClientUtil deprecated date methods have been removed, use DateUtil instead
* FacetParams.FacetDateOther has been removed, use FacetRangeOther
* ShardParams.SHARD_KEYS has been removed, use ShardParams._ROUTE_
* The 'old-style' solr.xml format is no longer supported, and cores must be
defined using core.properties files. See
https://cwiki.apache.org/confluence/display/solr/Format+of+solr.xml
@ -749,6 +779,9 @@ Other Changes
* SOLR-6840: Remove support for old-style solr.xml (Erick Erickson, Alan Woodward)
* SOLR-6976: Remove classes and methods deprecated in 4.x (Alan Woodward, Noble
Paul, Chris Hostetter)
================== 4.10.3 ==================
Bug Fixes

View File

@ -1,41 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.dataimport;
/**
* This class enables caching of data obtained from the DB to avoid too many sql
* queries
* <p/>
* <p>
* Refer to <a
* href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache
* .org/solr/DataImportHandler</a> for more details.
* </p>
* <p/>
* <b>This API is experimental and subject to change</b>
*
* @since solr 1.3
* @deprecated - Use SqlEntityProcessor with cacheImpl parameter.
*/
@Deprecated
public class CachedSqlEntityProcessor extends SqlEntityProcessor {
@Override
protected void initCache(Context context) {
cacheSupport = new DIHCacheSupport(context, "SortedMapBackedCache");
}
}

View File

@ -1,39 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.dataimport;
/**
* <p>
* A data source implementation which can be used to read character files using
* HTTP.
* </p>
* <p/>
* <p>
* Refer to <a
* href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a>
* for more details.
* </p>
* <p/>
* <b>This API is experimental and may change in the future.</b>
*
* @since solr 1.3
* @deprecated use {@link org.apache.solr.handler.dataimport.URLDataSource} instead
*/
@Deprecated
public class HttpDataSource extends URLDataSource {
}

View File

@ -16,12 +16,12 @@
*/
package org.apache.solr.handler.dataimport.config;
import org.apache.solr.handler.dataimport.SolrWriter;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.apache.solr.handler.dataimport.SolrWriter;
public class ConfigNameConstants {
public static final String SCRIPT = "script";
@ -31,10 +31,6 @@ public class ConfigNameConstants {
public static final String PROPERTY_WRITER = "propertyWriter";
/**
* @deprecated use IMPORTER_NS_SHORT instead
*/
@Deprecated
public static final String IMPORTER_NS = "dataimporter";
public static final String IMPORTER_NS_SHORT = "dih";

View File

@ -1,5 +1,9 @@
package org.apache.solr.handler.dataimport;
import junit.framework.Assert;
import org.junit.After;
import org.junit.Before;
import java.io.File;
import java.nio.file.Files;
import java.sql.Connection;
@ -17,11 +21,6 @@ import java.util.Locale;
import java.util.Map;
import java.util.Set;
import junit.framework.Assert;
import org.junit.After;
import org.junit.Before;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -634,8 +633,7 @@ public abstract class AbstractSqlEntityProcessorTestCase extends
+ "newColumnName=''countryAdded_s'' newColumnValue=''country_added'' "
: "");
if (countryCached) {
sb.append(random().nextBoolean() ? "processor=''SqlEntityProcessor'' cacheImpl=''SortedMapBackedCache'' "
: "processor=''CachedSqlEntityProcessor'' ");
sb.append("processor=''SqlEntityProcessor'' cacheImpl=''SortedMapBackedCache'' ");
if (useSimpleCaches) {
sb.append("query=''SELECT CODE, COUNTRY_NAME FROM COUNTRIES WHERE DELETED != 'Y' AND CODE='${People.COUNTRY_CODE}' ''>\n");
} else {
@ -671,8 +669,7 @@ public abstract class AbstractSqlEntityProcessorTestCase extends
+ "newColumnName=''sportsAdded_s'' newColumnValue=''sport_added'' "
: "");
if (sportsCached) {
sb.append(random().nextBoolean() ? "processor=''SqlEntityProcessor'' cacheImpl=''SortedMapBackedCache'' "
: "processor=''CachedSqlEntityProcessor'' ");
sb.append("processor=''SqlEntityProcessor'' cacheImpl=''SortedMapBackedCache'' ");
if (useSimpleCaches) {
sb.append("query=''SELECT ID, SPORT_NAME FROM PEOPLE_SPORTS WHERE DELETED != 'Y' AND PERSON_ID=${People.ID} ORDER BY ID'' ");
} else {

View File

@ -16,15 +16,13 @@
*/
package org.apache.solr.hadoop;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.reflect.Array;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -49,13 +47,14 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.reflect.Array;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
@ThreadLeakAction({Action.WARN})
@ThreadLeakLingering(linger = 0)

View File

@ -16,7 +16,6 @@
*/
package org.apache.solr.morphlines.solr;
import org.junit.BeforeClass;
import org.junit.Test;
import org.kitesdk.morphline.api.Record;
import org.kitesdk.morphline.base.Fields;

View File

@ -35,7 +35,6 @@ import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.junit.BeforeClass;
import org.kitesdk.morphline.api.Record;
import org.kitesdk.morphline.base.Fields;
import org.kitesdk.morphline.base.Notifications;

View File

@ -1,71 +0,0 @@
package org.apache.solr.analysis;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.Reader;
import java.util.Map;
import org.apache.lucene.analysis.charfilter.HTMLStripCharFilterFactory;
import org.apache.lucene.analysis.util.CharFilterFactory;
/**
* Factory for {@link LegacyHTMLStripCharFilter}.
* <pre class="prettyprint" >
* &lt;fieldType name="text_html_legacy" class="solr.TextField" positionIncrementGap="100"&gt;
* &lt;analyzer&gt;
* &lt;charFilter class="solr.LegacyHTMLStripCharFilterFactory"/&gt;
* &lt;tokenizer class="solr.WhitespaceTokenizerFactory"/&gt;
* &lt;/analyzer&gt;
* &lt;/fieldType&gt;
* </pre>
* <p>
* This factory is <b>NOT</b> recommended for new users and should be
* considered <b>UNSUPPORTED</b>.
* </p>
* <p>
* In Solr version 3.5 and earlier, <tt>HTMLStripCharFilter(Factory)</tt>
* had known bugs in the offsets it provided, triggering e.g. exceptions in
* highlighting.
* </p>
* <p>
* This class is provided as possible alternative for people who depend on
* the "broken" behavior of <tt>HTMLStripCharFilter</tt> in Solr version 3.5
* and earlier, and/or who don't like the changes introduced by the Solr 3.6+
* version of <tt>HTMLStripCharFilterFactory</tt>. (See the 3.6.0 release
* section of lucene/CHANGES.txt for a list of differences in behavior.)
* </p>
* @deprecated use {@link HTMLStripCharFilterFactory}
*/
@Deprecated
public class LegacyHTMLStripCharFilterFactory extends CharFilterFactory {
/** Creates a new LegacyHTMLStripCharFilterFactory */
public LegacyHTMLStripCharFilterFactory(Map<String,String> args) {
super(args);
if (!args.isEmpty()) {
throw new IllegalArgumentException("Unknown parameters: " + args);
}
}
@Override
public LegacyHTMLStripCharFilter create(Reader input) {
return new LegacyHTMLStripCharFilter(input);
}
}

View File

@ -17,10 +17,9 @@
package org.apache.solr.analysis;
import org.apache.lucene.analysis.*;
import org.apache.lucene.analysis.Analyzer;
import java.io.Reader;
import java.io.IOException;
/**
*
@ -37,14 +36,8 @@ public abstract class SolrAnalyzer extends Analyzer {
return posIncGap;
}
/** wrap the reader in a CharStream, if appropriate */
@Deprecated
public Reader charStream(Reader reader) {
return reader;
}
@Override
protected Reader initReader(String fieldName, Reader reader) {
return charStream(reader);
return reader;
}
}

View File

@ -17,6 +17,7 @@
package org.apache.solr.client.solrj.embedded;
import com.google.common.base.Strings;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
@ -30,7 +31,6 @@ import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.JavaBinCodec;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
@ -62,25 +62,10 @@ public class EmbeddedSolrServer extends SolrClient
/**
* Use the other constructor using a CoreContainer and a name.
* @deprecated use {@link #EmbeddedSolrServer(CoreContainer, String)} instead.
*/
@Deprecated
public EmbeddedSolrServer(SolrCore core)
{
if ( core == null ) {
throw new NullPointerException("SolrCore instance required");
}
CoreDescriptor dcore = core.getCoreDescriptor();
if (dcore == null)
throw new NullPointerException("CoreDescriptor required");
CoreContainer cores = dcore.getCoreContainer();
if (cores == null)
throw new NullPointerException("CoreContainer required");
coreName = dcore.getName();
coreContainer = cores;
_parser = new SolrRequestParsers( null );
this(core.getCoreDescriptor().getCoreContainer(), core.getName());
}
/**
@ -93,8 +78,10 @@ public class EmbeddedSolrServer extends SolrClient
if ( coreContainer == null ) {
throw new NullPointerException("CoreContainer instance required");
}
if (Strings.isNullOrEmpty(coreName))
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Core name cannot be empty");
this.coreContainer = coreContainer;
this.coreName = coreName == null? "" : coreName;
this.coreName = coreName;
_parser = new SolrRequestParsers( null );
}
@ -208,7 +195,7 @@ public class EmbeddedSolrServer extends SolrClient
}
// Now write it out
NamedList<Object> normalized = getParsedResponse(req, rsp);
NamedList<Object> normalized = BinaryResponseWriter.getParsedResponse(req, rsp);
return normalized;
}
catch( IOException iox ) {
@ -227,18 +214,6 @@ public class EmbeddedSolrServer extends SolrClient
}
}
/**
* Returns a response object equivalent to what you get from the XML/JSON/javabin parser. Documents
* become SolrDocuments, DocList becomes SolrDocumentList etc.
*
* @deprecated use {@link BinaryResponseWriter#getParsedResponse(SolrQueryRequest, SolrQueryResponse)}
*/
@Deprecated
public NamedList<Object> getParsedResponse( SolrQueryRequest req, SolrQueryResponse rsp )
{
return BinaryResponseWriter.getParsedResponse(req, rsp);
}
/**
* Shutdown all cores within the EmbeddedSolrServer instance
*/

View File

@ -1,26 +0,0 @@
package org.apache.solr.common;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @deprecated This interface has been kept for backwards compatibility and will
* be removed in (5.0). Use {@link org.apache.lucene.analysis.util.ResourceLoader}
*/
@Deprecated
public interface ResourceLoader extends org.apache.lucene.analysis.util.ResourceLoader {
}

View File

@ -1,29 +0,0 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
Commonly reused classes and interfaces (deprecated package, do not add new classes)
</p>
</body>
</html>

View File

@ -18,6 +18,7 @@
package org.apache.solr.core;
import org.apache.lucene.analysis.util.CharFilterFactory;
import org.apache.lucene.analysis.util.ResourceLoader;
import org.apache.lucene.analysis.util.ResourceLoaderAware;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.util.TokenizerFactory;
@ -26,7 +27,6 @@ import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.util.IOUtils;
import org.apache.solr.common.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.component.SearchComponent;
@ -546,10 +546,6 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
if (!live) {
//TODO: Does SolrCoreAware make sense here since in a multi-core context
// which core are we talking about ?
if (org.apache.solr.util.plugin.ResourceLoaderAware.class.isInstance(obj)) {
log.warn("Class [{}] uses org.apache.solr.util.plugin.ResourceLoaderAware " +
"which is deprecated. Change to org.apache.lucene.analysis.util.ResourceLoaderAware.", cname);
}
if( obj instanceof ResourceLoaderAware ) {
assertAwareCompatibility( ResourceLoaderAware.class, obj );
waitingForResources.add( (ResourceLoaderAware)obj );
@ -589,10 +585,6 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
assertAwareCompatibility( SolrCoreAware.class, obj );
waitingForCore.add( (SolrCoreAware)obj );
}
if (org.apache.solr.util.plugin.ResourceLoaderAware.class.isInstance(obj)) {
log.warn("Class [{}] uses org.apache.solr.util.plugin.ResourceLoaderAware " +
"which is deprecated. Change to org.apache.lucene.analysis.util.ResourceLoaderAware.", cName);
}
if( obj instanceof ResourceLoaderAware ) {
assertAwareCompatibility( ResourceLoaderAware.class, obj );
waitingForResources.add( (ResourceLoaderAware)obj );

View File

@ -1,44 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import org.apache.solr.common.util.NamedList;
/**
* Update handler which uses the JavaBin format
*
* @see org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec
* @see org.apache.solr.common.util.JavaBinCodec
*
* use {@link UpdateRequestHandler}
*/
@Deprecated
public class BinaryUpdateRequestHandler extends UpdateRequestHandler {
@Override
public void init(NamedList args) {
super.init(args);
setAssumeContentType("application/javabin");
log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
}
@Override
public String getDescription() {
return "Add/Update multiple documents with javabin format";
}
}

View File

@ -1,45 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import org.apache.solr.common.util.NamedList;
/**
* use {@link UpdateRequestHandler}
*/
@Deprecated
public class CSVRequestHandler extends UpdateRequestHandler {
@Override
public void init(NamedList args) {
super.init(args);
setAssumeContentType("application/csv");
// log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Add/Update multiple documents with CSV formatted rows";
}
}

View File

@ -1,44 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import org.apache.solr.common.util.NamedList;
/**
* use {@link UpdateRequestHandler}
*/
@Deprecated
public class JsonUpdateRequestHandler extends UpdateRequestHandler {
@Override
public void init(NamedList args) {
super.init(args);
setAssumeContentType("application/json");
// log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Add documents with JSON";
}
}

View File

@ -1,46 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import org.apache.solr.common.util.NamedList;
/**
* Add documents to solr using the STAX XML parser.
*
* use {@link UpdateRequestHandler}
*/
@Deprecated
public class XmlUpdateRequestHandler extends UpdateRequestHandler {
@Override
public void init(NamedList args) {
super.init(args);
setAssumeContentType("application/xml");
log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Add documents with XML";
}
}

View File

@ -1,43 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import org.apache.solr.common.util.NamedList;
/**
* Add documents to solr using the STAX XML parser, transforming it with XSLT first
*
* use {@link UpdateRequestHandler}
*/
@Deprecated
public class XsltUpdateRequestHandler extends UpdateRequestHandler {
@Override
public void init(NamedList args) {
super.init(args);
setAssumeContentType("application/xml");
log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Add documents with XML, transforming with XSLT first";
}
}

View File

@ -92,7 +92,6 @@ public class HighlightComponent extends SearchComponent implements PluginInfoIni
PluginInfo pluginInfo = core.getSolrConfig().getPluginInfo(SolrHighlighter.class.getName()); //TODO deprecated configuration remove later
if (pluginInfo != null) {
highlighter = core.createInitInstance(pluginInfo, SolrHighlighter.class, null, DefaultSolrHighlighter.class.getName());
highlighter.initalize(core.getSolrConfig());
} else {
DefaultSolrHighlighter defHighlighter = new DefaultSolrHighlighter(core);
defHighlighter.init(PluginInfo.EMPTY_INFO);

View File

@ -282,7 +282,6 @@ public class HttpShardHandler extends ShardHandler {
clusterState = zkController.getClusterState();
String shardKeys = params.get(ShardParams._ROUTE_);
if(shardKeys == null) shardKeys = params.get(ShardParams.SHARD_KEYS);//eprecated
// This will be the complete list of slices we need to query for this request.
slices = new HashMap<>();

View File

@ -16,18 +16,6 @@
*/
package org.apache.solr.highlight;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.analysis.CachingTokenFilter;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
@ -59,7 +47,6 @@ import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.IndexSchema;
@ -71,6 +58,18 @@ import org.apache.solr.util.plugin.PluginInfoInitialized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
/**
*
* @since solr 1.3
@ -161,37 +160,6 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
boundaryScanners.put("", boundaryScanner);
boundaryScanners.put(null, boundaryScanner);
initialized = true;
}
//just for back-compat with the deprecated method
private boolean initialized = false;
@Override
@Deprecated
public void initalize( SolrConfig config) {
if (initialized) return;
SolrFragmenter frag = new GapFragmenter();
fragmenters.put("", frag);
fragmenters.put(null, frag);
SolrFormatter fmt = new HtmlFormatter();
formatters.put("", fmt);
formatters.put(null, fmt);
SolrEncoder enc = new DefaultEncoder();
encoders.put("", enc);
encoders.put(null, enc);
SolrFragListBuilder fragListBuilder = new SimpleFragListBuilder();
fragListBuilders.put( "", fragListBuilder );
fragListBuilders.put( null, fragListBuilder );
SolrFragmentsBuilder fragsBuilder = new ScoreOrderFragmentsBuilder();
fragmentsBuilders.put( "", fragsBuilder );
fragmentsBuilders.put( null, fragsBuilder );
SolrBoundaryScanner boundaryScanner = new SimpleBoundaryScanner();
boundaryScanners.put("", boundaryScanner);
boundaryScanners.put(null, boundaryScanner);
}
/**

View File

@ -108,9 +108,6 @@ import java.util.Set;
*/
public class PostingsSolrHighlighter extends SolrHighlighter implements PluginInfoInitialized {
@Override
public void initalize(SolrConfig config) {}
@Override
public void init(PluginInfo info) {}

View File

@ -16,21 +16,20 @@ package org.apache.solr.highlight;
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.lucene.search.Query;
import org.apache.solr.common.params.HighlightParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.DocList;
import org.apache.solr.util.SolrPluginUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
public abstract class SolrHighlighter
{
@ -38,9 +37,6 @@ public abstract class SolrHighlighter
public static int DEFAULT_PHRASE_LIMIT = 5000;
public static Logger log = LoggerFactory.getLogger(SolrHighlighter.class);
@Deprecated
public abstract void initalize( SolrConfig config );
/**
* Check whether Highlighting is enabled for this request.
* @param params The params controlling Highlighting

View File

@ -27,22 +27,6 @@ interface CharStream {
*/
char readChar() throws java.io.IOException;
@Deprecated
/**
* Returns the column position of the character last read.
* @deprecated
* @see #getEndColumn
*/
int getColumn();
@Deprecated
/**
* Returns the line number of the character last read.
* @deprecated
* @see #getEndLine
*/
int getLine();
/**
* Returns the column number of the last character for current token (being
* matched after the last call to BeginTOken).

View File

@ -108,15 +108,6 @@ public final class FastCharStream implements CharStream {
}
}
@Override
public final int getColumn() {
return bufferStart + bufferPosition;
}
@Override
public final int getLine() {
return 1;
}
@Override
public final int getEndColumn() {
return bufferStart + bufferPosition;
}

View File

@ -17,26 +17,6 @@
package org.apache.solr.request;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.EnumSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.RunnableFuture;
import java.util.concurrent.Semaphore;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.LeafReader;
@ -95,6 +75,26 @@ import org.apache.solr.util.BoundedTreeSet;
import org.apache.solr.util.DateMathParser;
import org.apache.solr.util.DefaultSolrThreadFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.EnumSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.RunnableFuture;
import java.util.concurrent.Semaphore;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* A class that generates simple Facet information for a request.
*

View File

@ -96,10 +96,6 @@ public final class SchemaField extends FieldProperties {
public boolean storeTermOffsets() { return (properties & STORE_TERMOFFSETS)!=0; }
public boolean omitNorms() { return (properties & OMIT_NORMS)!=0; }
/** @deprecated Use {@link #omitTermFreqAndPositions} */
@Deprecated
public boolean omitTf() { return omitTermFreqAndPositions(); }
public boolean omitTermFreqAndPositions() { return (properties & OMIT_TF_POSITIONS)!=0; }
public boolean omitPositions() { return (properties & OMIT_POSITIONS)!=0; }
public boolean storeOffsetsWithPositions() { return (properties & STORE_OFFSETS)!=0; }

View File

@ -33,7 +33,6 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.MapSolrParams;
@ -45,8 +44,8 @@ import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import java.io.IOException;
import java.util.Collections;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
@ -222,17 +221,6 @@ public class QueryParsing {
return new MapSolrParams(localParams);
}
/**
* Returns the Sort object represented by the string, or null if default sort
* by score descending should be used.
* @see #parseSortSpec
* @deprecated use {@link #parseSortSpec}
*/
@Deprecated
public static Sort parseSort(String sortSpec, SolrQueryRequest req) {
return parseSortSpec(sortSpec, req).getSort();
}
/**
* <p>
* The form of the sort specification string currently parsed is:

View File

@ -19,13 +19,11 @@ package org.apache.solr.search;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.solr.schema.SchemaField;
import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/***
* SortSpec encapsulates a Lucene Sort and a count of the number of documents
* to return.
@ -44,28 +42,6 @@ public class SortSpec
setSortAndFields(sort, Arrays.asList(fields));
}
/** @deprecated Specify both Sort and SchemaField[] when constructing */
@Deprecated
public SortSpec(Sort sort, int num) {
this(sort,0,num);
}
/** @deprecated Specify both Sort and SchemaField[] when constructing */
@Deprecated
public SortSpec(Sort sort, int offset, int num) {
setSort(sort);
this.offset=offset;
this.num=num;
}
/** @deprecated use {@link #setSortAndFields} */
@Deprecated
public void setSort( Sort s )
{
sort = s;
fields = Collections.unmodifiableList(Arrays.asList(new SchemaField[s.getSort().length]));
}
/**
* the specified SchemaFields must correspond one to one with the Sort's SortFields,
* using null where appropriate.

View File

@ -23,7 +23,6 @@ import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.Version;
import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.Config;
import org.apache.solr.core.MapSerializable;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.PluginInfo;
@ -48,13 +47,7 @@ public class SolrIndexConfig implements MapSerializable {
public static final String DEFAULT_MERGE_SCHEDULER_CLASSNAME = ConcurrentMergeScheduler.class.getName();
public final Version luceneVersion;
/**
* The explicit value of &lt;useCompoundFile&gt; specified on this index config
* @deprecated use {@link #getUseCompoundFile}
*/
@Deprecated
public final boolean useCompoundFile;
private boolean effectiveUseCompountFileSetting;
private boolean effectiveUseCompoundFileSetting;
public final int maxBufferedDocs;
public final int maxMergeDocs;
@ -84,7 +77,7 @@ public class SolrIndexConfig implements MapSerializable {
@SuppressWarnings("deprecation")
private SolrIndexConfig(SolrConfig solrConfig) {
luceneVersion = solrConfig.luceneMatchVersion;
useCompoundFile = effectiveUseCompountFileSetting = false;
effectiveUseCompoundFileSetting = false;
maxBufferedDocs = -1;
maxMergeDocs = -1;
maxIndexingThreads = IndexWriterConfig.DEFAULT_MAX_THREAD_STATES;
@ -134,8 +127,7 @@ public class SolrIndexConfig implements MapSerializable {
true);
defaultMergePolicyClassName = def.defaultMergePolicyClassName;
useCompoundFile=solrConfig.getBool(prefix+"/useCompoundFile", def.useCompoundFile);
effectiveUseCompountFileSetting = useCompoundFile;
effectiveUseCompoundFileSetting = solrConfig.getBool(prefix+"/useCompoundFile", def.getUseCompoundFile());
maxBufferedDocs=solrConfig.getInt(prefix+"/maxBufferedDocs",def.maxBufferedDocs);
maxMergeDocs=solrConfig.getInt(prefix+"/maxMergeDocs",def.maxMergeDocs);
maxIndexingThreads=solrConfig.getInt(prefix+"/maxIndexingThreads",def.maxIndexingThreads);
@ -306,7 +298,7 @@ public class SolrIndexConfig implements MapSerializable {
}
public boolean getUseCompoundFile() {
return effectiveUseCompountFileSetting;
return effectiveUseCompoundFileSetting;
}
/**
@ -330,7 +322,7 @@ public class SolrIndexConfig implements MapSerializable {
if (useCFSArg instanceof Boolean) {
boolean cfs = ((Boolean)useCFSArg).booleanValue();
log.warn("Please update your config to specify <useCompoundFile>"+cfs+"</useCompoundFile> directly in your <indexConfig> settings.");
effectiveUseCompountFileSetting = cfs;
effectiveUseCompoundFileSetting = cfs;
} else {
log.error("MergePolicy's 'useCompoundFile' init arg is not a boolean, can not apply back compat logic to apply to the IndexWriterConfig: " + useCFSArg.toString());
}

View File

@ -17,24 +17,6 @@ package org.apache.solr.update.processor;
* limitations under the License.
*/
import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.solr.client.solrj.request.UpdateRequest;
@ -91,6 +73,24 @@ import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
// NOT mt-safe... create a new processor for each add thread
// TODO: we really should not wait for distrib after local? unless a certain replication factor is asked for
public class DistributedUpdateProcessor extends UpdateRequestProcessor {
@ -1254,7 +1254,6 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
SolrParams params = req.getParams();
String route = params.get(ShardParams._ROUTE_);
if(route == null) route = params.get(ShardParams.SHARD_KEYS);// deprecated . kept for backcompat
Collection<Slice> slices = coll.getRouter().getSearchSlices(route, params, coll);
List<Node> leaders = new ArrayList<>(slices.size());

View File

@ -1,26 +0,0 @@
package org.apache.solr.util.plugin;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @deprecated This interface has been kept for backwards compatibility and will
* be removed in (5.0). Use {@link org.apache.lucene.analysis.util.ResourceLoaderAware}.
*/
@Deprecated
public interface ResourceLoaderAware extends org.apache.lucene.analysis.util.ResourceLoaderAware {
}

View File

@ -1,321 +0,0 @@
package org.apache.solr.analysis;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Set;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.util.TestUtil;
import org.junit.Ignore;
public class LegacyHTMLStripCharFilterTest extends BaseTokenStreamTestCase {
//this is some text here is a link and another link . This is an entity: & plus a <. Here is an &
//
public void test() throws IOException {
String html = "<div class=\"foo\">this is some text</div> here is a <a href=\"#bar\">link</a> and " +
"another <a href=\"http://lucene.apache.org/\">link</a>. " +
"This is an entity: &amp; plus a &lt;. Here is an &. <!-- is a comment -->";
String gold = " this is some text here is a link and " +
"another link . " +
"This is an entity: & plus a <. Here is an &. ";
LegacyHTMLStripCharFilter reader = new LegacyHTMLStripCharFilter(new StringReader(html));
StringBuilder builder = new StringBuilder();
int ch = -1;
char [] goldArray = gold.toCharArray();
int position = 0;
while ((ch = reader.read()) != -1){
char theChar = (char) ch;
builder.append(theChar);
assertTrue("\"" + theChar + "\"" + " at position: " + position + " does not equal: " + goldArray[position]
+ " Buffer so far: " + builder + "<EOB>", theChar == goldArray[position]);
position++;
}
assertEquals(gold, builder.toString());
}
//Some sanity checks, but not a full-fledged check
public void testHTML() throws Exception {
InputStream stream = getClass().getResourceAsStream("htmlStripReaderTest.html");
LegacyHTMLStripCharFilter reader = new LegacyHTMLStripCharFilter(new InputStreamReader(stream, StandardCharsets.UTF_8));
StringBuilder builder = new StringBuilder();
int ch = -1;
while ((ch = reader.read()) != -1){
builder.append((char)ch);
}
String str = builder.toString();
assertTrue("Entity not properly escaped", str.indexOf("&lt;") == -1);//there is one > in the text
assertTrue("Forrest should have been stripped out", str.indexOf("forrest") == -1 && str.indexOf("Forrest") == -1);
assertTrue("File should start with 'Welcome to Solr' after trimming", str.trim().startsWith("Welcome to Solr"));
assertTrue("File should start with 'Foundation.' after trimming", str.trim().endsWith("Foundation."));
}
public void testGamma() throws Exception {
String test = "&Gamma;";
String gold = "\u0393";
Set<String> set = new HashSet<>();
set.add("reserved");
Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test), set);
StringBuilder builder = new StringBuilder();
int ch = 0;
while ((ch = reader.read()) != -1){
builder.append((char)ch);
}
String result = builder.toString();
// System.out.println("Resu: " + result + "<EOL>");
// System.out.println("Gold: " + gold + "<EOL>");
assertTrue(result + " is not equal to " + gold + "<EOS>", result.equals(gold) == true);
}
public void testEntities() throws Exception {
String test = "&nbsp; &lt;foo&gt; &Uuml;bermensch &#61; &Gamma; bar &#x393;";
String gold = " <foo> \u00DCbermensch = \u0393 bar \u0393";
Set<String> set = new HashSet<>();
set.add("reserved");
Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test), set);
StringBuilder builder = new StringBuilder();
int ch = 0;
while ((ch = reader.read()) != -1){
builder.append((char)ch);
}
String result = builder.toString();
// System.out.println("Resu: " + result + "<EOL>");
// System.out.println("Gold: " + gold + "<EOL>");
assertTrue(result + " is not equal to " + gold + "<EOS>", result.equals(gold) == true);
}
public void testMoreEntities() throws Exception {
String test = "&nbsp; &lt;junk/&gt; &nbsp; &#33; &#64; and &#8217;";
String gold = " <junk/> ! @ and ";
Set<String> set = new HashSet<>();
set.add("reserved");
Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test), set);
StringBuilder builder = new StringBuilder();
int ch = 0;
while ((ch = reader.read()) != -1){
builder.append((char)ch);
}
String result = builder.toString();
// System.out.println("Resu: " + result + "<EOL>");
// System.out.println("Gold: " + gold + "<EOL>");
assertTrue(result + " is not equal to " + gold, result.equals(gold) == true);
}
public void testReserved() throws Exception {
String test = "aaa bbb <reserved ccc=\"ddddd\"> eeee </reserved> ffff <reserved ggg=\"hhhh\"/> <other/>";
Set<String> set = new HashSet<>();
set.add("reserved");
Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test), set);
StringBuilder builder = new StringBuilder();
int ch = 0;
while ((ch = reader.read()) != -1){
builder.append((char)ch);
}
String result = builder.toString();
// System.out.println("Result: " + result);
assertTrue("Escaped tag not preserved: " + result.indexOf("reserved"), result.indexOf("reserved") == 9);
assertTrue("Escaped tag not preserved: " + result.indexOf("reserved", 15), result.indexOf("reserved", 15) == 38);
assertTrue("Escaped tag not preserved: " + result.indexOf("reserved", 41), result.indexOf("reserved", 41) == 54);
assertTrue("Other tag should be removed", result.indexOf("other") == -1);
}
public void testMalformedHTML() throws Exception {
String test = "a <a hr<ef=aa<a>> </close</a>";
String gold = "a <a hr<ef=aa > </close ";
Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test));
StringBuilder builder = new StringBuilder();
int ch = 0;
while ((ch = reader.read()) != -1){
builder.append((char)ch);
}
String result = builder.toString();
// System.out.println("Resu: " + result + "<EOL>");
// System.out.println("Gold: " + gold + "<EOL>");
assertTrue(result + " is not equal to " + gold + "<EOS>", result.equals(gold) == true);
}
public void testBufferOverflow() throws Exception {
StringBuilder testBuilder = new StringBuilder(LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 50);
testBuilder.append("ah<?> ??????");
appendChars(testBuilder, LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 500);
processBuffer(testBuilder.toString(), "Failed on pseudo proc. instr.");//processing instructions
testBuilder.setLength(0);
testBuilder.append("<!--");//comments
appendChars(testBuilder, 3*LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 500);//comments have two lookaheads
testBuilder.append("-->foo");
processBuffer(testBuilder.toString(), "Failed w/ comment");
testBuilder.setLength(0);
testBuilder.append("<?");
appendChars(testBuilder, LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 500);
testBuilder.append("?>");
processBuffer(testBuilder.toString(), "Failed with proc. instr.");
testBuilder.setLength(0);
testBuilder.append("<b ");
appendChars(testBuilder, LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 500);
testBuilder.append("/>");
processBuffer(testBuilder.toString(), "Failed on tag");
}
private void appendChars(StringBuilder testBuilder, int numChars) {
int i1 = numChars / 2;
for (int i = 0; i < i1; i++){
testBuilder.append('a').append(' ');//tack on enough to go beyond the mark readahead limit, since <?> makes LegacyHTMLStripCharFilter think it is a processing instruction
}
}
private void processBuffer(String test, String assertMsg) throws IOException {
// System.out.println("-------------------processBuffer----------");
Reader reader = new LegacyHTMLStripCharFilter(new BufferedReader(new StringReader(test)));//force the use of BufferedReader
int ch = 0;
StringBuilder builder = new StringBuilder();
try {
while ((ch = reader.read()) != -1){
builder.append((char)ch);
}
} finally {
// System.out.println("String (trimmed): " + builder.toString().trim() + "<EOS>");
}
assertTrue(assertMsg + "::: " + builder.toString() + " is not equal to " + test, builder.toString().equals(test) == true);
}
public void testComment() throws Exception {
String test = "<!--- three dashes, still a valid comment ---> ";
String gold = " ";
Reader reader = new LegacyHTMLStripCharFilter(new BufferedReader(new StringReader(test)));//force the use of BufferedReader
int ch = 0;
StringBuilder builder = new StringBuilder();
try {
while ((ch = reader.read()) != -1){
builder.append((char)ch);
}
} finally {
// System.out.println("String: " + builder.toString());
}
assertTrue(builder.toString() + " is not equal to " + gold + "<EOS>", builder.toString().equals(gold) == true);
}
public void doTestOffsets(String in) throws Exception {
LegacyHTMLStripCharFilter reader = new LegacyHTMLStripCharFilter(new BufferedReader(new StringReader(in)));
int ch = 0;
int off = 0; // offset in the reader
int strOff = -1; // offset in the original string
while ((ch = reader.read()) != -1) {
int correctedOff = reader.correctOffset(off);
if (ch == 'X') {
strOff = in.indexOf('X',strOff+1);
assertEquals(strOff, correctedOff);
}
off++;
}
}
public void testOffsets() throws Exception {
doTestOffsets("hello X how X are you");
doTestOffsets("hello <p> X<p> how <p>X are you");
doTestOffsets("X &amp; X &#40; X &lt; &gt; X");
// test backtracking
doTestOffsets("X < &zz >X &# < X > < &l > &g < X");
}
@Ignore("broken offsets: see LUCENE-2208")
public void testRandom() throws Exception {
Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer, tokenizer);
}
@Override
protected Reader initReader(String fieldName, Reader reader) {
return new LegacyHTMLStripCharFilter(new BufferedReader(reader));
}
};
int numRounds = RANDOM_MULTIPLIER * 10000;
checkRandomData(random(), analyzer, numRounds);
}
public void testRandomBrokenHTML() throws Exception {
int maxNumElements = 10000;
String text = TestUtil.randomHtmlishString(random(), maxNumElements);
Reader reader
= new LegacyHTMLStripCharFilter(new StringReader(text));
while (reader.read() != -1);
}
public void testRandomText() throws Exception {
StringBuilder text = new StringBuilder();
int minNumWords = 10;
int maxNumWords = 10000;
int minWordLength = 3;
int maxWordLength = 20;
int numWords = TestUtil.nextInt(random(), minNumWords, maxNumWords);
switch (TestUtil.nextInt(random(), 0, 4)) {
case 0: {
for (int wordNum = 0 ; wordNum < numWords ; ++wordNum) {
text.append(TestUtil.randomUnicodeString(random(), maxWordLength));
text.append(' ');
}
break;
}
case 1: {
for (int wordNum = 0 ; wordNum < numWords ; ++wordNum) {
text.append(TestUtil.randomRealisticUnicodeString
(random(), minWordLength, maxWordLength));
text.append(' ');
}
break;
}
default: { // ASCII 50% of the time
for (int wordNum = 0 ; wordNum < numWords ; ++wordNum) {
text.append(TestUtil.randomSimpleString(random()));
text.append(' ');
}
}
}
Reader reader = new LegacyHTMLStripCharFilter
(new StringReader(text.toString()));
while (reader.read() != -1);
}
}

View File

@ -17,32 +17,15 @@ package org.apache.solr.cloud;
* limitations under the License.
*/
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.cloud.CompositeIdRouter;
import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.ShardParams;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.servlet.SolrDispatchFilter;
import org.apache.solr.update.DirectUpdateHandler2;
import org.junit.BeforeClass;
import org.junit.Ignore;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class ShardRoutingTest extends AbstractFullDistribZkTestBase {
@ -137,7 +120,7 @@ public class ShardRoutingTest extends AbstractFullDistribZkTestBase {
private void doHashingTest() throws Exception {
log.info("### STARTING doHashingTest");
assertEquals(4, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getSlices().size());
String shardKeys = ShardParams.SHARD_KEYS;
String shardKeys = ShardParams._ROUTE_;
// for now, we know how ranges will be distributed to shards.
// may have to look it up in clusterstate if that assumption changes.
@ -282,12 +265,12 @@ public class ShardRoutingTest extends AbstractFullDistribZkTestBase {
assertEquals(1, nEnd - nStart); // short circuit should prevent distrib search
nStart = getNumRequests();
replica.client.solrClient.query( params("q","*:*", "shard.keys","b!") );
replica.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!") );
nEnd = getNumRequests();
assertEquals(1, nEnd - nStart); // short circuit should prevent distrib search
nStart = getNumRequests();
leader2.client.solrClient.query( params("q","*:*", "shard.keys","b!") );
leader2.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!") );
nEnd = getNumRequests();
assertEquals(3, nEnd - nStart); // original + 2 phase distrib search. we could improve this!
@ -297,12 +280,12 @@ public class ShardRoutingTest extends AbstractFullDistribZkTestBase {
assertEquals(9, nEnd - nStart); // original + 2 phase distrib search * 4 shards.
nStart = getNumRequests();
leader2.client.solrClient.query( params("q","*:*", "shard.keys","b!,d!") );
leader2.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!,d!") );
nEnd = getNumRequests();
assertEquals(5, nEnd - nStart); // original + 2 phase distrib search * 2 shards.
nStart = getNumRequests();
leader2.client.solrClient.query( params("q","*:*", "shard.keys","b!,f1!f2!") );
leader2.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!,f1!f2!") );
nEnd = getNumRequests();
assertEquals(5, nEnd - nStart);
}

View File

@ -17,17 +17,14 @@
package org.apache.solr.core;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.TieredMergePolicy;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.handler.admin.ShowFileRequestHandler;
import org.apache.solr.update.DirectUpdateHandler2;
import org.apache.solr.update.SolrIndexConfig;
import org.apache.solr.util.RefCounted;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.IndexSchemaFactory;
import org.apache.solr.update.SolrIndexConfig;
import org.junit.BeforeClass;
import org.junit.Test;
import org.w3c.dom.Node;
@ -36,7 +33,6 @@ import org.w3c.dom.NodeList;
import javax.xml.xpath.XPathConstants;
import java.io.IOException;
import java.io.InputStream;
import java.util.Locale;
public class TestConfig extends SolrTestCaseJ4 {
@ -117,7 +113,7 @@ public class TestConfig extends SolrTestCaseJ4 {
SolrIndexConfig sic = sc.indexConfig;
assertEquals("default ramBufferSizeMB", 100.0D, sic.ramBufferSizeMB, 0.0D);
assertEquals("default LockType", SolrIndexConfig.LOCK_TYPE_NATIVE, sic.lockType);
assertEquals("default useCompoundFile", false, sic.useCompoundFile);
assertEquals("default useCompoundFile", false, sic.getUseCompoundFile());
IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", solrConfig);
IndexWriterConfig iwc = sic.toIndexWriterConfig(indexSchema);
@ -140,7 +136,7 @@ public class TestConfig extends SolrTestCaseJ4 {
Double.parseDouble(System.getProperty("solr.tests.ramBufferSizeMB")),
sic.ramBufferSizeMB, 0.0D);
assertEquals("useCompoundFile sysprop",
Boolean.parseBoolean(System.getProperty("useCompoundFile")), sic.useCompoundFile);
Boolean.parseBoolean(System.getProperty("useCompoundFile")), sic.getUseCompoundFile());
}
}

View File

@ -16,15 +16,14 @@
*/
package org.apache.solr.highlight;
import java.io.IOException;
import org.apache.lucene.search.Query;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.DocList;
import java.io.IOException;
public class DummyHighlighter extends SolrHighlighter {
@Override
@ -35,9 +34,4 @@ public class DummyHighlighter extends SolrHighlighter {
return fragments;
}
@Override
public void initalize(SolrConfig config) {
// do nothing
}
}

View File

@ -20,10 +20,9 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.SortSpec;
import org.apache.solr.common.SolrException;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.SchemaField;
import org.junit.BeforeClass;
import org.junit.Test;
@ -79,7 +78,7 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
SortSpec spec;
SolrQueryRequest req = req();
sort = QueryParsing.parseSort("score desc", req);
sort = QueryParsing.parseSortSpec("score desc", req).getSort();
assertNull("sort", sort);//only 1 thing in the list, no Sort specified
spec = QueryParsing.parseSortSpec("score desc", req);
@ -89,7 +88,7 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
assertEquals(0, spec.getSchemaFields().size());
// SOLR-4458 - using different case variations of asc and desc
sort = QueryParsing.parseSort("score aSc", req);
sort = QueryParsing.parseSortSpec("score aSc", req).getSort();
SortField[] flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.SCORE);
assertTrue(flds[0].getReverse());
@ -102,7 +101,7 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
assertEquals(1, spec.getSchemaFields().size());
assertNull(spec.getSchemaFields().get(0));
sort = QueryParsing.parseSort("weight dEsC", req);
sort = QueryParsing.parseSortSpec("weight dEsC", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.FLOAT);
assertEquals(flds[0].getField(), "weight");
@ -118,7 +117,7 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
assertNotNull(spec.getSchemaFields().get(0));
assertEquals("weight", spec.getSchemaFields().get(0).getName());
sort = QueryParsing.parseSort("weight desc,bday ASC", req);
sort = QueryParsing.parseSortSpec("weight desc,bday ASC", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.FLOAT);
assertEquals(flds[0].getField(), "weight");
@ -127,7 +126,7 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
assertEquals(flds[1].getField(), "bday");
assertEquals(flds[1].getReverse(), false);
//order aliases
sort = QueryParsing.parseSort("weight top,bday asc", req);
sort = QueryParsing.parseSortSpec("weight top,bday asc", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.FLOAT);
assertEquals(flds[0].getField(), "weight");
@ -135,7 +134,7 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
assertEquals(flds[1].getType(), SortField.Type.LONG);
assertEquals(flds[1].getField(), "bday");
assertEquals(flds[1].getReverse(), false);
sort = QueryParsing.parseSort("weight top,bday bottom", req);
sort = QueryParsing.parseSortSpec("weight top,bday bottom", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.FLOAT);
assertEquals(flds[0].getField(), "weight");
@ -145,20 +144,20 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
assertEquals(flds[1].getReverse(), false);
//test weird spacing
sort = QueryParsing.parseSort("weight DESC, bday asc", req);
sort = QueryParsing.parseSortSpec("weight DESC, bday asc", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.FLOAT);
assertEquals(flds[0].getField(), "weight");
assertEquals(flds[1].getField(), "bday");
assertEquals(flds[1].getType(), SortField.Type.LONG);
//handles trailing commas
sort = QueryParsing.parseSort("weight desc,", req);
sort = QueryParsing.parseSortSpec("weight desc,", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.FLOAT);
assertEquals(flds[0].getField(), "weight");
//test functions
sort = QueryParsing.parseSort("pow(weight, 2) desc", req);
sort = QueryParsing.parseSortSpec("pow(weight, 2) desc", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE);
//Not thrilled about the fragility of string matching here, but...
@ -166,12 +165,12 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
assertEquals(flds[0].getField(), "pow(float(weight),const(2))");
//test functions (more deep)
sort = QueryParsing.parseSort("sum(product(r_f1,sum(d_f1,t_f1,1.0)),a_f1) asc", req);
sort = QueryParsing.parseSortSpec("sum(product(r_f1,sum(d_f1,t_f1,1.0)),a_f1) asc", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE);
assertEquals(flds[0].getField(), "sum(product(float(r_f1),sum(float(d_f1),float(t_f1),const(1.0))),float(a_f1))");
sort = QueryParsing.parseSort("pow(weight, 2.0) desc", req);
sort = QueryParsing.parseSortSpec("pow(weight, 2.0) desc", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE);
//Not thrilled about the fragility of string matching here, but...
@ -202,19 +201,19 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
assertEquals("bday", schemaFlds.get(2).getName());
//handles trailing commas
sort = QueryParsing.parseSort("weight desc,", req);
sort = QueryParsing.parseSortSpec("weight desc,", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.FLOAT);
assertEquals(flds[0].getField(), "weight");
//Test literals in functions
sort = QueryParsing.parseSort("strdist(foo_s1, \"junk\", jw) desc", req);
sort = QueryParsing.parseSortSpec("strdist(foo_s1, \"junk\", jw) desc", req).getSort();
flds = sort.getSort();
assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE);
//the value sources get wrapped, so the out field is different than the input
assertEquals(flds[0].getField(), "strdist(str(foo_s1),literal(junk), dist=org.apache.lucene.search.spell.JaroWinklerDistance)");
sort = QueryParsing.parseSort("", req);
sort = QueryParsing.parseSortSpec("", req).getSort();
assertNull(sort);
spec = QueryParsing.parseSortSpec("", req);
@ -231,40 +230,40 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
//test some bad vals
try {
sort = QueryParsing.parseSort("weight, desc", req);
sort = QueryParsing.parseSortSpec("weight, desc", req).getSort();
assertTrue(false);
} catch (SolrException e) {
//expected
}
try {
sort = QueryParsing.parseSort("w", req);
sort = QueryParsing.parseSortSpec("w", req).getSort();
assertTrue(false);
} catch (SolrException e) {
//expected
}
try {
sort = QueryParsing.parseSort("weight desc, bday", req);
sort = QueryParsing.parseSortSpec("weight desc, bday", req).getSort();
assertTrue(false);
} catch (SolrException e) {
}
try {
//bad number of commas
sort = QueryParsing.parseSort("pow(weight,,2) desc, bday asc", req);
sort = QueryParsing.parseSortSpec("pow(weight,,2) desc, bday asc", req).getSort();
assertTrue(false);
} catch (SolrException e) {
}
try {
//bad function
sort = QueryParsing.parseSort("pow() desc, bday asc", req);
sort = QueryParsing.parseSortSpec("pow() desc, bday asc", req).getSort();
assertTrue(false);
} catch (SolrException e) {
}
try {
//bad number of parens
sort = QueryParsing.parseSort("pow((weight,2) desc, bday asc", req);
sort = QueryParsing.parseSortSpec("pow((weight,2) desc, bday asc", req).getSort();
assertTrue(false);
} catch (SolrException e) {
}

View File

@ -17,12 +17,7 @@
package org.apache.solr.update.processor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.util.Constants;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.impl.BinaryRequestWriter;
import org.apache.solr.client.solrj.request.UpdateRequest;
@ -30,20 +25,23 @@ import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.MultiMapSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.BinaryUpdateRequestHandler;
import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryRequestBase;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
*
*/

View File

@ -17,13 +17,6 @@
package org.apache.solr.client.solrj;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.params.HighlightParams;
@ -32,6 +25,13 @@ import org.apache.solr.common.params.StatsParams;
import org.apache.solr.common.params.TermsParams;
import org.apache.solr.common.util.DateUtil;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
/**
* This is an augmented SolrParams with get/set/add fields for common fields used
@ -443,14 +443,6 @@ public class SolrQuery extends ModifiableSolrParams
return this;
}
/**
* @deprecated use {@link #setFacetMissing(Boolean)}
*/
@Deprecated
public SolrQuery setMissing(String fld) {
return setFacetMissing(Boolean.valueOf(fld));
}
/** get facet sort
*
* @return facet sort or default of {@link FacetParams#FACET_SORT_COUNT}
@ -459,18 +451,6 @@ public class SolrQuery extends ModifiableSolrParams
return this.get(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT);
}
/** get facet sort
*
* @return facet sort or default of true. <br />
* true corresponds to
* {@link FacetParams#FACET_SORT_COUNT} and <br />false to {@link FacetParams#FACET_SORT_INDEX}
*
* @deprecated Use {@link #getFacetSortString()} instead.
*/
@Deprecated
public boolean getFacetSort() {
return this.get(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT).equals(FacetParams.FACET_SORT_COUNT);
}
/** set facet sort
*
@ -482,19 +462,6 @@ public class SolrQuery extends ModifiableSolrParams
return this;
}
/** set facet sort
*
* @param sort sort facets
* @return this
* @deprecated Use {@link #setFacetSort(String)} instead, true corresponds to
* {@link FacetParams#FACET_SORT_COUNT} and false to {@link FacetParams#FACET_SORT_INDEX}.
*/
@Deprecated
public SolrQuery setFacetSort(boolean sort) {
this.set(FacetParams.FACET_SORT, sort == true ? FacetParams.FACET_SORT_COUNT : FacetParams.FACET_SORT_INDEX);
return this;
}
/** add highlight field
*
* @param f field to enable for highlighting
@ -571,57 +538,6 @@ public class SolrQuery extends ModifiableSolrParams
return this.get(HighlightParams.SIMPLE_POST, "");
}
/**
* Replaces the sort string with a single sort field.
* @deprecated Use {@link #setSort(SortClause)} instead, which is part
* of an api handling a wider range of sort specifications.
*/
@Deprecated
public SolrQuery setSortField(String field, ORDER order) {
this.remove(CommonParams.SORT);
addValueToParam(CommonParams.SORT, toSortString(field, order));
return this;
}
/**
* Adds a sort field to the end of the sort string.
* @deprecated Use {@link #addSort(SortClause)} instead, which is part
* of an api handling a wider range of sort specifications.
*/
@Deprecated
public SolrQuery addSortField(String field, ORDER order) {
return addValueToParam(CommonParams.SORT, toSortString(field, order));
}
/**
* Removes a sort field to the end of the sort string.
* @deprecated Use {@link #removeSort(SortClause)} instead, which is part
* of an api handling a wider range of sort specifications.
*/
@Deprecated
public SolrQuery removeSortField(String field, ORDER order) {
String[] sorts = getSortFields();
if (sorts != null) {
String removeSort = toSortString(field, order);
String s = join(sorts, ",", removeSort);
if (s.length()==0) s=null;
this.set(CommonParams.SORT, s);
}
return this;
}
/**
* Gets an array of sort specifications.
* @deprecated Use {@link #getSorts()} instead, which is part
* of an api handling a wider range of sort specifications.
*/
@Deprecated
public String[] getSortFields() {
String s = getSortField();
if (s==null) return null;
return s.trim().split(", *");
}
/**
* Gets the raw sort field, as it will be sent to Solr.
* <p>
@ -978,22 +894,6 @@ public class SolrQuery extends ModifiableSolrParams
return this.get(CommonParams.QT);
}
/**
* @deprecated See {@link #setRequestHandler(String)}.
*/
@Deprecated
public SolrQuery setQueryType(String qt) {
return setRequestHandler(qt);
}
/**
* @deprecated See {@link #getRequestHandler()}.
*/
@Deprecated
public String getQueryType() {
return getRequestHandler();
}
/**
* @return this
* @see ModifiableSolrParams#set(String,String[])

View File

@ -923,9 +923,6 @@ public class CloudSolrClient extends SolrClient {
}
String shardKeys = reqParams.get(ShardParams._ROUTE_);
if(shardKeys == null) {
shardKeys = reqParams.get(ShardParams.SHARD_KEYS); // deprecated
}
// TODO: not a big deal because of the caching, but we could avoid looking
// at every shard

View File

@ -587,14 +587,6 @@ public class CoreAdminRequest extends SolrRequest
return req.process( client );
}
@Deprecated
public static CoreAdminResponse persist(String fileName, SolrClient client) throws SolrServerException, IOException
{
CoreAdminRequest.Persist req = new CoreAdminRequest.Persist();
req.setFileName(fileName);
return req.process(client);
}
public static CoreAdminResponse mergeIndexes(String name,
String[] indexDirs, String[] srcCores, SolrClient client) throws SolrServerException,
IOException {

View File

@ -84,11 +84,6 @@ public abstract class RangeFacet<B, G> {
public static class Numeric extends RangeFacet<Number, Number> {
@Deprecated
public Numeric(String name, Number start, Number end, Number gap, Number before, Number after) {
this(name, start, end, gap, before, after, null);
}
public Numeric(String name, Number start, Number end, Number gap, Number before, Number after, Number between) {
super(name, start, end, gap, before, after, between);
}
@ -97,11 +92,6 @@ public abstract class RangeFacet<B, G> {
public static class Date extends RangeFacet<java.util.Date, String> {
@Deprecated
public Date(String name, java.util.Date start, java.util.Date end, String gap, Number before, Number after) {
this(name, start, end, gap, before, after, null);
}
public Date(String name, java.util.Date start, java.util.Date end, String gap, Number before, Number after, Number between) {
super(name, start, end, gap, before, after, between);
}

View File

@ -210,18 +210,6 @@ public class SpellCheckResponse {
return alternativeFrequencies;
}
@Deprecated
/** @see #getAlternatives */
public List<String> getSuggestions() {
return alternatives;
}
@Deprecated
/** @see #getAlternativeFrequencies */
public List<Integer> getSuggestionFrequencies() {
return alternativeFrequencies;
}
}
public class Collation {

View File

@ -17,21 +17,6 @@
package org.apache.solr.client.solrj.util;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.net.URLEncoder;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TimeZone;
import java.nio.ByteBuffer;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
@ -43,6 +28,18 @@ import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.DateUtil;
import org.apache.solr.common.util.XML;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.net.URLEncoder;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
/**
*
@ -186,44 +183,6 @@ public class ClientUtils
//---------------------------------------------------------------------------------------
/**
* @deprecated Use {@link org.apache.solr.common.util.DateUtil#DEFAULT_DATE_FORMATS}
*/
@Deprecated
public static final Collection<String> fmts = DateUtil.DEFAULT_DATE_FORMATS;
/**
* Returns a formatter that can be use by the current thread if needed to
* convert Date objects to the Internal representation.
*
* @deprecated Use {@link org.apache.solr.common.util.DateUtil#parseDate(String)}
*/
@Deprecated
public static Date parseDate( String d ) throws ParseException
{
return DateUtil.parseDate(d);
}
/**
* Returns a formatter that can be use by the current thread if needed to
* convert Date objects to the Internal representation.
*
* @deprecated use {@link org.apache.solr.common.util.DateUtil#getThreadLocalDateFormat()}
*/
@Deprecated
public static DateFormat getThreadLocalDateFormat() {
return DateUtil.getThreadLocalDateFormat();
}
/**
* @deprecated Use {@link org.apache.solr.common.util.DateUtil#UTC}.
*/
@Deprecated
public static TimeZone UTC = DateUtil.UTC;
/**
* See: {@link org.apache.lucene.queryparser.classic queryparser syntax}
* for more information on Escaping Special Characters

View File

@ -17,6 +17,12 @@ package org.apache.solr.common.cloud;
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.noggit.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@ -26,12 +32,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.noggit.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Immutable state of the cloud. Normally you can get the state by using
* {@link ZkStateReader#getClusterState()}.
@ -45,18 +45,6 @@ public class ClusterState implements JSONWriter.Writable {
private final Map<String, CollectionRef> collectionStates;
private Set<String> liveNodes;
/**
* Use this constr when ClusterState is meant for publication.
*
* hashCode and equals will only depend on liveNodes and not clusterStateVersion.
*/
@Deprecated
public ClusterState(Set<String> liveNodes,
Map<String, DocCollection> collectionStates) {
this(null, liveNodes, collectionStates);
}
/**
* Use this constr when ClusterState is meant for consumption.
*/

View File

@ -31,9 +31,9 @@ import static org.apache.solr.common.params.ShardParams._ROUTE_;
/** This document router is for custom sharding
*/
public class ImplicitDocRouter extends DocRouter {
public static final String NAME = "implicit";
// @Deprecated
// public static final String DEFAULT_SHARD_PARAM = "_shard_";
private static Logger log = LoggerFactory
.getLogger(ImplicitDocRouter.class);
@ -49,7 +49,6 @@ public class ImplicitDocRouter extends DocRouter {
}
if(shard == null) {
Object o = sdoc.getFieldValue(_ROUTE_);
if (o == null) o = sdoc.getFieldValue("_shard_");//deprecated . for backcompat remove later
if (o != null) {
shard = o.toString();
}
@ -58,7 +57,6 @@ public class ImplicitDocRouter extends DocRouter {
if (shard == null) {
shard = params.get(_ROUTE_);
if(shard == null) shard =params.get("_shard_"); //deperecated for back compat
}
if (shard != null) {

View File

@ -24,11 +24,11 @@ package org.apache.solr.common.params;
*/
public class AppendedSolrParams extends DefaultSolrParams {
/**
* @deprecated (3.6) Use {@link SolrParams#wrapAppended(SolrParams, SolrParams)} instead.
*/
@Deprecated
public AppendedSolrParams(SolrParams main, SolrParams extra) {
public static AppendedSolrParams wrapAppended(SolrParams params, SolrParams extra) {
return new AppendedSolrParams(params, extra);
}
private AppendedSolrParams(SolrParams main, SolrParams extra) {
super(main, extra);
}

View File

@ -24,14 +24,11 @@ import java.util.LinkedHashSet;
*
*/
public class DefaultSolrParams extends SolrParams {
protected final SolrParams params;
protected final SolrParams defaults;
/**
* @deprecated (3.6) Use {@link SolrParams#wrapDefaults(SolrParams, SolrParams)} instead.
*/
@Deprecated
public DefaultSolrParams(SolrParams params, SolrParams defaults) {
protected DefaultSolrParams(SolrParams params, SolrParams defaults) {
assert params != null && defaults != null;
this.params = params;
this.defaults = defaults;

View File

@ -309,25 +309,6 @@ public interface FacetParams {
}
}
/**
* @deprecated Use {@link FacetRangeOther}
*/
@Deprecated
public enum FacetDateOther {
BEFORE, AFTER, BETWEEN, ALL, NONE;
@Override
public String toString() { return super.toString().toLowerCase(Locale.ROOT); }
public static FacetDateOther get(String label) {
try {
return valueOf(label.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new SolrException
(SolrException.ErrorCode.BAD_REQUEST,
label+" is not a valid type of 'other' range facet information",e);
}
}
}
/**
* An enumeration of the legal values for {@link #FACET_DATE_INCLUDE} and {@link #FACET_RANGE_INCLUDE}
* <p>

View File

@ -46,10 +46,6 @@ public interface ShardParams {
/** Should things fail if there is an error? (true/false) */
public static final String SHARDS_TOLERANT = "shards.tolerant";
/** Should things fail if there is an error? (true/false) */
@Deprecated
public static final String SHARD_KEYS = "shard.keys";
/** query purpose for shard requests */
public static final String SHARDS_PURPOSE = "shards.purpose";

View File

@ -17,17 +17,17 @@
package org.apache.solr.common.params;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.StrUtils;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.StrUtils;
/** SolrParams hold request parameters.
*
*
@ -282,7 +282,6 @@ public abstract class SolrParams implements Serializable {
}
}
@SuppressWarnings({"deprecation"})
public static SolrParams wrapDefaults(SolrParams params, SolrParams defaults) {
if (params == null)
return defaults;
@ -291,13 +290,12 @@ public abstract class SolrParams implements Serializable {
return new DefaultSolrParams(params,defaults);
}
@SuppressWarnings({"deprecation"})
public static SolrParams wrapAppended(SolrParams params, SolrParams defaults) {
if (params == null)
return defaults;
if (defaults == null)
return params;
return new AppendedSolrParams(params,defaults);
return AppendedSolrParams.wrapAppended(params,defaults);
}
/** Create a Map&lt;String,String&gt; from a NamedList given no keys are repeated */

View File

@ -256,7 +256,7 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
SolrQuery query = new SolrQuery();
query.setQuery( "*:*" );
query.addSortField( "price", SolrQuery.ORDER.asc );
query.addSort(new SolrQuery.SortClause("price", SolrQuery.ORDER.asc));
QueryResponse rsp = client.query( query );
assertEquals(2, rsp.getResults().getNumFound());
@ -495,7 +495,7 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
SolrQuery query = new SolrQuery();
query.setQuery( "*:*" );
query.set( CommonParams.FL, "id,price,[docid],[explain style=nl],score,aaa:[value v=aaa],ten:[value v=10 t=int]" );
query.addSortField( "price", SolrQuery.ORDER.asc );
query.addSort(new SolrQuery.SortClause("price", SolrQuery.ORDER.asc));
QueryResponse rsp = client.query( query );
SolrDocumentList out = rsp.getResults();

View File

@ -70,14 +70,14 @@ public class SolrQueryTest extends LuceneTestCase {
b = q.removeFacetQuery("a:b");
Assert.assertEquals(null, q.getFacetQuery());
q.addSortField("price", SolrQuery.ORDER.asc);
q.addSortField("date", SolrQuery.ORDER.desc);
q.addSortField("qty", SolrQuery.ORDER.desc);
q.removeSortField("date", SolrQuery.ORDER.desc);
Assert.assertEquals(2, q.getSortFields().length);
q.removeSortField("price", SolrQuery.ORDER.asc);
q.removeSortField("qty", SolrQuery.ORDER.desc);
Assert.assertEquals(null, q.getSortFields());
q.addSort("price", SolrQuery.ORDER.asc);
q.addSort("date", SolrQuery.ORDER.desc);
q.addSort("qty", SolrQuery.ORDER.desc);
q.removeSort(new SortClause("date", SolrQuery.ORDER.desc));
Assert.assertEquals(2, q.getSorts().size());
q.removeSort(new SortClause("price", SolrQuery.ORDER.asc));
q.removeSort(new SortClause("qty", SolrQuery.ORDER.desc));
Assert.assertEquals(0, q.getSorts().size());
q.addHighlightField("hl1");
q.addHighlightField("hl2");
@ -104,34 +104,19 @@ public class SolrQueryTest extends LuceneTestCase {
// System.out.println(q);
}
/*
* Verifies that the old (deprecated) sort methods
* allows mix-and-match between the raw field and
* the itemized apis.
*/
public void testSortFieldRawStringAndMethods() {
SolrQuery q = new SolrQuery("dog");
q.set("sort", "price asc,date desc,qty desc");
q.removeSortField("date", SolrQuery.ORDER.desc);
Assert.assertEquals(2, q.getSortFields().length);
q.set("sort", "price asc, date desc, qty desc");
q.removeSortField("date", SolrQuery.ORDER.desc);
Assert.assertEquals(2, q.getSortFields().length);
}
/*
* Verifies that you can use removeSortField() twice, which
* did not work in 4.0
*/
public void testSortFieldRemoveAfterRemove() {
SolrQuery q = new SolrQuery("dog");
q.addSortField("price", SolrQuery.ORDER.asc);
q.addSortField("date", SolrQuery.ORDER.desc);
q.addSortField("qty", SolrQuery.ORDER.desc);
q.removeSortField("date", SolrQuery.ORDER.desc);
Assert.assertEquals(2, q.getSortFields().length);
q.removeSortField("qty", SolrQuery.ORDER.desc);
Assert.assertEquals(1, q.getSortFields().length);
q.addSort("price", SolrQuery.ORDER.asc);
q.addSort("date", SolrQuery.ORDER.desc);
q.addSort("qty", SolrQuery.ORDER.desc);
q.removeSort("date");
Assert.assertEquals(2, q.getSorts().size());
q.removeSort("qty");
Assert.assertEquals(1, q.getSorts().size());
}
/*
@ -140,9 +125,9 @@ public class SolrQueryTest extends LuceneTestCase {
*/
public void testSortFieldRemoveLast() {
SolrQuery q = new SolrQuery("dog");
q.addSortField("date", SolrQuery.ORDER.desc);
q.addSortField("qty", SolrQuery.ORDER.desc);
q.removeSortField("qty", SolrQuery.ORDER.desc);
q.addSort("date", SolrQuery.ORDER.desc);
q.addSort("qty", SolrQuery.ORDER.desc);
q.removeSort("qty");
Assert.assertEquals("date desc", q.getSortField());
}
@ -276,9 +261,9 @@ public class SolrQueryTest extends LuceneTestCase {
public void testFacetSortLegacy() {
SolrQuery q = new SolrQuery("dog");
assertTrue("expected default value to be true", q.getFacetSort());
q.setFacetSort(false);
assertFalse("expected set value to be false", q.getFacetSort());
assertEquals("expected default value to be SORT_COUNT", FacetParams.FACET_SORT_COUNT, q.getFacetSortString());
q.setFacetSort(FacetParams.FACET_SORT_INDEX);
assertEquals("expected set value to be SORT_INDEX", FacetParams.FACET_SORT_INDEX, q.getFacetSortString());
}
public void testFacetNumericRange() {
@ -343,7 +328,7 @@ public class SolrQueryTest extends LuceneTestCase {
assertEquals("foo", q.setFacetPrefix("foo").get( FacetParams.FACET_PREFIX, null ) );
assertEquals("foo", q.setFacetPrefix("a", "foo").getFieldParam( "a", FacetParams.FACET_PREFIX, null ) );
assertEquals( Boolean.TRUE, q.setMissing(Boolean.TRUE.toString()).getBool( FacetParams.FACET_MISSING ) );
assertEquals( Boolean.TRUE, q.setFacetMissing(Boolean.TRUE).getBool( FacetParams.FACET_MISSING ) );
assertEquals( Boolean.FALSE, q.setFacetMissing( Boolean.FALSE ).getBool( FacetParams.FACET_MISSING ) );
assertEquals( "true", q.setParam( "xxx", true ).getParams( "xxx" )[0] );

View File

@ -51,7 +51,7 @@ import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.handler.JsonUpdateRequestHandler;
import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
@ -1063,20 +1063,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
public String toString() { return xml; }
}
/**
* @see IOUtils#rm(Path...)
*/
@Deprecated()
public static boolean recurseDelete(File f) {
try {
IOUtils.rm(f.toPath());
return true;
} catch (IOException e) {
System.err.println(e.toString());
return false;
}
}
public void clearIndex() {
assertU(delQ("*:*"));
}
@ -1095,7 +1081,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
DirectSolrConnection connection = new DirectSolrConnection(core);
SolrRequestHandler handler = core.getRequestHandler("/update/json");
if (handler == null) {
handler = new JsonUpdateRequestHandler();
handler = new UpdateRequestHandler();
handler.init(null);
}
return connection.request(handler, args, json);