mirror of https://github.com/apache/lucene.git
SOLR-2452: Merged with trunk up to r1138036
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/solr2452@1138052 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
560533247b
|
@ -434,6 +434,10 @@ New features
|
|||
need to lookup by that field or perform deletions against it, for
|
||||
example in a near-real-time setting. (Mike McCandless)
|
||||
|
||||
* SOLR-2533: Added support for rewriting Sort and SortFields using an
|
||||
IndexSearcher. SortFields can have SortField.REWRITEABLE type which
|
||||
requires they are rewritten before they are used. (Chris Male)
|
||||
|
||||
Optimizations
|
||||
|
||||
* LUCENE-2588: Don't store unnecessary suffixes when writing the terms
|
||||
|
|
|
@ -187,8 +187,12 @@ public class TieredMergePolicy extends MergePolicy {
|
|||
|
||||
/** Sets the allowed number of segments per tier. Smaller
|
||||
* values mean more merging but fewer segments.
|
||||
* setMaxMergeAtOnce} otherwise you'll hit
|
||||
* Default is 10.0. */
|
||||
*
|
||||
* <p><b>NOTE</b>: this value should be >= the {@link
|
||||
* #setMaxMergeAtOnce} otherwise you'll force too much
|
||||
* merging to occur.</p>
|
||||
*
|
||||
* <p>Default is 10.0.</p> */
|
||||
public TieredMergePolicy setSegmentsPerTier(double v) {
|
||||
if (v < 2.0) {
|
||||
throw new IllegalArgumentException("segmentsPerTier must be >= 2.0 (got " + v + ")");
|
||||
|
|
|
@ -534,6 +534,7 @@ public class MemoryCodec extends Codec {
|
|||
private final FieldInfo field;
|
||||
private final BytesRefFSTEnum<BytesRef> fstEnum;
|
||||
private final ByteArrayDataInput buffer = new ByteArrayDataInput(null);
|
||||
private boolean didDecode;
|
||||
|
||||
private int docFreq;
|
||||
private long totalTermFreq;
|
||||
|
@ -544,16 +545,19 @@ public class MemoryCodec extends Codec {
|
|||
fstEnum = new BytesRefFSTEnum<BytesRef>(fst);
|
||||
}
|
||||
|
||||
private void readTermStats() throws IOException {
|
||||
buffer.reset(current.output.bytes, 0, current.output.length);
|
||||
docFreq = buffer.readVInt();
|
||||
if (!field.omitTermFreqAndPositions) {
|
||||
totalTermFreq = docFreq + buffer.readVLong();
|
||||
} else {
|
||||
totalTermFreq = 0;
|
||||
private void decodeMetaData() throws IOException {
|
||||
if (!didDecode) {
|
||||
buffer.reset(current.output.bytes, 0, current.output.length);
|
||||
docFreq = buffer.readVInt();
|
||||
if (!field.omitTermFreqAndPositions) {
|
||||
totalTermFreq = docFreq + buffer.readVLong();
|
||||
} else {
|
||||
totalTermFreq = 0;
|
||||
}
|
||||
current.output.offset = buffer.getPosition();
|
||||
if (VERBOSE) System.out.println(" df=" + docFreq + " totTF=" + totalTermFreq + " offset=" + buffer.getPosition() + " len=" + current.output.length);
|
||||
didDecode = true;
|
||||
}
|
||||
current.output.offset = buffer.getPosition();
|
||||
if (VERBOSE) System.out.println(" df=" + docFreq + " totTF=" + totalTermFreq + " offset=" + buffer.getPosition() + " len=" + current.output.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -570,7 +574,8 @@ public class MemoryCodec extends Codec {
|
|||
}
|
||||
}
|
||||
|
||||
readTermStats();
|
||||
didDecode = false;
|
||||
|
||||
if (text.equals(current.input)) {
|
||||
if (VERBOSE) System.out.println(" found!");
|
||||
return SeekStatus.FOUND;
|
||||
|
@ -582,7 +587,8 @@ public class MemoryCodec extends Codec {
|
|||
}
|
||||
|
||||
@Override
|
||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) {
|
||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
||||
decodeMetaData();
|
||||
FSTDocsEnum docsEnum;
|
||||
if (reuse == null || !(reuse instanceof FSTDocsEnum)) {
|
||||
docsEnum = new FSTDocsEnum(field.omitTermFreqAndPositions, field.storePayloads);
|
||||
|
@ -596,10 +602,11 @@ public class MemoryCodec extends Codec {
|
|||
}
|
||||
|
||||
@Override
|
||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) {
|
||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||
if (field.omitTermFreqAndPositions) {
|
||||
return null;
|
||||
}
|
||||
decodeMetaData();
|
||||
FSTDocsAndPositionsEnum docsAndPositionsEnum;
|
||||
if (reuse == null || !(reuse instanceof FSTDocsAndPositionsEnum)) {
|
||||
docsAndPositionsEnum = new FSTDocsAndPositionsEnum(field.omitTermFreqAndPositions, field.storePayloads);
|
||||
|
@ -626,18 +633,20 @@ public class MemoryCodec extends Codec {
|
|||
if (VERBOSE) System.out.println(" END");
|
||||
return null;
|
||||
}
|
||||
readTermStats();
|
||||
didDecode = false;
|
||||
if (VERBOSE) System.out.println(" term=" + field.name + ":" + current.input.utf8ToString());
|
||||
return current.input;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int docFreq() {
|
||||
public int docFreq() throws IOException {
|
||||
decodeMetaData();
|
||||
return docFreq;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long totalTermFreq() {
|
||||
public long totalTermFreq() throws IOException {
|
||||
decodeMetaData();
|
||||
return totalTermFreq;
|
||||
}
|
||||
|
||||
|
@ -657,7 +666,6 @@ public class MemoryCodec extends Codec {
|
|||
// NOTE: we could add this...
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private final static class TermsReader extends Terms {
|
||||
|
|
|
@ -17,6 +17,7 @@ package org.apache.lucene.search;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
|
||||
|
@ -149,6 +150,30 @@ public class Sort {
|
|||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rewrites the SortFields in this Sort, returning a new Sort if any of the fields
|
||||
* changes during their rewriting.
|
||||
*
|
||||
* @param searcher IndexSearcher to use in the rewriting
|
||||
* @return {@code this} if the Sort/Fields have not changed, or a new Sort if there
|
||||
* is a change
|
||||
* @throws IOException Can be thrown by the rewriting
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public Sort rewrite(IndexSearcher searcher) throws IOException {
|
||||
boolean changed = false;
|
||||
|
||||
SortField[] rewrittenSortFields = new SortField[fields.length];
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
rewrittenSortFields[i] = fields[i].rewrite(searcher);
|
||||
if (fields[i] != rewrittenSortFields[i]) {
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
return (changed) ? new Sort(rewrittenSortFields) : this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
|
|
|
@ -90,6 +90,10 @@ public class SortField {
|
|||
/** Sort use byte[] index values. */
|
||||
public static final int BYTES = 12;
|
||||
|
||||
/** Force rewriting of SortField using {@link SortField#rewrite(IndexSearcher)}
|
||||
* before it can be used for sorting */
|
||||
public static final int REWRITEABLE = 13;
|
||||
|
||||
/** Represents sorting by document score (relevance). */
|
||||
public static final SortField FIELD_SCORE = new SortField(null, SCORE);
|
||||
|
||||
|
@ -476,8 +480,25 @@ public class SortField {
|
|||
case SortField.STRING_VAL:
|
||||
return new FieldComparator.TermValComparator(numHits, field);
|
||||
|
||||
case SortField.REWRITEABLE:
|
||||
throw new IllegalStateException("SortField needs to be rewritten through Sort.rewrite(..) and SortField.rewrite(..)");
|
||||
|
||||
default:
|
||||
throw new IllegalStateException("Illegal sort type: " + type);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rewrites this SortField, returning a new SortField if a change is made.
|
||||
* Subclasses should override this define their rewriting behavior when this
|
||||
* SortField is of type {@link SortField#REWRITEABLE}
|
||||
*
|
||||
* @param searcher IndexSearcher to use during rewriting
|
||||
* @return New rewritten SortField, or {@code this} if nothing has changed.
|
||||
* @throws IOException Can be thrown by the rewriting
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public SortField rewrite(IndexSearcher searcher) throws IOException {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@ public class RandomIndexWriter implements Closeable {
|
|||
|
||||
private void switchDoDocValues() {
|
||||
// randomly enable / disable docValues
|
||||
doDocValues = r.nextInt(10) != 0;
|
||||
doDocValues = LuceneTestCase.rarely(r);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -64,7 +64,7 @@ public class TestIndexWriterOnJRECrash extends TestNRTThreads {
|
|||
}
|
||||
} else {
|
||||
// we are the fork, setup a crashing thread
|
||||
final int crashTime = TEST_NIGHTLY ? _TestUtil.nextInt(random, 500, 4000) : _TestUtil.nextInt(random, 300, 1000);
|
||||
final int crashTime = _TestUtil.nextInt(random, 3000, 4000);
|
||||
Thread t = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
|
@ -93,6 +93,8 @@ public class TestIndexWriterOnJRECrash extends TestNRTThreads {
|
|||
+ "java");
|
||||
cmd.add("-Xmx512m");
|
||||
cmd.add("-Dtests.crashmode=true");
|
||||
// passing NIGHTLY to this test makes it run for much longer, easier to catch it in the act...
|
||||
cmd.add("-Dtests.nightly=true");
|
||||
cmd.add("-DtempDir=" + tempDir.getPath());
|
||||
cmd.add("-Dtests.seed=" + random.nextLong() + ":" + random.nextLong());
|
||||
cmd.add("-ea");
|
||||
|
|
|
@ -57,7 +57,7 @@ public class SearchWithSortTask extends ReadTask {
|
|||
SortField sortField0;
|
||||
if (field.equals("doc")) {
|
||||
sortField0 = SortField.FIELD_DOC;
|
||||
} if (field.equals("score")) {
|
||||
} else if (field.equals("score")) {
|
||||
sortField0 = SortField.FIELD_SCORE;
|
||||
} else if (field.equals("noscore")) {
|
||||
doScore = false;
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
package org.apache.lucene.benchmark.byTask.tasks;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.benchmark.BenchmarkTestCase;
|
||||
import org.apache.lucene.benchmark.byTask.PerfRunData;
|
||||
import org.apache.lucene.benchmark.byTask.utils.Config;
|
||||
import org.apache.lucene.search.SortField;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
public class SearchWithSortTaskTest extends BenchmarkTestCase {
|
||||
|
||||
public void testSetParams_docField() throws Exception {
|
||||
SearchWithSortTask task = new SearchWithSortTask(new PerfRunData(new Config(new Properties())));
|
||||
task.setParams("doc");
|
||||
assertEquals(SortField.DOC, task.getSort().getSort()[0].getType());
|
||||
}
|
||||
}
|
|
@ -145,6 +145,9 @@ New Features
|
|||
* SOLR-2417: Add explain info directly to return documents using
|
||||
?fl=id,[explain] (ryan)
|
||||
|
||||
* SOLR-2533: Converted ValueSource.ValueSourceSortField over to new rewriteable Lucene
|
||||
SortFields. ValueSourceSortField instances must be rewritten before they can be used.
|
||||
This is done by SolrIndexSearcher when necessary. (Chris Male).
|
||||
|
||||
|
||||
Optimizations
|
||||
|
@ -266,6 +269,9 @@ New Features
|
|||
Karsten Sperling, Michael Gundlach, Oleg Gnatovskiy, Thomas Traeger,
|
||||
Harish Agarwal, yonik, Michael McCandless, Bill Bell)
|
||||
|
||||
* SOLR-1331 -- Added a srcCore parameter to CoreAdminHandler's mergeindexes action
|
||||
to merge one or more cores' indexes to a target core (shalin)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
||||
|
@ -307,6 +313,8 @@ Other Changes
|
|||
any copies of the maven-ant-tasks jar in the Ant classpath, e.g. under
|
||||
~/.ant/lib/ or under the Ant installation's lib/ directory. (Steve Rowe)
|
||||
|
||||
* SOLR-2611: Fix typos in the example configuration (Eric Pugh via rmuir)
|
||||
|
||||
================== 3.2.0 ==================
|
||||
Versions of Major Components
|
||||
---------------------
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
{
|
||||
"id" : "978-0641723445",
|
||||
"cat" : ["book","hardcover"],
|
||||
"title" : "The Lightning Thief",
|
||||
"name" : "The Lightning Thief",
|
||||
"author" : "Rick Riordan",
|
||||
"series_t" : "Percy Jackson and the Olympians",
|
||||
"sequence_i" : 1,
|
||||
|
@ -15,7 +15,7 @@
|
|||
{
|
||||
"id" : "978-1423103349",
|
||||
"cat" : ["book","paperback"],
|
||||
"title" : "The Sea of Monsters",
|
||||
"name" : "The Sea of Monsters",
|
||||
"author" : "Rick Riordan",
|
||||
"series_t" : "Percy Jackson and the Olympians",
|
||||
"sequence_i" : 2,
|
||||
|
@ -24,4 +24,28 @@
|
|||
"price" : 6.49,
|
||||
"pages_i" : 304
|
||||
}
|
||||
,
|
||||
{
|
||||
"id" : "978-1857995879",
|
||||
"cat" : ["book","paperback"],
|
||||
"name" : "Sophie's World : The Greek Philosophers",
|
||||
"author" : "Jostein Gaarder",
|
||||
"sequence_i" : 1,
|
||||
"genre_s" : "fantasy",
|
||||
"inStock" : true,
|
||||
"price" : 3.07,
|
||||
"pages_i" : 64
|
||||
}
|
||||
,
|
||||
{
|
||||
"id" : "978-1933988177",
|
||||
"cat" : ["book","paperback"],
|
||||
"name" : "Lucene in Action, Second Edition",
|
||||
"author" : "Michael McCandless",
|
||||
"sequence_i" : 1,
|
||||
"genre_s" : "IT",
|
||||
"inStock" : true,
|
||||
"price" : 30.50,
|
||||
"pages_i" : 475
|
||||
}
|
||||
]
|
||||
|
|
|
@ -275,7 +275,7 @@
|
|||
http://wiki.apache.org/solr/UpdateXmlMessages
|
||||
|
||||
maxDocs - Maximum number of documents to add since the last
|
||||
commit before automaticly triggering a new commit.
|
||||
commit before automatically triggering a new commit.
|
||||
|
||||
maxTime - Maximum amount of time that is allowed to pass
|
||||
since a document was added before automaticly
|
||||
|
@ -290,7 +290,7 @@
|
|||
|
||||
<!-- Update Related Event Listeners
|
||||
|
||||
Various IndexWriter realted events can trigger Listeners to
|
||||
Various IndexWriter related events can trigger Listeners to
|
||||
take actions.
|
||||
|
||||
postCommit - fired after every commit or optimize command
|
||||
|
@ -486,7 +486,7 @@
|
|||
that.
|
||||
|
||||
For most situations, this will not be useful unless you
|
||||
frequently get the same search repeatedly with differnet sort
|
||||
frequently get the same search repeatedly with different sort
|
||||
options, and none of them ever use "score"
|
||||
-->
|
||||
<!--
|
||||
|
@ -576,7 +576,7 @@
|
|||
|
||||
handleSelect="true" will cause the SolrDispatchFilter to process
|
||||
the request and will result in consistent error handling and
|
||||
formating for all types of requests.
|
||||
formatting for all types of requests.
|
||||
|
||||
handleSelect="false" will cause the SolrDispatchFilter to
|
||||
ignore "/select" requests and fallback to using the legacy
|
||||
|
@ -590,10 +590,10 @@
|
|||
those requests
|
||||
|
||||
enableRemoteStreaming - enables use of the stream.file
|
||||
and stream.url paramaters for specifying remote streams.
|
||||
and stream.url parameters for specifying remote streams.
|
||||
|
||||
multipartUploadLimitInKB - specifies the max size of
|
||||
Multipart File Uploads that Solr will alow in a Request.
|
||||
Multipart File Uploads that Solr will allow in a Request.
|
||||
|
||||
*** WARNING ***
|
||||
The settings below authorize Solr to fetch remote files, You
|
||||
|
@ -626,7 +626,7 @@
|
|||
<cacheControl>max-age=30, public</cacheControl>
|
||||
</httpCaching>
|
||||
-->
|
||||
<!-- To enable Solr to responde with automaticly generated HTTP
|
||||
<!-- To enable Solr to respond with automatically generated HTTP
|
||||
Caching headers, and to response to Cache Validation requests
|
||||
correctly, set the value of never304="false"
|
||||
|
||||
|
@ -640,12 +640,12 @@
|
|||
Last-Modified value (and validation against If-Modified-Since
|
||||
requests) will all be relative to when the current Searcher
|
||||
was opened. You can change it to lastModFrom="dirLastMod" if
|
||||
you want the value to exactly corrispond to when the physical
|
||||
you want the value to exactly correspond to when the physical
|
||||
index was last modified.
|
||||
|
||||
etagSeed="..." is an option you can change to force the ETag
|
||||
header (and validation against If-None-Match requests) to be
|
||||
differnet even if the index has not changed (ie: when making
|
||||
different even if the index has not changed (ie: when making
|
||||
significant changes to your config file)
|
||||
|
||||
(lastModifiedFrom and etagSeed are both ignored if you use
|
||||
|
@ -754,7 +754,7 @@
|
|||
This example SearchHandler declaration shows off usage of the
|
||||
SearchHandler with many defaults declared
|
||||
|
||||
Note that multiple instances of hte same Request Handler
|
||||
Note that multiple instances of the same Request Handler
|
||||
(SearchHandler) can be registered multiple times with different
|
||||
names (and different init parameters)
|
||||
-->
|
||||
|
@ -936,12 +936,12 @@
|
|||
</docs>
|
||||
|
||||
Note: Each document must contain a field which serves as the
|
||||
unique key. This key is used in the returned response to assoicate
|
||||
ananalysis breakdown to the analyzed document.
|
||||
unique key. This key is used in the returned response to associate
|
||||
an analysis breakdown to the analyzed document.
|
||||
|
||||
Like the FieldAnalysisRequestHandler, this handler also supports
|
||||
query analysis by sending either an "analysis.query" or "q"
|
||||
request paraemter that holds the query text to be analyized. It
|
||||
request parameter that holds the query text to be analyzed. It
|
||||
also supports the "analysis.showmatch" parameter which when set to
|
||||
true, all field tokens that match the query tokens will be marked
|
||||
as a "match".
|
||||
|
@ -957,7 +957,7 @@
|
|||
-->
|
||||
<requestHandler name="/admin/"
|
||||
class="solr.admin.AdminHandlers" />
|
||||
<!-- This single handler is equivilent to the following... -->
|
||||
<!-- This single handler is equivalent to the following... -->
|
||||
<!--
|
||||
<requestHandler name="/admin/luke" class="solr.admin.LukeRequestHandler" />
|
||||
<requestHandler name="/admin/system" class="solr.admin.SystemInfoHandler" />
|
||||
|
@ -1026,7 +1026,7 @@
|
|||
Search components are registered to SolrCore and used by
|
||||
instances of SearchHandler (which can access them by name)
|
||||
|
||||
By default, the following components are avaliable:
|
||||
By default, the following components are available:
|
||||
|
||||
<searchComponent name="query" class="solr.QueryComponent" />
|
||||
<searchComponent name="facet" class="solr.FacetComponent" />
|
||||
|
@ -1079,7 +1079,7 @@
|
|||
component
|
||||
-->
|
||||
|
||||
<!-- a spellchecker built from a field of hte main index, and
|
||||
<!-- a spellchecker built from a field of the main index, and
|
||||
written to disk
|
||||
-->
|
||||
<lst name="spellchecker">
|
||||
|
|
|
@ -59,6 +59,10 @@ public interface CoreAdminParams
|
|||
* The directories are specified by multiple indexDir parameters. */
|
||||
public final static String INDEX_DIR = "indexDir";
|
||||
|
||||
/** If you merge indexes, what is the source core's name
|
||||
* More than one source core can be specified by multiple srcCore parameters */
|
||||
public final static String SRC_CORE = "srcCore";
|
||||
|
||||
/** The collection name in solr cloud */
|
||||
public final static String COLLECTION = "collection";
|
||||
|
||||
|
|
|
@ -17,6 +17,9 @@
|
|||
|
||||
package org.apache.solr.handler.admin;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.solr.cloud.CloudDescriptor;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
|
@ -42,7 +45,9 @@ import org.apache.lucene.store.Directory;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -171,22 +176,53 @@ public class CoreAdminHandler extends RequestHandlerBase {
|
|||
}
|
||||
|
||||
protected boolean handleMergeAction(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException {
|
||||
boolean doPersist = false;
|
||||
SolrParams params = req.getParams();
|
||||
SolrParams required = params.required();
|
||||
String cname = required.get(CoreAdminParams.CORE);
|
||||
String cname = params.required().get(CoreAdminParams.CORE);
|
||||
SolrCore core = coreContainer.getCore(cname);
|
||||
SolrQueryRequest wrappedReq = null;
|
||||
|
||||
SolrCore[] sourceCores = null;
|
||||
RefCounted<SolrIndexSearcher>[] searchers = null;
|
||||
// stores readers created from indexDir param values
|
||||
IndexReader[] readersToBeClosed = null;
|
||||
if (core != null) {
|
||||
try {
|
||||
doPersist = coreContainer.isPersistent();
|
||||
String[] dirNames = params.getParams(CoreAdminParams.INDEX_DIR);
|
||||
if (dirNames == null || dirNames.length == 0) {
|
||||
String[] sources = params.getParams("srcCore");
|
||||
if (sources == null || sources.length == 0)
|
||||
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,
|
||||
"At least one indexDir or srcCore must be specified");
|
||||
|
||||
String[] dirNames = required.getParams(CoreAdminParams.INDEX_DIR);
|
||||
sourceCores = new SolrCore[sources.length];
|
||||
for (int i = 0; i < sources.length; i++) {
|
||||
String source = sources[i];
|
||||
SolrCore srcCore = coreContainer.getCore(source);
|
||||
if (srcCore == null)
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
||||
"Core: " + source + " does not exist");
|
||||
sourceCores[i] = srcCore;
|
||||
}
|
||||
} else {
|
||||
readersToBeClosed = new IndexReader[dirNames.length];
|
||||
DirectoryFactory dirFactory = core.getDirectoryFactory();
|
||||
for (int i = 0; i < dirNames.length; i++) {
|
||||
readersToBeClosed[i] = IndexReader.open(dirFactory.open(dirNames[i]), true);
|
||||
}
|
||||
}
|
||||
|
||||
DirectoryFactory dirFactory = core.getDirectoryFactory();
|
||||
Directory[] dirs = new Directory[dirNames.length];
|
||||
for (int i = 0; i < dirNames.length; i++) {
|
||||
dirs[i] = dirFactory.open(dirNames[i]);
|
||||
IndexReader[] readers = null;
|
||||
if (readersToBeClosed != null) {
|
||||
readers = readersToBeClosed;
|
||||
} else {
|
||||
readers = new IndexReader[sourceCores.length];
|
||||
searchers = new RefCounted[sourceCores.length];
|
||||
for (int i = 0; i < sourceCores.length; i++) {
|
||||
SolrCore solrCore = sourceCores[i];
|
||||
// record the searchers so that we can decref
|
||||
searchers[i] = solrCore.getSearcher();
|
||||
readers[i] = searchers[i].get().getIndexReader();
|
||||
}
|
||||
}
|
||||
|
||||
UpdateRequestProcessorChain processorChain =
|
||||
|
@ -194,13 +230,24 @@ public class CoreAdminHandler extends RequestHandlerBase {
|
|||
wrappedReq = new LocalSolrQueryRequest(core, req.getParams());
|
||||
UpdateRequestProcessor processor =
|
||||
processorChain.createProcessor(wrappedReq, rsp);
|
||||
processor.processMergeIndexes(new MergeIndexesCommand(dirs, req));
|
||||
processor.processMergeIndexes(new MergeIndexesCommand(readers, req));
|
||||
} finally {
|
||||
if (searchers != null) {
|
||||
for (RefCounted<SolrIndexSearcher> searcher : searchers) {
|
||||
if (searcher != null) searcher.decref();
|
||||
}
|
||||
}
|
||||
if (sourceCores != null) {
|
||||
for (SolrCore solrCore : sourceCores) {
|
||||
if (solrCore != null) solrCore.close();
|
||||
}
|
||||
}
|
||||
if (readersToBeClosed != null) IOUtils.closeSafely(true, readersToBeClosed);
|
||||
if (wrappedReq != null) wrappedReq.close();
|
||||
core.close();
|
||||
wrappedReq.close();
|
||||
}
|
||||
}
|
||||
return doPersist;
|
||||
return coreContainer.isPersistent();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -475,26 +475,7 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean {
|
|||
|
||||
/** Returns a weighted sort according to this searcher */
|
||||
public Sort weightSort(Sort sort) throws IOException {
|
||||
if (sort == null) return null;
|
||||
SortField[] sorts = sort.getSort();
|
||||
|
||||
boolean needsWeighting = false;
|
||||
for (SortField sf : sorts) {
|
||||
if (sf instanceof SolrSortField) {
|
||||
needsWeighting = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!needsWeighting) return sort;
|
||||
|
||||
SortField[] newSorts = Arrays.copyOf(sorts, sorts.length);
|
||||
for (int i=0; i<newSorts.length; i++) {
|
||||
if (newSorts[i] instanceof SolrSortField) {
|
||||
newSorts[i] = ((SolrSortField)newSorts[i]).weight(this);
|
||||
}
|
||||
}
|
||||
|
||||
return new Sort(newSorts);
|
||||
return (sort != null) ? sort.rewrite(this) : null;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -26,8 +26,6 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.search.SolrSortField;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
@ -102,20 +100,13 @@ public abstract class ValueSource implements Serializable {
|
|||
return new ValueSourceSortField(reverse);
|
||||
}
|
||||
|
||||
private static FieldComparatorSource dummyComparator = new FieldComparatorSource() {
|
||||
@Override
|
||||
public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unweighted use of sort " + fieldname);
|
||||
}
|
||||
};
|
||||
|
||||
class ValueSourceSortField extends SortField implements SolrSortField {
|
||||
class ValueSourceSortField extends SortField {
|
||||
public ValueSourceSortField(boolean reverse) {
|
||||
super(description(), dummyComparator, reverse);
|
||||
super(description(), SortField.REWRITEABLE, reverse);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortField weight(IndexSearcher searcher) throws IOException {
|
||||
public SortField rewrite(IndexSearcher searcher) throws IOException {
|
||||
Map context = newContext(searcher);
|
||||
createWeight(context, searcher);
|
||||
return new SortField(getField(), new ValueSourceComparatorSource(context), getReverse());
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
package org.apache.solr.update;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
|
@ -277,10 +278,10 @@ public class DirectUpdateHandler2 extends UpdateHandler {
|
|||
try {
|
||||
log.info("start " + cmd);
|
||||
|
||||
Directory[] dirs = cmd.dirs;
|
||||
if (dirs != null && dirs.length > 0) {
|
||||
IndexReader[] readers = cmd.readers;
|
||||
if (readers != null && readers.length > 0) {
|
||||
openWriter();
|
||||
writer.addIndexes(dirs);
|
||||
writer.addIndexes(readers);
|
||||
rc = 1;
|
||||
} else {
|
||||
rc = 0;
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
package org.apache.solr.update;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
|
||||
|
@ -27,25 +28,21 @@ import org.apache.solr.request.SolrQueryRequest;
|
|||
*
|
||||
*/
|
||||
public class MergeIndexesCommand extends UpdateCommand {
|
||||
public Directory[] dirs;
|
||||
public IndexReader[] readers;
|
||||
|
||||
public MergeIndexesCommand(SolrQueryRequest req) {
|
||||
this(null, req);
|
||||
}
|
||||
|
||||
public MergeIndexesCommand(Directory[] dirs, SolrQueryRequest req) {
|
||||
public MergeIndexesCommand(IndexReader[] readers, SolrQueryRequest req) {
|
||||
super("mergeIndexes", req);
|
||||
this.dirs = dirs;
|
||||
this.readers = readers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder(commandName);
|
||||
sb.append(':');
|
||||
if (dirs != null && dirs.length > 0) {
|
||||
sb.append(dirs[0]);
|
||||
for (int i = 1; i < dirs.length; i++) {
|
||||
sb.append(",").append(dirs[i]);
|
||||
if (readers != null && readers.length > 0) {
|
||||
sb.append(readers[0].directory());
|
||||
for (int i = 1; i < readers.length; i++) {
|
||||
sb.append(",").append(readers[i].directory());
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
|
|
|
@ -119,6 +119,7 @@ public class CoreAdminRequest extends SolrRequest
|
|||
|
||||
public static class MergeIndexes extends CoreAdminRequest {
|
||||
protected List<String> indexDirs;
|
||||
protected List<String> srcCores;
|
||||
|
||||
public MergeIndexes() {
|
||||
action = CoreAdminAction.MERGEINDEXES;
|
||||
|
@ -132,6 +133,14 @@ public class CoreAdminRequest extends SolrRequest
|
|||
return indexDirs;
|
||||
}
|
||||
|
||||
public List<String> getSrcCores() {
|
||||
return srcCores;
|
||||
}
|
||||
|
||||
public void setSrcCores(List<String> srcCores) {
|
||||
this.srcCores = srcCores;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SolrParams getParams() {
|
||||
if (action == null) {
|
||||
|
@ -145,6 +154,11 @@ public class CoreAdminRequest extends SolrRequest
|
|||
params.set(CoreAdminParams.INDEX_DIR, indexDir);
|
||||
}
|
||||
}
|
||||
if (srcCores != null) {
|
||||
for (String srcCore : srcCores) {
|
||||
params.set(CoreAdminParams.SRC_CORE, srcCore);
|
||||
}
|
||||
}
|
||||
return params;
|
||||
}
|
||||
}
|
||||
|
@ -289,11 +303,12 @@ public class CoreAdminRequest extends SolrRequest
|
|||
}
|
||||
|
||||
public static CoreAdminResponse mergeIndexes(String name,
|
||||
String[] indexDirs, SolrServer server) throws SolrServerException,
|
||||
String[] indexDirs, String[] srcCores, SolrServer server) throws SolrServerException,
|
||||
IOException {
|
||||
CoreAdminRequest.MergeIndexes req = new CoreAdminRequest.MergeIndexes();
|
||||
req.setCoreName(name);
|
||||
req.setIndexDirs(Arrays.asList(indexDirs));
|
||||
req.setSrcCores(Arrays.asList(srcCores));
|
||||
return req.process(server);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
package org.apache.solr.client.solrj;
|
||||
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
|
@ -26,6 +27,8 @@ import org.apache.solr.core.CoreContainer;
|
|||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.util.ExternalPaths;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Abstract base class for testing merge indexes command
|
||||
*
|
||||
|
@ -79,9 +82,9 @@ public abstract class MergeIndexesExampleTestBase extends SolrExampleTestBase {
|
|||
|
||||
protected abstract String getIndexDirCore1();
|
||||
|
||||
public void testMergeIndexes() throws Exception {
|
||||
private UpdateRequest setupCores() throws SolrServerException, IOException {
|
||||
UpdateRequest up = new UpdateRequest();
|
||||
up.setAction(ACTION.COMMIT, true, true);
|
||||
up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
up.deleteByQuery("*:*");
|
||||
up.process(getSolrCore0());
|
||||
up.process(getSolrCore1());
|
||||
|
@ -119,11 +122,28 @@ public abstract class MergeIndexesExampleTestBase extends SolrExampleTestBase {
|
|||
assertEquals(1,
|
||||
getSolrCore1().query(new SolrQuery("id:BBB")).getResults().size());
|
||||
|
||||
return up;
|
||||
}
|
||||
|
||||
public void testMergeIndexesByDirName() throws Exception {
|
||||
UpdateRequest up = setupCores();
|
||||
|
||||
// Now get the index directory of core1 and merge with core0
|
||||
String indexDir = getIndexDirCore1();
|
||||
String name = "core0";
|
||||
SolrServer coreadmin = getSolrAdmin();
|
||||
CoreAdminRequest.mergeIndexes(name, new String[] { indexDir }, coreadmin);
|
||||
CoreAdminRequest.mergeIndexes("core0", new String[] {getIndexDirCore1()}, new String[0], getSolrAdmin());
|
||||
|
||||
// Now commit the merged index
|
||||
up.clear(); // just do commit
|
||||
up.process(getSolrCore0());
|
||||
|
||||
assertEquals(1,
|
||||
getSolrCore0().query(new SolrQuery("id:AAA")).getResults().size());
|
||||
assertEquals(1,
|
||||
getSolrCore0().query(new SolrQuery("id:BBB")).getResults().size());
|
||||
}
|
||||
|
||||
public void testMergeIndexesByCoreName() throws Exception {
|
||||
UpdateRequest up = setupCores();
|
||||
CoreAdminRequest.mergeIndexes("core0", new String[0], new String[] {"core1"}, getSolrAdmin());
|
||||
|
||||
// Now commit the merged index
|
||||
up.clear(); // just do commit
|
||||
|
|
|
@ -96,7 +96,7 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
|
|||
//test functions
|
||||
sort = QueryParsing.parseSort("pow(weight, 2) desc", req);
|
||||
flds = sort.getSort();
|
||||
assertEquals(flds[0].getType(), SortField.CUSTOM);
|
||||
assertEquals(flds[0].getType(), SortField.REWRITEABLE);
|
||||
//Not thrilled about the fragility of string matching here, but...
|
||||
//the value sources get wrapped, so the out field is different than the input
|
||||
assertEquals(flds[0].getField(), "pow(float(weight),const(2))");
|
||||
|
@ -104,12 +104,12 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
|
|||
//test functions (more deep)
|
||||
sort = QueryParsing.parseSort("sum(product(r_f1,sum(d_f1,t_f1,1.0)),a_f1) asc", req);
|
||||
flds = sort.getSort();
|
||||
assertEquals(flds[0].getType(), SortField.CUSTOM);
|
||||
assertEquals(flds[0].getType(), SortField.REWRITEABLE);
|
||||
assertEquals(flds[0].getField(), "sum(product(float(r_f1),sum(float(d_f1),float(t_f1),const(1.0))),float(a_f1))");
|
||||
|
||||
sort = QueryParsing.parseSort("pow(weight, 2.0) desc", req);
|
||||
flds = sort.getSort();
|
||||
assertEquals(flds[0].getType(), SortField.CUSTOM);
|
||||
assertEquals(flds[0].getType(), SortField.REWRITEABLE);
|
||||
//Not thrilled about the fragility of string matching here, but...
|
||||
//the value sources get wrapped, so the out field is different than the input
|
||||
assertEquals(flds[0].getField(), "pow(float(weight),const(2.0))");
|
||||
|
@ -117,7 +117,7 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
|
|||
|
||||
sort = QueryParsing.parseSort("pow(weight, 2.0) desc, weight desc, bday asc", req);
|
||||
flds = sort.getSort();
|
||||
assertEquals(flds[0].getType(), SortField.CUSTOM);
|
||||
assertEquals(flds[0].getType(), SortField.REWRITEABLE);
|
||||
|
||||
//Not thrilled about the fragility of string matching here, but...
|
||||
//the value sources get wrapped, so the out field is different than the input
|
||||
|
@ -137,7 +137,7 @@ public class QueryParsingTest extends SolrTestCaseJ4 {
|
|||
//Test literals in functions
|
||||
sort = QueryParsing.parseSort("strdist(foo_s1, \"junk\", jw) desc", req);
|
||||
flds = sort.getSort();
|
||||
assertEquals(flds[0].getType(), SortField.CUSTOM);
|
||||
assertEquals(flds[0].getType(), SortField.REWRITEABLE);
|
||||
//the value sources get wrapped, so the out field is different than the input
|
||||
assertEquals(flds[0].getField(), "strdist(str(foo_s1),literal(junk), dist=org.apache.lucene.search.spell.JaroWinklerDistance)");
|
||||
|
||||
|
|
Loading…
Reference in New Issue