LUCENE-3606: Merge up to trunk r1212007

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene3606@1212008 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2011-12-08 18:01:13 +00:00
commit a363e9b564
19 changed files with 249 additions and 254 deletions

View File

@ -166,16 +166,16 @@ public class BlockTreeTermsReader extends FieldsProducer {
}
protected void readHeader(IndexInput input) throws IOException {
CodecUtil.checkHeader(input, BlockTreeTermsWriter.CODEC_NAME,
BlockTreeTermsWriter.VERSION_START,
BlockTreeTermsWriter.VERSION_CURRENT);
CodecUtil.checkHeader(input, BlockTreeTermsWriter.TERMS_CODEC_NAME,
BlockTreeTermsWriter.TERMS_VERSION_START,
BlockTreeTermsWriter.TERMS_VERSION_CURRENT);
dirOffset = input.readLong();
}
protected void readIndexHeader(IndexInput input) throws IOException {
CodecUtil.checkHeader(input, BlockTreeTermsWriter.CODEC_NAME,
BlockTreeTermsWriter.VERSION_START,
BlockTreeTermsWriter.VERSION_CURRENT);
CodecUtil.checkHeader(input, BlockTreeTermsWriter.TERMS_INDEX_CODEC_NAME,
BlockTreeTermsWriter.TERMS_INDEX_VERSION_START,
BlockTreeTermsWriter.TERMS_INDEX_VERSION_CURRENT);
indexDirOffset = input.readLong();
}

View File

@ -99,18 +99,21 @@ public class BlockTreeTermsWriter extends FieldsConsumer {
static final int OUTPUT_FLAG_IS_FLOOR = 0x1;
static final int OUTPUT_FLAG_HAS_TERMS = 0x2;
final static String CODEC_NAME = "BLOCK_TREE_TERMS_DICT";
// Initial format
public static final int VERSION_START = 0;
public static final int VERSION_CURRENT = VERSION_START;
/** Extension of terms file */
static final String TERMS_EXTENSION = "tim";
static final String TERMS_INDEX_EXTENSION = "tip";
final static String TERMS_CODEC_NAME = "BLOCK_TREE_TERMS_DICT";
// Initial format
public static final int TERMS_VERSION_START = 0;
public static final int TERMS_VERSION_CURRENT = TERMS_VERSION_START;
protected final IndexOutput out;
/** Extension of terms index file */
static final String TERMS_INDEX_EXTENSION = "tip";
final static String TERMS_INDEX_CODEC_NAME = "BLOCK_TREE_TERMS_INDEX";
// Initial format
public static final int TERMS_INDEX_VERSION_START = 0;
public static final int TERMS_INDEX_VERSION_CURRENT = TERMS_INDEX_VERSION_START;
private final IndexOutput out;
private final IndexOutput indexOut;
final int minItemsInBlock;
final int maxItemsInBlock;
@ -178,22 +181,22 @@ public class BlockTreeTermsWriter extends FieldsConsumer {
}
protected void writeHeader(IndexOutput out) throws IOException {
CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT);
CodecUtil.writeHeader(out, TERMS_CODEC_NAME, TERMS_VERSION_CURRENT);
out.writeLong(0); // leave space for end index pointer
}
protected void writeIndexHeader(IndexOutput out) throws IOException {
CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT);
CodecUtil.writeHeader(out, TERMS_INDEX_CODEC_NAME, TERMS_INDEX_VERSION_CURRENT);
out.writeLong(0); // leave space for end index pointer
}
protected void writeTrailer(long dirStart) throws IOException {
out.seek(CodecUtil.headerLength(CODEC_NAME));
protected void writeTrailer(IndexOutput out, long dirStart) throws IOException {
out.seek(CodecUtil.headerLength(TERMS_CODEC_NAME));
out.writeLong(dirStart);
}
protected void writeIndexTrailer(long dirStart) throws IOException {
indexOut.seek(CodecUtil.headerLength(CODEC_NAME));
protected void writeIndexTrailer(IndexOutput indexOut, long dirStart) throws IOException {
indexOut.seek(CodecUtil.headerLength(TERMS_INDEX_CODEC_NAME));
indexOut.writeLong(dirStart);
}
@ -935,8 +938,8 @@ public class BlockTreeTermsWriter extends FieldsConsumer {
indexOut.writeVLong(field.indexStartFP);
}
}
writeTrailer(dirStart);
writeIndexTrailer(indexDirStart);
writeTrailer(out, dirStart);
writeIndexTrailer(indexOut, indexDirStart);
} catch (IOException ioe2) {
ioe = ioe2;
} finally {

View File

@ -23,19 +23,16 @@ import java.util.Set;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.codecs.BlockTreeTermsReader;
import org.apache.lucene.index.codecs.BlockTreeTermsWriter;
import org.apache.lucene.index.codecs.PostingsFormat;
import org.apache.lucene.index.codecs.FieldsConsumer;
import org.apache.lucene.index.codecs.FieldsProducer;
import org.apache.lucene.index.codecs.FixedGapTermsIndexReader;
import org.apache.lucene.index.codecs.lucene40.Lucene40PostingsFormat;
import org.apache.lucene.index.codecs.lucene40.Lucene40PostingsReader;
import org.apache.lucene.index.codecs.lucene40.Lucene40PostingsWriter;
import org.apache.lucene.index.codecs.PostingsReaderBase;
import org.apache.lucene.index.codecs.PostingsWriterBase;
import org.apache.lucene.index.codecs.BlockTermsReader;
import org.apache.lucene.index.codecs.TermsIndexReaderBase;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
/**
* Appending postings impl
@ -48,72 +45,39 @@ class AppendingPostingsFormat extends PostingsFormat {
}
@Override
public FieldsConsumer fieldsConsumer(SegmentWriteState state)
throws IOException {
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
PostingsWriterBase docsWriter = new Lucene40PostingsWriter(state);
boolean success = false;
AppendingTermsIndexWriter indexWriter = null;
try {
indexWriter = new AppendingTermsIndexWriter(state);
FieldsConsumer ret = new AppendingTermsWriter(state, docsWriter, BlockTreeTermsWriter.DEFAULT_MIN_BLOCK_SIZE, BlockTreeTermsWriter.DEFAULT_MAX_BLOCK_SIZE);
success = true;
return ret;
} finally {
if (!success) {
docsWriter.close();
}
}
success = false;
try {
FieldsConsumer ret = new AppendingTermsDictWriter(indexWriter, state, docsWriter);
success = true;
return ret;
} finally {
if (!success) {
try {
docsWriter.close();
} finally {
indexWriter.close();
}
}
}
}
@Override
public FieldsProducer fieldsProducer(SegmentReadState state)
throws IOException {
PostingsReaderBase docsReader = new Lucene40PostingsReader(state.dir, state.segmentInfo, state.context, state.segmentSuffix);
TermsIndexReaderBase indexReader;
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
PostingsReaderBase postings = new Lucene40PostingsReader(state.dir, state.segmentInfo, state.context, state.segmentSuffix);
boolean success = false;
try {
indexReader = new AppendingTermsIndexReader(state.dir,
state.fieldInfos,
state.segmentInfo.name,
state.termsIndexDivisor,
BytesRef.getUTF8SortedAsUnicodeComparator(),
state.segmentSuffix, state.context);
success = true;
} finally {
if (!success) {
docsReader.close();
}
}
success = false;
try {
FieldsProducer ret = new AppendingTermsDictReader(indexReader,
state.dir, state.fieldInfos, state.segmentInfo.name,
docsReader,
state.context,
Lucene40PostingsFormat.TERMS_CACHE_SIZE,
state.segmentSuffix);
FieldsProducer ret = new AppendingTermsReader(
state.dir,
state.fieldInfos,
state.segmentInfo.name,
postings,
state.context,
state.segmentSuffix,
state.termsIndexDivisor);
success = true;
return ret;
} finally {
if (!success) {
try {
docsReader.close();
} finally {
indexReader.close();
}
postings.close();
}
}
}
@ -122,7 +86,6 @@ class AppendingPostingsFormat extends PostingsFormat {
public void files(Directory dir, SegmentInfo segmentInfo, String segmentSuffix, Set<String> files)
throws IOException {
Lucene40PostingsReader.files(dir, segmentInfo, segmentSuffix, files);
BlockTermsReader.files(dir, segmentInfo, segmentSuffix, files);
FixedGapTermsIndexReader.files(dir, segmentInfo, segmentSuffix, files);
BlockTreeTermsReader.files(dir, segmentInfo, segmentSuffix, files);
}
}

View File

@ -1,55 +0,0 @@
package org.apache.lucene.index.codecs.appending;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.codecs.PostingsReaderBase;
import org.apache.lucene.index.codecs.BlockTermsReader;
import org.apache.lucene.index.codecs.BlockTermsWriter;
import org.apache.lucene.index.codecs.TermsIndexReaderBase;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.CodecUtil;
public class AppendingTermsDictReader extends BlockTermsReader {
public AppendingTermsDictReader(TermsIndexReaderBase indexReader,
Directory dir, FieldInfos fieldInfos, String segment,
PostingsReaderBase postingsReader, IOContext context,
int termsCacheSize, String segmentSuffix) throws IOException {
super(indexReader, dir, fieldInfos, segment, postingsReader, context,
termsCacheSize, segmentSuffix);
}
@Override
protected void readHeader(IndexInput in) throws IOException {
CodecUtil.checkHeader(in, AppendingTermsDictWriter.CODEC_NAME,
BlockTermsWriter.VERSION_START, BlockTermsWriter.VERSION_CURRENT);
}
@Override
protected void seekDir(IndexInput in, long dirOffset) throws IOException {
in.seek(in.length() - Long.SIZE / 8);
long offset = in.readLong();
in.seek(offset);
}
}

View File

@ -1,47 +0,0 @@
package org.apache.lucene.index.codecs.appending;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.codecs.PostingsWriterBase;
import org.apache.lucene.index.codecs.BlockTermsWriter;
import org.apache.lucene.index.codecs.TermsIndexWriterBase;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.CodecUtil;
public class AppendingTermsDictWriter extends BlockTermsWriter {
final static String CODEC_NAME = "APPENDING_TERMS_DICT";
public AppendingTermsDictWriter(TermsIndexWriterBase indexWriter,
SegmentWriteState state, PostingsWriterBase postingsWriter)
throws IOException {
super(indexWriter, state, postingsWriter);
}
@Override
protected void writeHeader(IndexOutput out) throws IOException {
CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT);
}
@Override
protected void writeTrailer(long dirStart) throws IOException {
out.writeLong(dirStart);
}
}

View File

@ -1,45 +0,0 @@
package org.apache.lucene.index.codecs.appending;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.codecs.FixedGapTermsIndexWriter;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.CodecUtil;
public class AppendingTermsIndexWriter extends FixedGapTermsIndexWriter {
final static String CODEC_NAME = "APPENDING_TERMS_INDEX";
final static int VERSION_START = 0;
final static int VERSION_CURRENT = VERSION_START;
public AppendingTermsIndexWriter(SegmentWriteState state) throws IOException {
super(state);
}
@Override
protected void writeHeader(IndexOutput out) throws IOException {
CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT);
}
@Override
protected void writeTrailer(long dirStart) throws IOException {
out.writeLong(dirStart);
}
}

View File

@ -1,6 +1,6 @@
package org.apache.lucene.index.codecs.appending;
/*
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
@ -18,30 +18,40 @@ package org.apache.lucene.index.codecs.appending;
*/
import java.io.IOException;
import java.util.Comparator;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.codecs.FixedGapTermsIndexReader;
import org.apache.lucene.index.codecs.BlockTreeTermsReader;
import org.apache.lucene.index.codecs.PostingsReaderBase;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CodecUtil;
public class AppendingTermsIndexReader extends FixedGapTermsIndexReader {
/**
* Reads append-only terms from {@link AppendingTermsWriter}
* @lucene.experimental
*/
public class AppendingTermsReader extends BlockTreeTermsReader {
public AppendingTermsIndexReader(Directory dir, FieldInfos fieldInfos,
String segment, int indexDivisor, Comparator<BytesRef> termComp, String segmentSuffix, IOContext context)
throws IOException {
super(dir, fieldInfos, segment, indexDivisor, termComp, segmentSuffix, context);
public AppendingTermsReader(Directory dir, FieldInfos fieldInfos, String segment, PostingsReaderBase postingsReader,
IOContext ioContext, String segmentSuffix, int indexDivisor) throws IOException {
super(dir, fieldInfos, segment, postingsReader, ioContext, segmentSuffix, indexDivisor);
}
@Override
protected void readHeader(IndexInput input) throws IOException {
CodecUtil.checkHeader(input, AppendingTermsIndexWriter.CODEC_NAME,
AppendingTermsIndexWriter.VERSION_START, AppendingTermsIndexWriter.VERSION_START);
CodecUtil.checkHeader(input, AppendingTermsWriter.TERMS_CODEC_NAME,
AppendingTermsWriter.TERMS_VERSION_START,
AppendingTermsWriter.TERMS_VERSION_CURRENT);
}
@Override
protected void readIndexHeader(IndexInput input) throws IOException {
CodecUtil.checkHeader(input, AppendingTermsWriter.TERMS_INDEX_CODEC_NAME,
AppendingTermsWriter.TERMS_INDEX_VERSION_START,
AppendingTermsWriter.TERMS_INDEX_VERSION_CURRENT);
}
@Override
protected void seekDir(IndexInput input, long dirOffset) throws IOException {
input.seek(input.length() - Long.SIZE / 8);

View File

@ -0,0 +1,64 @@
package org.apache.lucene.index.codecs.appending;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.codecs.BlockTreeTermsWriter;
import org.apache.lucene.index.codecs.PostingsWriterBase;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.CodecUtil;
/**
* Append-only version of {@link BlockTreeTermsWriter}
* @lucene.experimental
*/
public class AppendingTermsWriter extends BlockTreeTermsWriter {
final static String TERMS_CODEC_NAME = "APPENDING_TERMS_DICT";
final static int TERMS_VERSION_START = 0;
final static int TERMS_VERSION_CURRENT = TERMS_VERSION_START;
final static String TERMS_INDEX_CODEC_NAME = "APPENDING_TERMS_INDEX";
final static int TERMS_INDEX_VERSION_START = 0;
final static int TERMS_INDEX_VERSION_CURRENT = TERMS_INDEX_VERSION_START;
public AppendingTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter, int minItemsInBlock, int maxItemsInBlock) throws IOException {
super(state, postingsWriter, minItemsInBlock, maxItemsInBlock);
}
@Override
protected void writeHeader(IndexOutput out) throws IOException {
CodecUtil.writeHeader(out, TERMS_CODEC_NAME, TERMS_VERSION_CURRENT);
}
@Override
protected void writeIndexHeader(IndexOutput out) throws IOException {
CodecUtil.writeHeader(out, TERMS_INDEX_CODEC_NAME, TERMS_INDEX_VERSION_CURRENT);
}
@Override
protected void writeTrailer(IndexOutput out, long dirStart) throws IOException {
out.writeLong(dirStart);
}
@Override
protected void writeIndexTrailer(IndexOutput indexOut, long dirStart) throws IOException {
indexOut.writeLong(dirStart);
}
}

View File

@ -56,6 +56,22 @@ public class FieldValueFilter extends Filter {
this.field = field;
this.negate = negate;
}
/**
* Returns the field this filter is applied on.
* @return the field this filter is applied on.
*/
public String field() {
return field;
}
/**
* Returns <code>true</code> iff this filter is negated, otherwise <code>false</code>
* @return <code>true</code> iff this filter is negated, otherwise <code>false</code>
*/
public boolean negate() {
return negate;
}
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs)

View File

@ -26,7 +26,7 @@ import java.nio.channels.ClosedChannelException; // javadoc @link
import java.nio.channels.FileChannel;
import java.nio.channels.FileChannel.MapMode;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import java.security.AccessController;
@ -34,6 +34,7 @@ import java.security.PrivilegedExceptionAction;
import java.security.PrivilegedActionException;
import java.lang.reflect.Method;
import org.apache.lucene.util.MapBackedSet;
import org.apache.lucene.util.Constants;
/** File-based {@link Directory} implementation that uses
@ -259,7 +260,7 @@ public class MMapDirectory extends FSDirectory {
private ByteBuffer curBuf; // redundant for speed: buffers[curBufIndex]
private boolean isClone = false;
private final Map<MMapIndexInput,Boolean> clones = new WeakHashMap<MMapIndexInput,Boolean>();
private final Set<MMapIndexInput> clones = new MapBackedSet<MMapIndexInput>(new WeakHashMap<MMapIndexInput,Boolean>());
MMapIndexInput(String resourceDescription, RandomAccessFile raf, long offset, long length, int chunkSizePower) throws IOException {
super(resourceDescription);
@ -430,7 +431,7 @@ public class MMapDirectory extends FSDirectory {
// register the new clone in our clone list to clean it up on closing:
synchronized(this.clones) {
this.clones.put(clone, Boolean.TRUE);
this.clones.add(clone);
}
return clone;
@ -449,7 +450,7 @@ public class MMapDirectory extends FSDirectory {
// for extra safety unset also all clones' buffers:
synchronized(this.clones) {
for (final MMapIndexInput clone : this.clones.keySet()) {
for (final MMapIndexInput clone : this.clones) {
assert clone.isClone;
clone.unsetBuffers();
}

View File

@ -672,8 +672,7 @@ public class TestGrouping extends LuceneTestCase {
do {
// B/c of DV based impl we can't see the difference between an empty string and a null value.
// For that reason we don't generate empty string groups.
// randomValue = _TestUtil.randomRealisticUnicodeString(random);
randomValue = _TestUtil.randomSimpleString(random);
randomValue = _TestUtil.randomRealisticUnicodeString(random);
} while ("".equals(randomValue));
groups.add(new BytesRef(randomValue));

View File

@ -288,6 +288,9 @@ Bug Fixes
* SOLR-2509: StringIndexOutOfBoundsException in the spellchecker collate when the term contains
a hyphen. (Thomas Gambier caught the bug, Steffen Godskesen did the patch, via Erick Erickson)
* SOLR-1730: Made it clearer when a core failed to load as well as better logging when the
QueryElevationComponent fails to properly initialize (gsingers)
Other Changes
----------------------

View File

@ -215,7 +215,6 @@ public class CoreContainer
}
/**
* @exception generates an error if you attempt to set this value to false
* @deprecated all cores now abort on configuration error regardless of configuration
*/
@Deprecated
@ -246,6 +245,9 @@ public class CoreContainer
}
solrConfigFilename = cores.getConfigFile().getName();
if (cores.cores.isEmpty()){
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No cores were created, please check the logs for errors");
}
return cores;
}

View File

@ -612,11 +612,14 @@ public final class SolrCore implements SolrInfoMBean {
// Finally tell anyone who wants to know
resourceLoader.inform( resourceLoader );
resourceLoader.inform( this ); // last call before the latch is released.
} catch (IOException e) {
log.error("", e);
} catch (Throwable e) {
log.error("Error in constructing the core", e);
latch.countDown();//release the latch, otherwise we block trying to do the close. This should be fine, since counting down on a latch of 0 is still fine
//close down the searcher and any other resources, if it exists, as this is not recoverable
close();
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, null, e, false);
} finally {
// allow firstSearcher events to fire
// allow firstSearcher events to fire and make sure it is released
latch.countDown();
}

View File

@ -199,7 +199,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
elevationCache.put(null, loadElevationMap( cfg ));
}
}
//in other words, we think this is in the data dir, not the conf dir
if (!exists){
// preload the first data
RefCounted<SolrIndexSearcher> searchHolder = null;
@ -215,10 +215,10 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
}
catch( Exception ex ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"Error initializing QueryElevationComponent.", ex );
"Error initializing QueryElevationComponent.", ex, false );
}
}
//get the elevation map from the data dir
Map<String, ElevationObj> getElevationMap( IndexReader reader, SolrCore core ) throws Exception
{
synchronized( elevationCache ) {
@ -242,7 +242,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
return map;
}
}
//load up the elevation map
private Map<String, ElevationObj> loadElevationMap( Config cfg ) throws IOException
{
XPath xpath = XPathFactory.newInstance().newXPath();

View File

@ -96,7 +96,7 @@ public class SolrDispatchFilter implements Filter
}
catch( Throwable t ) {
// catch this so our filter still works
log.error( "Could not start Solr. Check solr/home property", t);
log.error( "Could not start Solr. Check solr/home property and the logs", t);
SolrConfig.severeErrors.add( t );
SolrCore.log( t );
}

View File

@ -185,7 +185,17 @@
<!-- test elevation -->
<searchComponent name="elevate" class="org.apache.solr.handler.component.QueryElevationComponent" >
<str name="queryFieldType">string</str>
<str name="config-file">elevate.xml</str>
<str name="config-file">${elevate.file:elevate.xml}</str>
</searchComponent>
<!-- SOLR-1730 -->
<!--<searchComponent name="badElevate" class="org.apache.solr.handler.component.QueryElevationComponent" >
<str name="queryFieldType">string</str>
<str name="config-file">foo.xml</str>
</searchComponent>-->
<searchComponent name="dataElevate" class="org.apache.solr.handler.component.QueryElevationComponent" >
<str name="queryFieldType">string</str>
<str name="config-file">${elevate.data.file:elevate-data.xml}</str>
</searchComponent>
<requestHandler name="/elevate" class="org.apache.solr.handler.component.SearchHandler">
@ -197,6 +207,24 @@
</arr>
</requestHandler>
<requestHandler name="/dataElevate" class="org.apache.solr.handler.component.SearchHandler">
<lst name="defaults">
<str name="echoParams">explicit</str>
</lst>
<arr name="last-components">
<str>dataElevate</str>
</arr>
</requestHandler>
<!--<requestHandler name="/badElevate" class="org.apache.solr.handler.component.SearchHandler">
<lst name="defaults">
<str name="echoParams">explicit</str>
</lst>
<arr name="last-components">
<str>badElevate</str>
</arr>
</requestHandler>-->
<!-- enable streaming for testing... -->
<requestDispatcher handleSelect="true" >
<requestParsers enableRemoteStreaming="true" multipartUploadLimitInKB="2048" />

View File

@ -0,0 +1,43 @@
package org.apache.solr.handler.component;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrException;
import org.junit.Test;
/**
* SOLR-1730, tests what happens when a component fails to initialize properly
*
**/
public class BadComponentTest extends SolrTestCaseJ4{
@Test
public void testBadElevate() throws Exception {
try {
System.setProperty("elevate.file", "foo.xml");
initCore("solrconfig-elevate.xml", "schema12.xml");
assertTrue(false);
} catch (Throwable e) {
log.error("Exception", e);
assertTrue(true);
} finally {
System.clearProperty("elevate.file");
}
}
}

View File

@ -30,6 +30,7 @@ import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.params.QueryElevationParams;
import org.apache.solr.common.util.FileUtils;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.component.QueryElevationComponent.ElevationObj;
@ -45,7 +46,13 @@ public class QueryElevationComponentTest extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-elevate.xml","schema12.xml");
//write out elevate-data.xml to the Data dir first by copying it from conf, which we know exists, this way we can test both conf and data configurations
createTempDir();
File parent = new File(TEST_HOME(), "conf");
File elevateFile = new File(parent, "elevate.xml");
File elevateDataFile = new File(dataDir, "elevate-data.xml");
FileUtils.copyFile(elevateFile, elevateDataFile);
initCore("solrconfig-elevate.xml", "schema12.xml");
}
@Before