merge trunk: 1341933 through 1342428

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4055@1342429 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-05-24 20:57:43 +00:00
commit 0ce89ebf87
12 changed files with 78 additions and 23 deletions

View File

@ -567,8 +567,8 @@ you can now do this:
customizations won't run for certain MultiTermQuerys.
* LUCENE-2691: The near-real-time API has moved from IndexWriter to
IndexReader. Instead of IndexWriter.getReader(), call
IndexReader.open(IndexWriter) or IndexReader.reopen(IndexWriter).
DirectoryReader. Instead of IndexWriter.getReader(), call
DirectoryReader.open(IndexWriter) or DirectoryReader.openIfChanged(IndexWriter).
* LUCENE-2690: MultiTermQuery boolean rewrites per segment.
Also MultiTermQuery.getTermsEnum() now takes an AttributeSource. FuzzyTermsEnum
@ -615,7 +615,7 @@ you can now do this:
globally, or per-field). To define another Strategy, implement Analyzer.ReuseStrategy.
* LUCENE-3464: IndexReader.reopen has been renamed to
IndexReader.openIfChanged (a static method), and now returns null
DirectoryReader.openIfChanged (a static method), and now returns null
(instead of the old reader) if there are no changes to the index, to
prevent the common pitfall of accidentally closing the old reader.

View File

@ -21,6 +21,8 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.MultiDocsAndPositionsEnum;
import org.apache.lucene.index.MultiDocsAndPositionsEnum.EnumWithSlice;
import java.io.IOException;
/**
@ -51,6 +53,14 @@ public final class MappingMultiDocsAndPositionsEnum extends DocsAndPositionsEnum
public void setMergeState(MergeState mergeState) {
this.mergeState = mergeState;
}
public int getNumSubs() {
return numSubs;
}
public EnumWithSlice[] getSubs() {
return subs;
}
@Override
public int freq() throws IOException {

View File

@ -20,6 +20,8 @@ package org.apache.lucene.codecs;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.MultiDocsEnum;
import org.apache.lucene.index.MultiDocsEnum.EnumWithSlice;
import java.io.IOException;
/**
@ -50,6 +52,14 @@ public final class MappingMultiDocsEnum extends DocsEnum {
public void setMergeState(MergeState mergeState) {
this.mergeState = mergeState;
}
public int getNumSubs() {
return numSubs;
}
public EnumWithSlice[] getSubs() {
return subs;
}
@Override
public int freq() throws IOException {

View File

@ -1175,7 +1175,7 @@ public abstract class QueryParserBase {
} else if ('A' <= c && c <= 'F') {
return c - 'A' + 10;
} else {
throw new ParseException("None-hex character in unicode escape sequence: " + c);
throw new ParseException("Non-hex character in Unicode escape sequence: " + c);
}
}

View File

@ -22,7 +22,7 @@ INVALID_SYNTAX_ESCAPE_UNICODE_TRUNCATION = Truncated unicode escape sequence.
INVALID_SYNTAX_ESCAPE_CHARACTER = Term can not end with escape character.
#<CREATEDBY>Apache Lucene Community</CREATEDBY>
INVALID_SYNTAX_ESCAPE_NONE_HEX_UNICODE = None-hex character in unicode escape sequence: {0}
INVALID_SYNTAX_ESCAPE_NONE_HEX_UNICODE = Non-hex character in Unicode escape sequence: {0}
#<CREATEDBY>Apache Lucene Community</CREATEDBY>
NODE_ACTION_NOT_SUPPORTED = This node does not support this action.

View File

@ -1,6 +1,6 @@
package org.apache.lucene.search.suggest.fst;
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
@ -37,18 +37,18 @@ import org.apache.lucene.util.PriorityQueue;
* @lucene.internal
*/
public final class Sort {
public final static int MB = 1024 * 1024;
public final static int GB = MB * 1024;
public final static long MB = 1024 * 1024;
public final static long GB = MB * 1024;
/**
* Minimum recommended buffer size for sorting.
*/
public final static int MIN_BUFFER_SIZE_MB = 32;
public final static long MIN_BUFFER_SIZE_MB = 32;
/**
* Absolute minimum required buffer size for sorting.
*/
public static final int ABSOLUTE_MIN_SORT_BUFFER_SIZE = MB / 2;
public static final long ABSOLUTE_MIN_SORT_BUFFER_SIZE = MB / 2;
private static final String MIN_BUFFER_SIZE_MSG = "At least 0.5MB RAM buffer is needed";
/**
@ -60,7 +60,7 @@ public final class Sort {
* A bit more descriptive unit for constructors.
*
* @see #automatic()
* @see #megabytes(int)
* @see #megabytes(long)
*/
public static final class BufferSize {
final int bytes;
@ -70,11 +70,19 @@ public final class Sort {
throw new IllegalArgumentException("Buffer too large for Java ("
+ (Integer.MAX_VALUE / MB) + "mb max): " + bytes);
}
if (bytes < ABSOLUTE_MIN_SORT_BUFFER_SIZE) {
throw new IllegalArgumentException(MIN_BUFFER_SIZE_MSG + ": " + bytes);
}
this.bytes = (int) bytes;
}
public static BufferSize megabytes(int mb) {
/**
* Creates a {@link BufferSize} in MB. The given
* values must be $gt; 0 and &lt; 2048.
*/
public static BufferSize megabytes(long mb) {
return new BufferSize(mb * MB);
}
@ -105,7 +113,7 @@ public final class Sort {
sortBufferByteSize = Math.max(ABSOLUTE_MIN_SORT_BUFFER_SIZE, sortBufferByteSize);
}
}
return new BufferSize(Math.min(Integer.MAX_VALUE, sortBufferByteSize));
return new BufferSize(Math.min((long)Integer.MAX_VALUE, sortBufferByteSize));
}
}

View File

@ -63,7 +63,7 @@ public class TestSort extends LuceneTestCase {
public void testIntermediateMerges() throws Exception {
// Sort 20 mb worth of data with 1mb buffer, binary merging.
SortInfo info = checkSort(new Sort(Sort.DEFAULT_COMPARATOR, BufferSize.megabytes(1), Sort.defaultTempDir(), 2),
generateRandom(Sort.MB * 20));
generateRandom((int)Sort.MB * 20));
assertTrue(info.mergeRounds > 10);
}
@ -71,7 +71,7 @@ public class TestSort extends LuceneTestCase {
public void testSmallRandom() throws Exception {
// Sort 20 mb worth of data with 1mb buffer.
SortInfo sortInfo = checkSort(new Sort(Sort.DEFAULT_COMPARATOR, BufferSize.megabytes(1), Sort.defaultTempDir(), Sort.MAX_TEMPFILES),
generateRandom(Sort.MB * 20));
generateRandom((int)Sort.MB * 20));
assertEquals(1, sortInfo.mergeRounds);
}
@ -79,7 +79,7 @@ public class TestSort extends LuceneTestCase {
public void testLargerRandom() throws Exception {
// Sort 100MB worth of data with 15mb buffer.
checkSort(new Sort(Sort.DEFAULT_COMPARATOR, BufferSize.megabytes(16), Sort.defaultTempDir(), Sort.MAX_TEMPFILES),
generateRandom(Sort.MB * 100));
generateRandom((int)Sort.MB * 100));
}
private byte[][] generateRandom(int howMuchData) {
@ -152,4 +152,31 @@ public class TestSort extends LuceneTestCase {
w.close();
return file;
}
public void testRamBuffer() {
int numIters = atLeast(10000);
for (int i = 0; i < numIters; i++) {
BufferSize.megabytes(1+random().nextInt(2047));
}
BufferSize.megabytes(2047);
BufferSize.megabytes(1);
try {
BufferSize.megabytes(2048);
fail("max mb is 2047");
} catch (IllegalArgumentException e) {
}
try {
BufferSize.megabytes(0);
fail("min mb is 0.5");
} catch (IllegalArgumentException e) {
}
try {
BufferSize.megabytes(-1);
fail("min mb is 0.5");
} catch (IllegalArgumentException e) {
}
}
}

View File

@ -657,7 +657,7 @@ public class SnapPuller {
if(!success){
try {
LOG.error("Unable to move index file from: " + indexFileInTmpDir
+ " to: " + indexFileInIndex + "Trying to do a copy");
+ " to: " + indexFileInIndex + " Trying to do a copy");
FileUtils.copyFile(indexFileInTmpDir,indexFileInIndex);
success = true;
} catch (IOException e) {

View File

@ -123,7 +123,7 @@ public class BasicDistributedZkTest extends AbstractDistributedZkTestCase {
@Override
public void doTest() throws Exception {
setLoggingLevel(null);
// setLoggingLevel(null);
del("*:*");
indexr(id,1, i1, 100, tlong, 100,t1,"now is the time for all good men"

View File

@ -59,7 +59,6 @@ import org.junit.Ignore;
* what we test now - the default update chain
*
*/
@Ignore
public class FullSolrCloudTest extends AbstractDistributedZkTestCase {
@BeforeClass
public static void beforeFullSolrCloudTest() throws Exception {

View File

@ -976,7 +976,7 @@
http://wiki.apache.org/solr/AnalysisRequestHandler
An analysis handler that provides a breakdown of the analysis
process of provided docuemnts. This handler expects a (single)
process of provided documents. This handler expects a (single)
content stream with the following format:
<docs>

View File

@ -4,11 +4,12 @@ java.util.logging.ConsoleHandler.level=FINEST
java.util.logging.ConsoleHandler.formatter=org.apache.solr.SolrLogFormatter
.level=SEVERE
#.level=INFO
#.level=SEVERE
.level=INFO
#org.apache.solr.update.processor.LogUpdateProcessor=FINEST
#org.apache.solr.update.processor.DistributedUpdateProcessor=FINEST
#org.apache.solr.update.PeerSync.level=FINEST
#org.apache.solr.update.UpdateLog.level=FINE
#org.apache.solr.update.TransactionLog.level=FINEST