mirror of
https://github.com/apache/lucene.git
synced 2025-02-17 15:35:20 +00:00
Merged /lucene/dev/trunk:r1444646-1445015
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4765@1445016 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
73d9834a5b
@ -73,25 +73,5 @@
|
||||
</excludes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>add-source</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>add-source</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<sources>
|
||||
<source>${module-path}/src/examples</source>
|
||||
</sources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
@ -1930,7 +1930,7 @@ ${tests-output}/junit4-*.suites - per-JVM executed suites
|
||||
|
||||
<!-- Forbidden API Task -->
|
||||
<target name="install-forbidden-apis" unless="forbidden-apis.loaded" depends="ivy-availability-check,ivy-configure">
|
||||
<ivy:cachepath organisation="de.thetaphi" module="forbiddenapis" revision="1.0"
|
||||
<ivy:cachepath organisation="de.thetaphi" module="forbiddenapis" revision="1.1"
|
||||
inline="true" conf="default" transitive="true" pathid="forbidden-apis.classpath"/>
|
||||
<taskdef name="forbidden-apis" classname="de.thetaphi.forbiddenapis.AntTask" classpathref="forbidden-apis.classpath"/>
|
||||
<property name="forbidden-apis.loaded" value="true"/>
|
||||
|
@ -161,95 +161,90 @@ final class SegmentMerger {
|
||||
}
|
||||
|
||||
private void mergeDocValues(SegmentWriteState segmentWriteState) throws IOException {
|
||||
|
||||
if (codec.docValuesFormat() != null) {
|
||||
DocValuesConsumer consumer = codec.docValuesFormat().fieldsConsumer(segmentWriteState);
|
||||
boolean success = false;
|
||||
try {
|
||||
for (FieldInfo field : mergeState.fieldInfos) {
|
||||
DocValuesType type = field.getDocValuesType();
|
||||
if (type != null) {
|
||||
if (type == DocValuesType.NUMERIC) {
|
||||
List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
NumericDocValues values = reader.getNumericDocValues(field.name);
|
||||
if (values == null) {
|
||||
values = NumericDocValues.EMPTY;
|
||||
}
|
||||
toMerge.add(values);
|
||||
DocValuesConsumer consumer = codec.docValuesFormat().fieldsConsumer(segmentWriteState);
|
||||
boolean success = false;
|
||||
try {
|
||||
for (FieldInfo field : mergeState.fieldInfos) {
|
||||
DocValuesType type = field.getDocValuesType();
|
||||
if (type != null) {
|
||||
if (type == DocValuesType.NUMERIC) {
|
||||
List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
NumericDocValues values = reader.getNumericDocValues(field.name);
|
||||
if (values == null) {
|
||||
values = NumericDocValues.EMPTY;
|
||||
}
|
||||
consumer.mergeNumericField(field, mergeState, toMerge);
|
||||
} else if (type == DocValuesType.BINARY) {
|
||||
List<BinaryDocValues> toMerge = new ArrayList<BinaryDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
BinaryDocValues values = reader.getBinaryDocValues(field.name);
|
||||
if (values == null) {
|
||||
values = BinaryDocValues.EMPTY;
|
||||
}
|
||||
toMerge.add(values);
|
||||
}
|
||||
consumer.mergeBinaryField(field, mergeState, toMerge);
|
||||
} else if (type == DocValuesType.SORTED) {
|
||||
List<SortedDocValues> toMerge = new ArrayList<SortedDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
SortedDocValues values = reader.getSortedDocValues(field.name);
|
||||
if (values == null) {
|
||||
values = SortedDocValues.EMPTY;
|
||||
}
|
||||
toMerge.add(values);
|
||||
}
|
||||
consumer.mergeSortedField(field, mergeState, toMerge);
|
||||
} else if (type == DocValuesType.SORTED_SET) {
|
||||
List<SortedSetDocValues> toMerge = new ArrayList<SortedSetDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
SortedSetDocValues values = reader.getSortedSetDocValues(field.name);
|
||||
if (values == null) {
|
||||
values = SortedSetDocValues.EMPTY;
|
||||
}
|
||||
toMerge.add(values);
|
||||
}
|
||||
consumer.mergeSortedSetField(field, mergeState, toMerge);
|
||||
} else {
|
||||
throw new AssertionError("type=" + type);
|
||||
toMerge.add(values);
|
||||
}
|
||||
consumer.mergeNumericField(field, mergeState, toMerge);
|
||||
} else if (type == DocValuesType.BINARY) {
|
||||
List<BinaryDocValues> toMerge = new ArrayList<BinaryDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
BinaryDocValues values = reader.getBinaryDocValues(field.name);
|
||||
if (values == null) {
|
||||
values = BinaryDocValues.EMPTY;
|
||||
}
|
||||
toMerge.add(values);
|
||||
}
|
||||
consumer.mergeBinaryField(field, mergeState, toMerge);
|
||||
} else if (type == DocValuesType.SORTED) {
|
||||
List<SortedDocValues> toMerge = new ArrayList<SortedDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
SortedDocValues values = reader.getSortedDocValues(field.name);
|
||||
if (values == null) {
|
||||
values = SortedDocValues.EMPTY;
|
||||
}
|
||||
toMerge.add(values);
|
||||
}
|
||||
consumer.mergeSortedField(field, mergeState, toMerge);
|
||||
} else if (type == DocValuesType.SORTED_SET) {
|
||||
List<SortedSetDocValues> toMerge = new ArrayList<SortedSetDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
SortedSetDocValues values = reader.getSortedSetDocValues(field.name);
|
||||
if (values == null) {
|
||||
values = SortedSetDocValues.EMPTY;
|
||||
}
|
||||
toMerge.add(values);
|
||||
}
|
||||
consumer.mergeSortedSetField(field, mergeState, toMerge);
|
||||
} else {
|
||||
throw new AssertionError("type=" + type);
|
||||
}
|
||||
}
|
||||
success = true;
|
||||
} finally {
|
||||
if (success) {
|
||||
IOUtils.close(consumer);
|
||||
} else {
|
||||
IOUtils.closeWhileHandlingException(consumer);
|
||||
}
|
||||
}
|
||||
success = true;
|
||||
} finally {
|
||||
if (success) {
|
||||
IOUtils.close(consumer);
|
||||
} else {
|
||||
IOUtils.closeWhileHandlingException(consumer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeNorms(SegmentWriteState segmentWriteState) throws IOException {
|
||||
if (codec.normsFormat() != null) {
|
||||
DocValuesConsumer consumer = codec.normsFormat().normsConsumer(segmentWriteState);
|
||||
boolean success = false;
|
||||
try {
|
||||
for (FieldInfo field : mergeState.fieldInfos) {
|
||||
if (field.hasNorms()) {
|
||||
List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
NumericDocValues norms = reader.getNormValues(field.name);
|
||||
if (norms == null) {
|
||||
norms = NumericDocValues.EMPTY;
|
||||
}
|
||||
toMerge.add(norms);
|
||||
DocValuesConsumer consumer = codec.normsFormat().normsConsumer(segmentWriteState);
|
||||
boolean success = false;
|
||||
try {
|
||||
for (FieldInfo field : mergeState.fieldInfos) {
|
||||
if (field.hasNorms()) {
|
||||
List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
|
||||
for (AtomicReader reader : mergeState.readers) {
|
||||
NumericDocValues norms = reader.getNormValues(field.name);
|
||||
if (norms == null) {
|
||||
norms = NumericDocValues.EMPTY;
|
||||
}
|
||||
consumer.mergeNumericField(field, mergeState, toMerge);
|
||||
toMerge.add(norms);
|
||||
}
|
||||
consumer.mergeNumericField(field, mergeState, toMerge);
|
||||
}
|
||||
success = true;
|
||||
} finally {
|
||||
if (success) {
|
||||
IOUtils.close(consumer);
|
||||
} else {
|
||||
IOUtils.closeWhileHandlingException(consumer);
|
||||
}
|
||||
}
|
||||
success = true;
|
||||
} finally {
|
||||
if (success) {
|
||||
IOUtils.close(consumer);
|
||||
} else {
|
||||
IOUtils.closeWhileHandlingException(consumer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -114,6 +114,9 @@ Bug Fixes
|
||||
* SOLR-3655: A restarted node can briefly appear live and active before it really
|
||||
is in some cases. (Mark Miller)
|
||||
|
||||
* SOLR-4426: NRTCachingDirectoryFactory does not initialize maxCachedMB and maxMergeSizeMB
|
||||
if <directoryFactory> is not present in solrconfig.xml (Jack Krupansky via shalin)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
||||
|
@ -31,18 +31,20 @@ import org.apache.solr.core.DirectoryFactory.DirContext;
|
||||
* Factory to instantiate {@link org.apache.lucene.store.NRTCachingDirectory}
|
||||
*/
|
||||
public class NRTCachingDirectoryFactory extends StandardDirectoryFactory {
|
||||
private double maxMergeSizeMB;
|
||||
private double maxCachedMB;
|
||||
public static final int DEFAULT_MAX_MERGE_SIZE_MB = 4;
|
||||
private double maxMergeSizeMB = DEFAULT_MAX_MERGE_SIZE_MB;
|
||||
public static final int DEFAULT_MAX_CACHED_MB = 48;
|
||||
private double maxCachedMB = DEFAULT_MAX_CACHED_MB;
|
||||
|
||||
@Override
|
||||
public void init(NamedList args) {
|
||||
super.init(args);
|
||||
SolrParams params = SolrParams.toSolrParams(args);
|
||||
maxMergeSizeMB = params.getDouble("maxMergeSizeMB", 4);
|
||||
maxMergeSizeMB = params.getDouble("maxMergeSizeMB", DEFAULT_MAX_MERGE_SIZE_MB);
|
||||
if (maxMergeSizeMB <= 0){
|
||||
throw new IllegalArgumentException("maxMergeSizeMB must be greater than 0");
|
||||
}
|
||||
maxCachedMB = params.getDouble("maxCachedMB", 48);
|
||||
maxCachedMB = params.getDouble("maxCachedMB", DEFAULT_MAX_CACHED_MB);
|
||||
if (maxCachedMB <= 0){
|
||||
throw new IllegalArgumentException("maxCachedMB must be greater than 0");
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user