mirror of https://github.com/apache/lucene.git
LUCENE-7756: Only record the major Lucene version that created the index, and record the minimum Lucene version that contributed to segments.
This commit is contained in:
parent
3f172a019b
commit
23b002a0fd
|
@ -7,9 +7,12 @@ http://s.apache.org/luceneversions
|
|||
|
||||
New Features
|
||||
|
||||
* LUCENE-7703: SegmentInfos now record the Lucene version at index creation
|
||||
time. (Adrien Grand)
|
||||
* LUCENE-7703: SegmentInfos now record the major Lucene version at index
|
||||
creation time. (Adrien Grand)
|
||||
|
||||
* LUCENE-7756: LeafReader.getMetaData now exposes the index created version as
|
||||
well as the oldest Lucene version that contributed to the segment.
|
||||
(Adrien Grand)
|
||||
|
||||
API Changes
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ public class Lucene50SegmentInfoFormat extends SegmentInfoFormat {
|
|||
final Set<String> files = input.readSetOfStrings();
|
||||
final Map<String,String> attributes = input.readMapOfStrings();
|
||||
|
||||
si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null);
|
||||
si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null);
|
||||
si.setFiles(files);
|
||||
} catch (Throwable exception) {
|
||||
priorE = exception;
|
||||
|
|
|
@ -114,7 +114,7 @@ public class Lucene62Codec extends Codec {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final SegmentInfoFormat segmentInfoFormat() {
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return segmentInfosFormat;
|
||||
}
|
||||
|
||||
|
|
|
@ -128,6 +128,9 @@ public class FixBrokenOffsets {
|
|||
}
|
||||
|
||||
Directory destDir = FSDirectory.open(destPath);
|
||||
// We need to maintain the same major version
|
||||
int createdMajor = SegmentInfos.readLatestCommit(srcDir).getIndexCreatedVersionMajor();
|
||||
new SegmentInfos(createdMajor).commit(destDir);
|
||||
IndexWriter writer = new IndexWriter(destDir, new IndexWriterConfig());
|
||||
writer.addIndexes(filtered);
|
||||
IOUtils.close(writer, reader, srcDir, destDir);
|
||||
|
|
|
@ -65,7 +65,7 @@ public class Lucene50RWSegmentInfoFormat extends Lucene50SegmentInfoFormat {
|
|||
final Set<String> files = input.readSetOfStrings();
|
||||
final Map<String,String> attributes = input.readMapOfStrings();
|
||||
|
||||
si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null);
|
||||
si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null);
|
||||
si.setFiles(files);
|
||||
} catch (Throwable exception) {
|
||||
priorE = exception;
|
||||
|
|
|
@ -28,6 +28,11 @@ public class TestLucene50SegmentInfoFormat extends BaseSegmentInfoFormatTestCase
|
|||
return new Lucene60RWCodec();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int getCreatedVersionMajor() {
|
||||
return Version.LUCENE_6_0_0.major;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Version[] getVersions() {
|
||||
return new Version[] { Version.LUCENE_6_0_0 };
|
||||
|
@ -37,4 +42,9 @@ public class TestLucene50SegmentInfoFormat extends BaseSegmentInfoFormatTestCase
|
|||
protected boolean supportsIndexSort() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsMinVersion() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.codecs.lucene53;
|
|||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.lucene62.Lucene62RWCodec;
|
||||
import org.apache.lucene.index.BaseNormsFormatTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Tests Lucene53NormsFormat
|
||||
|
@ -26,6 +27,11 @@ import org.apache.lucene.index.BaseNormsFormatTestCase;
|
|||
public class TestLucene53NormsFormat extends BaseNormsFormatTestCase {
|
||||
private final Codec codec = new Lucene62RWCodec();
|
||||
|
||||
@Override
|
||||
protected int getCreatedVersionMajor() {
|
||||
return Version.LUCENE_6_2_0.major;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Codec getCodec() {
|
||||
return codec;
|
||||
|
|
|
@ -17,13 +17,25 @@
|
|||
package org.apache.lucene.codecs.lucene62;
|
||||
|
||||
import org.apache.lucene.codecs.NormsFormat;
|
||||
import org.apache.lucene.codecs.SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.lucene53.Lucene53RWNormsFormat;
|
||||
import org.apache.lucene.codecs.lucene62.Lucene62Codec;
|
||||
|
||||
/**
|
||||
* Read-write version of 6.2 codec for testing
|
||||
* @deprecated for test purposes only
|
||||
*/
|
||||
@Deprecated
|
||||
public class Lucene62RWCodec extends Lucene62Codec {
|
||||
|
||||
private final SegmentInfoFormat segmentInfoFormat = new Lucene62RWSegmentInfoFormat();
|
||||
private final NormsFormat normsFormat = new Lucene53RWNormsFormat();
|
||||
|
||||
@Override
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return segmentInfoFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NormsFormat normsFormat() {
|
||||
return normsFormat;
|
||||
|
|
|
@ -0,0 +1,193 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.codecs.lucene62;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.index.SegmentInfo;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.SortedNumericSelector;
|
||||
import org.apache.lucene.search.SortedNumericSortField;
|
||||
import org.apache.lucene.search.SortedSetSelector;
|
||||
import org.apache.lucene.search.SortedSetSortField;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Read-write version of 6.2 SegmentInfoFormat for testing
|
||||
* @deprecated for test purposes only
|
||||
*/
|
||||
@Deprecated
|
||||
public class Lucene62RWSegmentInfoFormat extends Lucene62SegmentInfoFormat {
|
||||
|
||||
@Override
|
||||
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene62SegmentInfoFormat.SI_EXTENSION);
|
||||
|
||||
try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
|
||||
// Only add the file once we've successfully created it, else IFD assert can trip:
|
||||
si.addFile(fileName);
|
||||
CodecUtil.writeIndexHeader(output,
|
||||
Lucene62SegmentInfoFormat.CODEC_NAME,
|
||||
Lucene62SegmentInfoFormat.VERSION_CURRENT,
|
||||
si.getId(),
|
||||
"");
|
||||
Version version = si.getVersion();
|
||||
if (version.major < 5) {
|
||||
throw new IllegalArgumentException("invalid major version: should be >= 5 but got: " + version.major + " segment=" + si);
|
||||
}
|
||||
// Write the Lucene version that created this segment, since 3.1
|
||||
output.writeInt(version.major);
|
||||
output.writeInt(version.minor);
|
||||
output.writeInt(version.bugfix);
|
||||
assert version.prerelease == 0;
|
||||
output.writeInt(si.maxDoc());
|
||||
|
||||
output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
|
||||
output.writeMapOfStrings(si.getDiagnostics());
|
||||
Set<String> files = si.files();
|
||||
for (String file : files) {
|
||||
if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
|
||||
throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
|
||||
}
|
||||
}
|
||||
output.writeSetOfStrings(files);
|
||||
output.writeMapOfStrings(si.getAttributes());
|
||||
|
||||
Sort indexSort = si.getIndexSort();
|
||||
int numSortFields = indexSort == null ? 0 : indexSort.getSort().length;
|
||||
output.writeVInt(numSortFields);
|
||||
for (int i = 0; i < numSortFields; ++i) {
|
||||
SortField sortField = indexSort.getSort()[i];
|
||||
SortField.Type sortType = sortField.getType();
|
||||
output.writeString(sortField.getField());
|
||||
int sortTypeID;
|
||||
switch (sortField.getType()) {
|
||||
case STRING:
|
||||
sortTypeID = 0;
|
||||
break;
|
||||
case LONG:
|
||||
sortTypeID = 1;
|
||||
break;
|
||||
case INT:
|
||||
sortTypeID = 2;
|
||||
break;
|
||||
case DOUBLE:
|
||||
sortTypeID = 3;
|
||||
break;
|
||||
case FLOAT:
|
||||
sortTypeID = 4;
|
||||
break;
|
||||
case CUSTOM:
|
||||
if (sortField instanceof SortedSetSortField) {
|
||||
sortTypeID = 5;
|
||||
sortType = SortField.Type.STRING;
|
||||
} else if (sortField instanceof SortedNumericSortField) {
|
||||
sortTypeID = 6;
|
||||
sortType = ((SortedNumericSortField) sortField).getNumericType();
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
|
||||
}
|
||||
output.writeVInt(sortTypeID);
|
||||
if (sortTypeID == 5) {
|
||||
SortedSetSortField ssf = (SortedSetSortField) sortField;
|
||||
if (ssf.getSelector() == SortedSetSelector.Type.MIN) {
|
||||
output.writeByte((byte) 0);
|
||||
} else if (ssf.getSelector() == SortedSetSelector.Type.MAX) {
|
||||
output.writeByte((byte) 1);
|
||||
} else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) {
|
||||
output.writeByte((byte) 2);
|
||||
} else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) {
|
||||
output.writeByte((byte) 3);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector());
|
||||
}
|
||||
} else if (sortTypeID == 6) {
|
||||
SortedNumericSortField snsf = (SortedNumericSortField) sortField;
|
||||
if (snsf.getNumericType() == SortField.Type.LONG) {
|
||||
output.writeByte((byte) 0);
|
||||
} else if (snsf.getNumericType() == SortField.Type.INT) {
|
||||
output.writeByte((byte) 1);
|
||||
} else if (snsf.getNumericType() == SortField.Type.DOUBLE) {
|
||||
output.writeByte((byte) 2);
|
||||
} else if (snsf.getNumericType() == SortField.Type.FLOAT) {
|
||||
output.writeByte((byte) 3);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType());
|
||||
}
|
||||
if (snsf.getSelector() == SortedNumericSelector.Type.MIN) {
|
||||
output.writeByte((byte) 0);
|
||||
} else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) {
|
||||
output.writeByte((byte) 1);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector());
|
||||
}
|
||||
}
|
||||
output.writeByte((byte) (sortField.getReverse() ? 0 : 1));
|
||||
|
||||
// write missing value
|
||||
Object missingValue = sortField.getMissingValue();
|
||||
if (missingValue == null) {
|
||||
output.writeByte((byte) 0);
|
||||
} else {
|
||||
switch(sortType) {
|
||||
case STRING:
|
||||
if (missingValue == SortField.STRING_LAST) {
|
||||
output.writeByte((byte) 1);
|
||||
} else if (missingValue == SortField.STRING_FIRST) {
|
||||
output.writeByte((byte) 2);
|
||||
} else {
|
||||
throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue);
|
||||
}
|
||||
break;
|
||||
case LONG:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeLong(((Long) missingValue).longValue());
|
||||
break;
|
||||
case INT:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeInt(((Integer) missingValue).intValue());
|
||||
break;
|
||||
case DOUBLE:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue()));
|
||||
break;
|
||||
case FLOAT:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue()));
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CodecUtil.writeFooter(output);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.codecs.lucene62;
|
|||
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
|
@ -27,13 +26,23 @@ import org.apache.lucene.util.Version;
|
|||
*/
|
||||
public class TestLucene62SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
|
||||
|
||||
@Override
|
||||
protected int getCreatedVersionMajor() {
|
||||
return Version.LUCENE_6_2_0.major;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Version[] getVersions() {
|
||||
return new Version[] { Version.LATEST };
|
||||
return new Version[] { Version.LUCENE_6_2_0 };
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Codec getCodec() {
|
||||
return TestUtil.getDefaultCodec();
|
||||
return new Lucene62RWCodec();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsMinVersion() {
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -693,10 +693,18 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
System.out.println("\nTEST: index=" + name);
|
||||
}
|
||||
Directory dir = newDirectory(oldIndexDirs.get(name));
|
||||
|
||||
final SegmentInfos oldSegInfos = SegmentInfos.readLatestCommit(dir);
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
w.forceMerge(1);
|
||||
w.close();
|
||||
|
||||
|
||||
final SegmentInfos segInfos = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(oldSegInfos.getIndexCreatedVersionMajor(), segInfos.getIndexCreatedVersionMajor());
|
||||
assertEquals(Version.LATEST, segInfos.asList().get(0).info.getVersion());
|
||||
assertEquals(oldSegInfos.asList().get(0).info.getMinVersion(), segInfos.asList().get(0).info.getMinVersion());
|
||||
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
@ -707,26 +715,30 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
System.out.println("\nTEST: old index " + name);
|
||||
}
|
||||
Directory oldDir = oldIndexDirs.get(name);
|
||||
Version indexCreatedVersion = SegmentInfos.readLatestCommit(oldDir).getIndexCreatedVersion();
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(oldDir);
|
||||
|
||||
Directory targetDir = newDirectory();
|
||||
// Simulate writing into an index that was created on the same version
|
||||
new SegmentInfos(indexCreatedVersion).commit(targetDir);
|
||||
if (infos.getCommitLuceneVersion().major != Version.LATEST.major) {
|
||||
// both indexes are not compatible
|
||||
Directory targetDir2 = newDirectory();
|
||||
IndexWriter w = new IndexWriter(targetDir2, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> w.addIndexes(oldDir));
|
||||
assertTrue(e.getMessage(), e.getMessage().startsWith("Cannot use addIndexes(Directory) with indexes that have been created by a different Lucene version."));
|
||||
w.close();
|
||||
targetDir2.close();
|
||||
|
||||
// for the next test, we simulate writing to an index that was created on the same major version
|
||||
new SegmentInfos(infos.getIndexCreatedVersionMajor()).commit(targetDir);
|
||||
}
|
||||
|
||||
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
w.addIndexes(oldDir);
|
||||
w.close();
|
||||
targetDir.close();
|
||||
|
||||
// Now check that we forbid calling addIndexes with a different version
|
||||
targetDir = newDirectory();
|
||||
IndexWriter oldWriter = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> oldWriter.addIndexes(oldDir));
|
||||
assertTrue(e.getMessage(), e.getMessage().startsWith("Cannot use addIndexes(Directory) with indexes that have been created by a different Lucene version."));
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("\nTEST: done adding indices; now close");
|
||||
}
|
||||
oldWriter.close();
|
||||
|
||||
targetDir.close();
|
||||
}
|
||||
|
@ -734,9 +746,22 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
public void testAddOldIndexesReader() throws IOException {
|
||||
for (String name : oldNames) {
|
||||
DirectoryReader reader = DirectoryReader.open(oldIndexDirs.get(name));
|
||||
Directory oldDir = oldIndexDirs.get(name);
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(oldDir);
|
||||
DirectoryReader reader = DirectoryReader.open(oldDir);
|
||||
|
||||
Directory targetDir = newDirectory();
|
||||
if (infos.getCommitLuceneVersion().major != Version.LATEST.major) {
|
||||
Directory targetDir2 = newDirectory();
|
||||
IndexWriter w = new IndexWriter(targetDir2, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TestUtil.addIndexesSlowly(w, reader));
|
||||
assertEquals(e.getMessage(), "Cannot merge a segment that has been created with major version 6 into this index which has been created by major version 7");
|
||||
w.close();
|
||||
targetDir2.close();
|
||||
|
||||
// for the next test, we simulate writing to an index that was created on the same major version
|
||||
new SegmentInfos(infos.getIndexCreatedVersionMajor()).commit(targetDir);
|
||||
}
|
||||
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
TestUtil.addIndexesSlowly(w, reader);
|
||||
w.close();
|
||||
|
@ -1245,11 +1270,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
|
||||
// those indexes are created by a single version so we can
|
||||
// compare the commit version with the created version
|
||||
if (infos.getCommitLuceneVersion().onOrAfter(Version.LUCENE_7_0_0)) {
|
||||
assertEquals(infos.getCommitLuceneVersion(), infos.getIndexCreatedVersion());
|
||||
} else {
|
||||
assertNull(infos.getIndexCreatedVersion());
|
||||
}
|
||||
assertEquals(infos.getCommitLuceneVersion().major, infos.getIndexCreatedVersionMajor());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1316,7 +1337,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private int checkAllSegmentsUpgraded(Directory dir, Version indexCreatedVersion) throws IOException {
|
||||
private int checkAllSegmentsUpgraded(Directory dir, int indexCreatedVersion) throws IOException {
|
||||
final SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
|
||||
if (VERBOSE) {
|
||||
System.out.println("checkAllSegmentsUpgraded: " + infos);
|
||||
|
@ -1325,7 +1346,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
assertEquals(Version.LATEST, si.info.getVersion());
|
||||
}
|
||||
assertEquals(Version.LATEST, infos.getCommitLuceneVersion());
|
||||
assertEquals(indexCreatedVersion, infos.getIndexCreatedVersion());
|
||||
assertEquals(indexCreatedVersion, infos.getIndexCreatedVersionMajor());
|
||||
return infos.size();
|
||||
}
|
||||
|
||||
|
@ -1343,7 +1364,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
System.out.println("testUpgradeOldIndex: index=" +name);
|
||||
}
|
||||
Directory dir = newDirectory(oldIndexDirs.get(name));
|
||||
Version indexCreatedVersion = SegmentInfos.readLatestCommit(dir).getIndexCreatedVersion();
|
||||
int indexCreatedVersion = SegmentInfos.readLatestCommit(dir).getIndexCreatedVersionMajor();
|
||||
|
||||
newIndexUpgrader(dir).upgrade();
|
||||
|
||||
|
@ -1360,7 +1381,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
try {
|
||||
for (Map.Entry<String,Directory> entry : oldIndexDirs.entrySet()) {
|
||||
String name = entry.getKey();
|
||||
Version indexCreatedVersion = SegmentInfos.readLatestCommit(entry.getValue()).getIndexCreatedVersion();
|
||||
int indexCreatedVersion = SegmentInfos.readLatestCommit(entry.getValue()).getIndexCreatedVersionMajor();
|
||||
Path dir = createTempDir(name);
|
||||
TestUtil.unzip(getDataInputStream("index." + name + ".zip"), dir);
|
||||
|
||||
|
@ -1413,7 +1434,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
Directory dir = newDirectory(oldIndexDirs.get(name));
|
||||
assertEquals("Original index must be single segment", 1, getNumberOfSegments(dir));
|
||||
Version indexCreatedVersion = SegmentInfos.readLatestCommit(dir).getIndexCreatedVersion();
|
||||
int indexCreatedVersion = SegmentInfos.readLatestCommit(dir).getIndexCreatedVersionMajor();
|
||||
|
||||
// create a bunch of dummy segments
|
||||
int id = 40;
|
||||
|
@ -1472,7 +1493,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
newIndexUpgrader(dir).upgrade();
|
||||
|
||||
checkAllSegmentsUpgraded(dir, null);
|
||||
checkAllSegmentsUpgraded(dir, 6);
|
||||
|
||||
dir.close();
|
||||
}
|
||||
|
@ -1598,7 +1619,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
DirectoryReader reader = DirectoryReader.open(dir);
|
||||
assertEquals(1, reader.leaves().size());
|
||||
Sort sort = reader.leaves().get(0).reader().getIndexSort();
|
||||
Sort sort = reader.leaves().get(0).reader().getMetaData().getSort();
|
||||
assertNotNull(sort);
|
||||
assertEquals("<long: \"dateDV\">!", sort.toString());
|
||||
reader.close();
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Path;
|
||||
|
@ -94,14 +93,11 @@ public class TestFixBrokenOffsets extends LuceneTestCase {
|
|||
for(int i=0;i<leaves.size();i++) {
|
||||
codecReaders[i] = (CodecReader) leaves.get(i).reader();
|
||||
}
|
||||
w.addIndexes(codecReaders);
|
||||
IndexWriter finalW2 = w;
|
||||
e = expectThrows(IllegalArgumentException.class, () -> finalW2.addIndexes(codecReaders));
|
||||
assertEquals("Cannot merge a segment that has been created with major version 6 into this index which has been created by major version 7", e.getMessage());
|
||||
reader.close();
|
||||
w.close();
|
||||
|
||||
// NOT OK: broken offsets were copied into a 7.0 segment:
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream(1024);
|
||||
RuntimeException re = expectThrows(RuntimeException.class, () -> {TestUtil.checkIndex(tmpDir2, false, true, output);});
|
||||
assertEquals("term [66 6f 6f]: doc 0: pos 1: startOffset 7 < lastStartOffset 10; consider using the FixBrokenOffsets tool in Lucene's backward-codecs module to correct your index", re.getMessage());
|
||||
tmpDir2.close();
|
||||
|
||||
// Now run the tool and confirm the broken offsets are fixed:
|
||||
|
|
|
@ -36,16 +36,16 @@ public class TestIndexWriterOnOldIndex extends LuceneTestCase {
|
|||
Directory dir = newFSDirectory(path);
|
||||
for (OpenMode openMode : OpenMode.values()) {
|
||||
Directory tmpDir = newDirectory(dir);
|
||||
assertEquals(null /** 6.3.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersion());
|
||||
assertEquals(6 /** 6.3.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor());
|
||||
IndexWriter w = new IndexWriter(tmpDir, newIndexWriterConfig().setOpenMode(openMode));
|
||||
w.commit();
|
||||
w.close();
|
||||
switch (openMode) {
|
||||
case CREATE:
|
||||
assertEquals(Version.LATEST, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersion());
|
||||
assertEquals(Version.LATEST.major, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor());
|
||||
break;
|
||||
default:
|
||||
assertEquals(null /** 6.3.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersion());
|
||||
assertEquals(6 /** 6.3.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor());
|
||||
}
|
||||
tmpDir.close();
|
||||
}
|
||||
|
|
|
@ -55,6 +55,7 @@ import org.apache.lucene.util.Version;
|
|||
*/
|
||||
public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
|
||||
final static BytesRef SI_VERSION = new BytesRef(" version ");
|
||||
final static BytesRef SI_MIN_VERSION = new BytesRef(" min version ");
|
||||
final static BytesRef SI_DOCCOUNT = new BytesRef(" number of documents ");
|
||||
final static BytesRef SI_USECOMPOUND = new BytesRef(" uses compound file ");
|
||||
final static BytesRef SI_NUM_DIAG = new BytesRef(" diagnostics ");
|
||||
|
@ -88,7 +89,21 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
|
|||
} catch (ParseException pe) {
|
||||
throw new CorruptIndexException("unable to parse version string: " + pe.getMessage(), input, pe);
|
||||
}
|
||||
|
||||
|
||||
SimpleTextUtil.readLine(input, scratch);
|
||||
assert StringHelper.startsWith(scratch.get(), SI_MIN_VERSION);
|
||||
Version minVersion;
|
||||
try {
|
||||
String versionString = readString(SI_MIN_VERSION.length, scratch);
|
||||
if (versionString.equals("null")) {
|
||||
minVersion = null;
|
||||
} else {
|
||||
minVersion = Version.parse(versionString);
|
||||
}
|
||||
} catch (ParseException pe) {
|
||||
throw new CorruptIndexException("unable to parse version string: " + pe.getMessage(), input, pe);
|
||||
}
|
||||
|
||||
SimpleTextUtil.readLine(input, scratch);
|
||||
assert StringHelper.startsWith(scratch.get(), SI_DOCCOUNT);
|
||||
final int docCount = Integer.parseInt(readString(SI_DOCCOUNT.length, scratch));
|
||||
|
@ -288,7 +303,7 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
|
|||
|
||||
SimpleTextUtil.checkFooter(input);
|
||||
|
||||
SegmentInfo info = new SegmentInfo(directory, version, segmentName, docCount,
|
||||
SegmentInfo info = new SegmentInfo(directory, version, minVersion, segmentName, docCount,
|
||||
isCompoundFile, null, Collections.unmodifiableMap(diagnostics),
|
||||
id, Collections.unmodifiableMap(attributes), indexSort);
|
||||
info.setFiles(files);
|
||||
|
@ -345,7 +360,15 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
|
|||
SimpleTextUtil.write(output, SI_VERSION);
|
||||
SimpleTextUtil.write(output, si.getVersion().toString(), scratch);
|
||||
SimpleTextUtil.writeNewline(output);
|
||||
|
||||
|
||||
SimpleTextUtil.write(output, SI_MIN_VERSION);
|
||||
if (si.getMinVersion() == null) {
|
||||
SimpleTextUtil.write(output, "null", scratch);
|
||||
} else {
|
||||
SimpleTextUtil.write(output, si.getMinVersion().toString(), scratch);
|
||||
}
|
||||
SimpleTextUtil.writeNewline(output);
|
||||
|
||||
SimpleTextUtil.write(output, SI_DOCCOUNT);
|
||||
SimpleTextUtil.write(output, Integer.toString(si.maxDoc()), scratch);
|
||||
SimpleTextUtil.writeNewline(output);
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.lucene.store.ChecksumIndexInput;
|
|||
import org.apache.lucene.store.DataOutput; // javadocs
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
|
@ -244,7 +243,7 @@ public class Lucene62SegmentInfoFormat extends SegmentInfoFormat {
|
|||
indexSort = null;
|
||||
}
|
||||
|
||||
si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort);
|
||||
si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort);
|
||||
si.setFiles(files);
|
||||
} catch (Throwable exception) {
|
||||
priorE = exception;
|
||||
|
@ -256,153 +255,8 @@ public class Lucene62SegmentInfoFormat extends SegmentInfoFormat {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene62SegmentInfoFormat.SI_EXTENSION);
|
||||
|
||||
try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
|
||||
// Only add the file once we've successfully created it, else IFD assert can trip:
|
||||
si.addFile(fileName);
|
||||
CodecUtil.writeIndexHeader(output,
|
||||
Lucene62SegmentInfoFormat.CODEC_NAME,
|
||||
Lucene62SegmentInfoFormat.VERSION_CURRENT,
|
||||
si.getId(),
|
||||
"");
|
||||
Version version = si.getVersion();
|
||||
if (version.major < 5) {
|
||||
throw new IllegalArgumentException("invalid major version: should be >= 5 but got: " + version.major + " segment=" + si);
|
||||
}
|
||||
// Write the Lucene version that created this segment, since 3.1
|
||||
output.writeInt(version.major);
|
||||
output.writeInt(version.minor);
|
||||
output.writeInt(version.bugfix);
|
||||
assert version.prerelease == 0;
|
||||
output.writeInt(si.maxDoc());
|
||||
|
||||
output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
|
||||
output.writeMapOfStrings(si.getDiagnostics());
|
||||
Set<String> files = si.files();
|
||||
for (String file : files) {
|
||||
if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
|
||||
throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
|
||||
}
|
||||
}
|
||||
output.writeSetOfStrings(files);
|
||||
output.writeMapOfStrings(si.getAttributes());
|
||||
|
||||
Sort indexSort = si.getIndexSort();
|
||||
int numSortFields = indexSort == null ? 0 : indexSort.getSort().length;
|
||||
output.writeVInt(numSortFields);
|
||||
for (int i = 0; i < numSortFields; ++i) {
|
||||
SortField sortField = indexSort.getSort()[i];
|
||||
SortField.Type sortType = sortField.getType();
|
||||
output.writeString(sortField.getField());
|
||||
int sortTypeID;
|
||||
switch (sortField.getType()) {
|
||||
case STRING:
|
||||
sortTypeID = 0;
|
||||
break;
|
||||
case LONG:
|
||||
sortTypeID = 1;
|
||||
break;
|
||||
case INT:
|
||||
sortTypeID = 2;
|
||||
break;
|
||||
case DOUBLE:
|
||||
sortTypeID = 3;
|
||||
break;
|
||||
case FLOAT:
|
||||
sortTypeID = 4;
|
||||
break;
|
||||
case CUSTOM:
|
||||
if (sortField instanceof SortedSetSortField) {
|
||||
sortTypeID = 5;
|
||||
sortType = SortField.Type.STRING;
|
||||
} else if (sortField instanceof SortedNumericSortField) {
|
||||
sortTypeID = 6;
|
||||
sortType = ((SortedNumericSortField) sortField).getNumericType();
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
|
||||
}
|
||||
output.writeVInt(sortTypeID);
|
||||
if (sortTypeID == 5) {
|
||||
SortedSetSortField ssf = (SortedSetSortField) sortField;
|
||||
if (ssf.getSelector() == SortedSetSelector.Type.MIN) {
|
||||
output.writeByte((byte) 0);
|
||||
} else if (ssf.getSelector() == SortedSetSelector.Type.MAX) {
|
||||
output.writeByte((byte) 1);
|
||||
} else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) {
|
||||
output.writeByte((byte) 2);
|
||||
} else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) {
|
||||
output.writeByte((byte) 3);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector());
|
||||
}
|
||||
} else if (sortTypeID == 6) {
|
||||
SortedNumericSortField snsf = (SortedNumericSortField) sortField;
|
||||
if (snsf.getNumericType() == SortField.Type.LONG) {
|
||||
output.writeByte((byte) 0);
|
||||
} else if (snsf.getNumericType() == SortField.Type.INT) {
|
||||
output.writeByte((byte) 1);
|
||||
} else if (snsf.getNumericType() == SortField.Type.DOUBLE) {
|
||||
output.writeByte((byte) 2);
|
||||
} else if (snsf.getNumericType() == SortField.Type.FLOAT) {
|
||||
output.writeByte((byte) 3);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType());
|
||||
}
|
||||
if (snsf.getSelector() == SortedNumericSelector.Type.MIN) {
|
||||
output.writeByte((byte) 0);
|
||||
} else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) {
|
||||
output.writeByte((byte) 1);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector());
|
||||
}
|
||||
}
|
||||
output.writeByte((byte) (sortField.getReverse() ? 0 : 1));
|
||||
|
||||
// write missing value
|
||||
Object missingValue = sortField.getMissingValue();
|
||||
if (missingValue == null) {
|
||||
output.writeByte((byte) 0);
|
||||
} else {
|
||||
switch(sortType) {
|
||||
case STRING:
|
||||
if (missingValue == SortField.STRING_LAST) {
|
||||
output.writeByte((byte) 1);
|
||||
} else if (missingValue == SortField.STRING_FIRST) {
|
||||
output.writeByte((byte) 2);
|
||||
} else {
|
||||
throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue);
|
||||
}
|
||||
break;
|
||||
case LONG:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeLong(((Long) missingValue).longValue());
|
||||
break;
|
||||
case INT:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeInt(((Integer) missingValue).intValue());
|
||||
break;
|
||||
case DOUBLE:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue()));
|
||||
break;
|
||||
case FLOAT:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue()));
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CodecUtil.writeFooter(output);
|
||||
}
|
||||
public void write(Directory dir, SegmentInfo info, IOContext ioContext) throws IOException {
|
||||
throw new UnsupportedOperationException("This format can only be used for reading");
|
||||
}
|
||||
|
||||
/** File extension used to store {@link SegmentInfo}. */
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
|
|||
import org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat;
|
||||
import org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat;
|
||||
import org.apache.lucene.codecs.lucene60.Lucene60PointsFormat;
|
||||
import org.apache.lucene.codecs.lucene62.Lucene62SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat;
|
||||
import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
|
||||
|
||||
|
@ -55,7 +54,7 @@ import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
|
|||
public class Lucene70Codec extends Codec {
|
||||
private final TermVectorsFormat vectorsFormat = new Lucene50TermVectorsFormat();
|
||||
private final FieldInfosFormat fieldInfosFormat = new Lucene60FieldInfosFormat();
|
||||
private final SegmentInfoFormat segmentInfosFormat = new Lucene62SegmentInfoFormat();
|
||||
private final SegmentInfoFormat segmentInfosFormat = new Lucene70SegmentInfoFormat();
|
||||
private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat();
|
||||
private final CompoundFormat compoundFormat = new Lucene50CompoundFormat();
|
||||
|
||||
|
|
|
@ -0,0 +1,439 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.codecs.lucene70;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.codecs.SegmentInfoFormat;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.index.IndexWriter; // javadocs
|
||||
import org.apache.lucene.index.SegmentInfo; // javadocs
|
||||
import org.apache.lucene.index.SegmentInfos; // javadocs
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.SortedNumericSelector;
|
||||
import org.apache.lucene.search.SortedNumericSortField;
|
||||
import org.apache.lucene.search.SortedSetSelector;
|
||||
import org.apache.lucene.search.SortedSetSortField;
|
||||
import org.apache.lucene.store.ChecksumIndexInput;
|
||||
import org.apache.lucene.store.DataOutput; // javadocs
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Lucene 7.0 Segment info format.
|
||||
* <p>
|
||||
* Files:
|
||||
* <ul>
|
||||
* <li><tt>.si</tt>: Header, SegVersion, SegSize, IsCompoundFile, Diagnostics, Files, Attributes, IndexSort, Footer
|
||||
* </ul>
|
||||
* Data types:
|
||||
* <ul>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>SegSize --> {@link DataOutput#writeInt Int32}</li>
|
||||
* <li>SegVersion --> {@link DataOutput#writeString String}</li>
|
||||
* <li>SegMinVersion --> {@link DataOutput#writeString String}</li>
|
||||
* <li>Files --> {@link DataOutput#writeSetOfStrings Set<String>}</li>
|
||||
* <li>Diagnostics,Attributes --> {@link DataOutput#writeMapOfStrings Map<String,String>}</li>
|
||||
* <li>IsCompoundFile --> {@link DataOutput#writeByte Int8}</li>
|
||||
* <li>IndexSort --> {@link DataOutput#writeVInt Int32} count, followed by {@code count} SortField</li>
|
||||
* <li>SortField --> {@link DataOutput#writeString String} field name, followed by {@link DataOutput#writeVInt Int32} sort type ID,
|
||||
* followed by {@link DataOutput#writeByte Int8} indicatating reversed sort, followed by a type-specific encoding of the optional missing value
|
||||
* <li>Footer --> {@link CodecUtil#writeFooter CodecFooter}</li>
|
||||
* </ul>
|
||||
* Field Descriptions:
|
||||
* <ul>
|
||||
* <li>SegVersion is the code version that created the segment.</li>
|
||||
* <li>SegMinVersion is the minimum code version that contributed documents to the segment.</li>
|
||||
* <li>SegSize is the number of documents contained in the segment index.</li>
|
||||
* <li>IsCompoundFile records whether the segment is written as a compound file or
|
||||
* not. If this is -1, the segment is not a compound file. If it is 1, the segment
|
||||
* is a compound file.</li>
|
||||
* <li>The Diagnostics Map is privately written by {@link IndexWriter}, as a debugging aid,
|
||||
* for each segment it creates. It includes metadata like the current Lucene
|
||||
* version, OS, Java version, why the segment was created (merge, flush,
|
||||
* addIndexes), etc.</li>
|
||||
* <li>Files is a list of files referred to by this segment.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @see SegmentInfos
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Lucene70SegmentInfoFormat extends SegmentInfoFormat {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene70SegmentInfoFormat() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene70SegmentInfoFormat.SI_EXTENSION);
|
||||
try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) {
|
||||
Throwable priorE = null;
|
||||
SegmentInfo si = null;
|
||||
try {
|
||||
int format = CodecUtil.checkIndexHeader(input, Lucene70SegmentInfoFormat.CODEC_NAME,
|
||||
Lucene70SegmentInfoFormat.VERSION_START,
|
||||
Lucene70SegmentInfoFormat.VERSION_CURRENT,
|
||||
segmentID, "");
|
||||
final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
|
||||
byte hasMinVersion = input.readByte();
|
||||
final Version minVersion;
|
||||
switch (hasMinVersion) {
|
||||
case 0:
|
||||
minVersion = null;
|
||||
break;
|
||||
case 1:
|
||||
minVersion = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
|
||||
break;
|
||||
default:
|
||||
throw new CorruptIndexException("Illegal boolean value " + hasMinVersion, input);
|
||||
}
|
||||
|
||||
final int docCount = input.readInt();
|
||||
if (docCount < 0) {
|
||||
throw new CorruptIndexException("invalid docCount: " + docCount, input);
|
||||
}
|
||||
final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
|
||||
|
||||
final Map<String,String> diagnostics = input.readMapOfStrings();
|
||||
final Set<String> files = input.readSetOfStrings();
|
||||
final Map<String,String> attributes = input.readMapOfStrings();
|
||||
|
||||
int numSortFields = input.readVInt();
|
||||
Sort indexSort;
|
||||
if (numSortFields > 0) {
|
||||
SortField[] sortFields = new SortField[numSortFields];
|
||||
for(int i=0;i<numSortFields;i++) {
|
||||
String fieldName = input.readString();
|
||||
int sortTypeID = input.readVInt();
|
||||
SortField.Type sortType;
|
||||
SortedSetSelector.Type sortedSetSelector = null;
|
||||
SortedNumericSelector.Type sortedNumericSelector = null;
|
||||
switch(sortTypeID) {
|
||||
case 0:
|
||||
sortType = SortField.Type.STRING;
|
||||
break;
|
||||
case 1:
|
||||
sortType = SortField.Type.LONG;
|
||||
break;
|
||||
case 2:
|
||||
sortType = SortField.Type.INT;
|
||||
break;
|
||||
case 3:
|
||||
sortType = SortField.Type.DOUBLE;
|
||||
break;
|
||||
case 4:
|
||||
sortType = SortField.Type.FLOAT;
|
||||
break;
|
||||
case 5:
|
||||
sortType = SortField.Type.STRING;
|
||||
byte selector = input.readByte();
|
||||
if (selector == 0) {
|
||||
sortedSetSelector = SortedSetSelector.Type.MIN;
|
||||
} else if (selector == 1) {
|
||||
sortedSetSelector = SortedSetSelector.Type.MAX;
|
||||
} else if (selector == 2) {
|
||||
sortedSetSelector = SortedSetSelector.Type.MIDDLE_MIN;
|
||||
} else if (selector == 3) {
|
||||
sortedSetSelector = SortedSetSelector.Type.MIDDLE_MAX;
|
||||
} else {
|
||||
throw new CorruptIndexException("invalid index SortedSetSelector ID: " + selector, input);
|
||||
}
|
||||
break;
|
||||
case 6:
|
||||
byte type = input.readByte();
|
||||
if (type == 0) {
|
||||
sortType = SortField.Type.LONG;
|
||||
} else if (type == 1) {
|
||||
sortType = SortField.Type.INT;
|
||||
} else if (type == 2) {
|
||||
sortType = SortField.Type.DOUBLE;
|
||||
} else if (type == 3) {
|
||||
sortType = SortField.Type.FLOAT;
|
||||
} else {
|
||||
throw new CorruptIndexException("invalid index SortedNumericSortField type ID: " + type, input);
|
||||
}
|
||||
byte numericSelector = input.readByte();
|
||||
if (numericSelector == 0) {
|
||||
sortedNumericSelector = SortedNumericSelector.Type.MIN;
|
||||
} else if (numericSelector == 1) {
|
||||
sortedNumericSelector = SortedNumericSelector.Type.MAX;
|
||||
} else {
|
||||
throw new CorruptIndexException("invalid index SortedNumericSelector ID: " + numericSelector, input);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new CorruptIndexException("invalid index sort field type ID: " + sortTypeID, input);
|
||||
}
|
||||
byte b = input.readByte();
|
||||
boolean reverse;
|
||||
if (b == 0) {
|
||||
reverse = true;
|
||||
} else if (b == 1) {
|
||||
reverse = false;
|
||||
} else {
|
||||
throw new CorruptIndexException("invalid index sort reverse: " + b, input);
|
||||
}
|
||||
|
||||
if (sortedSetSelector != null) {
|
||||
sortFields[i] = new SortedSetSortField(fieldName, reverse, sortedSetSelector);
|
||||
} else if (sortedNumericSelector != null) {
|
||||
sortFields[i] = new SortedNumericSortField(fieldName, sortType, reverse, sortedNumericSelector);
|
||||
} else {
|
||||
sortFields[i] = new SortField(fieldName, sortType, reverse);
|
||||
}
|
||||
|
||||
Object missingValue;
|
||||
b = input.readByte();
|
||||
if (b == 0) {
|
||||
missingValue = null;
|
||||
} else {
|
||||
switch(sortType) {
|
||||
case STRING:
|
||||
if (b == 1) {
|
||||
missingValue = SortField.STRING_LAST;
|
||||
} else if (b == 2) {
|
||||
missingValue = SortField.STRING_FIRST;
|
||||
} else {
|
||||
throw new CorruptIndexException("invalid missing value flag: " + b, input);
|
||||
}
|
||||
break;
|
||||
case LONG:
|
||||
if (b != 1) {
|
||||
throw new CorruptIndexException("invalid missing value flag: " + b, input);
|
||||
}
|
||||
missingValue = input.readLong();
|
||||
break;
|
||||
case INT:
|
||||
if (b != 1) {
|
||||
throw new CorruptIndexException("invalid missing value flag: " + b, input);
|
||||
}
|
||||
missingValue = input.readInt();
|
||||
break;
|
||||
case DOUBLE:
|
||||
if (b != 1) {
|
||||
throw new CorruptIndexException("invalid missing value flag: " + b, input);
|
||||
}
|
||||
missingValue = Double.longBitsToDouble(input.readLong());
|
||||
break;
|
||||
case FLOAT:
|
||||
if (b != 1) {
|
||||
throw new CorruptIndexException("invalid missing value flag: " + b, input);
|
||||
}
|
||||
missingValue = Float.intBitsToFloat(input.readInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("unhandled sortType=" + sortType);
|
||||
}
|
||||
}
|
||||
if (missingValue != null) {
|
||||
sortFields[i].setMissingValue(missingValue);
|
||||
}
|
||||
}
|
||||
indexSort = new Sort(sortFields);
|
||||
} else if (numSortFields < 0) {
|
||||
throw new CorruptIndexException("invalid index sort field count: " + numSortFields, input);
|
||||
} else {
|
||||
indexSort = null;
|
||||
}
|
||||
|
||||
si = new SegmentInfo(dir, version, minVersion, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort);
|
||||
si.setFiles(files);
|
||||
} catch (Throwable exception) {
|
||||
priorE = exception;
|
||||
} finally {
|
||||
CodecUtil.checkFooter(input, priorE);
|
||||
}
|
||||
return si;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene70SegmentInfoFormat.SI_EXTENSION);
|
||||
|
||||
try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
|
||||
// Only add the file once we've successfully created it, else IFD assert can trip:
|
||||
si.addFile(fileName);
|
||||
CodecUtil.writeIndexHeader(output,
|
||||
Lucene70SegmentInfoFormat.CODEC_NAME,
|
||||
Lucene70SegmentInfoFormat.VERSION_CURRENT,
|
||||
si.getId(),
|
||||
"");
|
||||
Version version = si.getVersion();
|
||||
if (version.major < 7) {
|
||||
throw new IllegalArgumentException("invalid major version: should be >= 7 but got: " + version.major + " segment=" + si);
|
||||
}
|
||||
// Write the Lucene version that created this segment, since 3.1
|
||||
output.writeInt(version.major);
|
||||
output.writeInt(version.minor);
|
||||
output.writeInt(version.bugfix);
|
||||
|
||||
// Write the min Lucene version that contributed docs to the segment, since 7.0
|
||||
if (si.getMinVersion() != null) {
|
||||
output.writeByte((byte) 1);
|
||||
Version minVersion = si.getMinVersion();
|
||||
output.writeInt(minVersion.major);
|
||||
output.writeInt(minVersion.minor);
|
||||
output.writeInt(minVersion.bugfix);
|
||||
} else {
|
||||
output.writeByte((byte) 0);
|
||||
}
|
||||
|
||||
assert version.prerelease == 0;
|
||||
output.writeInt(si.maxDoc());
|
||||
|
||||
output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
|
||||
output.writeMapOfStrings(si.getDiagnostics());
|
||||
Set<String> files = si.files();
|
||||
for (String file : files) {
|
||||
if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
|
||||
throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
|
||||
}
|
||||
}
|
||||
output.writeSetOfStrings(files);
|
||||
output.writeMapOfStrings(si.getAttributes());
|
||||
|
||||
Sort indexSort = si.getIndexSort();
|
||||
int numSortFields = indexSort == null ? 0 : indexSort.getSort().length;
|
||||
output.writeVInt(numSortFields);
|
||||
for (int i = 0; i < numSortFields; ++i) {
|
||||
SortField sortField = indexSort.getSort()[i];
|
||||
SortField.Type sortType = sortField.getType();
|
||||
output.writeString(sortField.getField());
|
||||
int sortTypeID;
|
||||
switch (sortField.getType()) {
|
||||
case STRING:
|
||||
sortTypeID = 0;
|
||||
break;
|
||||
case LONG:
|
||||
sortTypeID = 1;
|
||||
break;
|
||||
case INT:
|
||||
sortTypeID = 2;
|
||||
break;
|
||||
case DOUBLE:
|
||||
sortTypeID = 3;
|
||||
break;
|
||||
case FLOAT:
|
||||
sortTypeID = 4;
|
||||
break;
|
||||
case CUSTOM:
|
||||
if (sortField instanceof SortedSetSortField) {
|
||||
sortTypeID = 5;
|
||||
sortType = SortField.Type.STRING;
|
||||
} else if (sortField instanceof SortedNumericSortField) {
|
||||
sortTypeID = 6;
|
||||
sortType = ((SortedNumericSortField) sortField).getNumericType();
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
|
||||
}
|
||||
output.writeVInt(sortTypeID);
|
||||
if (sortTypeID == 5) {
|
||||
SortedSetSortField ssf = (SortedSetSortField) sortField;
|
||||
if (ssf.getSelector() == SortedSetSelector.Type.MIN) {
|
||||
output.writeByte((byte) 0);
|
||||
} else if (ssf.getSelector() == SortedSetSelector.Type.MAX) {
|
||||
output.writeByte((byte) 1);
|
||||
} else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) {
|
||||
output.writeByte((byte) 2);
|
||||
} else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) {
|
||||
output.writeByte((byte) 3);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector());
|
||||
}
|
||||
} else if (sortTypeID == 6) {
|
||||
SortedNumericSortField snsf = (SortedNumericSortField) sortField;
|
||||
if (snsf.getNumericType() == SortField.Type.LONG) {
|
||||
output.writeByte((byte) 0);
|
||||
} else if (snsf.getNumericType() == SortField.Type.INT) {
|
||||
output.writeByte((byte) 1);
|
||||
} else if (snsf.getNumericType() == SortField.Type.DOUBLE) {
|
||||
output.writeByte((byte) 2);
|
||||
} else if (snsf.getNumericType() == SortField.Type.FLOAT) {
|
||||
output.writeByte((byte) 3);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType());
|
||||
}
|
||||
if (snsf.getSelector() == SortedNumericSelector.Type.MIN) {
|
||||
output.writeByte((byte) 0);
|
||||
} else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) {
|
||||
output.writeByte((byte) 1);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector());
|
||||
}
|
||||
}
|
||||
output.writeByte((byte) (sortField.getReverse() ? 0 : 1));
|
||||
|
||||
// write missing value
|
||||
Object missingValue = sortField.getMissingValue();
|
||||
if (missingValue == null) {
|
||||
output.writeByte((byte) 0);
|
||||
} else {
|
||||
switch(sortType) {
|
||||
case STRING:
|
||||
if (missingValue == SortField.STRING_LAST) {
|
||||
output.writeByte((byte) 1);
|
||||
} else if (missingValue == SortField.STRING_FIRST) {
|
||||
output.writeByte((byte) 2);
|
||||
} else {
|
||||
throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue);
|
||||
}
|
||||
break;
|
||||
case LONG:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeLong(((Long) missingValue).longValue());
|
||||
break;
|
||||
case INT:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeInt(((Integer) missingValue).intValue());
|
||||
break;
|
||||
case DOUBLE:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue()));
|
||||
break;
|
||||
case FLOAT:
|
||||
output.writeByte((byte) 1);
|
||||
output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue()));
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CodecUtil.writeFooter(output);
|
||||
}
|
||||
}
|
||||
|
||||
/** File extension used to store {@link SegmentInfo}. */
|
||||
public final static String SI_EXTENSION = "si";
|
||||
static final String CODEC_NAME = "Lucene70SegmentInfo";
|
||||
static final int VERSION_START = 0;
|
||||
static final int VERSION_CURRENT = VERSION_START;
|
||||
}
|
|
@ -697,7 +697,7 @@ public final class CheckIndex implements Closeable {
|
|||
long startOpenReaderNS = System.nanoTime();
|
||||
if (infoStream != null)
|
||||
infoStream.print(" test: open reader.........");
|
||||
reader = new SegmentReader(info, IOContext.DEFAULT);
|
||||
reader = new SegmentReader(info, sis.getIndexCreatedVersionMajor(), IOContext.DEFAULT);
|
||||
msg(infoStream, String.format(Locale.ROOT, "OK [took %.3f sec]", nsToSec(System.nanoTime()-startOpenReaderNS)));
|
||||
|
||||
segInfoStat.openReaderPassed = true;
|
||||
|
|
|
@ -178,7 +178,7 @@ class DocumentsWriterPerThread {
|
|||
assert numDocsInRAM == 0 : "num docs " + numDocsInRAM;
|
||||
deleteSlice = deleteQueue.newSlice();
|
||||
|
||||
segmentInfo = new SegmentInfo(directoryOrig, Version.LATEST, segmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), indexWriterConfig.getIndexSort());
|
||||
segmentInfo = new SegmentInfo(directoryOrig, Version.LATEST, Version.LATEST, segmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), indexWriterConfig.getIndexSort());
|
||||
assert numDocsInRAM == 0;
|
||||
if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) {
|
||||
infoStream.message("DWPT", Thread.currentThread().getName() + " init seg=" + segmentName + " delQueue=" + deleteQueue);
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.codecs.NormsProducer;
|
|||
import org.apache.lucene.codecs.PointsReader;
|
||||
import org.apache.lucene.codecs.StoredFieldsReader;
|
||||
import org.apache.lucene.codecs.TermVectorsReader;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
||||
|
@ -104,8 +103,8 @@ public abstract class FilterCodecReader extends CodecReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return in.getIndexSort();
|
||||
public LeafMetaData getMetaData() {
|
||||
return in.getMetaData();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -398,9 +397,9 @@ public abstract class FilterLeafReader extends LeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
public LeafMetaData getMetaData() {
|
||||
ensureOpen();
|
||||
return in.getIndexSort();
|
||||
return in.getMetaData();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -30,7 +30,6 @@ import java.util.LinkedList;
|
|||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Map;
|
||||
import java.util.Queue;
|
||||
import java.util.Set;
|
||||
|
@ -855,7 +854,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
// against an index that's currently open for
|
||||
// searching. In this case we write the next
|
||||
// segments_N file with no segments:
|
||||
final SegmentInfos sis = new SegmentInfos(Version.LATEST);
|
||||
final SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
|
||||
try {
|
||||
final SegmentInfos previous = SegmentInfos.readLatestCommit(directory);
|
||||
sis.updateGenerationVersionAndCounter(previous);
|
||||
|
@ -2654,12 +2653,12 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
infoStream.message("IW", "addIndexes: process directory " + dir);
|
||||
}
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir); // read infos from dir
|
||||
if (Objects.equals(segmentInfos.getIndexCreatedVersion(), sis.getIndexCreatedVersion()) == false) {
|
||||
if (segmentInfos.getIndexCreatedVersionMajor() != sis.getIndexCreatedVersionMajor()) {
|
||||
throw new IllegalArgumentException("Cannot use addIndexes(Directory) with indexes that have been created "
|
||||
+ "by a different Lucene version. The current index was generated by "
|
||||
+ segmentInfos.getIndexCreatedVersion()
|
||||
+ " while one of the directories contains an index that was generated with "
|
||||
+ sis.getIndexCreatedVersion());
|
||||
+ "by a different Lucene version. The current index was generated by Lucene "
|
||||
+ segmentInfos.getIndexCreatedVersionMajor()
|
||||
+ " while one of the directories contains an index that was generated with Lucene "
|
||||
+ sis.getIndexCreatedVersionMajor());
|
||||
}
|
||||
totalMaxDoc += sis.totalMaxDoc();
|
||||
commits.add(sis);
|
||||
|
@ -2747,7 +2746,26 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
|
||||
return seqNo;
|
||||
}
|
||||
|
||||
|
||||
private void validateMergeReader(CodecReader leaf) {
|
||||
LeafMetaData segmentMeta = leaf.getMetaData();
|
||||
if (segmentInfos.getIndexCreatedVersionMajor() != segmentMeta.getCreatedVersionMajor()) {
|
||||
throw new IllegalArgumentException("Cannot merge a segment that has been created with major version "
|
||||
+ segmentMeta.getCreatedVersionMajor() + " into this index which has been created by major version "
|
||||
+ segmentInfos.getIndexCreatedVersionMajor());
|
||||
}
|
||||
|
||||
if (segmentInfos.getIndexCreatedVersionMajor() >= 7 && segmentMeta.getMinVersion() == null) {
|
||||
throw new IllegalStateException("Indexes created on or after Lucene 7 must record the created version major, but " + leaf + " hides it");
|
||||
}
|
||||
|
||||
Sort leafIndexSort = segmentMeta.getSort();
|
||||
if (config.getIndexSort() != null && leafIndexSort != null
|
||||
&& config.getIndexSort().equals(leafIndexSort) == false) {
|
||||
throw new IllegalArgumentException("cannot change index sort from " + leafIndexSort + " to " + config.getIndexSort());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges the provided indexes into this index.
|
||||
*
|
||||
|
@ -2801,12 +2819,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
flush(false, true);
|
||||
|
||||
String mergedName = newSegmentName();
|
||||
|
||||
for (CodecReader leaf : readers) {
|
||||
numDocs += leaf.numDocs();
|
||||
Sort leafIndexSort = leaf.getIndexSort();
|
||||
if (indexSort != null && leafIndexSort != null && indexSort.equals(leafIndexSort) == false) {
|
||||
throw new IllegalArgumentException("cannot change index sort from " + leafIndexSort + " to " + indexSort);
|
||||
}
|
||||
validateMergeReader(leaf);
|
||||
}
|
||||
|
||||
// Best-effort up front check:
|
||||
|
@ -2818,7 +2834,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
// abortable so that IW.close(false) is able to stop it
|
||||
TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(directory);
|
||||
|
||||
SegmentInfo info = new SegmentInfo(directoryOrig, Version.LATEST, mergedName, -1,
|
||||
// We set the min version to null for now, it will be set later by SegmentMerger
|
||||
SegmentInfo info = new SegmentInfo(directoryOrig, Version.LATEST, null, mergedName, -1,
|
||||
false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), config.getIndexSort());
|
||||
|
||||
SegmentMerger merger = new SegmentMerger(Arrays.asList(readers), info, infoStream, trackingDir,
|
||||
|
@ -2907,7 +2924,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
|
||||
//System.out.println("copy seg=" + info.info.name + " version=" + info.info.getVersion());
|
||||
// Same SI as before but we change directory and name
|
||||
SegmentInfo newInfo = new SegmentInfo(directoryOrig, info.info.getVersion(), segName, info.info.maxDoc(),
|
||||
SegmentInfo newInfo = new SegmentInfo(directoryOrig, info.info.getVersion(), info.info.getMinVersion(), segName, info.info.maxDoc(),
|
||||
info.info.getUseCompoundFile(), info.info.getCodec(),
|
||||
info.info.getDiagnostics(), info.info.getId(), info.info.getAttributes(), info.info.getIndexSort());
|
||||
SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo, info.getDelCount(), info.getDelGen(),
|
||||
|
@ -4117,7 +4134,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
// ConcurrentMergePolicy we keep deterministic segment
|
||||
// names.
|
||||
final String mergeSegmentName = newSegmentName();
|
||||
SegmentInfo si = new SegmentInfo(directoryOrig, Version.LATEST, mergeSegmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), config.getIndexSort());
|
||||
// We set the min version to null for now, it will be set later by SegmentMerger
|
||||
SegmentInfo si = new SegmentInfo(directoryOrig, Version.LATEST, null, mergeSegmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), config.getIndexSort());
|
||||
Map<String,String> details = new HashMap<>();
|
||||
details.put("mergeMaxNumSegments", "" + merge.maxNumSegments);
|
||||
details.put("mergeFactor", Integer.toString(merge.segments.size()));
|
||||
|
@ -4322,7 +4340,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
// Let the merge wrap readers
|
||||
List<CodecReader> mergeReaders = new ArrayList<>();
|
||||
for (SegmentReader reader : merge.readers) {
|
||||
mergeReaders.add(merge.wrapForMerge(reader));
|
||||
CodecReader wrappedReader = merge.wrapForMerge(reader);
|
||||
validateMergeReader(wrappedReader);
|
||||
mergeReaders.add(wrappedReader);
|
||||
}
|
||||
final SegmentMerger merger = new SegmentMerger(mergeReaders,
|
||||
merge.info.info, infoStream, dirWrapper,
|
||||
|
@ -4608,7 +4628,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
|
||||
// For infoStream output
|
||||
synchronized SegmentInfos toLiveInfos(SegmentInfos sis) {
|
||||
final SegmentInfos newSIS = new SegmentInfos(sis.getIndexCreatedVersion());
|
||||
final SegmentInfos newSIS = new SegmentInfos(sis.getIndexCreatedVersionMajor());
|
||||
final Map<SegmentCommitInfo,SegmentCommitInfo> liveSIS = new HashMap<>();
|
||||
for(SegmentCommitInfo info : segmentInfos) {
|
||||
liveSIS.put(info, info);
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Provides read-only metadata about a leaf.
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public final class LeafMetaData {
|
||||
|
||||
private final int createdVersionMajor;
|
||||
private final Version minVersion;
|
||||
private final Sort sort;
|
||||
|
||||
/** Expert: Sole constructor. Public for use by custom {@link LeafReader} impls. */
|
||||
public LeafMetaData(int createdVersionMajor, Version minVersion, Sort sort) {
|
||||
this.createdVersionMajor = createdVersionMajor;
|
||||
if (createdVersionMajor > Version.LATEST.major) {
|
||||
throw new IllegalArgumentException("createdVersionMajor is in the future: " + createdVersionMajor);
|
||||
}
|
||||
if (createdVersionMajor < 6) {
|
||||
throw new IllegalArgumentException("createdVersionMajor must be >= 6, got: " + createdVersionMajor);
|
||||
}
|
||||
if (minVersion != null && minVersion.onOrAfter(Version.LUCENE_7_0_0) == false) {
|
||||
throw new IllegalArgumentException("minVersion must be >= 7.0.0: " + minVersion);
|
||||
}
|
||||
if (createdVersionMajor >= 7 && minVersion == null) {
|
||||
throw new IllegalArgumentException("minVersion must be set when createdVersionMajor is >= 7");
|
||||
}
|
||||
this.minVersion = minVersion;
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
/** Get the Lucene version that created this index. This can be used to implement
|
||||
* backward compatibility on top of the codec API. A return value of {@code 6}
|
||||
* indicates that the created version is unknown. */
|
||||
public int getCreatedVersionMajor() {
|
||||
return createdVersionMajor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the minimum Lucene version that contributed documents to this index,
|
||||
* or {@code null} if this information is not available.
|
||||
*/
|
||||
public Version getMinVersion() {
|
||||
return minVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the order in which documents from this index are sorted, or
|
||||
* {@code null} if documents are in no particular order.
|
||||
*/
|
||||
public Sort getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
}
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.IndexReader.CacheHelper;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
||||
/** {@code LeafReader} is an abstract class, providing an interface for accessing an
|
||||
|
@ -246,6 +245,8 @@ public abstract class LeafReader extends IndexReader {
|
|||
*/
|
||||
public abstract void checkIntegrity() throws IOException;
|
||||
|
||||
/** Returns null if this leaf is unsorted, or the {@link Sort} that it was sorted by */
|
||||
public abstract Sort getIndexSort();
|
||||
/**
|
||||
* Return metadata about this leaf.
|
||||
* @lucene.experimental */
|
||||
public abstract LeafMetaData getMetaData();
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.codecs.FieldsProducer;
|
|||
import org.apache.lucene.codecs.NormsProducer;
|
||||
import org.apache.lucene.codecs.StoredFieldsReader;
|
||||
import org.apache.lucene.codecs.TermVectorsReader;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
||||
/** This is a hack to make index sorting fast, with a {@link LeafReader} that always returns merge instances when you ask for the codec readers. */
|
||||
|
@ -235,7 +234,7 @@ class MergeReaderWrapper extends LeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return in.getIndexSort();
|
||||
public LeafMetaData getMetaData() {
|
||||
return in.getMetaData();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -231,7 +231,7 @@ public class MergeState {
|
|||
List<CodecReader> readers = new ArrayList<>(originalReaders.size());
|
||||
|
||||
for (CodecReader leaf : originalReaders) {
|
||||
Sort segmentSort = leaf.getIndexSort();
|
||||
Sort segmentSort = leaf.getMetaData().getSort();
|
||||
|
||||
if (segmentSort == null) {
|
||||
// This segment was written by flush, so documents are not yet sorted, so we sort them now:
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.util.TreeMap;
|
|||
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/** An {@link LeafReader} which reads multiple, parallel indexes. Each index
|
||||
* added must have the same number of documents, but typically each contains
|
||||
|
@ -56,7 +57,7 @@ public class ParallelLeafReader extends LeafReader {
|
|||
private final boolean closeSubReaders;
|
||||
private final int maxDoc, numDocs;
|
||||
private final boolean hasDeletions;
|
||||
private final Sort indexSort;
|
||||
private final LeafMetaData metaData;
|
||||
private final SortedMap<String,LeafReader> fieldToReader = new TreeMap<>();
|
||||
private final SortedMap<String,LeafReader> tvFieldToReader = new TreeMap<>();
|
||||
|
||||
|
@ -104,16 +105,25 @@ public class ParallelLeafReader extends LeafReader {
|
|||
FieldInfos.Builder builder = new FieldInfos.Builder();
|
||||
|
||||
Sort indexSort = null;
|
||||
int createdVersionMajor = -1;
|
||||
|
||||
// build FieldInfos and fieldToReader map:
|
||||
for (final LeafReader reader : this.parallelReaders) {
|
||||
Sort leafIndexSort = reader.getIndexSort();
|
||||
LeafMetaData leafMetaData = reader.getMetaData();
|
||||
|
||||
Sort leafIndexSort = leafMetaData.getSort();
|
||||
if (indexSort == null) {
|
||||
indexSort = leafIndexSort;
|
||||
} else if (leafIndexSort != null && indexSort.equals(leafIndexSort) == false) {
|
||||
throw new IllegalArgumentException("cannot combine LeafReaders that have different index sorts: saw both sort=" + indexSort + " and " + leafIndexSort);
|
||||
}
|
||||
|
||||
if (createdVersionMajor == -1) {
|
||||
createdVersionMajor = leafMetaData.getCreatedVersionMajor();
|
||||
} else if (createdVersionMajor != leafMetaData.getCreatedVersionMajor()) {
|
||||
throw new IllegalArgumentException("cannot combine LeafReaders that have different creation versions: saw both version=" + createdVersionMajor + " and " + leafMetaData.getCreatedVersionMajor());
|
||||
}
|
||||
|
||||
final FieldInfos readerFieldInfos = reader.getFieldInfos();
|
||||
for (FieldInfo fieldInfo : readerFieldInfos) {
|
||||
// NOTE: first reader having a given field "wins":
|
||||
|
@ -126,8 +136,24 @@ public class ParallelLeafReader extends LeafReader {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (createdVersionMajor == -1) {
|
||||
// empty reader
|
||||
createdVersionMajor = Version.LATEST.major;
|
||||
}
|
||||
|
||||
Version minVersion = Version.LATEST;
|
||||
for (final LeafReader reader : this.parallelReaders) {
|
||||
Version leafVersion = reader.getMetaData().getMinVersion();
|
||||
if (leafVersion == null) {
|
||||
minVersion = null;
|
||||
break;
|
||||
} else if (minVersion.onOrAfter(leafVersion)) {
|
||||
minVersion = leafVersion;
|
||||
}
|
||||
}
|
||||
|
||||
fieldInfos = builder.finish();
|
||||
this.indexSort = indexSort;
|
||||
this.metaData = new LeafMetaData(createdVersionMajor, minVersion, indexSort);
|
||||
|
||||
// build Fields instance
|
||||
for (final LeafReader reader : this.parallelReaders) {
|
||||
|
@ -358,8 +384,8 @@ public class ParallelLeafReader extends LeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return indexSort;
|
||||
public LeafMetaData getMetaData() {
|
||||
return metaData;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -140,7 +140,7 @@ class ReadersAndUpdates {
|
|||
public SegmentReader getReader(IOContext context) throws IOException {
|
||||
if (reader == null) {
|
||||
// We steal returned ref:
|
||||
reader = new SegmentReader(info, context);
|
||||
reader = new SegmentReader(info, writer.segmentInfos.getIndexCreatedVersionMajor(), context);
|
||||
if (liveDocs == null) {
|
||||
liveDocs = reader.getLiveDocs();
|
||||
}
|
||||
|
@ -552,7 +552,7 @@ class ReadersAndUpdates {
|
|||
|
||||
// reader could be null e.g. for a just merged segment (from
|
||||
// IndexWriter.commitMergedDeletes).
|
||||
final SegmentReader reader = this.reader == null ? new SegmentReader(info, IOContext.READONCE) : this.reader;
|
||||
final SegmentReader reader = this.reader == null ? new SegmentReader(info, writer.segmentInfos.getIndexCreatedVersionMajor(), IOContext.READONCE) : this.reader;
|
||||
try {
|
||||
// clone FieldInfos so that we can update their dvGen separately from
|
||||
// the reader's infos and write them to a new fieldInfos_gen file
|
||||
|
|
|
@ -77,7 +77,13 @@ public final class SegmentInfo {
|
|||
// The format expected is "x.y" - "2.x" for pre-3.0 indexes (or null), and
|
||||
// specific versions afterwards ("3.0.0", "3.1.0" etc.).
|
||||
// see o.a.l.util.Version.
|
||||
private Version version;
|
||||
private final Version version;
|
||||
|
||||
// Tracks the minimum version that contributed documents to a segment. For
|
||||
// flush segments, that is the version that wrote it. For merged segments,
|
||||
// this is the minimum minVersion of all the segments that have been merged
|
||||
// into this segment
|
||||
Version minVersion;
|
||||
|
||||
void setDiagnostics(Map<String, String> diagnostics) {
|
||||
this.diagnostics = Objects.requireNonNull(diagnostics);
|
||||
|
@ -94,12 +100,13 @@ public final class SegmentInfo {
|
|||
* <p>Note: this is public only to allow access from
|
||||
* the codecs package.</p>
|
||||
*/
|
||||
public SegmentInfo(Directory dir, Version version, String name, int maxDoc,
|
||||
public SegmentInfo(Directory dir, Version version, Version minVersion, String name, int maxDoc,
|
||||
boolean isCompoundFile, Codec codec, Map<String,String> diagnostics,
|
||||
byte[] id, Map<String,String> attributes, Sort indexSort) {
|
||||
assert !(dir instanceof TrackingDirectoryWrapper);
|
||||
this.dir = Objects.requireNonNull(dir);
|
||||
this.version = Objects.requireNonNull(version);
|
||||
this.minVersion = minVersion;
|
||||
this.name = Objects.requireNonNull(name);
|
||||
this.maxDoc = maxDoc;
|
||||
this.isCompoundFile = isCompoundFile;
|
||||
|
@ -233,6 +240,14 @@ public final class SegmentInfo {
|
|||
return version;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the minimum Lucene version that contributed documents to this
|
||||
* segment, or {@code null} if it is unknown.
|
||||
*/
|
||||
public Version getMinVersion() {
|
||||
return minVersion;
|
||||
}
|
||||
|
||||
/** Return the id that uniquely identifies this segment. */
|
||||
public byte[] getId() {
|
||||
return id.clone();
|
||||
|
|
|
@ -161,16 +161,19 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
/** Version of the oldest segment in the index, or null if there are no segments. */
|
||||
private Version minSegmentLuceneVersion;
|
||||
|
||||
/** The Lucene version that was used to create the index. */
|
||||
private final Version indexCreatedVersion;
|
||||
/** The Lucene version major that was used to create the index. */
|
||||
private final int indexCreatedVersionMajor;
|
||||
|
||||
/** Sole constructor.
|
||||
* @param indexCreatedVersion the Lucene version at index creation time, or {@code null} if the index was created before 7.0 */
|
||||
public SegmentInfos(Version indexCreatedVersion) {
|
||||
if (indexCreatedVersion != null && indexCreatedVersion.onOrAfter(Version.LUCENE_7_0_0) == false) {
|
||||
throw new IllegalArgumentException("indexCreatedVersion may only be non-null if the index was created on or after 7.0, got " + indexCreatedVersion);
|
||||
* @param indexCreatedVersionMajor the Lucene version major at index creation time, or 6 if the index was created before 7.0 */
|
||||
public SegmentInfos(int indexCreatedVersionMajor) {
|
||||
if (indexCreatedVersionMajor > Version.LATEST.major) {
|
||||
throw new IllegalArgumentException("indexCreatedVersionMajor is in the future: " + indexCreatedVersionMajor);
|
||||
}
|
||||
this.indexCreatedVersion = indexCreatedVersion;
|
||||
if (indexCreatedVersionMajor < 6) {
|
||||
throw new IllegalArgumentException("indexCreatedVersionMajor must be >= 6, got: " + indexCreatedVersionMajor);
|
||||
}
|
||||
this.indexCreatedVersionMajor = indexCreatedVersionMajor;
|
||||
}
|
||||
|
||||
/** Returns {@link SegmentCommitInfo} at the provided
|
||||
|
@ -314,24 +317,9 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
throw new IndexFormatTooOldException(input, "this index is too old (version: " + luceneVersion + ")");
|
||||
}
|
||||
|
||||
Version indexCreatedVersion;
|
||||
int indexCreatedVersion = 6;
|
||||
if (format >= VERSION_70) {
|
||||
byte b = input.readByte();
|
||||
switch (b) {
|
||||
case 0:
|
||||
// version is not known: pre-7.0 index that has been modified since the 7.0 upgrade
|
||||
indexCreatedVersion = null;
|
||||
break;
|
||||
case 1:
|
||||
// version is known: index has been created on or after 7.0
|
||||
indexCreatedVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
|
||||
break;
|
||||
default:
|
||||
throw new CorruptIndexException("Illegal byte value for a boolean: " + b + ", expected 0 or 1", input);
|
||||
}
|
||||
} else {
|
||||
// pre-7.0 index that has not been modified since the 7.0 upgrade
|
||||
indexCreatedVersion = null;
|
||||
indexCreatedVersion = input.readVInt();
|
||||
}
|
||||
|
||||
SegmentInfos infos = new SegmentInfos(indexCreatedVersion);
|
||||
|
@ -399,6 +387,14 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
|
||||
throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input);
|
||||
}
|
||||
|
||||
if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) {
|
||||
throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input);
|
||||
}
|
||||
|
||||
if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) {
|
||||
throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input);
|
||||
}
|
||||
}
|
||||
|
||||
infos.userData = input.readMapOfStrings();
|
||||
|
@ -495,16 +491,7 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
out.writeVInt(Version.LATEST.bugfix);
|
||||
//System.out.println(Thread.currentThread().getName() + ": now write " + out.getName() + " with version=" + version);
|
||||
|
||||
if (indexCreatedVersion != null) {
|
||||
// 7.0+ index
|
||||
out.writeByte((byte) 1);
|
||||
out.writeVInt(indexCreatedVersion.major);
|
||||
out.writeVInt(indexCreatedVersion.minor);
|
||||
out.writeVInt(indexCreatedVersion.bugfix);
|
||||
} else {
|
||||
// pre-7.0 index
|
||||
out.writeByte((byte) 0);
|
||||
}
|
||||
out.writeVInt(indexCreatedVersionMajor);
|
||||
|
||||
out.writeLong(version);
|
||||
out.writeInt(counter); // write counter
|
||||
|
@ -531,6 +518,9 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
// write infos
|
||||
for (SegmentCommitInfo siPerCommit : this) {
|
||||
SegmentInfo si = siPerCommit.info;
|
||||
if (indexCreatedVersionMajor >= 7 && si.minVersion == null) {
|
||||
throw new IllegalStateException("Segments must record minVersion if they have been created on or after Lucene 7: " + si);
|
||||
}
|
||||
out.writeString(si.name);
|
||||
byte segmentID[] = si.getId();
|
||||
// TODO: remove this in lucene 6, we don't need to include 4.x segments in commits anymore
|
||||
|
@ -917,6 +907,10 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
|
||||
/** applies all changes caused by committing a merge to this SegmentInfos */
|
||||
void applyMergeChanges(MergePolicy.OneMerge merge, boolean dropSegment) {
|
||||
if (indexCreatedVersionMajor >= 7 && merge.info.info.minVersion == null) {
|
||||
throw new IllegalArgumentException("All segments must record the minVersion for indices created on or after Lucene 7");
|
||||
}
|
||||
|
||||
final Set<SegmentCommitInfo> mergedAway = new HashSet<>(merge.segments);
|
||||
boolean inserted = false;
|
||||
int newSegIdx = 0;
|
||||
|
@ -981,6 +975,10 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
|
||||
/** Appends the provided {@link SegmentCommitInfo}. */
|
||||
public void add(SegmentCommitInfo si) {
|
||||
if (indexCreatedVersionMajor >= 7 && si.info.minVersion == null) {
|
||||
throw new IllegalArgumentException("All segments must record the minVersion for indices created on or after Lucene 7");
|
||||
}
|
||||
|
||||
segments.add(si);
|
||||
}
|
||||
|
||||
|
@ -1038,10 +1036,11 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
return minSegmentLuceneVersion;
|
||||
}
|
||||
|
||||
/** Return the version that was used to initially create the index. This
|
||||
* version is set when the index is first created and then never changes.
|
||||
* This returns {@code null} if the index was created before 7.0. */
|
||||
public Version getIndexCreatedVersion() {
|
||||
return indexCreatedVersion;
|
||||
/** Return the version major that was used to initially create the index.
|
||||
* This version is set when the index is first created and then never
|
||||
* changes. This information was added as of version 7.0 so older
|
||||
* indices report 6 as a creation version. */
|
||||
public int getIndexCreatedVersionMajor() {
|
||||
return indexCreatedVersionMajor;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.codecs.TermVectorsWriter;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* The SegmentMerger class combines two or more Segments, represented by an
|
||||
|
@ -59,6 +60,19 @@ final class SegmentMerger {
|
|||
this.codec = segmentInfo.getCodec();
|
||||
this.context = context;
|
||||
this.fieldInfosBuilder = new FieldInfos.Builder(fieldNumbers);
|
||||
Version minVersion = Version.LATEST;
|
||||
for (CodecReader reader : readers) {
|
||||
Version leafMinVersion = reader.getMetaData().getMinVersion();
|
||||
if (leafMinVersion == null) {
|
||||
minVersion = null;
|
||||
break;
|
||||
}
|
||||
if (minVersion.onOrAfter(leafMinVersion)) {
|
||||
minVersion = leafMinVersion;
|
||||
}
|
||||
}
|
||||
assert segmentInfo.minVersion == null : "The min version should be set by SegmentMerger for merged segments";
|
||||
segmentInfo.minVersion = minVersion;
|
||||
if (mergeState.infoStream.isEnabled("SM")) {
|
||||
if (segmentInfo.getIndexSort() != null) {
|
||||
mergeState.infoStream.message("SM", "index sort during merge: " + segmentInfo.getIndexSort());
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.codecs.NormsProducer;
|
|||
import org.apache.lucene.codecs.PointsReader;
|
||||
import org.apache.lucene.codecs.StoredFieldsReader;
|
||||
import org.apache.lucene.codecs.TermVectorsReader;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
@ -46,6 +45,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
public final class SegmentReader extends CodecReader {
|
||||
|
||||
private final SegmentCommitInfo si;
|
||||
private final LeafMetaData metaData;
|
||||
private final Bits liveDocs;
|
||||
|
||||
// Normally set to si.maxDoc - si.delDocCount, unless we
|
||||
|
@ -68,8 +68,9 @@ public final class SegmentReader extends CodecReader {
|
|||
* @throws IOException if there is a low-level IO error
|
||||
*/
|
||||
// TODO: why is this public?
|
||||
public SegmentReader(SegmentCommitInfo si, IOContext context) throws IOException {
|
||||
public SegmentReader(SegmentCommitInfo si, int createdVersionMajor, IOContext context) throws IOException {
|
||||
this.si = si;
|
||||
this.metaData = new LeafMetaData(createdVersionMajor, si.info.getMinVersion(), si.info.getIndexSort());
|
||||
|
||||
// We pull liveDocs/DV updates from disk:
|
||||
this.isNRT = false;
|
||||
|
@ -133,6 +134,7 @@ public final class SegmentReader extends CodecReader {
|
|||
throw new IllegalArgumentException("maxDoc=" + si.info.maxDoc() + " but liveDocs.size()=" + liveDocs.length());
|
||||
}
|
||||
this.si = si;
|
||||
this.metaData = sr.getMetaData();
|
||||
this.liveDocs = liveDocs;
|
||||
this.isNRT = isNRT;
|
||||
this.numDocs = numDocs;
|
||||
|
@ -330,7 +332,7 @@ public final class SegmentReader extends CodecReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return si.info.getIndexSort();
|
||||
public LeafMetaData getMetaData() {
|
||||
return metaData;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.codecs.NormsProducer;
|
|||
import org.apache.lucene.codecs.PointsReader;
|
||||
import org.apache.lucene.codecs.StoredFieldsReader;
|
||||
import org.apache.lucene.codecs.TermVectorsReader;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
||||
/**
|
||||
|
@ -128,8 +127,8 @@ public final class SlowCodecReaderWrapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return reader.getIndexSort();
|
||||
public LeafMetaData getMetaData() {
|
||||
return reader.getMetaData();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ public final class StandardDirectoryReader extends DirectoryReader {
|
|||
boolean success = false;
|
||||
try {
|
||||
for (int i = sis.size()-1; i >= 0; i--) {
|
||||
readers[i] = new SegmentReader(sis.info(i), IOContext.READ);
|
||||
readers[i] = new SegmentReader(sis.info(i), sis.getIndexCreatedVersionMajor(), IOContext.READ);
|
||||
}
|
||||
|
||||
// This may throw CorruptIndexException if there are too many docs, so
|
||||
|
@ -181,7 +181,7 @@ public final class StandardDirectoryReader extends DirectoryReader {
|
|||
if (oldReader == null || commitInfo.info.getUseCompoundFile() != oldReader.getSegmentInfo().info.getUseCompoundFile()) {
|
||||
|
||||
// this is a new reader; in case we hit an exception we can decRef it safely
|
||||
newReader = new SegmentReader(commitInfo, IOContext.READ);
|
||||
newReader = new SegmentReader(commitInfo, infos.getIndexCreatedVersionMajor(), IOContext.READ);
|
||||
newReaders[i] = newReader;
|
||||
} else {
|
||||
if (oldReader.isNRT) {
|
||||
|
|
|
@ -98,7 +98,7 @@ public class EarlyTerminatingSortingCollector extends FilterCollector {
|
|||
|
||||
@Override
|
||||
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
|
||||
Sort segmentSort = context.reader().getIndexSort();
|
||||
Sort segmentSort = context.reader().getMetaData().getSort();
|
||||
if (segmentSort != null && canEarlyTerminate(sort, segmentSort) == false) {
|
||||
throw new IllegalStateException("Cannot early terminate with sort order " + sort + " if segments are sorted with " + segmentSort);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.codecs.lucene70;
|
||||
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestLucene70SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
|
||||
|
||||
@Override
|
||||
protected Version[] getVersions() {
|
||||
return new Version[] { Version.LATEST };
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Codec getCodec() {
|
||||
return TestUtil.getDefaultCodec();
|
||||
}
|
||||
}
|
|
@ -217,7 +217,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
final FieldInfos fieldInfos = builder.finish();
|
||||
final Directory dir = newDirectory();
|
||||
Codec codec = Codec.getDefault();
|
||||
final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
|
||||
this.write(si, fieldInfos, dir, fields);
|
||||
final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random())));
|
||||
|
@ -274,7 +274,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
}
|
||||
|
||||
Codec codec = Codec.getDefault();
|
||||
final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
this.write(si, fieldInfos, dir, fields);
|
||||
|
||||
if (VERBOSE) {
|
||||
|
|
|
@ -55,6 +55,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
// TODO:
|
||||
// - old parallel indices are only pruned on commit/close; can we do it on refresh?
|
||||
|
@ -414,7 +415,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
|
|||
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
|
||||
assert infos.size() == 1;
|
||||
final LeafReader parLeafReader = new SegmentReader(infos.info(0), IOContext.DEFAULT);
|
||||
final LeafReader parLeafReader = new SegmentReader(infos.info(0), Version.LATEST.major, IOContext.DEFAULT);
|
||||
|
||||
//checkParallelReader(leaf, parLeafReader, schemaGen);
|
||||
|
||||
|
|
|
@ -213,12 +213,12 @@ public class TestDoc extends LuceneTestCase {
|
|||
private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
|
||||
throws Exception {
|
||||
IOContext context = newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1)));
|
||||
SegmentReader r1 = new SegmentReader(si1, context);
|
||||
SegmentReader r2 = new SegmentReader(si2, context);
|
||||
SegmentReader r1 = new SegmentReader(si1, Version.LATEST.major, context);
|
||||
SegmentReader r2 = new SegmentReader(si2, Version.LATEST.major, context);
|
||||
|
||||
final Codec codec = Codec.getDefault();
|
||||
TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
|
||||
final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, merged, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, null, merged, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
|
||||
SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(r1, r2),
|
||||
si, InfoStream.getDefault(), trackingDir,
|
||||
|
@ -244,7 +244,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
|
||||
private void printSegment(PrintWriter out, SegmentCommitInfo si)
|
||||
throws Exception {
|
||||
SegmentReader reader = new SegmentReader(si, newIOContext(random()));
|
||||
SegmentReader reader = new SegmentReader(si, Version.LATEST.major, newIOContext(random()));
|
||||
|
||||
for (int i = 0; i < reader.numDocs(); i++)
|
||||
out.println(reader.document(i));
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.util.AttributeSource;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestDocumentWriter extends LuceneTestCase {
|
||||
private Directory dir;
|
||||
|
@ -62,7 +63,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
SegmentCommitInfo info = writer.newestSegment();
|
||||
writer.close();
|
||||
//After adding the document, we should be able to read it back in
|
||||
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
|
||||
SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
|
||||
assertTrue(reader != null);
|
||||
Document doc = reader.document(0);
|
||||
assertTrue(doc != null);
|
||||
|
@ -123,7 +124,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
writer.commit();
|
||||
SegmentCommitInfo info = writer.newestSegment();
|
||||
writer.close();
|
||||
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
|
||||
SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
|
||||
|
||||
PostingsEnum termPositions = MultiFields.getTermPositionsEnum(reader, "repeated", new BytesRef("repeated"));
|
||||
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
|
@ -194,7 +195,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
writer.commit();
|
||||
SegmentCommitInfo info = writer.newestSegment();
|
||||
writer.close();
|
||||
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
|
||||
SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
|
||||
|
||||
PostingsEnum termPositions = MultiFields.getTermPositionsEnum(reader, "f1", new BytesRef("a"));
|
||||
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
|
@ -236,7 +237,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
writer.commit();
|
||||
SegmentCommitInfo info = writer.newestSegment();
|
||||
writer.close();
|
||||
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
|
||||
SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
|
||||
|
||||
PostingsEnum termPositions = reader.postings(new Term("preanalyzed", "term1"), PostingsEnum.ALL);
|
||||
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
|
|
|
@ -2094,7 +2094,7 @@ public class TestIndexSorting extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("TEST: now compare r1=" + r1 + " r2=" + r2);
|
||||
}
|
||||
assertEquals(sort, getOnlyLeafReader(r2).getIndexSort());
|
||||
assertEquals(sort, getOnlyLeafReader(r2).getMetaData().getSort());
|
||||
assertReaderEquals("left: sorted by hand; right: sorted by Lucene", r1, r2);
|
||||
IOUtils.close(w1, w2, r1, r2, dir1, dir2);
|
||||
}
|
||||
|
|
|
@ -2805,7 +2805,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
||||
w.commit();
|
||||
w.close();
|
||||
assertEquals(Version.LATEST, SegmentInfos.readLatestCommit(dir).getIndexCreatedVersion());
|
||||
assertEquals(Version.LATEST.major, SegmentInfos.readLatestCommit(dir).getIndexCreatedVersionMajor());
|
||||
dir.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
|
||||
|
||||
|
@ -331,7 +332,7 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
|
|||
SegmentInfo si = TestUtil.getDefaultCodec().segmentInfoFormat().read(dir, segName, id, IOContext.DEFAULT);
|
||||
si.setCodec(codec);
|
||||
SegmentCommitInfo sci = new SegmentCommitInfo(si, 0, -1, -1, -1);
|
||||
SegmentReader sr = new SegmentReader(sci, IOContext.DEFAULT);
|
||||
SegmentReader sr = new SegmentReader(sci, Version.LATEST.major, IOContext.DEFAULT);
|
||||
try {
|
||||
thread0Count += sr.docFreq(new Term("field", "threadID0"));
|
||||
thread1Count += sr.docFreq(new Term("field", "threadID1"));
|
||||
|
|
|
@ -124,6 +124,7 @@ public class TestOneMergeWrappingMergePolicy extends LuceneTestCase {
|
|||
final SegmentInfo si = new SegmentInfo(
|
||||
dir, // dir
|
||||
Version.LATEST, // version
|
||||
Version.LATEST, // min version
|
||||
TestUtil.randomSimpleString(random()), // name
|
||||
random().nextInt(), // maxDoc
|
||||
random().nextBoolean(), // isCompoundFile
|
||||
|
|
|
@ -30,13 +30,15 @@ import java.util.Collections;
|
|||
public class TestSegmentInfos extends LuceneTestCase {
|
||||
|
||||
public void testIllegalCreatedVersion() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SegmentInfos(Version.LUCENE_6_5_0));
|
||||
assertEquals("indexCreatedVersion may only be non-null if the index was created on or after 7.0, got 6.5.0", e.getMessage());
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SegmentInfos(5));
|
||||
assertEquals("indexCreatedVersionMajor must be >= 6, got: 5", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new SegmentInfos(8));
|
||||
assertEquals("indexCreatedVersionMajor is in the future: 8", e.getMessage());
|
||||
}
|
||||
|
||||
// LUCENE-5954
|
||||
public void testVersionsNoSegments() throws IOException {
|
||||
SegmentInfos sis = new SegmentInfos(Version.LATEST);
|
||||
SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
|
||||
BaseDirectoryWrapper dir = newDirectory();
|
||||
dir.setCheckIndexOnClose(false);
|
||||
sis.commit(dir);
|
||||
|
@ -53,8 +55,8 @@ public class TestSegmentInfos extends LuceneTestCase {
|
|||
byte id[] = StringHelper.randomId();
|
||||
Codec codec = Codec.getDefault();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos(Version.LATEST);
|
||||
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(),
|
||||
SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
|
||||
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_7_0_0, Version.LUCENE_7_0_0, "_0", 1, false, Codec.getDefault(),
|
||||
Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
|
@ -63,7 +65,7 @@ public class TestSegmentInfos extends LuceneTestCase {
|
|||
sis.add(commitInfo);
|
||||
sis.commit(dir);
|
||||
sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion());
|
||||
assertEquals(Version.LUCENE_7_0_0, sis.getMinSegmentLuceneVersion());
|
||||
assertEquals(Version.LATEST, sis.getCommitLuceneVersion());
|
||||
dir.close();
|
||||
}
|
||||
|
@ -75,15 +77,15 @@ public class TestSegmentInfos extends LuceneTestCase {
|
|||
byte id[] = StringHelper.randomId();
|
||||
Codec codec = Codec.getDefault();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos(Version.LATEST);
|
||||
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(),
|
||||
SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
|
||||
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_7_0_0, Version.LUCENE_7_0_0, "_0", 1, false, Codec.getDefault(),
|
||||
Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, -1, -1, -1);
|
||||
sis.add(commitInfo);
|
||||
|
||||
info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_1", 1, false, Codec.getDefault(),
|
||||
info = new SegmentInfo(dir, Version.LUCENE_7_0_0, Version.LUCENE_7_0_0, "_1", 1, false, Codec.getDefault(),
|
||||
Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
|
@ -92,7 +94,7 @@ public class TestSegmentInfos extends LuceneTestCase {
|
|||
|
||||
sis.commit(dir);
|
||||
sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion());
|
||||
assertEquals(Version.LUCENE_7_0_0, sis.getMinSegmentLuceneVersion());
|
||||
assertEquals(Version.LATEST, sis.getCommitLuceneVersion());
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -60,8 +60,8 @@ public class TestSegmentMerger extends LuceneTestCase {
|
|||
SegmentCommitInfo info1 = DocHelper.writeDoc(random(), merge1Dir, doc1);
|
||||
DocHelper.setupDoc(doc2);
|
||||
SegmentCommitInfo info2 = DocHelper.writeDoc(random(), merge2Dir, doc2);
|
||||
reader1 = new SegmentReader(info1, newIOContext(random()));
|
||||
reader2 = new SegmentReader(info2, newIOContext(random()));
|
||||
reader1 = new SegmentReader(info1, Version.LATEST.major, newIOContext(random()));
|
||||
reader2 = new SegmentReader(info2, Version.LATEST.major, newIOContext(random()));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -84,7 +84,7 @@ public class TestSegmentMerger extends LuceneTestCase {
|
|||
|
||||
public void testMerge() throws IOException {
|
||||
final Codec codec = Codec.getDefault();
|
||||
final SegmentInfo si = new SegmentInfo(mergedDir, Version.LATEST, mergedSegment, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
final SegmentInfo si = new SegmentInfo(mergedDir, Version.LATEST, null, mergedSegment, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
|
||||
SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(reader1, reader2),
|
||||
si, InfoStream.getDefault(), mergedDir,
|
||||
|
@ -97,6 +97,7 @@ public class TestSegmentMerger extends LuceneTestCase {
|
|||
SegmentReader mergedReader = new SegmentReader(new SegmentCommitInfo(
|
||||
mergeState.segmentInfo,
|
||||
0, -1L, -1L, -1L),
|
||||
Version.LATEST.major,
|
||||
newIOContext(random()));
|
||||
assertTrue(mergedReader != null);
|
||||
assertTrue(mergedReader.numDocs() == 2);
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.lucene.store.IOContext;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestSegmentReader extends LuceneTestCase {
|
||||
private Directory dir;
|
||||
|
@ -42,7 +43,7 @@ public class TestSegmentReader extends LuceneTestCase {
|
|||
dir = newDirectory();
|
||||
DocHelper.setupDoc(testDoc);
|
||||
SegmentCommitInfo info = DocHelper.writeDoc(random(), dir, testDoc);
|
||||
reader = new SegmentReader(info, IOContext.READ);
|
||||
reader = new SegmentReader(info, Version.LATEST.major, IOContext.READ);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestSegmentTermDocs extends LuceneTestCase {
|
||||
private Document testDoc = new Document();
|
||||
|
@ -53,7 +54,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
|
|||
|
||||
public void testTermDocs() throws IOException {
|
||||
//After adding the document, we should be able to read it back in
|
||||
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
|
||||
SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
|
||||
assertTrue(reader != null);
|
||||
|
||||
TermsEnum terms = reader.fields().terms(DocHelper.TEXT_FIELD_2_KEY).iterator();
|
||||
|
@ -71,7 +72,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
|
|||
public void testBadSeek() throws IOException {
|
||||
{
|
||||
//After adding the document, we should be able to read it back in
|
||||
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
|
||||
SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
|
||||
assertTrue(reader != null);
|
||||
PostingsEnum termDocs = TestUtil.docs(random(), reader,
|
||||
"textField2",
|
||||
|
@ -84,7 +85,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
|
|||
}
|
||||
{
|
||||
//After adding the document, we should be able to read it back in
|
||||
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
|
||||
SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
|
||||
assertTrue(reader != null);
|
||||
PostingsEnum termDocs = TestUtil.docs(random(), reader,
|
||||
"junk",
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.DocValuesType;
|
|||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.LeafMetaData;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
|
@ -34,8 +35,8 @@ import org.apache.lucene.index.SortedNumericDocValues;
|
|||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.index.StoredFieldVisitor;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Wraps a Terms with a {@link org.apache.lucene.index.LeafReader}, typically from term vectors.
|
||||
|
@ -165,8 +166,8 @@ public class TermVectorLeafReader extends LeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return null;
|
||||
public LeafMetaData getMetaData() {
|
||||
return new LeafMetaData(Version.LATEST.major, null, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.SimpleCollector;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
|
@ -58,6 +57,7 @@ import org.apache.lucene.util.IntBlockPool.SliceWriter;
|
|||
import org.apache.lucene.util.RecyclingByteBlockAllocator;
|
||||
import org.apache.lucene.util.RecyclingIntBlockAllocator;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* High-performance single-document main memory Apache Lucene fulltext search index.
|
||||
|
@ -1625,8 +1625,8 @@ public class MemoryIndex {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return null;
|
||||
public LeafMetaData getMetaData() {
|
||||
return new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -133,13 +133,13 @@ public class IndexSplitter {
|
|||
public void split(Path destDir, String[] segs) throws IOException {
|
||||
Files.createDirectories(destDir);
|
||||
FSDirectory destFSDir = FSDirectory.open(destDir);
|
||||
SegmentInfos destInfos = new SegmentInfos(infos.getIndexCreatedVersion());
|
||||
SegmentInfos destInfos = new SegmentInfos(infos.getIndexCreatedVersionMajor());
|
||||
destInfos.counter = infos.counter;
|
||||
for (String n : segs) {
|
||||
SegmentCommitInfo infoPerCommit = getInfo(n);
|
||||
SegmentInfo info = infoPerCommit.info;
|
||||
// Same info just changing the dir:
|
||||
SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.name, info.maxDoc(),
|
||||
SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.getMinVersion(), info.name, info.maxDoc(),
|
||||
info.getUseCompoundFile(), info.getCodec(), info.getDiagnostics(), info.getId(), new HashMap<>(), null);
|
||||
destInfos.add(new SegmentCommitInfo(newInfo, infoPerCommit.getDelCount(),
|
||||
infoPerCommit.getDelGen(), infoPerCommit.getFieldInfosGen(),
|
||||
|
|
|
@ -139,7 +139,7 @@ public abstract class ReplicaNode extends Node {
|
|||
SegmentInfos infos;
|
||||
if (segmentsFileName == null) {
|
||||
// No index here yet:
|
||||
infos = new SegmentInfos(Version.LATEST);
|
||||
infos = new SegmentInfos(Version.LATEST.major);
|
||||
message("top: init: no segments in index");
|
||||
} else {
|
||||
message("top: init: read existing segments commit " + segmentsFileName);
|
||||
|
|
|
@ -638,7 +638,8 @@ public abstract class BaseCompoundFormatTestCase extends BaseIndexFileFormatTest
|
|||
|
||||
/** Returns a new fake segment */
|
||||
protected static SegmentInfo newSegmentInfo(Directory dir, String name) {
|
||||
return new SegmentInfo(dir, Version.LATEST, name, 10000, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
Version minVersion = random().nextBoolean() ? null : Version.LATEST;
|
||||
return new SegmentInfo(dir, Version.LATEST, minVersion, name, 10000, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
}
|
||||
|
||||
/** Creates a file of the specified size with random data. */
|
||||
|
|
|
@ -347,7 +347,8 @@ public abstract class BaseFieldInfoFormatTestCase extends BaseIndexFileFormatTes
|
|||
|
||||
/** Returns a new fake segment */
|
||||
protected static SegmentInfo newSegmentInfo(Directory dir, String name) {
|
||||
return new SegmentInfo(dir, Version.LATEST, name, 10000, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
Version minVersion = random().nextBoolean() ? null : Version.LATEST;
|
||||
return new SegmentInfo(dir, Version.LATEST, minVersion, name, 10000, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -147,6 +147,22 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
|||
/** Returns the codec to run tests against */
|
||||
protected abstract Codec getCodec();
|
||||
|
||||
/** Returns the major version that this codec is compatible with. */
|
||||
protected int getCreatedVersionMajor() {
|
||||
return Version.LATEST.major;
|
||||
}
|
||||
|
||||
/** Set the created version of the given {@link Directory} and return it. */
|
||||
protected final <D extends Directory> D applyCreatedVersionMajor(D d) throws IOException {
|
||||
if (SegmentInfos.getLastCommitGeneration(d) != -1) {
|
||||
throw new IllegalArgumentException("Cannot set the created version on a Directory that already has segments");
|
||||
}
|
||||
if (getCreatedVersionMajor() != Version.LATEST.major || random().nextBoolean()) {
|
||||
new SegmentInfos(getCreatedVersionMajor()).commit(d);
|
||||
}
|
||||
return d;
|
||||
}
|
||||
|
||||
private Codec savedCodec;
|
||||
|
||||
public void setUp() throws Exception {
|
||||
|
@ -195,7 +211,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
|||
/** The purpose of this test is to make sure that bulk merge doesn't accumulate useless data over runs. */
|
||||
public void testMergeStability() throws Exception {
|
||||
assumeTrue("merge is not stable", mergeIsStable());
|
||||
Directory dir = newDirectory();
|
||||
Directory dir = applyCreatedVersionMajor(newDirectory());
|
||||
|
||||
// do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio
|
||||
// do not use RIW which will change things up!
|
||||
|
@ -214,7 +230,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
|||
w.close();
|
||||
DirectoryReader reader = DirectoryReader.open(dir);
|
||||
|
||||
Directory dir2 = newDirectory();
|
||||
Directory dir2 = applyCreatedVersionMajor(newDirectory());
|
||||
mp = newTieredMergePolicy();
|
||||
mp.setNoCFSRatio(0);
|
||||
cfg = new IndexWriterConfig(new MockAnalyzer(random())).setUseCompoundFile(false).setMergePolicy(mp);
|
||||
|
@ -245,7 +261,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
|||
avoidCodecs.add(new MockRandomPostingsFormat().getName());
|
||||
Codec.setDefault(new RandomCodec(random(), avoidCodecs));
|
||||
}
|
||||
Directory dir = newDirectory();
|
||||
Directory dir = applyCreatedVersionMajor(newDirectory());
|
||||
IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||
IndexWriter w = new IndexWriter(dir, cfg);
|
||||
// we need to index enough documents so that constant overhead doesn't dominate
|
||||
|
@ -286,7 +302,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
|||
/** Calls close multiple times on closeable codec apis */
|
||||
public void testMultiClose() throws IOException {
|
||||
// first make a one doc index
|
||||
Directory oneDocIndex = newDirectory();
|
||||
Directory oneDocIndex = applyCreatedVersionMajor(newDirectory());
|
||||
IndexWriter iw = new IndexWriter(oneDocIndex, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
Document oneDoc = new Document();
|
||||
FieldType customType = new FieldType(TextField.TYPE_STORED);
|
||||
|
@ -303,7 +319,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
|||
Directory dir = newFSDirectory(createTempDir("justSoYouGetSomeChannelErrors"));
|
||||
Codec codec = getCodec();
|
||||
|
||||
SegmentInfo segmentInfo = new SegmentInfo(dir, Version.LATEST, "_0", 1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
SegmentInfo segmentInfo = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", 1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
FieldInfo proto = oneDocReader.getFieldInfos().fieldInfo("field");
|
||||
FieldInfo field = new FieldInfo(proto.name, proto.number, proto.hasVectors(), proto.omitsNorms(), proto.hasPayloads(),
|
||||
proto.getIndexOptions(), proto.getDocValuesType(), proto.getDocValuesGen(), new HashMap<>(),
|
||||
|
@ -499,7 +515,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
|||
// codec does not corrupt the index or leak file handles.
|
||||
public void testRandomExceptions() throws Exception {
|
||||
// disable slow things: we don't rely upon sleeps here.
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
MockDirectoryWrapper dir = applyCreatedVersionMajor(newMockDirectory());
|
||||
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
||||
dir.setUseSlowOpenClosers(false);
|
||||
dir.setRandomIOExceptionRate(0.001); // more rare
|
||||
|
|
|
@ -449,7 +449,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
|
|||
norms[i] = longs.getAsLong();
|
||||
}
|
||||
|
||||
Directory dir = newDirectory();
|
||||
Directory dir = applyCreatedVersionMajor(newDirectory());
|
||||
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false);
|
||||
IndexWriterConfig conf = newIndexWriterConfig(analyzer);conf.setMergePolicy(NoMergePolicy.INSTANCE);
|
||||
conf.setSimilarity(new CannedNormSimilarity(norms));
|
||||
|
@ -585,7 +585,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
|
|||
*
|
||||
*/
|
||||
public void testUndeadNorms() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
Directory dir = applyCreatedVersionMajor(newDirectory());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(500);
|
||||
List<Integer> toDelete = new ArrayList<>();
|
||||
|
@ -646,7 +646,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
|
|||
norms[i] = random().nextLong();
|
||||
}
|
||||
|
||||
Directory dir = newDirectory();
|
||||
Directory dir = applyCreatedVersionMajor(newDirectory());
|
||||
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false);
|
||||
IndexWriterConfig conf = newIndexWriterConfig(analyzer);conf.setMergePolicy(NoMergePolicy.INSTANCE);
|
||||
conf.setSimilarity(new CannedNormSimilarity(norms));
|
||||
|
|
|
@ -48,13 +48,18 @@ import org.apache.lucene.util.Version;
|
|||
* if there is some bug in a given si Format that this
|
||||
* test fails to catch then this test needs to be improved! */
|
||||
public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatTestCase {
|
||||
|
||||
|
||||
/** Whether this format records min versions. */
|
||||
protected boolean supportsMinVersion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Test files map */
|
||||
public void testFiles() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
|
@ -68,7 +73,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
Directory dir = newDirectory();
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
|
||||
Set<String> originalFiles = Collections.singleton("_123.a");
|
||||
info.setFiles(originalFiles);
|
||||
|
@ -97,7 +102,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
Map<String,String> diagnostics = new HashMap<>();
|
||||
diagnostics.put("key1", "value1");
|
||||
diagnostics.put("key2", "value2");
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
diagnostics, id, new HashMap<>(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
|
@ -120,7 +125,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
Map<String,String> attributes = new HashMap<>();
|
||||
attributes.put("key1", "value1");
|
||||
attributes.put("key2", "value2");
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.emptyMap(), id, attributes, null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
|
@ -140,7 +145,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
Codec codec = getCodec();
|
||||
Directory dir = newDirectory();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
|
@ -153,15 +158,22 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
public void testVersions() throws Exception {
|
||||
Codec codec = getCodec();
|
||||
for (Version v : getVersions()) {
|
||||
Directory dir = newDirectory();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, v, "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
|
||||
assertEquals(info2.getVersion(), v);
|
||||
dir.close();
|
||||
for (Version minV : new Version[] { v, null}) {
|
||||
Directory dir = newDirectory();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, v, minV, "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
|
||||
assertEquals(info2.getVersion(), v);
|
||||
if (supportsMinVersion()) {
|
||||
assertEquals(info2.getMinVersion(), minV);
|
||||
} else {
|
||||
assertEquals(info2.getMinVersion(), null);
|
||||
}
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -262,7 +274,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
Directory dir = newDirectory();
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), sort);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
|
@ -292,7 +304,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
dir.failOn(fail);
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
|
||||
|
@ -325,7 +337,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
dir.failOn(fail);
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
|
||||
|
@ -358,7 +370,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
dir.failOn(fail);
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
|
@ -392,7 +404,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
dir.failOn(fail);
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
|
@ -442,7 +454,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
|||
TestUtil.randomUnicodeString(random()));
|
||||
}
|
||||
|
||||
SegmentInfo info = new SegmentInfo(dir, version, name, docCount, isCompoundFile, codec, diagnostics, id, attributes, null);
|
||||
SegmentInfo info = new SegmentInfo(dir, version, null, name, docCount, isCompoundFile, codec, diagnostics, id, attributes, null);
|
||||
info.setFiles(files);
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, name, id, IOContext.DEFAULT);
|
||||
|
|
|
@ -611,7 +611,7 @@ public class RandomPostingsTester {
|
|||
// maxAllowed = the "highest" we can index, but we will still
|
||||
// randomly index at lower IndexOption
|
||||
public FieldsProducer buildIndex(Codec codec, Directory dir, IndexOptions maxAllowed, boolean allowPayloads, boolean alwaysTestMax) throws IOException {
|
||||
SegmentInfo segmentInfo = new SegmentInfo(dir, Version.LATEST, "_0", maxDoc, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
SegmentInfo segmentInfo = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", maxDoc, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
|
||||
int maxIndexOption = Arrays.asList(IndexOptions.values()).indexOf(maxAllowed);
|
||||
if (LuceneTestCase.VERBOSE) {
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.index.BinaryDocValues;
|
|||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.LeafMetaData;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
@ -39,6 +40,7 @@ import org.apache.lucene.index.StoredFieldVisitor;
|
|||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
|
@ -260,8 +262,8 @@ public class QueryUtils {
|
|||
protected void doClose() throws IOException {}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return null;
|
||||
public LeafMetaData getMetaData() {
|
||||
return new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -23,9 +23,8 @@ import java.util.Map;
|
|||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.index.MultiDocValues.MultiSortedDocValues;
|
||||
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* This class forces a composite reader (eg a {@link
|
||||
|
@ -47,6 +46,7 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
|
|||
|
||||
private final CompositeReader in;
|
||||
private final Fields fields;
|
||||
private final LeafMetaData metaData;
|
||||
|
||||
/** This method is sugar for getting an {@link LeafReader} from
|
||||
* an {@link IndexReader} of any kind. If the reader is already atomic,
|
||||
|
@ -66,6 +66,17 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
|
|||
in = reader;
|
||||
fields = MultiFields.getFields(in);
|
||||
in.registerParentReader(this);
|
||||
if (reader.leaves().isEmpty()) {
|
||||
metaData = new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
|
||||
} else {
|
||||
Version minVersion = reader.leaves().stream()
|
||||
.map(LeafReaderContext::reader)
|
||||
.map(LeafReader::getMetaData)
|
||||
.map(LeafMetaData::getMinVersion)
|
||||
.reduce((v1, v2) -> v1 == null ? null : v2 == null ? null : v2.onOrAfter(v1) ? v1 : v2)
|
||||
.get();
|
||||
metaData = new LeafMetaData(reader.leaves().get(0).reader().getMetaData().getCreatedVersionMajor(), minVersion, null);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -263,7 +274,7 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return null;
|
||||
public LeafMetaData getMetaData() {
|
||||
return metaData;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.BinaryDocValues;
|
|||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.LeafMetaData;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexReaderContext;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
|
@ -38,11 +39,11 @@ import org.apache.lucene.index.SortedSetDocValues;
|
|||
import org.apache.lucene.index.StoredFieldVisitor;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.BitSetIterator;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -455,8 +456,8 @@ public class TestDocSet extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Sort getIndexSort() {
|
||||
return null;
|
||||
public LeafMetaData getMetaData() {
|
||||
return new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in New Issue