LUCENE-5992: add SI tests; remove FieldInfos from SegmentInfosWrite.write

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1629774 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2014-10-06 22:01:51 +00:00
parent c3c3f29a7d
commit 55fa29e030
14 changed files with 401 additions and 16 deletions

View File

@ -151,6 +151,8 @@ API Changes
to return an instance optimized for merging instead of searching.
(Mike McCandless, Robert Muir)
* LUCENE-5992: Remove FieldInfos from SegmentInfosWriter.write API. (Robert Muir, Mike McCandless)
Bug Fixes
* LUCENE-5650: Enforce read-only access to any path outside the temporary

View File

@ -22,7 +22,6 @@ import java.util.Collections;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.SegmentInfoWriter;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
@ -43,7 +42,7 @@ public final class Lucene40SegmentInfoWriter extends SegmentInfoWriter {
/** Save a single segment's info. */
@Override
public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException {
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene40SegmentInfoFormat.SI_EXTENSION);
si.addFile(fileName);

View File

@ -0,0 +1,58 @@
package org.apache.lucene.codecs.lucene40;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
import org.apache.lucene.util.Version;
/**
* Tests Lucene40InfoFormat
*/
public class TestLucene40SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
@Override
protected Version[] getVersions() {
// NOTE: some of these bugfix releases we never actually "wrote",
// but staying on the safe side...
return new Version[] {
Version.LUCENE_4_0_0_ALPHA,
Version.LUCENE_4_0_0_BETA,
Version.LUCENE_4_0_0,
Version.LUCENE_4_1_0,
Version.LUCENE_4_2_0,
Version.LUCENE_4_2_1,
Version.LUCENE_4_3_0,
Version.LUCENE_4_3_1,
Version.LUCENE_4_4_0,
Version.LUCENE_4_5_0,
Version.LUCENE_4_5_1,
};
}
@Override
@Deprecated
protected void assertIDEquals(byte[] expected, byte[] actual) {
assertNull(actual); // we don't support IDs
}
@Override
protected Codec getCodec() {
return new Lucene40RWCodec();
}
}

View File

@ -21,7 +21,6 @@ import java.io.IOException;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.SegmentInfoWriter;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
@ -43,7 +42,7 @@ final class Lucene46SegmentInfoWriter extends SegmentInfoWriter {
/** Save a single segment's info. */
@Override
public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException {
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene46SegmentInfoFormat.SI_EXTENSION);
si.addFile(fileName);

View File

@ -0,0 +1,57 @@
package org.apache.lucene.codecs.lucene46;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
import org.apache.lucene.util.Version;
/**
* Tests Lucene46InfoFormat
*/
public class TestLucene46SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
@Override
protected Version[] getVersions() {
// NOTE: some of these bugfix releases we never actually "wrote",
// but staying on the safe side...
return new Version[] {
Version.LUCENE_4_6_0,
Version.LUCENE_4_6_1,
Version.LUCENE_4_7_0,
Version.LUCENE_4_7_1,
Version.LUCENE_4_7_2,
Version.LUCENE_4_8_0,
Version.LUCENE_4_8_1,
Version.LUCENE_4_9_0,
Version.LUCENE_4_10_0,
Version.LUCENE_4_10_1
};
}
@Override
@Deprecated
protected void assertIDEquals(byte[] expected, byte[] actual) {
assertNull(actual); // we don't support IDs
}
@Override
protected Codec getCodec() {
return new Lucene46RWCodec();
}
}

View File

@ -22,7 +22,6 @@ import java.util.Map;
import java.util.Set;
import org.apache.lucene.codecs.SegmentInfoWriter;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
@ -51,7 +50,7 @@ public class SimpleTextSegmentInfoWriter extends SegmentInfoWriter {
final static BytesRef SI_ID = new BytesRef(" id ");
@Override
public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException {
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
String segFileName = IndexFileNames.segmentFileName(si.name, "", SimpleTextSegmentInfoFormat.SI_EXTENSION);
si.addFile(segFileName);

View File

@ -0,0 +1,39 @@
package org.apache.lucene.codecs.simpletext;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
import org.apache.lucene.util.Version;
/**
* Tests SimpleTextSegmentInfoFormat
*/
public class TestSimpleTextSegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
private final Codec codec = new SimpleTextCodec();
@Override
protected Version[] getVersions() {
return new Version[] { Version.LATEST };
}
@Override
protected Codec getCodec() {
return codec;
}
}

View File

@ -19,7 +19,6 @@ package org.apache.lucene.codecs;
import java.io.IOException;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
@ -39,5 +38,5 @@ public abstract class SegmentInfoWriter {
* Write {@link SegmentInfo} data.
* @throws IOException If an I/O error occurs
*/
public abstract void write(Directory dir, SegmentInfo info, FieldInfos fis, IOContext ioContext) throws IOException;
public abstract void write(Directory dir, SegmentInfo info, IOContext ioContext) throws IOException;
}

View File

@ -22,7 +22,6 @@ import java.util.Set;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.SegmentInfoWriter;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
@ -46,7 +45,7 @@ public class Lucene50SegmentInfoWriter extends SegmentInfoWriter {
/** Save a single segment's info. */
@Override
public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException {
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene50SegmentInfoFormat.SI_EXTENSION);
si.addFile(fileName);

View File

@ -517,7 +517,7 @@ class DocumentsWriterPerThread {
// creating CFS so that 1) .si isn't slurped into CFS,
// and 2) .si reflects useCompoundFile=true change
// above:
codec.segmentInfoFormat().getSegmentInfoWriter().write(directory, newSegment.info, flushedSegment.fieldInfos, context);
codec.segmentInfoFormat().getSegmentInfoWriter().write(directory, newSegment.info, context);
// TODO: ideally we would freeze newSegment here!!
// because any changes after writing the .si will be

View File

@ -2594,7 +2594,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
// above:
success = false;
try {
codec.segmentInfoFormat().getSegmentInfoWriter().write(trackingDir, info, mergeState.mergeFieldInfos, context);
codec.segmentInfoFormat().getSegmentInfoWriter().write(trackingDir, info, context);
success = true;
} finally {
if (!success) {
@ -4060,7 +4060,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
// above:
boolean success2 = false;
try {
codec.segmentInfoFormat().getSegmentInfoWriter().write(directory, merge.info.info, mergeState.mergeFieldInfos, context);
codec.segmentInfoFormat().getSegmentInfoWriter().write(directory, merge.info.info, context);
success2 = true;
} finally {
if (!success2) {

View File

@ -0,0 +1,39 @@
package org.apache.lucene.codecs.lucene50;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.Version;
/**
* Tests Lucene50SegmentInfoFormat
*/
public class TestLucene50SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
@Override
protected Version[] getVersions() {
return new Version[] { Version.LATEST };
}
@Override
protected Codec getCodec() {
return TestUtil.getDefaultCodec();
}
}

View File

@ -23,7 +23,6 @@ import java.util.Random;
import org.apache.lucene.codecs.SegmentInfoFormat;
import org.apache.lucene.codecs.SegmentInfoReader;
import org.apache.lucene.codecs.SegmentInfoWriter;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
@ -57,11 +56,11 @@ class CrankySegmentInfoFormat extends SegmentInfoFormat {
}
@Override
public void write(Directory dir, SegmentInfo info, FieldInfos fis, IOContext ioContext) throws IOException {
public void write(Directory dir, SegmentInfo info, IOContext ioContext) throws IOException {
if (random.nextInt(100) == 0) {
throw new IOException("Fake IOException from SegmentInfoWriter.write()");
}
delegate.write(dir, info, fis, ioContext);
delegate.write(dir, info, ioContext);
}
}
}

View File

@ -0,0 +1,196 @@
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.Version;
/**
* Abstract class to do basic tests for si format.
* NOTE: This test focuses on the si impl, nothing else.
* The [stretch] goal is for this test to be
* so thorough in testing a new si format that if this
* test passes, then all Lucene/Solr tests should also pass. Ie,
* if there is some bug in a given si Format that this
* test fails to catch then this test needs to be improved! */
public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatTestCase {
/** Test files map */
public void testFiles() throws Exception {
Directory dir = newDirectory();
Codec codec = getCodec();
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
Collections.<String,String>emptyMap(), StringHelper.randomId());
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().getSegmentInfoWriter().write(dir, info, IOContext.DEFAULT);
SegmentInfo info2 = codec.segmentInfoFormat().getSegmentInfoReader().read(dir, "_123", IOContext.DEFAULT);
assertEquals(info.files(), info2.files());
dir.close();
}
/** Tests SI writer adds itself to files... */
public void testAddsSelfToFiles() throws Exception {
Directory dir = newDirectory();
Codec codec = getCodec();
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
Collections.<String,String>emptyMap(), StringHelper.randomId());
Set<String> originalFiles = Collections.singleton("_123.a");
info.setFiles(originalFiles);
codec.segmentInfoFormat().getSegmentInfoWriter().write(dir, info, IOContext.DEFAULT);
Set<String> modifiedFiles = info.files();
assertTrue(modifiedFiles.containsAll(originalFiles));
assertTrue("did you forget to add yourself to files()", modifiedFiles.size() > originalFiles.size());
SegmentInfo info2 = codec.segmentInfoFormat().getSegmentInfoReader().read(dir, "_123", IOContext.DEFAULT);
assertEquals(info.files(), info2.files());
dir.close();
}
/** Test diagnostics map */
public void testDiagnostics() throws Exception {
Directory dir = newDirectory();
Codec codec = getCodec();
Map<String,String> diagnostics = new HashMap<>();
diagnostics.put("key1", "value1");
diagnostics.put("key2", "value2");
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
diagnostics, StringHelper.randomId());
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().getSegmentInfoWriter().write(dir, info, IOContext.DEFAULT);
SegmentInfo info2 = codec.segmentInfoFormat().getSegmentInfoReader().read(dir, "_123", IOContext.DEFAULT);
assertEquals(diagnostics, info2.getDiagnostics());
dir.close();
}
/** Test unique ID */
public void testUniqueID() throws Exception {
Codec codec = getCodec();
Directory dir = newDirectory();
byte id[] = StringHelper.randomId();
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
Collections.<String,String>emptyMap(), id);
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().getSegmentInfoWriter().write(dir, info, IOContext.DEFAULT);
SegmentInfo info2 = codec.segmentInfoFormat().getSegmentInfoReader().read(dir, "_123", IOContext.DEFAULT);
assertIDEquals(id, info2.getId());
dir.close();
}
/** Test versions */
public void testVersions() throws Exception {
Codec codec = getCodec();
for (Version v : getVersions()) {
Directory dir = newDirectory();
SegmentInfo info = new SegmentInfo(dir, v, "_123", 1, false, codec,
Collections.<String,String>emptyMap(), StringHelper.randomId());
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().getSegmentInfoWriter().write(dir, info, IOContext.DEFAULT);
SegmentInfo info2 = codec.segmentInfoFormat().getSegmentInfoReader().read(dir, "_123", IOContext.DEFAULT);
assertEquals(info2.getVersion(), v);
dir.close();
}
}
/**
* Sets some otherwise hard-to-test properties:
* random segment names, ID values, document count, etc and round-trips
*/
public void testRandom() throws Exception {
Codec codec = getCodec();
Version[] versions = getVersions();
for (int i = 0; i < 10; i++) {
Directory dir = newDirectory();
Version version = versions[random().nextInt(versions.length)];
String name = "_" + Integer.toString(random().nextInt(Integer.MAX_VALUE), Character.MAX_RADIX);
int docCount = TestUtil.nextInt(random(), 1, IndexWriter.MAX_DOCS);
boolean isCompoundFile = random().nextBoolean();
Set<String> files = new HashSet<>();
int numFiles = random().nextInt(10);
for (int j = 0; j < numFiles; j++) {
String file = IndexFileNames.segmentFileName(name, "", Integer.toString(j));
files.add(file);
dir.createOutput(file, IOContext.DEFAULT).close();
}
Map<String,String> diagnostics = new HashMap<>();
int numDiags = random().nextInt(10);
for (int j = 0; j < numDiags; j++) {
diagnostics.put(TestUtil.randomUnicodeString(random()),
TestUtil.randomUnicodeString(random()));
}
byte id[] = new byte[StringHelper.ID_LENGTH];
random().nextBytes(id);
SegmentInfo info = new SegmentInfo(dir, version, name, docCount, isCompoundFile, codec, diagnostics, id);
info.setFiles(files);
codec.segmentInfoFormat().getSegmentInfoWriter().write(dir, info, IOContext.DEFAULT);
SegmentInfo info2 = codec.segmentInfoFormat().getSegmentInfoReader().read(dir, name, IOContext.DEFAULT);
assertEquals(info, info2);
dir.close();
}
}
protected final void assertEquals(SegmentInfo expected, SegmentInfo actual) {
assertSame(expected.dir, actual.dir);
assertEquals(expected.name, actual.name);
assertEquals(expected.files(), actual.files());
// we don't assert this, because SI format has nothing to do with it... set by SIS
// assertSame(expected.getCodec(), actual.getCodec());
assertEquals(expected.getDiagnostics(), actual.getDiagnostics());
assertEquals(expected.getDocCount(), actual.getDocCount());
assertIDEquals(expected.getId(), actual.getId());
assertEquals(expected.getUseCompoundFile(), actual.getUseCompoundFile());
assertEquals(expected.getVersion(), actual.getVersion());
}
/** Returns the versions this SI should test */
protected abstract Version[] getVersions();
/**
* assert that unique id is equal.
* @deprecated only exists to be overridden by old codecs that didnt support this
*/
@Deprecated
protected void assertIDEquals(byte expected[], byte actual[]) {
assertArrayEquals(expected, actual);
}
@Override
protected void addRandomFields(Document doc) {
doc.add(new StoredField("foobar", TestUtil.randomSimpleString(random())));
}
@Override
public void testRamBytesUsed() throws IOException {
assumeTrue("not applicable for this format", true);
}
}