LUCENE-6265: add extrasfs to return bonus files/subdirs with directories

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1661251 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2015-02-21 00:09:35 +00:00
parent e168bfa2f5
commit 7c36d012f6
19 changed files with 224 additions and 78 deletions

View File

@ -30,6 +30,7 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -45,8 +46,8 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
@ -271,15 +272,13 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
}
boolean fail = false;
try (DirectoryStream<Path> stream = Files.newDirectoryStream(segsPath)) {
for (Path path : stream) {
SegmentIDAndGen segIDGen = new SegmentIDAndGen(path.getFileName().toString());
if (liveIDs.contains(segIDGen.segID) == false) {
if (DEBUG) System.out.println("TEST: fail seg=" + path.getFileName() + " is not live but still has a parallel index");
fail = true;
}
}
for(Path path : segSubDirs(segsPath)) {
SegmentIDAndGen segIDGen = new SegmentIDAndGen(path.getFileName().toString());
if (liveIDs.contains(segIDGen.segID) == false) {
if (DEBUG) System.out.println("TEST: fail seg=" + path.getFileName() + " is not live but still has a parallel index");
fail = true;
}
}
assertFalse(fail);
}
@ -486,24 +485,22 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
long currentSchemaGen = getCurrentSchemaGen();
if (Files.exists(segsPath)) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(segsPath)) {
for (Path path : stream) {
if (Files.isDirectory(path)) {
SegmentIDAndGen segIDGen = new SegmentIDAndGen(path.getFileName().toString());
assert segIDGen.schemaGen <= currentSchemaGen;
if (liveIDs.contains(segIDGen.segID) == false && (closedSegments.contains(segIDGen) || (removeOldGens && segIDGen.schemaGen < currentSchemaGen))) {
if (DEBUG) System.out.println("TEST: remove " + segIDGen);
try {
IOUtils.rm(path);
closedSegments.remove(segIDGen);
} catch (IOException ioe) {
// OK, we'll retry later
if (DEBUG) System.out.println("TEST: ignore ioe during delete " + path + ":" + ioe);
}
}
for (Path path : segSubDirs(segsPath)) {
if (Files.isDirectory(path)) {
SegmentIDAndGen segIDGen = new SegmentIDAndGen(path.getFileName().toString());
assert segIDGen.schemaGen <= currentSchemaGen;
if (liveIDs.contains(segIDGen.segID) == false && (closedSegments.contains(segIDGen) || (removeOldGens && segIDGen.schemaGen < currentSchemaGen))) {
if (DEBUG) System.out.println("TEST: remove " + segIDGen);
try {
IOUtils.rm(path);
closedSegments.remove(segIDGen);
} catch (IOException ioe) {
// OK, we'll retry later
if (DEBUG) System.out.println("TEST: ignore ioe during delete " + path + ":" + ioe);
}
}
}
}
}
}
@ -1365,5 +1362,22 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
}
}
// TODO: maybe the leading id could be further restricted? It's from StringHelper.idToString:
static final Pattern SEG_GEN_SUB_DIR_PATTERN = Pattern.compile("^[a-z0-9]+_([0-9]+)$");
private static List<Path> segSubDirs(Path segsPath) throws IOException {
List<Path> result = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(segsPath)) {
for (Path path : stream) {
// Must be form <segIDString>_<longGen>
if (Files.isDirectory(path) && SEG_GEN_SUB_DIR_PATTERN.matcher(path.getFileName().toString()).matches()) {
result.add(path);
}
}
}
return result;
}
// TODO: test exceptions
}

View File

@ -482,7 +482,7 @@ public void testFilesOpenClose() throws IOException {
// expected
}
Files.delete(dirFile);
IOUtils.rm(dirFile);
// Make sure we still get a CorruptIndexException (not NPE):
try {
@ -1056,7 +1056,7 @@ public void testFilesOpenClose() throws IOException {
public void testIndexExistsOnNonExistentDirectory() throws Exception {
Path tempDir = createTempDir("testIndexExistsOnNonExistentDirectory");
Files.delete(tempDir);
IOUtils.rm(tempDir);
Directory dir = newFSDirectory(tempDir);
assertFalse(DirectoryReader.indexExists(dir));
dir.close();

View File

@ -264,7 +264,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
assertEquals(1, sis.getGeneration());
// no inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(1, sis.getGeneration());
dir.close();
@ -284,12 +284,12 @@ public class TestIndexFileDeleter extends LuceneTestCase {
dir.createOutput(IndexFileNames.SEGMENTS + "_2", IOContext.DEFAULT).close();
// ensure inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(2, sis.getGeneration());
// add another trash commit
dir.createOutput(IndexFileNames.SEGMENTS + "_4", IOContext.DEFAULT).close();
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(4, sis.getGeneration());
dir.close();
@ -305,19 +305,19 @@ public class TestIndexFileDeleter extends LuceneTestCase {
assertEquals(0, sis.counter);
// no inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(0, sis.counter);
// add trash per-segment file
dir.createOutput(IndexFileNames.segmentFileName("_0", "", "foo"), IOContext.DEFAULT).close();
// ensure inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(1, sis.counter);
// add trash per-segment file
dir.createOutput(IndexFileNames.segmentFileName("_3", "", "foo"), IOContext.DEFAULT).close();
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(4, sis.counter);
// ensure we write _4 segment next
@ -346,14 +346,14 @@ public class TestIndexFileDeleter extends LuceneTestCase {
assertEquals(1, sis.info(0).getNextDelGen());
// no inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(1, sis.info(0).getNextDelGen());
// add trash per-segment deletes file
dir.createOutput(IndexFileNames.fileNameFromGeneration("_0", "del", 2), IOContext.DEFAULT).close();
// ensure inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(3, sis.info(0).getNextDelGen());
dir.close();
@ -373,7 +373,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
dir.createOutput(IndexFileNames.SEGMENTS + "_", IOContext.DEFAULT).close();
// no inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(1, sis.getGeneration());
dir.close();
@ -396,11 +396,25 @@ public class TestIndexFileDeleter extends LuceneTestCase {
dir.createOutput("_1_A", IOContext.DEFAULT).close();
// no inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(1, sis.info(0).getNextDelGen());
dir.close();
}
// IFD's inflater is "raw" and expects to only see codec files,
// and rightfully so, it filters them out.
static void inflateGens(SegmentInfos sis, Collection<String> files, InfoStream stream) {
List<String> filtered = new ArrayList<>();
for (String file : files) {
if (IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches() ||
file.startsWith(IndexFileNames.SEGMENTS) ||
file.startsWith(IndexFileNames.PENDING_SEGMENTS)) {
filtered.add(file);
}
}
IndexFileDeleter.inflateGens(sis, filtered, stream);
}
// LUCENE-5919
public void testExcInDecRef() throws Exception {

View File

@ -1303,11 +1303,9 @@ public class TestIndexWriter extends LuceneTestCase {
if (iter == 1) {
// we run a full commit so there should be a segments file etc.
assertTrue(files.contains("segments_1"));
assertEquals(files.toString(), files.size(), 4);
} else {
// this is an NRT reopen - no segments files yet
assertEquals(files.toString(), files.size(), 3);
assertFalse(files.contains("segments_1"));
}
w.addDocument(doc);
w.forceMerge(1);
@ -1416,6 +1414,8 @@ public class TestIndexWriter extends LuceneTestCase {
if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
}
String[] origFiles = dir.listAll();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy())
@ -1424,13 +1424,13 @@ public class TestIndexWriter extends LuceneTestCase {
// Creating over empty dir should not create any files,
// or, at most the write.lock file
final int extraFileCount;
if (files.length == 1) {
assertTrue(files[0].endsWith("write.lock"));
extraFileCount = 1;
final int extraFileCount = files.length - origFiles.length;
if (extraFileCount == 1) {
assertTrue(Arrays.asList(files).contains(IndexWriter.WRITE_LOCK_NAME));
} else {
assertEquals(0, files.length);
extraFileCount = 0;
Arrays.sort(origFiles);
Arrays.sort(files);
assertArrayEquals(origFiles, files);
}
Document doc = new Document();
@ -1444,10 +1444,14 @@ public class TestIndexWriter extends LuceneTestCase {
// Adding just one document does not call flush yet.
int computedExtraFileCount = 0;
for (String file : dir.listAll()) {
if (file.lastIndexOf('.') < 0
// don't count stored fields and term vectors in
|| !Arrays.asList("fdx", "fdt", "tvx", "tvd", "tvf").contains(file.substring(file.lastIndexOf('.') + 1))) {
++computedExtraFileCount;
if (IndexWriter.WRITE_LOCK_NAME.equals(file) ||
file.startsWith(IndexFileNames.SEGMENTS) ||
IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) {
if (file.lastIndexOf('.') < 0
// don't count stored fields and term vectors in
|| !Arrays.asList("fdx", "fdt", "tvx", "tvd", "tvf").contains(file.substring(file.lastIndexOf('.') + 1))) {
++computedExtraFileCount;
}
}
}
assertEquals("only the stored and term vector files should exist in the directory", extraFileCount, computedExtraFileCount);
@ -1462,12 +1466,12 @@ public class TestIndexWriter extends LuceneTestCase {
// After rollback, IW should remove all files
writer.rollback();
String allFiles[] = dir.listAll();
assertTrue("no files should exist in the directory after rollback", allFiles.length == 0 || Arrays.equals(allFiles, new String[] { IndexWriter.WRITE_LOCK_NAME }));
assertEquals("no files should exist in the directory after rollback", origFiles.length + extraFileCount, allFiles.length);
// Since we rolled-back above, that close should be a no-op
writer.close();
allFiles = dir.listAll();
assertTrue("expected a no-op close after IW.rollback()", allFiles.length == 0 || Arrays.equals(allFiles, new String[] { IndexWriter.WRITE_LOCK_NAME }));
assertEquals("expected a no-op close after IW.rollback()", origFiles.length + extraFileCount, allFiles.length);
dir.close();
}

View File

@ -945,6 +945,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
MockDirectoryWrapper dir = newMockDirectory();
dir.setFailOnCreateOutput(false);
dir.setEnableVirusScanner(false); // we check for specific list of files
int fileCount = dir.listAll().length;
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES));
@ -961,7 +962,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
assertTrue(failure.failOnCommit && failure.failOnDeleteFile);
w.rollback();
String files[] = dir.listAll();
assertTrue(files.length == 0 || Arrays.equals(files, new String[] { IndexWriter.WRITE_LOCK_NAME }));
assertTrue(files.length == fileCount || (files.length == fileCount+1 && Arrays.asList(files).contains(IndexWriter.WRITE_LOCK_NAME)));
dir.close();
}
}

View File

@ -155,8 +155,9 @@ public class TestRollingUpdates extends LuceneTestCase {
totalBytes += sipc.sizeInBytes();
}
long totalBytes2 = 0;
for(String fileName : dir.listAll()) {
if (!fileName.startsWith(IndexFileNames.SEGMENTS)) {
if (IndexFileNames.CODEC_FILE_PATTERN.matcher(fileName).matches()) {
totalBytes2 += dir.fileLength(fileName);
}
}

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import org.apache.lucene.util.IOUtils;
@ -143,7 +144,9 @@ public class TestDirectory extends BaseDirectoryTestCase {
try {
Files.createDirectory(path.resolve("subdir"));
FSDirectory fsDir = new SimpleFSDirectory(path);
assertEquals(0, new RAMDirectory(fsDir, newIOContext(random())).listAll().length);
RAMDirectory ramDir = new RAMDirectory(fsDir, newIOContext(random()));
List<String> files = Arrays.asList(ramDir.listAll());
assertFalse(files.contains("subdir"));
} finally {
IOUtils.rm(path);
}

View File

@ -20,6 +20,9 @@ package org.apache.lucene.store;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
@ -75,7 +78,9 @@ public class TestRAMDirectory extends BaseDirectoryTestCase {
try {
Files.createDirectory(path.resolve("subdir"));
fsDir = new SimpleFSDirectory(path);
assertEquals(0, new RAMDirectory(fsDir, newIOContext(random())).listAll().length);
RAMDirectory ramDir = new RAMDirectory(fsDir, newIOContext(random()));
List<String> files = Arrays.asList(ramDir.listAll());
assertFalse(files.contains("subdir"));
} finally {
IOUtils.close(fsDir);
IOUtils.rm(path);

View File

@ -51,7 +51,7 @@ public class TestLeaveFilesIfTestFails extends WithNestedTests {
Result r = JUnitCore.runClasses(Nested1.class);
Assert.assertEquals(1, r.getFailureCount());
Assert.assertTrue(Nested1.file != null && Files.exists(Nested1.file));
Files.delete(Nested1.file);
IOUtils.rm(Nested1.file);
}
public static class Nested2 extends WithNestedTests.AbstractNestedTest {

View File

@ -353,18 +353,23 @@ public abstract class BaseCompoundFormatTestCase extends BaseIndexFileFormatTest
createRandomFile(dir, segment + ".big6", 3 * chunk + 1);
createRandomFile(dir, segment + ".big7", 1000 * chunk);
String files[] = dir.listAll();
List<String> files = new ArrayList<>();
for (String file : dir.listAll()) {
if (file.startsWith(segment)) {
files.add(file);
}
}
SegmentInfo si = newSegmentInfo(dir, "_123");
si.setFiles(Arrays.asList(files));
si.setFiles(files);
si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
for (int i = 0; i < files.length; i++) {
IndexInput check = dir.openInput(files[i], newIOContext(random()));
IndexInput test = cfs.openInput(files[i], newIOContext(random()));
assertSameStreams(files[i], check, test);
assertSameSeekBehavior(files[i], check, test);
for (String file : files) {
IndexInput check = dir.openInput(file, newIOContext(random()));
IndexInput test = cfs.openInput(file, newIOContext(random()));
assertSameStreams(file, check, test);
assertSameSeekBehavior(file, check, test);
test.close();
check.close();
}
@ -379,8 +384,11 @@ public abstract class BaseCompoundFormatTestCase extends BaseIndexFileFormatTest
final int FILE_COUNT = atLeast(500);
List<String> files = new ArrayList<>();
for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) {
IndexOutput out = dir.createOutput("_123." + fileIdx, newIOContext(random()));
String file = "_123." + fileIdx;
files.add(file);
IndexOutput out = dir.createOutput(file, newIOContext(random()));
out.writeByte((byte) fileIdx);
out.close();
}
@ -388,7 +396,7 @@ public abstract class BaseCompoundFormatTestCase extends BaseIndexFileFormatTest
assertEquals(0, dir.getFileHandleCount());
SegmentInfo si = newSegmentInfo(dir, "_123");
si.setFiles(Arrays.asList(dir.listAll()));
si.setFiles(files);
si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);

View File

@ -161,9 +161,11 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
private Map<String, Long> bytesUsedByExtension(Directory d) throws IOException {
Map<String, Long> bytesUsedByExtension = new HashMap<>();
for (String file : d.listAll()) {
final String ext = IndexFileNames.getExtension(file);
final long previousLength = bytesUsedByExtension.containsKey(ext) ? bytesUsedByExtension.get(ext) : 0;
bytesUsedByExtension.put(ext, previousLength + d.fileLength(file));
if (IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) {
final String ext = IndexFileNames.getExtension(file);
final long previousLength = bytesUsedByExtension.containsKey(ext) ? bytesUsedByExtension.get(ext) : 0;
bytesUsedByExtension.put(ext, previousLength + d.fileLength(file));
}
}
bytesUsedByExtension.keySet().removeAll(excludedExtensionsFromByteCounts());

View File

@ -0,0 +1,74 @@
package org.apache.lucene.mockfile;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.FileAttribute;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
/**
* Adds extra files/subdirectories when directories are created.
* <p>
* Lucene shouldn't care about these, but sometimes operating systems
* create special files themselves (.DS_Store, thumbs.db, .nfsXXX, ...),
* so we add them and see what breaks.
* <p>
* When a directory is created, sometimes a file or directory named
* "extraNNNN" will be included with it.
* All other filesystem operations are passed thru as normal.
*/
public class ExtrasFS extends FilterFileSystemProvider {
final AtomicLong counter = new AtomicLong();
final Random random;
/**
* Create a new instance, wrapping {@code delegate}.
*/
public ExtrasFS(FileSystem delegate, Random random) {
super("extras://", delegate);
this.random = random;
}
@Override
public void createDirectory(Path dir, FileAttribute<?>... attrs) throws IOException {
super.createDirectory(dir, attrs);
// ok, we created the directory successfully.
if (random.nextInt(4) == 0) {
// lets add a bogus file... if this fails, we don't care, its best effort.
try {
Path target = dir.resolve("extra" + counter.incrementAndGet());
if (random.nextBoolean()) {
super.createDirectory(target);
} else {
Files.createFile(target);
}
} catch (Exception ignored) {
// best effort
}
}
}
// TODO: would be great if we overrode attributes, so file size was always zero for
// our fake files. But this is tricky because its hooked into several places.
// Currently MDW has a hack so we don't break disk full tests.
}

View File

@ -157,9 +157,11 @@ public abstract class BaseDirectoryTestCase extends LuceneTestCase {
public void testDeleteFile() throws Exception {
Directory dir = getDirectory(createTempDir("testDeleteFile"));
int count = dir.listAll().length;
dir.createOutput("foo.txt", IOContext.DEFAULT).close();
assertEquals(count+1, dir.listAll().length);
dir.deleteFile("foo.txt");
assertEquals(0, dir.listAll().length);
assertEquals(count, dir.listAll().length);
dir.close();
}
@ -434,6 +436,9 @@ public abstract class BaseDirectoryTestCase extends LuceneTestCase {
try {
String[] files = dir.listAll();
for (String file : files) {
if (!file.startsWith(name)) {
continue;
}
//System.out.println("file:" + file);
try {
IndexInput input = dir.openInput(file, newIOContext(random()));
@ -749,8 +754,7 @@ public abstract class BaseDirectoryTestCase extends LuceneTestCase {
// delete it
Files.delete(path.resolve("afile"));
// directory is empty
assertEquals(0, fsdir.listAll().length);
int fileCount = fsdir.listAll().length;
// fsync it
try {
@ -760,8 +764,8 @@ public abstract class BaseDirectoryTestCase extends LuceneTestCase {
// ok
}
// directory is still empty
assertEquals(0, fsdir.listAll().length);
// no new files created
assertEquals(fileCount, fsdir.listAll().length);
fsdir.close();
}

View File

@ -285,8 +285,13 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
else {
// hack
long size = 0;
for (String file : in.listAll())
size += in.fileLength(file);
for (String file : in.listAll()) {
// hack 2: see TODO in ExtrasFS (ideally it would always return 0 byte
// size for extras it creates, even though the size of non-regular files is not defined)
if (!file.startsWith("extra")) {
size += in.fileLength(file);
}
}
return size;
}
}

View File

@ -1304,7 +1304,9 @@ public abstract class LuceneTestCase extends Assert {
public static BaseDirectoryWrapper newDirectory(Random r, Directory d) throws IOException {
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
for (String file : d.listAll()) {
impl.copyFrom(d, file, file, newIOContext(r));
if (file.startsWith(IndexFileNames.SEGMENTS) || IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) {
impl.copyFrom(d, file, file, newIOContext(r));
}
}
return wrapDirectory(r, impl, rarely(r));
}

View File

@ -17,6 +17,7 @@ import java.util.Random;
import java.util.Set;
import org.apache.lucene.mockfile.DisableFsyncFS;
import org.apache.lucene.mockfile.ExtrasFS;
import org.apache.lucene.mockfile.HandleLimitFS;
import org.apache.lucene.mockfile.LeakFS;
import org.apache.lucene.mockfile.VerboseFS;
@ -155,6 +156,9 @@ final class TestRuleTemporaryFilesCleanup extends TestRuleAdapter {
fs = new WindowsFS(fs).getFileSystem(null);
}
}
if (allowed(avoid, ExtrasFS.class)) {
fs = new ExtrasFS(fs, new Random(random.nextLong())).getFileSystem(null);
}
}
if (LuceneTestCase.VERBOSE) {
System.out.println("filesystem: " + fs.provider());

View File

@ -73,6 +73,7 @@ import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexReader;
@ -1208,7 +1209,9 @@ public final class TestUtil {
public static RAMDirectory ramCopyOf(Directory dir) throws IOException {
RAMDirectory ram = new RAMDirectory();
for (String file : dir.listAll()) {
ram.copyFrom(dir, file, file, IOContext.DEFAULT);
if (file.startsWith(IndexFileNames.SEGMENTS) || IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) {
ram.copyFrom(dir, file, file, IOContext.DEFAULT);
}
}
return ram;
}

View File

@ -341,7 +341,7 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
@Test
public void testSolrHomeDoesntExist() throws Exception {
File homeDir = solrHomeDirectory;
Files.delete(homeDir.toPath());
IOUtils.rm(homeDir.toPath());
CoreContainer cc = null;
try {
cc = init();

View File

@ -28,6 +28,7 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
import org.apache.lucene.util.LuceneTestCase.SuppressSysoutChecks;
import org.apache.lucene.util.QuickPatchThreadsFilter;
import org.apache.lucene.util.TestUtil;
@ -125,6 +126,7 @@ import static com.google.common.base.Preconditions.checkNotNull;
QuickPatchThreadsFilter.class
})
@SuppressSysoutChecks(bugUrl = "Solr dumps tons of logs to console.")
@SuppressFileSystems("ExtrasFS") // might be ok, the failures with e.g. nightly runs might be "normal"
public abstract class SolrTestCaseJ4 extends LuceneTestCase {
public static final String DEFAULT_TEST_CORENAME = "collection1";