[CORE] Cut over to Path API for file deletion

Today we use the File API for file deletion as well as recursive
directory deletions. This API returns a boolean if operations
are successful while hiding the actual reason why they failed.
The Path API throws and actual exception that might provide better
insights and debug information.

Closes #8366
This commit is contained in:
Simon Willnauer 2014-11-06 15:12:16 +01:00
parent 9ebce349e1
commit 95171e2bc2
34 changed files with 244 additions and 153 deletions

View File

@ -1,3 +1,5 @@
@defaultMessage Convert to URI @defaultMessage Convert to URI
java.net.URL#getPath() java.net.URL#getPath()
java.net.URL#getFile() java.net.URL#getFile()
java.io.File#delete() @ use Files.delete for real exception, IOUtils.deleteFilesIgnoringExceptions if you dont care

View File

@ -51,7 +51,7 @@ public interface BlobContainer {
*/ */
OutputStream createOutput(String blobName) throws IOException; OutputStream createOutput(String blobName) throws IOException;
boolean deleteBlob(String blobName) throws IOException; void deleteBlob(String blobName) throws IOException;
void deleteBlobsByPrefix(String blobNamePrefix) throws IOException; void deleteBlobsByPrefix(String blobNamePrefix) throws IOException;

View File

@ -18,6 +18,8 @@
*/ */
package org.elasticsearch.common.blobstore; package org.elasticsearch.common.blobstore;
import java.io.IOException;
/** /**
* *
*/ */
@ -25,7 +27,7 @@ public interface BlobStore {
BlobContainer blobContainer(BlobPath path); BlobContainer blobContainer(BlobPath path);
void delete(BlobPath path); void delete(BlobPath path) throws IOException;
void close(); void close();
} }

View File

@ -30,6 +30,8 @@ import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.FileSystemUtils;
import java.io.*; import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
/** /**
* *
@ -65,8 +67,12 @@ public class FsBlobContainer extends AbstractBlobContainer {
return builder.immutableMap(); return builder.immutableMap();
} }
public boolean deleteBlob(String blobName) throws IOException { @Override
return new File(path, blobName).delete(); public void deleteBlob(String blobName) throws IOException {
Path blobPath = new File(path, blobName).toPath();
if (Files.exists(blobPath)) {
Files.delete(blobPath);
}
} }
@Override @Override

View File

@ -19,6 +19,7 @@
package org.elasticsearch.common.blobstore.fs; package org.elasticsearch.common.blobstore.fs;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
@ -30,6 +31,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import java.io.File; import java.io.File;
import java.io.IOException;
/** /**
* *
@ -74,8 +76,8 @@ public class FsBlobStore extends AbstractComponent implements BlobStore {
} }
@Override @Override
public void delete(BlobPath path) { public void delete(BlobPath path) throws IOException {
FileSystemUtils.deleteRecursively(buildPath(path)); IOUtils.rm(buildPath(path).toPath());
} }
@Override @Override

View File

@ -73,7 +73,7 @@ public class URLBlobContainer extends AbstractBlobContainer {
* This operation is not supported by URLBlobContainer * This operation is not supported by URLBlobContainer
*/ */
@Override @Override
public boolean deleteBlob(String blobName) throws IOException { public void deleteBlob(String blobName) throws IOException {
throw new UnsupportedOperationException("URL repository is read only"); throw new UnsupportedOperationException("URL repository is read only");
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.common.http.client;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import java.io.*; import java.io.*;
@ -346,7 +347,7 @@ public class HttpDownloadHelper {
// Try to delete the garbage we'd otherwise leave // Try to delete the garbage we'd otherwise leave
// behind. // behind.
IOUtils.closeWhileHandlingException(os, is); IOUtils.closeWhileHandlingException(os, is);
dest.delete(); FileSystemUtils.deleteFilesIgnoringExceptions(dest.toPath());
} else { } else {
IOUtils.close(os, is); IOUtils.close(os, is);
} }
@ -385,7 +386,7 @@ public class HttpDownloadHelper {
} else { } else {
IOUtils.closeWhileHandlingException(is, os); IOUtils.closeWhileHandlingException(is, os);
if (dest != null && dest.exists()) { if (dest != null && dest.exists()) {
dest.delete(); FileSystemUtils.deleteFilesIgnoringExceptions(dest.toPath());
} }
} }
} }

View File

@ -78,84 +78,34 @@ public class FileSystemUtils {
} }
/** /**
* Deletes the given files recursively. if <tt>deleteRoots</tt> is set to <code>true</code> * Returns an array of {@link Path} build from the correspondent element
* the given root files will be deleted as well. Otherwise only their content is deleted. * in the input array using {@link java.io.File#toPath()}
* @param files the files to get paths for
*/ */
public static boolean deleteRecursively(File[] roots, boolean deleteRoots) { public static Path[] toPaths(File... files) {
Path[] paths = new Path[files.length];
boolean deleted = true; for (int i = 0; i < files.length; i++) {
for (File root : roots) { paths[i] = files[i].toPath();
deleted &= deleteRecursively(root, deleteRoots);
} }
return deleted; return paths;
} }
/** /**
* Deletes all subdirectories of the given roots recursively. * Deletes all subdirectories in the given path recursively
* @throws java.lang.IllegalArgumentException if the given path is not a directory
*/ */
public static boolean deleteSubDirectories(File[] roots) { public static void deleteSubDirectories(Path... paths) throws IOException {
for (Path path : paths) {
boolean deleted = true; try (DirectoryStream<Path> stream = Files.newDirectoryStream(path)) {
for (File root : roots) { for (Path subPath : stream) {
if (root.isDirectory()) { if (Files.isDirectory(subPath)) {
File[] files = root.listFiles(new FileFilter() { IOUtils.rm(subPath);
@Override
public boolean accept(File pathname) {
return pathname.isDirectory();
}
});
deleted &= deleteRecursively(files, true);
}
}
return deleted;
}
/**
* Deletes the given files recursively including the given roots.
*/
public static boolean deleteRecursively(File... roots) {
return deleteRecursively(roots, true);
}
/**
* Delete the supplied {@link java.io.File} - for directories,
* recursively delete any nested directories or files as well.
*
* @param root the root <code>File</code> to delete
* @param deleteRoot whether or not to delete the root itself or just the content of the root.
* @return <code>true</code> if the <code>File</code> was deleted,
* otherwise <code>false</code>
*/
public static boolean deleteRecursively(File root, boolean deleteRoot) {
if (root != null && root.exists()) {
if (root.isDirectory()) {
File[] children = root.listFiles();
if (children != null) {
for (File aChildren : children) {
deleteRecursively(aChildren, true);
} }
} }
} }
if (deleteRoot) {
return root.delete();
} else {
return true;
}
} }
return false;
} }
/**
* Ensure that any writes to the given file is written to the storage device that contains it.
* @param fileToSync the file to fsync
* @param isDir if true, the given file is a directory (we open for read and ignore IOExceptions,
* because not all file systems and operating systems allow to fsync on a directory)
*/
public static void syncFile(File fileToSync, boolean isDir) throws IOException {
IOUtils.fsync(fileToSync.toPath(), isDir);
}
/** /**
* Check that a directory exists, is a directory and is readable * Check that a directory exists, is a directory and is readable
@ -181,11 +131,19 @@ public class FileSystemUtils {
private FileSystemUtils() {} private FileSystemUtils() {}
public static void tryDeleteFile(File file) { /**
try { * Temporary solution until LUCENE-6051 is fixed
file.delete(); * @see org.apache.lucene.util.IOUtils#deleteFilesIgnoringExceptions(java.nio.file.Path...)
} catch (SecurityException e1) { */
// ignore public static void deleteFilesIgnoringExceptions(Path... files) {
for (Path name : files) {
if (name != null) {
try {
Files.delete(name);
} catch (Throwable ignored) {
// ignore
}
}
} }
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.gateway.local;
import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectOpenHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.*;
@ -197,7 +198,11 @@ public class LocalGateway extends AbstractLifecycleComponent<Gateway> implements
@Override @Override
public void reset() throws Exception { public void reset() throws Exception {
FileSystemUtils.deleteRecursively(nodeEnv.nodeDataLocations()); try {
IOUtils.rm(FileSystemUtils.toPaths(nodeEnv.nodeDataLocations()));
} catch (Exception ex) {
logger.debug("failed to delete shard locations", ex);
}
} }
@Override @Override

View File

@ -21,6 +21,7 @@ package org.elasticsearch.gateway.local.state.meta;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
@ -49,6 +50,7 @@ import org.elasticsearch.threadpool.ThreadPool;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -243,7 +245,11 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
if (!newMetaData.hasIndex(current.index())) { if (!newMetaData.hasIndex(current.index())) {
logger.debug("[{}] deleting index that is no longer part of the metadata (indices: [{}])", current.index(), newMetaData.indices().keys()); logger.debug("[{}] deleting index that is no longer part of the metadata (indices: [{}])", current.index(), newMetaData.indices().keys());
if (nodeEnv.hasNodeFile()) { if (nodeEnv.hasNodeFile()) {
FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(current.index()))); try {
IOUtils.rm(FileSystemUtils.toPaths(nodeEnv.indexLocations(new Index(current.index()))));
} catch (Exception ex) {
logger.debug("[{}] failed to delete index", ex, current.index());
}
} }
try { try {
nodeIndexDeletedAction.nodeIndexStoreDeleted(event.state(), current.index(), event.state().nodes().localNodeId()); nodeIndexDeletedAction.nodeIndexStoreDeleted(event.state(), current.index(), event.state().nodes().localNodeId());
@ -280,7 +286,11 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
if (indexMetaData != null) { if (indexMetaData != null) {
if (danglingTimeout.millis() == 0) { if (danglingTimeout.millis() == 0) {
logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, timeout set to 0, deleting now", indexName); logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, timeout set to 0, deleting now", indexName);
FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(indexName))); try {
IOUtils.rm(FileSystemUtils.toPaths(nodeEnv.indexLocations(new Index(indexName))));
} catch (Exception ex) {
logger.debug("[{}] failed to delete dangling index", ex, indexName);
}
} else { } else {
logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, scheduling to delete in [{}], auto import to cluster state [{}]", indexName, danglingTimeout, autoImportDangled); logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, scheduling to delete in [{}], auto import to cluster state [{}]", indexName, danglingTimeout, autoImportDangled);
danglingIndices.put(indexName, new DanglingIndex(indexName, threadPool.schedule(danglingTimeout, ThreadPool.Names.SAME, new RemoveDanglingIndex(indexName)))); danglingIndices.put(indexName, new DanglingIndex(indexName, threadPool.schedule(danglingTimeout, ThreadPool.Names.SAME, new RemoveDanglingIndex(indexName))));
@ -517,7 +527,11 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
if (!name.startsWith("metadata-")) { if (!name.startsWith("metadata-")) {
continue; continue;
} }
stateFile.delete(); try {
Files.delete(stateFile.toPath());
} catch (Exception ex) {
logger.debug("failed to delete file " + stateFile, ex);
}
} }
} }
@ -576,7 +590,11 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
return; return;
} }
logger.warn("[{}] deleting dangling index", index); logger.warn("[{}] deleting dangling index", index);
FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(index))); try {
IOUtils.rm(FileSystemUtils.toPaths(nodeEnv.indexLocations(new Index(index))));
} catch (Exception ex) {
logger.debug("failed to delete dangling index", ex);
}
} }
} }
} }

View File

@ -208,7 +208,9 @@ public abstract class MetaDataStateFormat<T> {
if (file.getName().equals(fileName)) { if (file.getName().equals(fileName)) {
continue; continue;
} }
Files.delete(file.toPath()); if (Files.exists(file.toPath())) {
Files.delete(file.toPath());
}
} }
} }
} }

View File

@ -37,6 +37,7 @@ import org.elasticsearch.gateway.local.state.meta.MetaDataStateFormat;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import java.io.*; import java.io.*;
import java.nio.file.Files;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -317,7 +318,12 @@ public class LocalGatewayShardsState extends AbstractComponent implements Cluste
if (!name.startsWith("shards-")) { if (!name.startsWith("shards-")) {
continue; continue;
} }
stateFile.delete(); try {
Files.delete(stateFile.toPath());
} catch (Exception ex) {
logger.debug("Failed to delete state file {}", ex, stateFile);
}
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.gateway.none; package org.elasticsearch.gateway.none;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction;
@ -117,7 +118,11 @@ public class NoneGateway extends AbstractLifecycleComponent<Gateway> implements
if (!newMetaData.hasIndex(current.index())) { if (!newMetaData.hasIndex(current.index())) {
logger.debug("[{}] deleting index that is no longer part of the metadata (indices: [{}])", current.index(), newMetaData.indices().keys()); logger.debug("[{}] deleting index that is no longer part of the metadata (indices: [{}])", current.index(), newMetaData.indices().keys());
if (nodeEnv.hasNodeFile()) { if (nodeEnv.hasNodeFile()) {
FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(current.index()))); try {
IOUtils.rm(FileSystemUtils.toPaths(nodeEnv.indexLocations(new Index(current.index()))));
} catch (Exception ex) {
logger.debug("failed to delete shard locations", ex);
}
} }
try { try {
nodeIndexDeletedAction.nodeIndexStoreDeleted(event.state(), current.index(), event.state().nodes().localNodeId()); nodeIndexDeletedAction.nodeIndexStoreDeleted(event.state(), current.index(), event.state().nodes().localNodeId());

View File

@ -51,6 +51,7 @@ import org.elasticsearch.threadpool.ThreadPool;
import java.io.EOFException; import java.io.EOFException;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import java.util.Arrays; import java.util.Arrays;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
@ -290,7 +291,11 @@ public class LocalIndexShardGateway extends AbstractIndexShardComponent implemen
} }
indexShard.performRecoveryFinalization(true); indexShard.performRecoveryFinalization(true);
recoveringTranslogFile.delete(); try {
Files.delete(recoveringTranslogFile.toPath());
} catch (Exception ex) {
logger.debug("Failed to delete recovering translog file {}", ex, recoveringTranslogFile);
}
for (final String type : typesToUpdate) { for (final String type : typesToUpdate) {
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.store.fs; package org.elasticsearch.index.store.fs;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -81,7 +82,11 @@ public abstract class FsIndexStore extends AbstractIndexStore {
if (indexService.hasShard(shardId.id())) { if (indexService.hasShard(shardId.id())) {
throw new ElasticsearchIllegalStateException(shardId + " allocated, can't be deleted"); throw new ElasticsearchIllegalStateException(shardId + " allocated, can't be deleted");
} }
FileSystemUtils.deleteRecursively(shardLocations(shardId)); try {
IOUtils.rm(FileSystemUtils.toPaths(shardLocations(shardId)));
} catch (Exception ex) {
logger.debug("failed to delete shard locations", ex);
}
} }
public File[] shardLocations(ShardId shardId) { public File[] shardLocations(ShardId shardId) {

View File

@ -41,6 +41,7 @@ import org.elasticsearch.index.translog.*;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.channels.ClosedChannelException; import java.nio.channels.ClosedChannelException;
import java.nio.file.Files;
import java.util.Collections; import java.util.Collections;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReadWriteLock;
@ -215,9 +216,9 @@ public class FsTranslog extends AbstractIndexShardComponent implements Translog
continue; continue;
} }
try { try {
file.delete(); Files.delete(file.toPath());
} catch (Exception e) { } catch (Exception ex) {
// ignore logger.debug("failed to delete " + file, ex);
} }
} }
} }

View File

@ -19,11 +19,15 @@
package org.elasticsearch.index.translog.fs; package org.elasticsearch.index.translog.fs;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.io.FileSystemUtils;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.RandomAccessFile; import java.io.RandomAccessFile;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
/** /**
@ -70,12 +74,14 @@ public class RafReference {
if (refCount.decrementAndGet() <= 0) { if (refCount.decrementAndGet() <= 0) {
try { try {
raf.close(); raf.close();
if (delete) {
file.delete();
}
} catch (IOException e) { } catch (IOException e) {
// ignore // ignore
} finally {
if (delete) {
FileSystemUtils.deleteFilesIgnoringExceptions(file.toPath());
}
} }
} }
} }
} }

View File

@ -20,6 +20,7 @@
package org.elasticsearch.indices.store; package org.elasticsearch.indices.store;
import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.store.StoreRateLimiting;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
@ -323,7 +324,11 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
File[] shardLocations = nodeEnv.shardLocations(shardId); File[] shardLocations = nodeEnv.shardLocations(shardId);
if (FileSystemUtils.exists(shardLocations)) { if (FileSystemUtils.exists(shardLocations)) {
logger.debug("{} deleting shard that is no longer used", shardId); logger.debug("{} deleting shard that is no longer used", shardId);
FileSystemUtils.deleteRecursively(shardLocations); try {
IOUtils.rm(FileSystemUtils.toPaths(shardLocations));
} catch (Exception ex) {
logger.debug("failed to delete shard locations", ex);
}
} }
} }
} else { } else {

View File

@ -21,6 +21,7 @@ package org.elasticsearch.plugins;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ElasticsearchTimeoutException;
@ -32,6 +33,7 @@ import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.internal.InternalSettingsPreparer;
import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.HttpsURLConnection;
@ -222,12 +224,20 @@ public class PluginManager {
// ignore // ignore
} }
} }
pluginFile.delete(); try {
Files.delete(pluginFile.toPath());
} catch (Exception ex) {
log("Failed to delete plugin file" + pluginFile + " " + ex);
}
} }
if (FileSystemUtils.hasExtensions(extractLocation, ".java")) { if (FileSystemUtils.hasExtensions(extractLocation, ".java")) {
debug("Plugin installation assumed to be site plugin, but contains source code, aborting installation..."); debug("Plugin installation assumed to be site plugin, but contains source code, aborting installation...");
FileSystemUtils.deleteRecursively(extractLocation); try {
IOUtils.rm(extractLocation.toPath());
} catch(Exception ex) {
debug("Failed to remove site plugin from path " + extractLocation + " - " + ex.getMessage());
}
throw new IllegalArgumentException("Plugin installation assumed to be site plugin, but contains source code, aborting installation."); throw new IllegalArgumentException("Plugin installation assumed to be site plugin, but contains source code, aborting installation.");
} }
@ -237,7 +247,9 @@ public class PluginManager {
if (binFile.exists() && binFile.isDirectory()) { if (binFile.exists() && binFile.isDirectory()) {
File toLocation = pluginHandle.binDir(environment); File toLocation = pluginHandle.binDir(environment);
debug("Found bin, moving to " + toLocation.getAbsolutePath()); debug("Found bin, moving to " + toLocation.getAbsolutePath());
FileSystemUtils.deleteRecursively(toLocation); if (toLocation.exists()) {
IOUtils.rm(toLocation.toPath());
}
if (!binFile.renameTo(toLocation)) { if (!binFile.renameTo(toLocation)) {
throw new IOException("Could not move ["+ binFile.getAbsolutePath() + "] to [" + toLocation.getAbsolutePath() + "]"); throw new IOException("Could not move ["+ binFile.getAbsolutePath() + "] to [" + toLocation.getAbsolutePath() + "]");
} }
@ -294,27 +306,33 @@ public class PluginManager {
File pluginToDelete = pluginHandle.extractedDir(environment); File pluginToDelete = pluginHandle.extractedDir(environment);
if (pluginToDelete.exists()) { if (pluginToDelete.exists()) {
debug("Removing: " + pluginToDelete.getPath()); debug("Removing: " + pluginToDelete.getPath());
if (!FileSystemUtils.deleteRecursively(pluginToDelete, true)) { try {
IOUtils.rm(pluginToDelete.toPath());
} catch (IOException ex){
throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " + throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " +
pluginToDelete.toString()); pluginToDelete.toString(), ex);
} }
removed = true; removed = true;
} }
pluginToDelete = pluginHandle.distroFile(environment); pluginToDelete = pluginHandle.distroFile(environment);
if (pluginToDelete.exists()) { if (pluginToDelete.exists()) {
debug("Removing: " + pluginToDelete.getPath()); debug("Removing: " + pluginToDelete.getPath());
if (!pluginToDelete.delete()) { try {
Files.delete(pluginToDelete.toPath());
} catch (Exception ex) {
throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " + throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " +
pluginToDelete.toString()); pluginToDelete.toString(), ex);
} }
removed = true; removed = true;
} }
File binLocation = pluginHandle.binDir(environment); File binLocation = pluginHandle.binDir(environment);
if (binLocation.exists()) { if (binLocation.exists()) {
debug("Removing: " + binLocation.getPath()); debug("Removing: " + binLocation.getPath());
if (!FileSystemUtils.deleteRecursively(binLocation)) { try {
IOUtils.rm(binLocation.toPath());
} catch (IOException ex){
throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " + throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " +
binLocation.toString()); binLocation.toString(), ex);
} }
removed = true; removed = true;
} }

View File

@ -18,13 +18,16 @@
*/ */
package org.elasticsearch.benchmark.fs; package org.elasticsearch.benchmark.fs;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.StopWatch; import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import java.io.File; import java.io.File;
import java.io.RandomAccessFile; import java.io.RandomAccessFile;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
import java.nio.file.Paths;
import java.util.Random; import java.util.Random;
/** /**
@ -33,7 +36,7 @@ import java.util.Random;
public class FsAppendBenchmark { public class FsAppendBenchmark {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
new File("work/test.log").delete(); FileSystemUtils.deleteFilesIgnoringExceptions(Paths.get("work/test.log"));
RandomAccessFile raf = new RandomAccessFile("work/test.log", "rw"); RandomAccessFile raf = new RandomAccessFile("work/test.log", "rw");
raf.setLength(0); raf.setLength(0);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.cluster.allocation; package org.elasticsearch.cluster.allocation;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
@ -196,7 +197,7 @@ public class ClusterRerouteTests extends ElasticsearchIntegrationTest {
logger.info("--> deleting the shard data [{}] ", Arrays.toString(shardLocation)); logger.info("--> deleting the shard data [{}] ", Arrays.toString(shardLocation));
assertThat(FileSystemUtils.exists(shardLocation), equalTo(true)); // verify again after cluster was shut down assertThat(FileSystemUtils.exists(shardLocation), equalTo(true)); // verify again after cluster was shut down
assertThat(FileSystemUtils.deleteRecursively(shardLocation), equalTo(true)); IOUtils.rm(FileSystemUtils.toPaths(shardLocation));
logger.info("--> starting nodes back, will not allocate the shard since it has no data, but the index will be there"); logger.info("--> starting nodes back, will not allocate the shard since it has no data, but the index will be there");
node_1 = internalCluster().startNode(commonSettings); node_1 = internalCluster().startNode(commonSettings);

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import com.google.common.io.Files; import com.google.common.io.Files;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
@ -98,7 +99,7 @@ public class FileBasedMappingsTests extends ElasticsearchTestCase {
assertEquals(ImmutableSet.of("f", "g", "h"), properties.keySet()); assertEquals(ImmutableSet.of("f", "g", "h"), properties.keySet());
} }
} finally { } finally {
FileSystemUtils.deleteRecursively(configDir); IOUtils.rm(configDir.toPath());
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.translog.fs; package org.elasticsearch.index.translog.fs;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.index.translog.AbstractSimpleTranslogTests; import org.elasticsearch.index.translog.AbstractSimpleTranslogTests;
@ -26,6 +27,8 @@ import org.elasticsearch.index.translog.Translog;
import org.junit.AfterClass; import org.junit.AfterClass;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
/** /**
* *
@ -49,7 +52,7 @@ public class FsBufferedTranslogTests extends AbstractSimpleTranslogTests {
} }
@AfterClass @AfterClass
public static void cleanup() { public static void cleanup() throws IOException {
FileSystemUtils.deleteRecursively(new File("data/fs-buf-translog"), true); IOUtils.rm(Paths.get("data/fs-buf-translog"));
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.translog.fs; package org.elasticsearch.index.translog.fs;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.index.translog.AbstractSimpleTranslogTests; import org.elasticsearch.index.translog.AbstractSimpleTranslogTests;
@ -26,6 +27,8 @@ import org.elasticsearch.index.translog.Translog;
import org.junit.AfterClass; import org.junit.AfterClass;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
/** /**
* *
@ -45,7 +48,7 @@ public class FsSimpleTranslogTests extends AbstractSimpleTranslogTests {
} }
@AfterClass @AfterClass
public static void cleanup() { public static void cleanup() throws IOException {
FileSystemUtils.deleteRecursively(new File("data/fs-simple-translog"), true); IOUtils.rm(Paths.get("data/fs-simple-translog"));
} }
} }

View File

@ -20,6 +20,7 @@ package org.elasticsearch.plugins;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.HttpClients;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ElasticsearchTimeoutException;
@ -46,6 +47,7 @@ import org.junit.Test;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.nio.file.Paths;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.io.FileSystemUtilsTests.assertFileContent; import static org.elasticsearch.common.io.FileSystemUtilsTests.assertFileContent;
@ -65,12 +67,12 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
private static final String PLUGIN_DIR = "plugins"; private static final String PLUGIN_DIR = "plugins";
@After @After
public void afterTest() { public void afterTest() throws IOException {
deletePluginsFolder(); deletePluginsFolder();
} }
@Before @Before
public void beforeTest() { public void beforeTest() throws IOException {
deletePluginsFolder(); deletePluginsFolder();
} }
@ -123,8 +125,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
assertThat(toolFile.canExecute(), is(true)); assertThat(toolFile.canExecute(), is(true));
} finally { } finally {
// we need to clean up the copied dirs // we need to clean up the copied dirs
FileSystemUtils.deleteRecursively(pluginBinDir); IOUtils.rm(pluginBinDir.toPath(), pluginConfigDir.toPath());
FileSystemUtils.deleteRecursively(pluginConfigDir);
} }
} }
@ -206,7 +207,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
assertFileContent(pluginConfigDir, "dir/subdir/testsubdir.txt.new", "version2\n"); assertFileContent(pluginConfigDir, "dir/subdir/testsubdir.txt.new", "version2\n");
} finally { } finally {
// we need to clean up the copied dirs // we need to clean up the copied dirs
FileSystemUtils.deleteRecursively(pluginConfigDir); IOUtils.rm(pluginConfigDir.toPath());
} }
} }
@ -230,7 +231,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
assertDirectoryExists(pluginBinDir); assertDirectoryExists(pluginBinDir);
} finally { } finally {
// we need to clean up the copied dirs // we need to clean up the copied dirs
FileSystemUtils.deleteRecursively(pluginBinDir); IOUtils.rm(pluginBinDir.toPath());
} }
} }
@ -463,8 +464,8 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
return false; return false;
} }
private void deletePluginsFolder() { private void deletePluginsFolder() throws IOException {
FileSystemUtils.deleteRecursively(new File(PLUGIN_DIR)); IOUtils.rm(Paths.get(PLUGIN_DIR));
} }
@Test @Test

View File

@ -31,6 +31,7 @@ import org.junit.Test;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import java.util.Map; import java.util.Map;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
@ -73,8 +74,8 @@ public class ScriptServiceTests extends ElasticsearchTestCase {
assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file"));
logger.info("--> delete both files"); logger.info("--> delete both files");
assertThat(testFileNoExt.delete(), equalTo(true)); Files.delete(testFileNoExt.toPath());
assertThat(testFileWithExt.delete(), equalTo(true)); Files.delete(testFileWithExt.toPath());
resourceWatcherService.notifyNow(); resourceWatcherService.notifyNow();
logger.info("--> verify that file with extension was correctly removed"); logger.info("--> verify that file with extension was correctly removed");

View File

@ -22,6 +22,7 @@ package org.elasticsearch.snapshots;
import com.carrotsearch.randomizedtesting.LifecycleScope; import com.carrotsearch.randomizedtesting.LifecycleScope;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.ListenableActionFuture;
@ -45,6 +46,7 @@ import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.SnapshotMetaData; import org.elasticsearch.cluster.metadata.SnapshotMetaData;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.store.support.AbstractIndexStore; import org.elasticsearch.index.store.support.AbstractIndexStore;
@ -55,6 +57,7 @@ import org.elasticsearch.test.junit.annotations.TestLogging;
import org.junit.Test; import org.junit.Test;
import java.io.File; import java.io.File;
import java.nio.file.Files;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@ -732,8 +735,8 @@ public class SharedClusterSnapshotRestoreTests extends AbstractSnapshotTests {
File testIndex1 = new File(indices, "test-idx-1"); File testIndex1 = new File(indices, "test-idx-1");
File testIndex2 = new File(indices, "test-idx-2"); File testIndex2 = new File(indices, "test-idx-2");
File testIndex2Shard0 = new File(testIndex2, "0"); File testIndex2Shard0 = new File(testIndex2, "0");
new File(testIndex1, "snapshot-test-snap-1").delete(); FileSystemUtils.deleteFilesIgnoringExceptions(new File(testIndex1, "snapshot-test-snap-1").toPath());
new File(testIndex2Shard0, "snapshot-test-snap-1").delete(); FileSystemUtils.deleteFilesIgnoringExceptions(new File(testIndex2Shard0, "snapshot-test-snap-1").toPath());
logger.info("--> delete snapshot"); logger.info("--> delete snapshot");
client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap-1").get(); client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap-1").get();
@ -768,7 +771,7 @@ public class SharedClusterSnapshotRestoreTests extends AbstractSnapshotTests {
logger.info("--> delete index metadata and shard metadata"); logger.info("--> delete index metadata and shard metadata");
File metadata = new File(repo, "metadata-test-snap-1"); File metadata = new File(repo, "metadata-test-snap-1");
assertThat(metadata.delete(), equalTo(true)); Files.delete(metadata.toPath());
logger.info("--> delete snapshot"); logger.info("--> delete snapshot");
client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap-1").get(); client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap-1").get();

View File

@ -58,8 +58,8 @@ public class BlobContainerWrapper implements BlobContainer {
} }
@Override @Override
public boolean deleteBlob(String blobName) throws IOException { public void deleteBlob(String blobName) throws IOException {
return delegate.deleteBlob(blobName); delegate.deleteBlob(blobName);
} }
@Override @Override

View File

@ -22,6 +22,8 @@ import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import java.io.IOException;
/** /**
* *
*/ */
@ -39,7 +41,7 @@ public class BlobStoreWrapper implements BlobStore {
} }
@Override @Override
public void delete(BlobPath path) { public void delete(BlobPath path) throws IOException {
delegate.delete(path); delegate.delete(path);
} }

View File

@ -262,9 +262,9 @@ public class MockRepository extends FsRepository {
} }
@Override @Override
public boolean deleteBlob(String blobName) throws IOException { public void deleteBlob(String blobName) throws IOException {
maybeIOExceptionOrBlock(blobName); maybeIOExceptionOrBlock(blobName);
return super.deleteBlob(blobName); super.deleteBlob(blobName);
} }
@Override @Override

View File

@ -19,6 +19,7 @@
package org.elasticsearch.stresstest.fullrestart; package org.elasticsearch.stresstest.fullrestart;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.count.CountResponse; import org.elasticsearch.action.count.CountResponse;
@ -200,7 +201,11 @@ public class FullRestartStressTest {
File[] nodeDatas = ((InternalNode) node).injector().getInstance(NodeEnvironment.class).nodeDataLocations(); File[] nodeDatas = ((InternalNode) node).injector().getInstance(NodeEnvironment.class).nodeDataLocations();
node.close(); node.close();
if (clearNodeWork && !settings.get("gateway.type").equals("local")) { if (clearNodeWork && !settings.get("gateway.type").equals("local")) {
FileSystemUtils.deleteRecursively(nodeDatas); try {
IOUtils.rm(FileSystemUtils.toPaths(nodeDatas));
} catch (Exception ex) {
logger.debug("failed to remove node data locations", ex);
}
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.stresstest.rollingrestart; package org.elasticsearch.stresstest.rollingrestart;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.count.CountResponse; import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
@ -170,7 +171,12 @@ public class RollingRestartStressTest {
File[] nodeData = ((InternalNode) nodes[nodeIndex]).injector().getInstance(NodeEnvironment.class).nodeDataLocations(); File[] nodeData = ((InternalNode) nodes[nodeIndex]).injector().getInstance(NodeEnvironment.class).nodeDataLocations();
nodes[nodeIndex].close(); nodes[nodeIndex].close();
if (clearNodeData) { if (clearNodeData) {
FileSystemUtils.deleteRecursively(nodeData); try {
IOUtils.rm(FileSystemUtils.toPaths(nodeData));
} catch (Exception ex) {
logger.debug("Failed to delete node data directories", ex);
}
} }
try { try {

View File

@ -99,6 +99,7 @@ import java.io.Closeable;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.nio.file.Path;
import java.util.*; import java.util.*;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -164,7 +165,7 @@ public final class InternalTestCluster extends TestCluster {
/* sorted map to make traverse order reproducible, concurrent since we do checks on it not within a sync block */ /* sorted map to make traverse order reproducible, concurrent since we do checks on it not within a sync block */
private final NavigableMap<String, NodeAndClient> nodes = new TreeMap<>(); private final NavigableMap<String, NodeAndClient> nodes = new TreeMap<>();
private final Set<File> dataDirToClean = new HashSet<>(); private final Set<Path> dataDirToClean = new HashSet<>();
private final String clusterName; private final String clusterName;
@ -788,7 +789,7 @@ public final class InternalTestCluster extends TestCluster {
if (callback.clearData(name)) { if (callback.clearData(name)) {
NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, node); NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, node);
if (nodeEnv.hasNodeFile()) { if (nodeEnv.hasNodeFile()) {
FileSystemUtils.deleteRecursively(nodeEnv.nodeDataLocations()); IOUtils.rm(FileSystemUtils.toPaths(nodeEnv.nodeDataLocations()));
} }
} }
node = (InternalNode) nodeBuilder().settings(node.settings()).settings(newSettings).node(); node = (InternalNode) nodeBuilder().settings(node.settings()).settings(newSettings).node();
@ -953,12 +954,17 @@ public final class InternalTestCluster extends TestCluster {
private void wipeDataDirectories() { private void wipeDataDirectories() {
if (!dataDirToClean.isEmpty()) { if (!dataDirToClean.isEmpty()) {
boolean deleted = false;
try { try {
deleted = FileSystemUtils.deleteSubDirectories(dataDirToClean.toArray(new File[dataDirToClean.size()])); for (Path path : dataDirToClean) {
try {
FileSystemUtils.deleteSubDirectories(path);
logger.info("Successfully wiped data directory for node location: {}", path);
} catch (IOException e) {
logger.info("Failed to wipe data directory for node location: {}", path);
}
}
} finally { } finally {
logger.info("Wipe data directory for all nodes locations: {} success: {}", this.dataDirToClean, deleted); dataDirToClean.clear();
this.dataDirToClean.clear();
} }
} }
} }
@ -1399,7 +1405,7 @@ public final class InternalTestCluster extends TestCluster {
assert !nodeAndClient.node().isClosed(); assert !nodeAndClient.node().isClosed();
NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, nodeAndClient.node); NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, nodeAndClient.node);
if (nodeEnv.hasNodeFile()) { if (nodeEnv.hasNodeFile()) {
dataDirToClean.addAll(Arrays.asList(nodeEnv.nodeDataLocations())); dataDirToClean.addAll(Arrays.asList(FileSystemUtils.toPaths(nodeEnv.nodeDataLocations())));
} }
nodes.put(nodeAndClient.name, nodeAndClient); nodes.put(nodeAndClient.name, nodeAndClient);
applyDisruptionSchemeToNode(nodeAndClient); applyDisruptionSchemeToNode(nodeAndClient);

View File

@ -19,17 +19,19 @@
package org.elasticsearch.watcher; package org.elasticsearch.watcher;
import com.carrotsearch.randomizedtesting.LifecycleScope; import com.carrotsearch.randomizedtesting.LifecycleScope;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.test.ElasticsearchTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List; import java.util.List;
import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.io.Files.*; import static com.google.common.io.Files.*;
import static org.elasticsearch.common.io.FileSystemUtils.deleteRecursively;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.*;
/** /**
@ -114,7 +116,7 @@ public class FileWatcherTest extends ElasticsearchTestCase {
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), hasSize(0)); assertThat(changes.notifications(), hasSize(0));
testFile.delete(); Files.delete(testFile.toPath());
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), contains(equalTo("onFileDeleted: test.txt"))); assertThat(changes.notifications(), contains(equalTo("onFileDeleted: test.txt")));
@ -160,8 +162,8 @@ public class FileWatcherTest extends ElasticsearchTestCase {
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), hasSize(0)); assertThat(changes.notifications(), hasSize(0));
new File(testDir, "test1.txt").delete(); Files.delete(new File(testDir, "test1.txt").toPath());
new File(testDir, "test2.txt").delete(); Files.delete(new File(testDir, "test2.txt").toPath());
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), contains( assertThat(changes.notifications(), contains(
@ -173,7 +175,7 @@ public class FileWatcherTest extends ElasticsearchTestCase {
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), hasSize(0)); assertThat(changes.notifications(), hasSize(0));
new File(testDir, "test0.txt").delete(); Files.delete(new File(testDir, "test0.txt").toPath());
touch(new File(testDir, "test2.txt")); touch(new File(testDir, "test2.txt"));
touch(new File(testDir, "test4.txt")); touch(new File(testDir, "test4.txt"));
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
@ -187,8 +189,8 @@ public class FileWatcherTest extends ElasticsearchTestCase {
changes.notifications().clear(); changes.notifications().clear();
new File(testDir, "test3.txt").delete(); Files.delete(new File(testDir, "test3.txt").toPath());
new File(testDir, "test4.txt").delete(); Files.delete(new File(testDir, "test4.txt").toPath());
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), contains( assertThat(changes.notifications(), contains(
equalTo("onFileDeleted: test-dir/test3.txt"), equalTo("onFileDeleted: test-dir/test3.txt"),
@ -197,7 +199,9 @@ public class FileWatcherTest extends ElasticsearchTestCase {
changes.notifications().clear(); changes.notifications().clear();
deleteRecursively(testDir); if (testDir.exists()) {
IOUtils.rm(testDir.toPath());
}
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), contains( assertThat(changes.notifications(), contains(
@ -261,7 +265,10 @@ public class FileWatcherTest extends ElasticsearchTestCase {
assertThat(changes.notifications(), hasSize(0)); assertThat(changes.notifications(), hasSize(0));
// Delete a directory, check notifications for // Delete a directory, check notifications for
deleteRecursively(new File(testDir, "first-level")); Path path = new File(testDir, "first-level").toPath();
if (Files.exists(path)) {
IOUtils.rm(path);
}
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), contains( assertThat(changes.notifications(), contains(
equalTo("onFileDeleted: test-dir/first-level/file1.txt"), equalTo("onFileDeleted: test-dir/first-level/file1.txt"),
@ -294,7 +301,9 @@ public class FileWatcherTest extends ElasticsearchTestCase {
changes.notifications().clear(); changes.notifications().clear();
deleteRecursively(subDir); if (subDir.exists()) {
IOUtils.rm(subDir.toPath());
}
touch(subDir); touch(subDir);
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), contains( assertThat(changes.notifications(), contains(
@ -306,7 +315,7 @@ public class FileWatcherTest extends ElasticsearchTestCase {
changes.notifications().clear(); changes.notifications().clear();
subDir.delete(); Files.delete(subDir.toPath());
subDir.mkdir(); subDir.mkdir();
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
@ -330,8 +339,8 @@ public class FileWatcherTest extends ElasticsearchTestCase {
fileWatcher.init(); fileWatcher.init();
changes.notifications().clear(); changes.notifications().clear();
new File(testDir, "test0.txt").delete(); Files.delete(new File(testDir, "test0.txt").toPath());
new File(testDir, "test1.txt").delete(); Files.delete(new File(testDir, "test1.txt").toPath());
fileWatcher.checkAndNotify(); fileWatcher.checkAndNotify();
assertThat(changes.notifications(), contains( assertThat(changes.notifications(), contains(
equalTo("onFileDeleted: test-dir/test0.txt"), equalTo("onFileDeleted: test-dir/test0.txt"),