Add File.java to forbidden APIs
This commit cuts over all of core (not quite all tests) to java.nio.Path It also adds the file class to the core forbidden APIs to prevent its usage. This commit also resolves #8254 since we now consistently useing the NIO Path API. The Changes in this commit allow for more information if IO operations fail since the NIO API throws exceptions instead of boolean return values. The build-in methods used in this commit are also more resillient to encodeing errors like unmappable characters and throw exceptions if those chars are present in a file. Closes #8254 Closes #8666
This commit is contained in:
parent
d8f16178d3
commit
a6510f9245
|
@ -114,3 +114,17 @@ org.apache.lucene.search.TimeLimitingCollector#getGlobalCounter()
|
|||
|
||||
@defaultMessage Don't interrupt threads use FutureUtils#cancel(Future<T>) instead
|
||||
java.util.concurrent.Future#cancel(boolean)
|
||||
|
||||
@defaultMessage Use java.nio.file.Path / java.nio.file.Files instead of java.io.File API
|
||||
java.util.jar.JarFile
|
||||
java.util.zip.ZipFile
|
||||
java.io.File
|
||||
java.io.FileInputStream
|
||||
java.io.FileOutputStream
|
||||
java.io.PrintStream#<init>(java.lang.String,java.lang.String)
|
||||
java.io.PrintWriter#<init>(java.lang.String,java.lang.String)
|
||||
java.util.Formatter#<init>(java.lang.String,java.lang.String,java.util.Locale)
|
||||
java.io.RandomAccessFile
|
||||
java.nio.file.Path#toFile()
|
||||
|
||||
|
||||
|
|
2
pom.xml
2
pom.xml
|
@ -1218,7 +1218,7 @@
|
|||
<exclude>org/elasticsearch/common/logging/log4j/ConsoleAppender*</exclude>
|
||||
<exclude>org/elasticsearch/common/http/client/HttpDownloadHelper*</exclude>
|
||||
<exclude>org/elasticsearch/common/cli/Terminal*</exclude>
|
||||
<exclude>org/elasticsearch/plugins/PluginManager.class</exclude>
|
||||
<exclude>org/elasticsearch/plugins/PluginManager$SysOut.class</exclude>
|
||||
<exclude>org/elasticsearch/common/http/client/HttpDownloadHelper.class</exclude>
|
||||
<exclude>org/elasticsearch/bootstrap/Bootstrap.class</exclude>
|
||||
<exclude>org/elasticsearch/Version.class</exclude>
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.elasticsearch.snapshots.SnapshotsService;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -86,7 +87,7 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeOperation
|
|||
@Override
|
||||
protected void masterOperation(final SnapshotsStatusRequest request,
|
||||
final ClusterState state,
|
||||
final ActionListener<SnapshotsStatusResponse> listener) throws ElasticsearchException {
|
||||
final ActionListener<SnapshotsStatusResponse> listener) throws Exception {
|
||||
ImmutableList<SnapshotMetaData.Entry> currentSnapshots = snapshotsService.currentSnapshots(request.repository(), request.snapshots());
|
||||
|
||||
if (currentSnapshots.isEmpty()) {
|
||||
|
@ -136,7 +137,7 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeOperation
|
|||
}
|
||||
|
||||
private SnapshotsStatusResponse buildResponse(SnapshotsStatusRequest request, ImmutableList<SnapshotMetaData.Entry> currentSnapshots,
|
||||
TransportNodesSnapshotsStatus.NodesSnapshotStatus nodeSnapshotStatuses) {
|
||||
TransportNodesSnapshotsStatus.NodesSnapshotStatus nodeSnapshotStatuses) throws IOException {
|
||||
// First process snapshot that are currently processed
|
||||
ImmutableList.Builder<SnapshotStatus> builder = ImmutableList.builder();
|
||||
Set<SnapshotId> currentSnapshotIds = newHashSet();
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.action.support.master;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.TransportAction;
|
||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
|
@ -32,6 +33,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.discovery.MasterNotDiscoveredException;
|
||||
import org.elasticsearch.node.NodeClosedException;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -64,7 +66,7 @@ public abstract class TransportMasterNodeOperationAction<Request extends MasterN
|
|||
|
||||
protected abstract Response newResponse();
|
||||
|
||||
protected abstract void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws ElasticsearchException;
|
||||
protected abstract void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception;
|
||||
|
||||
protected boolean localExecute(Request request) {
|
||||
return false;
|
||||
|
@ -126,20 +128,12 @@ public abstract class TransportMasterNodeOperationAction<Request extends MasterN
|
|||
);
|
||||
|
||||
} else {
|
||||
try {
|
||||
threadPool.executor(executor).execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
masterOperation(request, clusterService.state(), listener);
|
||||
} catch (Throwable e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (Throwable t) {
|
||||
listener.onFailure(t);
|
||||
}
|
||||
threadPool.executor(executor).execute(new ActionRunnable(listener) {
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
masterOperation(request, clusterService.state(), listener);
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
if (nodes.masterNode() == null) {
|
||||
|
|
|
@ -40,6 +40,8 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.*;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -149,15 +151,12 @@ public class Bootstrap {
|
|||
|
||||
if (pidFile != null) {
|
||||
try {
|
||||
File fPidFile = new File(pidFile);
|
||||
if (fPidFile.getParentFile() != null) {
|
||||
FileSystemUtils.mkdirs(fPidFile.getParentFile());
|
||||
}
|
||||
FileOutputStream outputStream = new FileOutputStream(fPidFile);
|
||||
Path fPidFile = Paths.get(pidFile);
|
||||
Files.createDirectories(fPidFile.getParent());
|
||||
OutputStream outputStream = Files.newOutputStream(fPidFile, StandardOpenOption.DELETE_ON_CLOSE);
|
||||
outputStream.write(Long.toString(JvmInfo.jvmInfo().pid()).getBytes(Charsets.UTF_8));
|
||||
outputStream.close();
|
||||
|
||||
fPidFile.deleteOnExit();
|
||||
outputStream.flush(); // make those bytes visible...
|
||||
// don't close this stream we will delete on JVM exit
|
||||
} catch (Exception e) {
|
||||
String errorMessage = buildErrorMessage("pid", e);
|
||||
System.err.println(errorMessage);
|
||||
|
|
|
@ -70,10 +70,13 @@ import org.elasticsearch.river.RiverIndexName;
|
|||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -303,17 +306,17 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
}
|
||||
|
||||
// now add config level mappings
|
||||
File mappingsDir = new File(environment.configFile(), "mappings");
|
||||
if (mappingsDir.exists() && mappingsDir.isDirectory()) {
|
||||
Path mappingsDir = environment.configFile().resolve("mappings");
|
||||
if (Files.isDirectory(mappingsDir)) {
|
||||
// first index level
|
||||
File indexMappingsDir = new File(mappingsDir, request.index());
|
||||
if (indexMappingsDir.exists() && indexMappingsDir.isDirectory()) {
|
||||
Path indexMappingsDir = mappingsDir.resolve(request.index());
|
||||
if (Files.isDirectory(indexMappingsDir)) {
|
||||
addMappings(mappings, indexMappingsDir);
|
||||
}
|
||||
|
||||
// second is the _default mapping
|
||||
File defaultMappingsDir = new File(mappingsDir, "_default");
|
||||
if (defaultMappingsDir.exists() && defaultMappingsDir.isDirectory()) {
|
||||
Path defaultMappingsDir = mappingsDir.resolve("_default");
|
||||
if (Files.isDirectory(defaultMappingsDir)) {
|
||||
addMappings(mappings, defaultMappingsDir);
|
||||
}
|
||||
}
|
||||
|
@ -485,28 +488,30 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
return XContentFactory.xContent(mappingSource).createParser(mappingSource).mapAndClose();
|
||||
}
|
||||
|
||||
private void addMappings(Map<String, Map<String, Object>> mappings, File mappingsDir) {
|
||||
File[] mappingsFiles = mappingsDir.listFiles();
|
||||
for (File mappingFile : mappingsFiles) {
|
||||
if (mappingFile.isHidden()) {
|
||||
continue;
|
||||
}
|
||||
int lastDotIndex = mappingFile.getName().lastIndexOf('.');
|
||||
String mappingType = lastDotIndex != -1 ? mappingFile.getName().substring(0, lastDotIndex) : mappingFile.getName();
|
||||
try {
|
||||
String mappingSource = Streams.copyToString(new InputStreamReader(new FileInputStream(mappingFile), Charsets.UTF_8));
|
||||
if (mappings.containsKey(mappingType)) {
|
||||
XContentHelper.mergeDefaults(mappings.get(mappingType), parseMapping(mappingSource));
|
||||
} else {
|
||||
mappings.put(mappingType, parseMapping(mappingSource));
|
||||
private void addMappings(Map<String, Map<String, Object>> mappings, Path mappingsDir) throws IOException {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(mappingsDir)) {
|
||||
for (Path mappingFile : stream) {
|
||||
final String fileName = mappingFile.getFileName().toString();
|
||||
if (Files.isHidden(mappingFile)) {
|
||||
continue;
|
||||
}
|
||||
int lastDotIndex = fileName.lastIndexOf('.');
|
||||
String mappingType = lastDotIndex != -1 ? mappingFile.getFileName().toString().substring(0, lastDotIndex) : mappingFile.getFileName().toString();
|
||||
try (BufferedReader reader = Files.newBufferedReader(mappingFile, Charsets.UTF_8)) {
|
||||
String mappingSource = Streams.copyToString(reader);
|
||||
if (mappings.containsKey(mappingType)) {
|
||||
XContentHelper.mergeDefaults(mappings.get(mappingType), parseMapping(mappingSource));
|
||||
} else {
|
||||
mappings.put(mappingType, parseMapping(mappingSource));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to read / parse mapping [" + mappingType + "] from location [" + mappingFile + "], ignoring...", e);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to read / parse mapping [" + mappingType + "] from location [" + mappingFile + "], ignoring...", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private List<IndexTemplateMetaData> findTemplates(CreateIndexClusterStateUpdateRequest request, ClusterState state, IndexTemplateFilter indexTemplateFilter) {
|
||||
private List<IndexTemplateMetaData> findTemplates(CreateIndexClusterStateUpdateRequest request, ClusterState state, IndexTemplateFilter indexTemplateFilter) throws IOException {
|
||||
List<IndexTemplateMetaData> templates = Lists.newArrayList();
|
||||
for (ObjectCursor<IndexTemplateMetaData> cursor : state.metaData().templates().values()) {
|
||||
IndexTemplateMetaData template = cursor.value;
|
||||
|
@ -516,22 +521,21 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
}
|
||||
|
||||
// see if we have templates defined under config
|
||||
File templatesDir = new File(environment.configFile(), "templates");
|
||||
if (templatesDir.exists() && templatesDir.isDirectory()) {
|
||||
File[] templatesFiles = templatesDir.listFiles();
|
||||
if (templatesFiles != null) {
|
||||
for (File templatesFile : templatesFiles) {
|
||||
if (templatesFile.isFile()) {
|
||||
final Path templatesDir = environment.configFile().resolve("templates");
|
||||
if (Files.exists(templatesDir) && Files.isDirectory(templatesDir)) {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(templatesDir)) {
|
||||
for (Path templatesFile : stream) {
|
||||
if (Files.isRegularFile(templatesFile)) {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
byte[] templatesData = Streams.copyToByteArray(templatesFile);
|
||||
final byte[] templatesData = Files.readAllBytes(templatesFile);
|
||||
parser = XContentHelper.createParser(templatesData, 0, templatesData.length);
|
||||
IndexTemplateMetaData template = IndexTemplateMetaData.Builder.fromXContent(parser, templatesFile.getName());
|
||||
IndexTemplateMetaData template = IndexTemplateMetaData.Builder.fromXContent(parser, templatesFile.getFileName().toString());
|
||||
if (indexTemplateFilter.apply(request, template)) {
|
||||
templates.add(template);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn("[{}] failed to read template [{}] from config", e, request.index(), templatesFile.getAbsolutePath());
|
||||
logger.warn("[{}] failed to read template [{}] from config", e, request.index(), templatesFile.toAbsolutePath());
|
||||
} finally {
|
||||
Releasables.closeWhileHandlingException(parser);
|
||||
}
|
||||
|
|
|
@ -1427,256 +1427,6 @@ public class Base64 {
|
|||
return obj;
|
||||
} // end decodeObject
|
||||
|
||||
|
||||
/**
|
||||
* Convenience method for encoding data to a file.
|
||||
* <p/>
|
||||
* <p>As of v 2.3, if there is a error,
|
||||
* the method will throw an java.io.IOException. <b>This is new to v2.3!</b>
|
||||
* In earlier versions, it just returned false, but
|
||||
* in retrospect that's a pretty poor way to handle it.</p>
|
||||
*
|
||||
* @param dataToEncode byte array of data to encode in base64 form
|
||||
* @param filename Filename for saving encoded data
|
||||
* @throws java.io.IOException if there is an error
|
||||
* @throws NullPointerException if dataToEncode is null
|
||||
* @since 2.1
|
||||
*/
|
||||
public static void encodeToFile(byte[] dataToEncode, String filename)
|
||||
throws java.io.IOException {
|
||||
|
||||
if (dataToEncode == null) {
|
||||
throw new NullPointerException("Data to encode was null.");
|
||||
} // end iff
|
||||
|
||||
Base64.OutputStream bos = null;
|
||||
try {
|
||||
bos = new Base64.OutputStream(
|
||||
new java.io.FileOutputStream(filename), Base64.ENCODE);
|
||||
bos.write(dataToEncode);
|
||||
} // end try
|
||||
catch (java.io.IOException e) {
|
||||
throw e; // Catch and throw to execute finally{} block
|
||||
} // end catch: java.io.IOException
|
||||
finally {
|
||||
try {
|
||||
bos.close();
|
||||
} catch (Exception e) {
|
||||
}
|
||||
} // end finally
|
||||
|
||||
} // end encodeToFile
|
||||
|
||||
|
||||
/**
|
||||
* Convenience method for decoding data to a file.
|
||||
* <p/>
|
||||
* <p>As of v 2.3, if there is a error,
|
||||
* the method will throw an java.io.IOException. <b>This is new to v2.3!</b>
|
||||
* In earlier versions, it just returned false, but
|
||||
* in retrospect that's a pretty poor way to handle it.</p>
|
||||
*
|
||||
* @param dataToDecode Base64-encoded data as a string
|
||||
* @param filename Filename for saving decoded data
|
||||
* @throws java.io.IOException if there is an error
|
||||
* @since 2.1
|
||||
*/
|
||||
public static void decodeToFile(String dataToDecode, String filename)
|
||||
throws java.io.IOException {
|
||||
|
||||
Base64.OutputStream bos = null;
|
||||
try {
|
||||
bos = new Base64.OutputStream(
|
||||
new java.io.FileOutputStream(filename), Base64.DECODE);
|
||||
bos.write(dataToDecode.getBytes(PREFERRED_ENCODING));
|
||||
} // end try
|
||||
catch (java.io.IOException e) {
|
||||
throw e; // Catch and throw to execute finally{} block
|
||||
} // end catch: java.io.IOException
|
||||
finally {
|
||||
try {
|
||||
bos.close();
|
||||
} catch (Exception e) {
|
||||
}
|
||||
} // end finally
|
||||
|
||||
} // end decodeToFile
|
||||
|
||||
|
||||
/**
|
||||
* Convenience method for reading a base64-encoded
|
||||
* file and decoding it.
|
||||
* <p/>
|
||||
* <p>As of v 2.3, if there is a error,
|
||||
* the method will throw an java.io.IOException. <b>This is new to v2.3!</b>
|
||||
* In earlier versions, it just returned false, but
|
||||
* in retrospect that's a pretty poor way to handle it.</p>
|
||||
*
|
||||
* @param filename Filename for reading encoded data
|
||||
* @return decoded byte array
|
||||
* @throws java.io.IOException if there is an error
|
||||
* @since 2.1
|
||||
*/
|
||||
public static byte[] decodeFromFile(String filename)
|
||||
throws java.io.IOException {
|
||||
|
||||
byte[] decodedData = null;
|
||||
Base64.InputStream bis = null;
|
||||
try {
|
||||
// Set up some useful variables
|
||||
java.io.File file = new java.io.File(filename);
|
||||
byte[] buffer = null;
|
||||
int length = 0;
|
||||
int numBytes = 0;
|
||||
|
||||
// Check for size of file
|
||||
if (file.length() > Integer.MAX_VALUE) {
|
||||
throw new java.io.IOException("File is too big for this convenience method (" + file.length() + " bytes).");
|
||||
} // end if: file too big for int index
|
||||
buffer = new byte[(int) file.length()];
|
||||
|
||||
// Open a stream
|
||||
bis = new Base64.InputStream(
|
||||
new java.io.BufferedInputStream(
|
||||
new java.io.FileInputStream(file)), Base64.DECODE);
|
||||
|
||||
// Read until done
|
||||
while ((numBytes = bis.read(buffer, length, 4096)) >= 0) {
|
||||
length += numBytes;
|
||||
} // end while
|
||||
|
||||
// Save in a variable to return
|
||||
decodedData = new byte[length];
|
||||
System.arraycopy(buffer, 0, decodedData, 0, length);
|
||||
|
||||
} // end try
|
||||
catch (java.io.IOException e) {
|
||||
throw e; // Catch and release to execute finally{}
|
||||
} // end catch: java.io.IOException
|
||||
finally {
|
||||
try {
|
||||
bis.close();
|
||||
} catch (Exception e) {
|
||||
}
|
||||
} // end finally
|
||||
|
||||
return decodedData;
|
||||
} // end decodeFromFile
|
||||
|
||||
|
||||
/**
|
||||
* Convenience method for reading a binary file
|
||||
* and base64-encoding it.
|
||||
* <p/>
|
||||
* <p>As of v 2.3, if there is a error,
|
||||
* the method will throw an java.io.IOException. <b>This is new to v2.3!</b>
|
||||
* In earlier versions, it just returned false, but
|
||||
* in retrospect that's a pretty poor way to handle it.</p>
|
||||
*
|
||||
* @param filename Filename for reading binary data
|
||||
* @return base64-encoded string
|
||||
* @throws java.io.IOException if there is an error
|
||||
* @since 2.1
|
||||
*/
|
||||
public static String encodeFromFile(String filename)
|
||||
throws java.io.IOException {
|
||||
|
||||
String encodedData = null;
|
||||
Base64.InputStream bis = null;
|
||||
try {
|
||||
// Set up some useful variables
|
||||
java.io.File file = new java.io.File(filename);
|
||||
byte[] buffer = new byte[Math.max((int) (file.length() * 1.4 + 1), 40)]; // Need max() for math on small files (v2.2.1); Need +1 for a few corner cases (v2.3.5)
|
||||
int length = 0;
|
||||
int numBytes = 0;
|
||||
|
||||
// Open a stream
|
||||
bis = new Base64.InputStream(
|
||||
new java.io.BufferedInputStream(
|
||||
new java.io.FileInputStream(file)), Base64.ENCODE);
|
||||
|
||||
// Read until done
|
||||
while ((numBytes = bis.read(buffer, length, 4096)) >= 0) {
|
||||
length += numBytes;
|
||||
} // end while
|
||||
|
||||
// Save in a variable to return
|
||||
encodedData = new String(buffer, 0, length, Base64.PREFERRED_ENCODING);
|
||||
|
||||
} // end try
|
||||
catch (java.io.IOException e) {
|
||||
throw e; // Catch and release to execute finally{}
|
||||
} // end catch: java.io.IOException
|
||||
finally {
|
||||
try {
|
||||
bis.close();
|
||||
} catch (Exception e) {
|
||||
}
|
||||
} // end finally
|
||||
|
||||
return encodedData;
|
||||
} // end encodeFromFile
|
||||
|
||||
/**
|
||||
* Reads <tt>infile</tt> and encodes it to <tt>outfile</tt>.
|
||||
*
|
||||
* @param infile Input file
|
||||
* @param outfile Output file
|
||||
* @throws java.io.IOException if there is an error
|
||||
* @since 2.2
|
||||
*/
|
||||
public static void encodeFileToFile(String infile, String outfile)
|
||||
throws java.io.IOException {
|
||||
|
||||
String encoded = Base64.encodeFromFile(infile);
|
||||
java.io.OutputStream out = null;
|
||||
try {
|
||||
out = new java.io.BufferedOutputStream(
|
||||
new java.io.FileOutputStream(outfile));
|
||||
out.write(encoded.getBytes("US-ASCII")); // Strict, 7-bit output.
|
||||
} // end try
|
||||
catch (java.io.IOException e) {
|
||||
throw e; // Catch and release to execute finally{}
|
||||
} // end catch
|
||||
finally {
|
||||
try {
|
||||
out.close();
|
||||
} catch (Exception ex) {
|
||||
}
|
||||
} // end finally
|
||||
} // end encodeFileToFile
|
||||
|
||||
|
||||
/**
|
||||
* Reads <tt>infile</tt> and decodes it to <tt>outfile</tt>.
|
||||
*
|
||||
* @param infile Input file
|
||||
* @param outfile Output file
|
||||
* @throws java.io.IOException if there is an error
|
||||
* @since 2.2
|
||||
*/
|
||||
public static void decodeFileToFile(String infile, String outfile)
|
||||
throws java.io.IOException {
|
||||
|
||||
byte[] decoded = Base64.decodeFromFile(infile);
|
||||
java.io.OutputStream out = null;
|
||||
try {
|
||||
out = new java.io.BufferedOutputStream(
|
||||
new java.io.FileOutputStream(outfile));
|
||||
out.write(decoded);
|
||||
} // end try
|
||||
catch (java.io.IOException e) {
|
||||
throw e; // Catch and release to execute finally{}
|
||||
} // end catch
|
||||
finally {
|
||||
try {
|
||||
out.close();
|
||||
} catch (Exception ex) {
|
||||
}
|
||||
} // end finally
|
||||
} // end decodeFileToFile
|
||||
|
||||
|
||||
/* ******** I N N E R C L A S S I N P U T S T R E A M ******** */
|
||||
|
||||
|
||||
|
|
|
@ -25,6 +25,8 @@ import java.io.BufferedReader;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
/**
|
||||
|
@ -32,35 +34,25 @@ import java.util.concurrent.ThreadLocalRandom;
|
|||
*/
|
||||
public abstract class Names {
|
||||
|
||||
public static String randomNodeName(URL nodeNames) {
|
||||
BufferedReader reader = null;
|
||||
public static String randomNodeName(Path nodeNames) {
|
||||
try {
|
||||
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream(), Charsets.UTF_8));
|
||||
int numberOfNames = 0;
|
||||
while (reader.readLine() != null) {
|
||||
numberOfNames++;
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(Files.newInputStream(nodeNames), Charsets.UTF_8))) {
|
||||
while (reader.readLine() != null) {
|
||||
numberOfNames++;
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream(), Charsets.UTF_8));
|
||||
int number = ((ThreadLocalRandom.current().nextInt(numberOfNames)) % numberOfNames);
|
||||
for (int i = 0; i < number; i++) {
|
||||
reader.readLine();
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(Files.newInputStream(nodeNames), Charsets.UTF_8))) {
|
||||
int number = ((ThreadLocalRandom.current().nextInt(numberOfNames)) % numberOfNames);
|
||||
for (int i = 0; i < number; i++) {
|
||||
reader.readLine();
|
||||
}
|
||||
return reader.readLine();
|
||||
}
|
||||
return reader.readLine();
|
||||
} catch (IOException e) {
|
||||
return null;
|
||||
} finally {
|
||||
try {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// ignore this exception
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Names() {
|
||||
|
||||
}
|
||||
private Names() {}
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.io.FileSystemUtils;
|
|||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -40,28 +41,25 @@ public class FsBlobContainer extends AbstractBlobContainer {
|
|||
|
||||
protected final FsBlobStore blobStore;
|
||||
|
||||
protected final File path;
|
||||
protected final Path path;
|
||||
|
||||
public FsBlobContainer(FsBlobStore blobStore, BlobPath blobPath, File path) {
|
||||
public FsBlobContainer(FsBlobStore blobStore, BlobPath blobPath, Path path) {
|
||||
super(blobPath);
|
||||
this.blobStore = blobStore;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public File filePath() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
public ImmutableMap<String, BlobMetaData> listBlobs() throws IOException {
|
||||
File[] files = path.listFiles();
|
||||
if (files == null || files.length == 0) {
|
||||
Path[] files = FileSystemUtils.files(path);
|
||||
if (files.length == 0) {
|
||||
return ImmutableMap.of();
|
||||
}
|
||||
// using MapBuilder and not ImmutableMap.Builder as it seems like File#listFiles might return duplicate files!
|
||||
MapBuilder<String, BlobMetaData> builder = MapBuilder.newMapBuilder();
|
||||
for (File file : files) {
|
||||
if (file.isFile()) {
|
||||
builder.put(file.getName(), new PlainBlobMetaData(file.getName(), file.length()));
|
||||
for (Path file : files) {
|
||||
final BasicFileAttributes attrs = Files.readAttributes(file, BasicFileAttributes.class);
|
||||
if (attrs.isRegularFile()) {
|
||||
builder.put(file.getFileName().toString(), new PlainBlobMetaData(file.getFileName().toString(), attrs.size()));
|
||||
}
|
||||
}
|
||||
return builder.immutableMap();
|
||||
|
@ -69,7 +67,7 @@ public class FsBlobContainer extends AbstractBlobContainer {
|
|||
|
||||
@Override
|
||||
public void deleteBlob(String blobName) throws IOException {
|
||||
Path blobPath = new File(path, blobName).toPath();
|
||||
Path blobPath = path.resolve(blobName);
|
||||
if (Files.exists(blobPath)) {
|
||||
Files.delete(blobPath);
|
||||
}
|
||||
|
@ -77,18 +75,18 @@ public class FsBlobContainer extends AbstractBlobContainer {
|
|||
|
||||
@Override
|
||||
public boolean blobExists(String blobName) {
|
||||
return new File(path, blobName).exists();
|
||||
return Files.exists(path.resolve(blobName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream openInput(String name) throws IOException {
|
||||
return new BufferedInputStream(new FileInputStream(new File(path, name)), blobStore.bufferSizeInBytes());
|
||||
return new BufferedInputStream(Files.newInputStream(path.resolve(name)), blobStore.bufferSizeInBytes());
|
||||
}
|
||||
|
||||
@Override
|
||||
public OutputStream createOutput(String blobName) throws IOException {
|
||||
final File file = new File(path, blobName);
|
||||
return new BufferedOutputStream(new FilterOutputStream(new FileOutputStream(file)) {
|
||||
final Path file = path.resolve(blobName);
|
||||
return new BufferedOutputStream(new FilterOutputStream(Files.newOutputStream(file)) {
|
||||
|
||||
@Override // FilterOutputStream#write(byte[] b, int off, int len) is trappy writes every single byte
|
||||
public void write(byte[] b, int off, int len) throws IOException { out.write(b, off, len);}
|
||||
|
@ -96,8 +94,8 @@ public class FsBlobContainer extends AbstractBlobContainer {
|
|||
@Override
|
||||
public void close() throws IOException {
|
||||
super.close();
|
||||
IOUtils.fsync(file.toPath(), false);
|
||||
IOUtils.fsync(path.toPath(), true);
|
||||
IOUtils.fsync(file, false);
|
||||
IOUtils.fsync(path, true);
|
||||
}
|
||||
}, blobStore.bufferSizeInBytes());
|
||||
}
|
||||
|
|
|
@ -20,40 +20,35 @@
|
|||
package org.elasticsearch.common.blobstore.fs;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.BlobStore;
|
||||
import org.elasticsearch.common.blobstore.BlobStoreException;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FsBlobStore extends AbstractComponent implements BlobStore {
|
||||
|
||||
private final File path;
|
||||
private final Path path;
|
||||
|
||||
private final int bufferSizeInBytes;
|
||||
|
||||
public FsBlobStore(Settings settings, File path) {
|
||||
public FsBlobStore(Settings settings, Path path) throws IOException {
|
||||
super(settings);
|
||||
this.path = path;
|
||||
if (!path.exists()) {
|
||||
boolean b = FileSystemUtils.mkdirs(path);
|
||||
if (!b) {
|
||||
throw new BlobStoreException("Failed to create directory at [" + path + "]");
|
||||
}
|
||||
}
|
||||
if (!path.isDirectory()) {
|
||||
throw new BlobStoreException("Path is not a directory at [" + path + "]");
|
||||
}
|
||||
Files.createDirectories(path);
|
||||
this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes();
|
||||
}
|
||||
|
||||
|
@ -62,7 +57,7 @@ public class FsBlobStore extends AbstractComponent implements BlobStore {
|
|||
return path.toString();
|
||||
}
|
||||
|
||||
public File path() {
|
||||
public Path path() {
|
||||
return path;
|
||||
}
|
||||
|
||||
|
@ -72,12 +67,16 @@ public class FsBlobStore extends AbstractComponent implements BlobStore {
|
|||
|
||||
@Override
|
||||
public BlobContainer blobContainer(BlobPath path) {
|
||||
return new FsBlobContainer(this, path, buildAndCreate(path));
|
||||
try {
|
||||
return new FsBlobContainer(this, path, buildAndCreate(path));
|
||||
} catch (IOException ex) {
|
||||
throw new ElasticsearchException("failed to create blob container", ex);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(BlobPath path) throws IOException {
|
||||
IOUtils.rm(buildPath(path).toPath());
|
||||
IOUtils.rm(buildPath(path));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -85,21 +84,21 @@ public class FsBlobStore extends AbstractComponent implements BlobStore {
|
|||
// nothing to do here...
|
||||
}
|
||||
|
||||
private synchronized File buildAndCreate(BlobPath path) {
|
||||
File f = buildPath(path);
|
||||
FileSystemUtils.mkdirs(f);
|
||||
private synchronized Path buildAndCreate(BlobPath path) throws IOException {
|
||||
Path f = buildPath(path);
|
||||
Files.createDirectories(f);
|
||||
return f;
|
||||
}
|
||||
|
||||
private File buildPath(BlobPath path) {
|
||||
private Path buildPath(BlobPath path) {
|
||||
String[] paths = path.toArray();
|
||||
if (paths.length == 0) {
|
||||
return path();
|
||||
}
|
||||
File blobPath = new File(this.path, paths[0]);
|
||||
Path blobPath = this.path.resolve(paths[0]);
|
||||
if (paths.length > 1) {
|
||||
for (int i = 1; i < paths.length; i++) {
|
||||
blobPath = new File(blobPath, paths[i]);
|
||||
blobPath = blobPath.resolve(paths[i]);
|
||||
}
|
||||
}
|
||||
return blobPath;
|
||||
|
|
|
@ -28,6 +28,9 @@ import java.io.*;
|
|||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -37,8 +40,8 @@ public class HttpDownloadHelper {
|
|||
private boolean useTimestamp = false;
|
||||
private boolean skipExisting = false;
|
||||
|
||||
public boolean download(URL source, File dest, @Nullable DownloadProgress progress, TimeValue timeout) throws Exception {
|
||||
if (dest.exists() && skipExisting) {
|
||||
public boolean download(URL source, Path dest, @Nullable DownloadProgress progress, TimeValue timeout) throws Exception {
|
||||
if (Files.exists(dest) && skipExisting) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -51,8 +54,8 @@ public class HttpDownloadHelper {
|
|||
long timestamp = 0;
|
||||
|
||||
boolean hasTimestamp = false;
|
||||
if (useTimestamp && dest.exists()) {
|
||||
timestamp = dest.lastModified();
|
||||
if (useTimestamp && Files.exists(dest) ) {
|
||||
timestamp = Files.getLastModifiedTime(dest).toMillis();
|
||||
hasTimestamp = true;
|
||||
}
|
||||
|
||||
|
@ -182,7 +185,7 @@ public class HttpDownloadHelper {
|
|||
private class GetThread extends Thread {
|
||||
|
||||
private final URL source;
|
||||
private final File dest;
|
||||
private final Path dest;
|
||||
private final boolean hasTimestamp;
|
||||
private final long timestamp;
|
||||
private final DownloadProgress progress;
|
||||
|
@ -194,7 +197,7 @@ public class HttpDownloadHelper {
|
|||
private URLConnection connection;
|
||||
private int redirections = 0;
|
||||
|
||||
GetThread(URL source, File dest, boolean h, long t, DownloadProgress p) {
|
||||
GetThread(URL source, Path dest, boolean h, long t, DownloadProgress p) {
|
||||
this.source = source;
|
||||
this.dest = dest;
|
||||
hasTimestamp = h;
|
||||
|
@ -329,7 +332,7 @@ public class HttpDownloadHelper {
|
|||
throw new IOException("Can't get " + source + " to " + dest, lastEx);
|
||||
}
|
||||
|
||||
os = new FileOutputStream(dest);
|
||||
os = Files.newOutputStream(dest);
|
||||
progress.beginDownload();
|
||||
boolean finished = false;
|
||||
try {
|
||||
|
@ -346,7 +349,7 @@ public class HttpDownloadHelper {
|
|||
// Try to delete the garbage we'd otherwise leave
|
||||
// behind.
|
||||
IOUtils.closeWhileHandlingException(os, is);
|
||||
IOUtils.deleteFilesIgnoringExceptions(dest.toPath());
|
||||
IOUtils.deleteFilesIgnoringExceptions(dest);
|
||||
} else {
|
||||
IOUtils.close(os, is);
|
||||
}
|
||||
|
@ -355,10 +358,10 @@ public class HttpDownloadHelper {
|
|||
return true;
|
||||
}
|
||||
|
||||
private void updateTimeStamp() {
|
||||
private void updateTimeStamp() throws IOException {
|
||||
long remoteTimestamp = connection.getLastModified();
|
||||
if (remoteTimestamp != 0) {
|
||||
dest.setLastModified(remoteTimestamp);
|
||||
Files.setLastModifiedTime(dest, FileTime.fromMillis(remoteTimestamp));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -384,8 +387,8 @@ public class HttpDownloadHelper {
|
|||
IOUtils.close(is, os);
|
||||
} else {
|
||||
IOUtils.closeWhileHandlingException(is, os);
|
||||
if (dest != null && dest.exists()) {
|
||||
IOUtils.deleteFilesIgnoringExceptions(dest.toPath());
|
||||
if (dest != null && Files.exists(dest)) {
|
||||
IOUtils.deleteFilesIgnoringExceptions(dest);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,58 +19,60 @@
|
|||
|
||||
package org.elasticsearch.common.io;
|
||||
|
||||
import com.google.common.collect.Iterators;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static java.nio.file.FileVisitResult.CONTINUE;
|
||||
import static java.nio.file.FileVisitResult.SKIP_SUBTREE;
|
||||
|
||||
/**
|
||||
*
|
||||
* Elasticsearch utils to work with {@link java.nio.file.Path}
|
||||
*/
|
||||
public class FileSystemUtils {
|
||||
public final class FileSystemUtils {
|
||||
|
||||
public static boolean mkdirs(File dir) {
|
||||
return dir.mkdirs();
|
||||
}
|
||||
private FileSystemUtils() {} // only static methods
|
||||
|
||||
public static boolean hasExtensions(File root, String... extensions) {
|
||||
if (root != null && root.exists()) {
|
||||
if (root.isDirectory()) {
|
||||
File[] children = root.listFiles();
|
||||
if (children != null) {
|
||||
for (File child : children) {
|
||||
if (child.isDirectory()) {
|
||||
boolean has = hasExtensions(child, extensions);
|
||||
if (has) {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
for (String extension : extensions) {
|
||||
if (child.getName().endsWith(extension)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns <code>true</code> iff a file under the given root has one of the given extensions. This method
|
||||
* will travers directories recursively and will terminate once any of the extensions was found. This
|
||||
* methods will not follow any links.
|
||||
*
|
||||
* @param root the root directory to travers. Must be a directory
|
||||
* @param extensions the file extensions to look for
|
||||
* @return <code>true</code> iff a file under the given root has one of the given extensions, otherwise <code>false</code>
|
||||
* @throws IOException if an IOException occurs or if the given root path is not a directory.
|
||||
*/
|
||||
public static boolean hasExtensions(Path root, final String... extensions) throws IOException {
|
||||
final AtomicBoolean retVal = new AtomicBoolean(false);
|
||||
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
for (String extension : extensions) {
|
||||
if (file.getFileName().toString().endsWith(extension)) {
|
||||
retVal.set(true);
|
||||
return FileVisitResult.TERMINATE;
|
||||
}
|
||||
}
|
||||
return super.visitFile(file, attrs);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
return retVal.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if at least one of the files exists.
|
||||
* Returns <code>true</code> iff one of the files exists otherwise <code>false</code>
|
||||
*/
|
||||
public static boolean exists(File... files) {
|
||||
for (File file : files) {
|
||||
if (file.exists()) {
|
||||
public static boolean exists(Path... files) {
|
||||
for (Path file : files) {
|
||||
if (Files.exists(file)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -78,17 +80,16 @@ public class FileSystemUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns an array of {@link Path} build from the correspondent element
|
||||
* in the input array using {@link java.io.File#toPath()}
|
||||
* @param files the files to get paths for
|
||||
* Appends the path to the given base and strips N elements off the path if strip is > 0.
|
||||
*/
|
||||
@Deprecated // this is only a transition API
|
||||
public static Path[] toPaths(File... files) {
|
||||
Path[] paths = new Path[files.length];
|
||||
for (int i = 0; i < files.length; i++) {
|
||||
paths[i] = files[i].toPath();
|
||||
public static Path append(Path base, Path path, int strip) {
|
||||
for (Path subPath : path) {
|
||||
if (strip-- > 0) {
|
||||
continue;
|
||||
}
|
||||
base = base.resolve(subPath.toString());
|
||||
}
|
||||
return paths;
|
||||
return base;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -112,26 +113,24 @@ public class FileSystemUtils {
|
|||
* Check that a directory exists, is a directory and is readable
|
||||
* by the current user
|
||||
*/
|
||||
public static boolean isAccessibleDirectory(File directory, ESLogger logger) {
|
||||
public static boolean isAccessibleDirectory(Path directory, ESLogger logger) {
|
||||
assert directory != null && logger != null;
|
||||
|
||||
if (!directory.exists()) {
|
||||
logger.debug("[{}] directory does not exist.", directory.getAbsolutePath());
|
||||
if (!Files.exists(directory)) {
|
||||
logger.debug("[{}] directory does not exist.", directory.toAbsolutePath());
|
||||
return false;
|
||||
}
|
||||
if (!directory.isDirectory()) {
|
||||
logger.debug("[{}] should be a directory but is not.", directory.getAbsolutePath());
|
||||
if (!Files.isDirectory(directory)) {
|
||||
logger.debug("[{}] should be a directory but is not.", directory.toAbsolutePath());
|
||||
return false;
|
||||
}
|
||||
if (!directory.canRead()) {
|
||||
logger.debug("[{}] directory is not readable.", directory.getAbsolutePath());
|
||||
if (!Files.isReadable(directory)) {
|
||||
logger.debug("[{}] directory is not readable.", directory.toAbsolutePath());
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private FileSystemUtils() {}
|
||||
|
||||
/**
|
||||
* This utility copy a full directory content (excluded) under
|
||||
* a new directory but without overwriting existing files.
|
||||
|
@ -144,17 +143,17 @@ public class FileSystemUtils {
|
|||
* @param suffix When not null, files are copied with a suffix appended to the original name (eg: ".new")
|
||||
* When null, files are ignored
|
||||
*/
|
||||
public static void moveFilesWithoutOverwriting(File source, final File destination, final String suffix) throws IOException {
|
||||
public static void moveFilesWithoutOverwriting(Path source, final Path destination, final String suffix) throws IOException {
|
||||
|
||||
// Create destination dir
|
||||
FileSystemUtils.mkdirs(destination);
|
||||
Files.createDirectories(destination);
|
||||
|
||||
final int configPathRootLevel = source.toPath().getNameCount();
|
||||
final int configPathRootLevel = source.getNameCount();
|
||||
|
||||
// We walk through the file tree from
|
||||
Files.walkFileTree(source.toPath(), new SimpleFileVisitor<Path>() {
|
||||
Files.walkFileTree(source, new SimpleFileVisitor<Path>() {
|
||||
private Path buildPath(Path path) {
|
||||
return destination.toPath().resolve(path);
|
||||
return destination.resolve(path);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -168,10 +167,8 @@ public class FileSystemUtils {
|
|||
Path path = buildPath(subpath);
|
||||
if (!Files.exists(path)) {
|
||||
// We just move the structure to new dir
|
||||
if (!dir.toFile().renameTo(path.toFile())) {
|
||||
throw new IOException("Could not move [" + dir + "] to [" + path + "]");
|
||||
}
|
||||
|
||||
// we can't do atomic move here since src / dest might be on different mounts?
|
||||
Files.move(dir, path);
|
||||
// We just ignore sub files from here
|
||||
return FileVisitResult.SKIP_SUBTREE;
|
||||
}
|
||||
|
@ -209,9 +206,8 @@ public class FileSystemUtils {
|
|||
* @param source source dir
|
||||
* @param destination destination dir
|
||||
*/
|
||||
public static void copyDirectoryRecursively(File source, File destination) throws IOException {
|
||||
Files.walkFileTree(source.toPath(),
|
||||
new TreeCopier(source.toPath(), destination.toPath()));
|
||||
public static void copyDirectoryRecursively(Path source, Path destination) throws IOException {
|
||||
Files.walkFileTree(source, new TreeCopier(source, destination));
|
||||
}
|
||||
|
||||
static class TreeCopier extends SimpleFileVisitor<Path> {
|
||||
|
@ -248,4 +244,30 @@ public class FileSystemUtils {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of all files in the given directory matching.
|
||||
*/
|
||||
public static Path[] files(Path from, DirectoryStream.Filter<Path> filter) throws IOException {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(from, filter)) {
|
||||
return Iterators.toArray(stream.iterator(), Path.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of all files in the given directory.
|
||||
*/
|
||||
public static Path[] files(Path directory) throws IOException {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(directory)) {
|
||||
return Iterators.toArray(stream.iterator(), Path.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of all files in the given directory matching the glob.
|
||||
*/
|
||||
public static Path[] files(Path directory, String glob) throws IOException {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(directory, glob)) {
|
||||
return Iterators.toArray(stream.iterator(), Path.class);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|||
import org.elasticsearch.common.util.Callback;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -41,53 +43,6 @@ public abstract class Streams {
|
|||
public static final int BUFFER_SIZE = 1024 * 8;
|
||||
|
||||
|
||||
//---------------------------------------------------------------------
|
||||
// Copy methods for java.io.File
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Copy the contents of the given input File to the given output File.
|
||||
*
|
||||
* @param in the file to copy from
|
||||
* @param out the file to copy to
|
||||
* @return the number of bytes copied
|
||||
* @throws IOException in case of I/O errors
|
||||
*/
|
||||
public static long copy(File in, File out) throws IOException {
|
||||
Preconditions.checkNotNull(in, "No input File specified");
|
||||
Preconditions.checkNotNull(out, "No output File specified");
|
||||
return copy(new BufferedInputStream(new FileInputStream(in)),
|
||||
new BufferedOutputStream(new FileOutputStream(out)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy the contents of the given byte array to the given output File.
|
||||
*
|
||||
* @param in the byte array to copy from
|
||||
* @param out the file to copy to
|
||||
* @throws IOException in case of I/O errors
|
||||
*/
|
||||
public static void copy(byte[] in, File out) throws IOException {
|
||||
Preconditions.checkNotNull(in, "No input byte array specified");
|
||||
Preconditions.checkNotNull(out, "No output File specified");
|
||||
ByteArrayInputStream inStream = new ByteArrayInputStream(in);
|
||||
OutputStream outStream = new BufferedOutputStream(new FileOutputStream(out));
|
||||
copy(inStream, outStream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy the contents of the given input File into a new byte array.
|
||||
*
|
||||
* @param in the file to copy from
|
||||
* @return the new byte array that has been copied to
|
||||
* @throws IOException in case of I/O errors
|
||||
*/
|
||||
public static byte[] copyToByteArray(File in) throws IOException {
|
||||
Preconditions.checkNotNull(in, "No input File specified");
|
||||
return copyToByteArray(new BufferedInputStream(new FileInputStream(in)));
|
||||
}
|
||||
|
||||
|
||||
//---------------------------------------------------------------------
|
||||
// Copy methods for java.io.InputStream / java.io.OutputStream
|
||||
//---------------------------------------------------------------------
|
||||
|
@ -154,20 +109,6 @@ public abstract class Streams {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy the contents of the given InputStream into a new byte array.
|
||||
* Closes the stream when done.
|
||||
*
|
||||
* @param in the stream to copy from
|
||||
* @return the new byte array that has been copied to
|
||||
* @throws IOException in case of I/O errors
|
||||
*/
|
||||
public static byte[] copyToByteArray(InputStream in) throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
copy(in, out);
|
||||
return out.bytes().toBytes();
|
||||
}
|
||||
|
||||
|
||||
//---------------------------------------------------------------------
|
||||
// Copy methods for java.io.Reader / java.io.Writer
|
||||
|
@ -262,11 +203,15 @@ public abstract class Streams {
|
|||
}
|
||||
|
||||
public static byte[] copyToBytesFromClasspath(String path) throws IOException {
|
||||
InputStream is = Streams.class.getResourceAsStream(path);
|
||||
if (is == null) {
|
||||
throw new FileNotFoundException("Resource [" + path + "] not found in classpath");
|
||||
try (InputStream is = Streams.class.getResourceAsStream(path)) {
|
||||
if (is == null) {
|
||||
throw new FileNotFoundException("Resource [" + path + "] not found in classpath");
|
||||
}
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
copy(is, out);
|
||||
return out.bytes().toBytes();
|
||||
}
|
||||
}
|
||||
return copyToByteArray(is);
|
||||
}
|
||||
|
||||
public static int readFully(Reader reader, char[] dest) throws IOException {
|
||||
|
|
|
@ -120,7 +120,7 @@ public class LogConfigurator {
|
|||
public static void resolveConfig(Environment env, final ImmutableSettings.Builder settingsBuilder) {
|
||||
|
||||
try {
|
||||
Files.walkFileTree(env.configFile().toPath(), EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
|
||||
Files.walkFileTree(env.configFile(), EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
String fileName = file.getFileName().toString();
|
||||
|
|
|
@ -41,6 +41,8 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -933,6 +935,18 @@ public class ImmutableSettings implements Settings {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from a url that represents them using the
|
||||
* {@link SettingsLoaderFactory#loaderFromSource(String)}.
|
||||
*/
|
||||
public Builder loadFromPath(Path path) throws SettingsException {
|
||||
try {
|
||||
return loadFromStream(path.getFileName().toString(), Files.newInputStream(path));
|
||||
} catch (IOException e) {
|
||||
throw new SettingsException("Failed to open stream for url [" + path + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from a stream that represents them using the
|
||||
* {@link SettingsLoaderFactory#loaderFromSource(String)}.
|
||||
|
|
|
@ -29,7 +29,12 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
import static org.elasticsearch.common.Strings.cleanPath;
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
|
||||
|
@ -41,21 +46,21 @@ public class Environment {
|
|||
|
||||
private final Settings settings;
|
||||
|
||||
private final File homeFile;
|
||||
private final Path homeFile;
|
||||
|
||||
private final File workFile;
|
||||
private final Path workFile;
|
||||
|
||||
private final File workWithClusterFile;
|
||||
private final Path workWithClusterFile;
|
||||
|
||||
private final File[] dataFiles;
|
||||
private final Path[] dataFiles;
|
||||
|
||||
private final File[] dataWithClusterFiles;
|
||||
private final Path[] dataWithClusterFiles;
|
||||
|
||||
private final File configFile;
|
||||
private final Path configFile;
|
||||
|
||||
private final File pluginsFile;
|
||||
private final Path pluginsFile;
|
||||
|
||||
private final File logsFile;
|
||||
private final Path logsFile;
|
||||
|
||||
public Environment() {
|
||||
this(EMPTY_SETTINGS);
|
||||
|
@ -64,47 +69,47 @@ public class Environment {
|
|||
public Environment(Settings settings) {
|
||||
this.settings = settings;
|
||||
if (settings.get("path.home") != null) {
|
||||
homeFile = new File(cleanPath(settings.get("path.home")));
|
||||
homeFile = Paths.get(cleanPath(settings.get("path.home")));
|
||||
} else {
|
||||
homeFile = new File(System.getProperty("user.dir"));
|
||||
homeFile = Paths.get(System.getProperty("user.dir"));
|
||||
}
|
||||
|
||||
if (settings.get("path.conf") != null) {
|
||||
configFile = new File(cleanPath(settings.get("path.conf")));
|
||||
configFile = Paths.get(cleanPath(settings.get("path.conf")));
|
||||
} else {
|
||||
configFile = new File(homeFile, "config");
|
||||
configFile = homeFile.resolve("config");
|
||||
}
|
||||
|
||||
if (settings.get("path.plugins") != null) {
|
||||
pluginsFile = new File(cleanPath(settings.get("path.plugins")));
|
||||
pluginsFile = Paths.get(cleanPath(settings.get("path.plugins")));
|
||||
} else {
|
||||
pluginsFile = new File(homeFile, "plugins");
|
||||
pluginsFile = homeFile.resolve("plugins");
|
||||
}
|
||||
|
||||
if (settings.get("path.work") != null) {
|
||||
workFile = new File(cleanPath(settings.get("path.work")));
|
||||
workFile = Paths.get(cleanPath(settings.get("path.work")));
|
||||
} else {
|
||||
workFile = new File(homeFile, "work");
|
||||
workFile = homeFile.resolve("work");
|
||||
}
|
||||
workWithClusterFile = new File(workFile, ClusterName.clusterNameFromSettings(settings).value());
|
||||
workWithClusterFile = workFile.resolve(ClusterName.clusterNameFromSettings(settings).value());
|
||||
|
||||
String[] dataPaths = settings.getAsArray("path.data");
|
||||
if (dataPaths.length > 0) {
|
||||
dataFiles = new File[dataPaths.length];
|
||||
dataWithClusterFiles = new File[dataPaths.length];
|
||||
dataFiles = new Path[dataPaths.length];
|
||||
dataWithClusterFiles = new Path[dataPaths.length];
|
||||
for (int i = 0; i < dataPaths.length; i++) {
|
||||
dataFiles[i] = new File(dataPaths[i]);
|
||||
dataWithClusterFiles[i] = new File(dataFiles[i], ClusterName.clusterNameFromSettings(settings).value());
|
||||
dataFiles[i] = Paths.get(dataPaths[i]);
|
||||
dataWithClusterFiles[i] = dataFiles[i].resolve(ClusterName.clusterNameFromSettings(settings).value());
|
||||
}
|
||||
} else {
|
||||
dataFiles = new File[]{new File(homeFile, "data")};
|
||||
dataWithClusterFiles = new File[]{new File(new File(homeFile, "data"), ClusterName.clusterNameFromSettings(settings).value())};
|
||||
dataFiles = new Path[]{homeFile.resolve("data")};
|
||||
dataWithClusterFiles = new Path[]{homeFile.resolve("data").resolve(ClusterName.clusterNameFromSettings(settings).value())};
|
||||
}
|
||||
|
||||
if (settings.get("path.logs") != null) {
|
||||
logsFile = new File(cleanPath(settings.get("path.logs")));
|
||||
logsFile = Paths.get(cleanPath(settings.get("path.logs")));
|
||||
} else {
|
||||
logsFile = new File(homeFile, "logs");
|
||||
logsFile = homeFile.resolve("logs");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -118,7 +123,7 @@ public class Environment {
|
|||
/**
|
||||
* The home of the installation.
|
||||
*/
|
||||
public File homeFile() {
|
||||
public Path homeFile() {
|
||||
return homeFile;
|
||||
}
|
||||
|
||||
|
@ -128,7 +133,7 @@ public class Environment {
|
|||
* Note, currently, we don't use it in ES at all, we should strive to see if we can keep it like that,
|
||||
* but if we do, we have the infra for it.
|
||||
*/
|
||||
public File workFile() {
|
||||
public Path workFile() {
|
||||
return workFile;
|
||||
}
|
||||
|
||||
|
@ -138,77 +143,73 @@ public class Environment {
|
|||
* Note, currently, we don't use it in ES at all, we should strive to see if we can keep it like that,
|
||||
* but if we do, we have the infra for it.
|
||||
*/
|
||||
public File workWithClusterFile() {
|
||||
public Path workWithClusterFile() {
|
||||
return workWithClusterFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* The data location.
|
||||
*/
|
||||
public File[] dataFiles() {
|
||||
public Path[] dataFiles() {
|
||||
return dataFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* The data location with the cluster name as a sub directory.
|
||||
*/
|
||||
public File[] dataWithClusterFiles() {
|
||||
public Path[] dataWithClusterFiles() {
|
||||
return dataWithClusterFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* The config location.
|
||||
*/
|
||||
public File configFile() {
|
||||
public Path configFile() {
|
||||
return configFile;
|
||||
}
|
||||
|
||||
public File pluginsFile() {
|
||||
public Path pluginsFile() {
|
||||
return pluginsFile;
|
||||
}
|
||||
|
||||
public File logsFile() {
|
||||
public Path logsFile() {
|
||||
return logsFile;
|
||||
}
|
||||
|
||||
public String resolveConfigAndLoadToString(String path) throws FailedToResolveConfigException, IOException {
|
||||
return Streams.copyToString(new InputStreamReader(resolveConfig(path).openStream(), Charsets.UTF_8));
|
||||
return Streams.copyToString(Files.newBufferedReader(resolveConfig(path), Charsets.UTF_8));
|
||||
}
|
||||
|
||||
public URL resolveConfig(String path) throws FailedToResolveConfigException {
|
||||
public Path resolveConfig(String path) throws FailedToResolveConfigException {
|
||||
String origPath = path;
|
||||
// first, try it as a path on the file system
|
||||
File f1 = new File(path);
|
||||
if (f1.exists()) {
|
||||
try {
|
||||
return f1.toURI().toURL();
|
||||
} catch (MalformedURLException e) {
|
||||
throw new FailedToResolveConfigException("Failed to resolve path [" + f1 + "]", e);
|
||||
}
|
||||
Path f1 = Paths.get(path);
|
||||
if (Files.exists(f1)) {
|
||||
return f1;
|
||||
}
|
||||
if (path.startsWith("/")) {
|
||||
path = path.substring(1);
|
||||
}
|
||||
// next, try it relative to the config location
|
||||
File f2 = new File(configFile, path);
|
||||
if (f2.exists()) {
|
||||
try {
|
||||
return f2.toURI().toURL();
|
||||
} catch (MalformedURLException e) {
|
||||
throw new FailedToResolveConfigException("Failed to resolve path [" + f2 + "]", e);
|
||||
}
|
||||
Path f2 = configFile.resolve(path);
|
||||
if (Files.exists(f2)) {
|
||||
return f2;
|
||||
}
|
||||
// try and load it from the classpath directly
|
||||
URL resource = settings.getClassLoader().getResource(path);
|
||||
if (resource != null) {
|
||||
return resource;
|
||||
}
|
||||
// try and load it from the classpath with config/ prefix
|
||||
if (!path.startsWith("config/")) {
|
||||
resource = settings.getClassLoader().getResource("config/" + path);
|
||||
try {
|
||||
// try and load it from the classpath directly
|
||||
URL resource = settings.getClassLoader().getResource(path);
|
||||
if (resource != null) {
|
||||
return resource;
|
||||
return Paths.get(resource.toURI());
|
||||
}
|
||||
// try and load it from the classpath with config/ prefix
|
||||
if (!path.startsWith("config/")) {
|
||||
resource = settings.getClassLoader().getResource("config/" + path);
|
||||
if (resource != null) {
|
||||
return Paths.get(resource.toURI());
|
||||
}
|
||||
}
|
||||
} catch (URISyntaxException ex) {
|
||||
throw new FailedToResolveConfigException("Failed to resolve config path [" + origPath + "], tried file path [" + f1 + "], path file [" + f2 + "], and classpath", ex);
|
||||
}
|
||||
throw new FailedToResolveConfigException("Failed to resolve config path [" + origPath + "], tried file path [" + f1 + "], path file [" + f2 + "], and classpath");
|
||||
}
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.*;
|
||||
import java.util.*;
|
||||
|
@ -80,7 +79,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable{
|
|||
int maxLocalStorageNodes = settings.getAsInt("node.max_local_storage_nodes", 50);
|
||||
for (int possibleLockId = 0; possibleLockId < maxLocalStorageNodes; possibleLockId++) {
|
||||
for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) {
|
||||
Path dir = environment.dataWithClusterFiles()[dirIndex].toPath().resolve(Paths.get("nodes", Integer.toString(possibleLockId)));
|
||||
Path dir = environment.dataWithClusterFiles()[dirIndex].resolve(Paths.get("nodes", Integer.toString(possibleLockId)));
|
||||
if (Files.exists(dir) == false) {
|
||||
Files.createDirectories(dir);
|
||||
}
|
||||
|
@ -348,33 +347,6 @@ public class NodeEnvironment extends AbstractComponent implements Closeable{
|
|||
return nodePaths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of all of the nodes data locations.
|
||||
* @deprecated use {@link #nodeDataPaths()} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public File[] nodeDataLocations() {
|
||||
return toFiles(nodeDataPaths());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all data paths for the given index.
|
||||
* @deprecated use {@link #indexPaths(org.elasticsearch.index.Index)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public File[] indexLocations(Index index) {
|
||||
return toFiles(indexPaths(index));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all data paths for the given shards ID
|
||||
* @deprecated use {@link #shardPaths(org.elasticsearch.index.shard.ShardId)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public File[] shardLocations(ShardId shardId) {
|
||||
return toFiles(shardPaths(shardId));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all data paths for the given index.
|
||||
*/
|
||||
|
@ -535,21 +507,6 @@ public class NodeEnvironment extends AbstractComponent implements Closeable{
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns an array of {@link File} build from the correspondent element
|
||||
* in the input array using {@link java.nio.file.Path#toFile()} )}
|
||||
* @param files the files to get paths for
|
||||
*/
|
||||
@Deprecated // this is only a transition API
|
||||
private static File[] toFiles(Path... files) {
|
||||
File[] paths = new File[files.length];
|
||||
for (int i = 0; i < files.length; i++) {
|
||||
paths[i] = files[i].toFile();
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
Settings getSettings() { // for testing
|
||||
return settings;
|
||||
}
|
||||
|
|
|
@ -199,7 +199,7 @@ public class LocalGateway extends AbstractLifecycleComponent<Gateway> implements
|
|||
@Override
|
||||
public void reset() throws Exception {
|
||||
try {
|
||||
IOUtils.rm(FileSystemUtils.toPaths(nodeEnv.nodeDataLocations()));
|
||||
IOUtils.rm(nodeEnv.nodeDataPaths());
|
||||
} catch (Exception ex) {
|
||||
logger.debug("failed to delete shard locations", ex);
|
||||
}
|
||||
|
|
|
@ -54,8 +54,10 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -479,57 +481,55 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
|
|||
|
||||
private void pre019Upgrade() throws Exception {
|
||||
long index = -1;
|
||||
File metaDataFile = null;
|
||||
Path metaDataFile = null;
|
||||
MetaData metaData = null;
|
||||
long version = -1;
|
||||
for (File dataLocation : nodeEnv.nodeDataLocations()) {
|
||||
File stateLocation = new File(dataLocation, "_state");
|
||||
if (!stateLocation.exists()) {
|
||||
for (Path dataLocation : nodeEnv.nodeDataPaths()) {
|
||||
final Path stateLocation = dataLocation.resolve(MetaDataStateFormat.STATE_DIR_NAME);
|
||||
if (!Files.exists(stateLocation)) {
|
||||
continue;
|
||||
}
|
||||
File[] stateFiles = stateLocation.listFiles();
|
||||
if (stateFiles == null) {
|
||||
continue;
|
||||
}
|
||||
for (File stateFile : stateFiles) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("[upgrade]: processing [" + stateFile.getName() + "]");
|
||||
}
|
||||
String name = stateFile.getName();
|
||||
if (!name.startsWith("metadata-")) {
|
||||
continue;
|
||||
}
|
||||
long fileIndex = Long.parseLong(name.substring(name.indexOf('-') + 1));
|
||||
if (fileIndex >= index) {
|
||||
// try and read the meta data
|
||||
try {
|
||||
byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
|
||||
if (data.length == 0) {
|
||||
continue;
|
||||
}
|
||||
try (XContentParser parser = XContentHelper.createParser(data, 0, data.length)) {
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != null) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("meta-data".equals(currentFieldName)) {
|
||||
metaData = MetaData.Builder.fromXContent(parser);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("version".equals(currentFieldName)) {
|
||||
version = parser.longValue();
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(stateLocation)) {
|
||||
for (Path stateFile : stream) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("[upgrade]: processing [" + stateFile.getFileName() + "]");
|
||||
}
|
||||
String name = stateFile.getFileName().toString();
|
||||
if (!name.startsWith("metadata-")) {
|
||||
continue;
|
||||
}
|
||||
long fileIndex = Long.parseLong(name.substring(name.indexOf('-') + 1));
|
||||
if (fileIndex >= index) {
|
||||
// try and read the meta data
|
||||
try {
|
||||
byte[] data = Files.readAllBytes(stateFile);
|
||||
if (data.length == 0) {
|
||||
continue;
|
||||
}
|
||||
try (XContentParser parser = XContentHelper.createParser(data, 0, data.length)) {
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != null) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("meta-data".equals(currentFieldName)) {
|
||||
metaData = MetaData.Builder.fromXContent(parser);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("version".equals(currentFieldName)) {
|
||||
version = parser.longValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
index = fileIndex;
|
||||
metaDataFile = stateFile;
|
||||
} catch (IOException e) {
|
||||
logger.warn("failed to read pre 0.19 state from [" + name + "], ignoring...", e);
|
||||
}
|
||||
index = fileIndex;
|
||||
metaDataFile = stateFile;
|
||||
} catch (IOException e) {
|
||||
logger.warn("failed to read pre 0.19 state from [" + name + "], ignoring...", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -538,7 +538,7 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
|
|||
return;
|
||||
}
|
||||
|
||||
logger.info("found old metadata state, loading metadata from [{}] and converting to new metadata location and structure...", metaDataFile.getAbsolutePath());
|
||||
logger.info("found old metadata state, loading metadata from [{}] and converting to new metadata location and structure...", metaDataFile.toAbsolutePath());
|
||||
|
||||
writeGlobalState("upgrade", MetaData.builder(metaData).version(version).build());
|
||||
for (IndexMetaData indexMetaData : metaData) {
|
||||
|
@ -549,35 +549,31 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
|
|||
}
|
||||
|
||||
// rename shards state to backup state
|
||||
File backupFile = new File(metaDataFile.getParentFile(), "backup-" + metaDataFile.getName());
|
||||
if (!metaDataFile.renameTo(backupFile)) {
|
||||
throw new IOException("failed to rename old state to backup state [" + metaDataFile.getAbsolutePath() + "]");
|
||||
}
|
||||
Path backupFile = metaDataFile.resolveSibling("backup-" + metaDataFile.getFileName());
|
||||
Files.move(metaDataFile, backupFile, StandardCopyOption.ATOMIC_MOVE);
|
||||
|
||||
// delete all other shards state files
|
||||
for (File dataLocation : nodeEnv.nodeDataLocations()) {
|
||||
File stateLocation = new File(dataLocation, "_state");
|
||||
if (!stateLocation.exists()) {
|
||||
for (Path dataLocation : nodeEnv.nodeDataPaths()) {
|
||||
Path stateLocation = dataLocation.resolve(MetaDataStateFormat.STATE_DIR_NAME);
|
||||
if (!Files.exists(stateLocation)) {
|
||||
continue;
|
||||
}
|
||||
File[] stateFiles = stateLocation.listFiles();
|
||||
if (stateFiles == null) {
|
||||
continue;
|
||||
}
|
||||
for (File stateFile : stateFiles) {
|
||||
String name = stateFile.getName();
|
||||
if (!name.startsWith("metadata-")) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
Files.delete(stateFile.toPath());
|
||||
} catch (Exception ex) {
|
||||
logger.debug("failed to delete file " + stateFile, ex);
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(stateLocation)) {
|
||||
for (Path stateFile : stream) {
|
||||
String name = stateFile.getFileName().toString();
|
||||
if (!name.startsWith("metadata-")) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
Files.delete(stateFile);
|
||||
} catch (Exception ex) {
|
||||
logger.debug("failed to delete file " + stateFile, ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("conversion to new metadata location and format done, backup create at [{}]", backupFile.getAbsolutePath());
|
||||
logger.info("conversion to new metadata location and format done, backup create at [{}]", backupFile.toAbsolutePath());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -264,17 +264,15 @@ public abstract class MetaDataStateFormat<T> {
|
|||
final long version = pathAndVersion.version;
|
||||
final XContentParser parser;
|
||||
if (pathAndVersion.legacy) { // read the legacy format -- plain XContent
|
||||
try (InputStream stream = Files.newInputStream(stateFile)) {
|
||||
final byte[] data = Streams.copyToByteArray(stream);
|
||||
if (data.length == 0) {
|
||||
logger.debug("{}: no data for [{}], ignoring...", stateType, stateFile.toAbsolutePath());
|
||||
continue;
|
||||
}
|
||||
parser = XContentHelper.createParser(data, 0, data.length);
|
||||
state = format.fromXContent(parser);
|
||||
if (state == null) {
|
||||
logger.debug("{}: no data for [{}], ignoring...", stateType, stateFile.toAbsolutePath());
|
||||
}
|
||||
final byte[] data = Files.readAllBytes(stateFile);
|
||||
if (data.length == 0) {
|
||||
logger.debug("{}: no data for [{}], ignoring...", stateType, stateFile.toAbsolutePath());
|
||||
continue;
|
||||
}
|
||||
parser = XContentHelper.createParser(data, 0, data.length);
|
||||
state = format.fromXContent(parser);
|
||||
if (state == null) {
|
||||
logger.debug("{}: no data for [{}], ignoring...", stateType, stateFile.toAbsolutePath());
|
||||
}
|
||||
} else {
|
||||
state = format.read(stateFile, version);
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.elasticsearch.gateway.local.state.meta.MetaDataStateFormat;
|
|||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.*;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -251,37 +251,33 @@ public class LocalGatewayShardsState extends AbstractComponent implements Cluste
|
|||
|
||||
private void pre019Upgrade() throws Exception {
|
||||
long index = -1;
|
||||
File latest = null;
|
||||
for (File dataLocation : nodeEnv.nodeDataLocations()) {
|
||||
File stateLocation = new File(dataLocation, "_state");
|
||||
if (!stateLocation.exists()) {
|
||||
Path latest = null;
|
||||
for (Path dataLocation : nodeEnv.nodeDataPaths()) {
|
||||
final Path stateLocation = dataLocation.resolve(MetaDataStateFormat.STATE_DIR_NAME);
|
||||
if (!Files.exists(stateLocation)) {
|
||||
continue;
|
||||
}
|
||||
File[] stateFiles = stateLocation.listFiles();
|
||||
if (stateFiles == null) {
|
||||
continue;
|
||||
}
|
||||
for (File stateFile : stateFiles) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("[find_latest_state]: processing [" + stateFile.getName() + "]");
|
||||
}
|
||||
String name = stateFile.getName();
|
||||
if (!name.startsWith("shards-")) {
|
||||
continue;
|
||||
}
|
||||
long fileIndex = Long.parseLong(name.substring(name.indexOf('-') + 1));
|
||||
if (fileIndex >= index) {
|
||||
// try and read the meta data
|
||||
try {
|
||||
byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
|
||||
if (data.length == 0) {
|
||||
logger.debug("[upgrade]: not data for [" + name + "], ignoring...");
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(stateLocation, "shards-*")) {
|
||||
for (Path stateFile : stream) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("[find_latest_state]: processing [" + stateFile.getFileName() + "]");
|
||||
}
|
||||
String name = stateFile.getFileName().toString();
|
||||
assert name.startsWith("shards-");
|
||||
long fileIndex = Long.parseLong(name.substring(name.indexOf('-') + 1));
|
||||
if (fileIndex >= index) {
|
||||
// try and read the meta data
|
||||
try {
|
||||
byte[] data = Files.readAllBytes(stateFile);
|
||||
if (data.length == 0) {
|
||||
logger.debug("[upgrade]: not data for [" + name + "], ignoring...");
|
||||
}
|
||||
pre09ReadState(data);
|
||||
index = fileIndex;
|
||||
latest = stateFile;
|
||||
} catch (IOException e) {
|
||||
logger.warn("[upgrade]: failed to read state from [" + name + "], ignoring...", e);
|
||||
}
|
||||
pre09ReadState(data);
|
||||
index = fileIndex;
|
||||
latest = stateFile;
|
||||
} catch (IOException e) {
|
||||
logger.warn("[upgrade]: failed to read state from [" + name + "], ignoring...", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -290,44 +286,36 @@ public class LocalGatewayShardsState extends AbstractComponent implements Cluste
|
|||
return;
|
||||
}
|
||||
|
||||
logger.info("found old shards state, loading started shards from [{}] and converting to new shards state locations...", latest.getAbsolutePath());
|
||||
Map<ShardId, ShardStateInfo> shardsState = pre09ReadState(Streams.copyToByteArray(new FileInputStream(latest)));
|
||||
logger.info("found old shards state, loading started shards from [{}] and converting to new shards state locations...", latest.toAbsolutePath());
|
||||
Map<ShardId, ShardStateInfo> shardsState = pre09ReadState(Files.readAllBytes(latest));
|
||||
|
||||
for (Map.Entry<ShardId, ShardStateInfo> entry : shardsState.entrySet()) {
|
||||
writeShardState("upgrade", entry.getKey(), entry.getValue(), null);
|
||||
}
|
||||
|
||||
// rename shards state to backup state
|
||||
File backupFile = new File(latest.getParentFile(), "backup-" + latest.getName());
|
||||
if (!latest.renameTo(backupFile)) {
|
||||
throw new IOException("failed to rename old state to backup state [" + latest.getAbsolutePath() + "]");
|
||||
}
|
||||
Path backupFile = latest.resolveSibling("backup-" + latest.getFileName());
|
||||
Files.move(latest, backupFile, StandardCopyOption.ATOMIC_MOVE);
|
||||
|
||||
// delete all other shards state files
|
||||
for (File dataLocation : nodeEnv.nodeDataLocations()) {
|
||||
File stateLocation = new File(dataLocation, "_state");
|
||||
if (!stateLocation.exists()) {
|
||||
for (Path dataLocation : nodeEnv.nodeDataPaths()) {
|
||||
final Path stateLocation = dataLocation.resolve(MetaDataStateFormat.STATE_DIR_NAME);
|
||||
if (!Files.exists(stateLocation)) {
|
||||
continue;
|
||||
}
|
||||
File[] stateFiles = stateLocation.listFiles();
|
||||
if (stateFiles == null) {
|
||||
continue;
|
||||
}
|
||||
for (File stateFile : stateFiles) {
|
||||
String name = stateFile.getName();
|
||||
if (!name.startsWith("shards-")) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
Files.delete(stateFile.toPath());
|
||||
} catch (Exception ex) {
|
||||
logger.debug("Failed to delete state file {}", ex, stateFile);
|
||||
}
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(stateLocation, "shards-*")) {
|
||||
for (Path stateFile : stream) {
|
||||
try {
|
||||
Files.delete(stateFile);
|
||||
} catch (Exception ex) {
|
||||
logger.debug("Failed to delete state file {}", ex, stateFile);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("conversion to new shards state location and format done, backup create at [{}]", backupFile.getAbsolutePath());
|
||||
logger.info("conversion to new shards state location and format done, backup create at [{}]", backupFile.toAbsolutePath());
|
||||
}
|
||||
|
||||
private Map<ShardId, ShardStateInfo> pre09ReadState(byte[] data) throws IOException {
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableMap;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -31,6 +32,7 @@ import org.elasticsearch.rest.*;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.*;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
@ -125,12 +127,12 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
|
|||
class PluginSiteFilter extends RestFilter {
|
||||
|
||||
@Override
|
||||
public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) {
|
||||
public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws IOException {
|
||||
handlePluginSite((HttpRequest) request, (HttpChannel) channel);
|
||||
}
|
||||
}
|
||||
|
||||
void handlePluginSite(HttpRequest request, HttpChannel channel) {
|
||||
void handlePluginSite(HttpRequest request, HttpChannel channel) throws IOException {
|
||||
if (disableSites) {
|
||||
channel.sendResponse(new BytesRestResponse(FORBIDDEN));
|
||||
return;
|
||||
|
@ -167,36 +169,36 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
|
|||
if (sitePath.length() == 0) {
|
||||
sitePath = "/index.html";
|
||||
}
|
||||
final Path siteFile = environment.pluginsFile().resolve(pluginName).resolve("_site");
|
||||
|
||||
final String separator = siteFile.getFileSystem().getSeparator();
|
||||
// Convert file separators.
|
||||
sitePath = sitePath.replace('/', File.separatorChar);
|
||||
|
||||
sitePath = sitePath.replace("/", separator);
|
||||
// this is a plugin provided site, serve it as static files from the plugin location
|
||||
File siteFile = new File(new File(environment.pluginsFile(), pluginName), "_site");
|
||||
File file = new File(siteFile, sitePath);
|
||||
if (!file.exists() || file.isHidden()) {
|
||||
Path file = FileSystemUtils.append(siteFile, Paths.get(sitePath), 0);
|
||||
if (!Files.exists(file) || Files.isHidden(file)) {
|
||||
channel.sendResponse(new BytesRestResponse(NOT_FOUND));
|
||||
return;
|
||||
}
|
||||
if (!file.isFile()) {
|
||||
if (!Files.isRegularFile(file)) {
|
||||
// If it's not a dir, we send a 403
|
||||
if (!file.isDirectory()) {
|
||||
if (!Files.isDirectory(file)) {
|
||||
channel.sendResponse(new BytesRestResponse(FORBIDDEN));
|
||||
return;
|
||||
}
|
||||
// We don't serve dir but if index.html exists in dir we should serve it
|
||||
file = new File(file, "index.html");
|
||||
if (!file.exists() || file.isHidden() || !file.isFile()) {
|
||||
file = file.resolve("index.html");
|
||||
if (!Files.exists(file) || Files.isHidden(file) || !Files.isRegularFile(file)) {
|
||||
channel.sendResponse(new BytesRestResponse(FORBIDDEN));
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!file.getAbsolutePath().startsWith(siteFile.getAbsolutePath())) {
|
||||
if (!file.toAbsolutePath().startsWith(siteFile)) {
|
||||
channel.sendResponse(new BytesRestResponse(FORBIDDEN));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
byte[] data = Streams.copyToByteArray(file);
|
||||
byte[] data = Files.readAllBytes(file);
|
||||
channel.sendResponse(new BytesRestResponse(OK, guessMimeType(sitePath), data));
|
||||
} catch (IOException e) {
|
||||
channel.sendResponse(new BytesRestResponse(INTERNAL_SERVER_ERROR));
|
||||
|
|
|
@ -73,6 +73,8 @@ import java.io.IOException;
|
|||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
|
@ -227,10 +229,10 @@ public class Analysis {
|
|||
}
|
||||
}
|
||||
|
||||
URL wordListFile = env.resolveConfig(wordListPath);
|
||||
Path wordListFile = env.resolveConfig(wordListPath);
|
||||
|
||||
try {
|
||||
return loadWordList(new InputStreamReader(wordListFile.openStream(), Charsets.UTF_8), "#");
|
||||
return loadWordList(Files.newBufferedReader(wordListFile, Charsets.UTF_8), "#");
|
||||
} catch (IOException ioe) {
|
||||
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||
throw new ElasticsearchIllegalArgumentException(message);
|
||||
|
@ -274,11 +276,11 @@ public class Analysis {
|
|||
return null;
|
||||
}
|
||||
|
||||
URL fileUrl = env.resolveConfig(filePath);
|
||||
Path fileUrl = env.resolveConfig(filePath);
|
||||
|
||||
Reader reader = null;
|
||||
try {
|
||||
reader = new InputStreamReader(fileUrl.openStream(), Charsets.UTF_8);
|
||||
reader = new InputStreamReader(Files.newInputStream(fileUrl), Charsets.UTF_8);
|
||||
} catch (IOException ioe) {
|
||||
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||
throw new ElasticsearchIllegalArgumentException(message);
|
||||
|
|
|
@ -36,6 +36,8 @@ import org.elasticsearch.index.settings.IndexSettings;
|
|||
import org.xml.sax.InputSource;
|
||||
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
/**
|
||||
* Uses the {@link org.apache.lucene.analysis.compound.HyphenationCompoundWordTokenFilter} to decompound tokens based on hyphenation rules.
|
||||
|
@ -56,10 +58,10 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW
|
|||
throw new ElasticsearchIllegalArgumentException("hyphenation_patterns_path is a required setting.");
|
||||
}
|
||||
|
||||
URL hyphenationPatternsFile = env.resolveConfig(hyphenationPatternsPath);
|
||||
Path hyphenationPatternsFile = env.resolveConfig(hyphenationPatternsPath);
|
||||
|
||||
try {
|
||||
hyphenationTree = HyphenationCompoundWordTokenFilter.getHyphenationTree(new InputSource(hyphenationPatternsFile.toExternalForm()));
|
||||
hyphenationTree = HyphenationCompoundWordTokenFilter.getHyphenationTree(new InputSource(Files.newInputStream(hyphenationPatternsFile)));
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchIllegalArgumentException("Exception while reading hyphenation_patterns_path: " + e.getMessage());
|
||||
}
|
||||
|
|
|
@ -69,7 +69,11 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
|
||||
|
@ -134,7 +138,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
|
||||
this.dynamic = componentSettings.getAsBoolean("dynamic", true);
|
||||
String defaultMappingLocation = componentSettings.get("default_mapping_location");
|
||||
final URL defaultMappingUrl;
|
||||
final Path defaultMappingUrl;
|
||||
if (index.getName().equals(ScriptService.SCRIPT_INDEX)){
|
||||
defaultMappingUrl = getMappingUrl(indexSettings, environment, defaultMappingLocation,"script-mapping.json","org/elasticsearch/index/mapper/script-mapping.json");
|
||||
} else {
|
||||
|
@ -160,29 +164,25 @@ public class MapperService extends AbstractIndexComponent {
|
|||
}
|
||||
} else {
|
||||
try {
|
||||
defaultMappingSource = Streams.copyToString(new InputStreamReader(defaultMappingUrl.openStream(), Charsets.UTF_8));
|
||||
defaultMappingSource = Streams.copyToString(new InputStreamReader(Files.newInputStream(defaultMappingUrl), Charsets.UTF_8));
|
||||
} catch (IOException e) {
|
||||
throw new MapperException("Failed to load default mapping source from [" + defaultMappingLocation + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
String percolatorMappingLocation = componentSettings.get("default_percolator_mapping_location");
|
||||
URL percolatorMappingUrl = null;
|
||||
Path percolatorMappingUrl = null;
|
||||
if (percolatorMappingLocation != null) {
|
||||
try {
|
||||
percolatorMappingUrl = environment.resolveConfig(percolatorMappingLocation);
|
||||
} catch (FailedToResolveConfigException e) {
|
||||
// not there, default to the built in one
|
||||
try {
|
||||
percolatorMappingUrl = new File(percolatorMappingLocation).toURI().toURL();
|
||||
} catch (MalformedURLException e1) {
|
||||
throw new FailedToResolveConfigException("Failed to resolve default percolator mapping location [" + percolatorMappingLocation + "]");
|
||||
}
|
||||
percolatorMappingUrl = Paths.get(percolatorMappingLocation);
|
||||
}
|
||||
}
|
||||
if (percolatorMappingUrl != null) {
|
||||
try {
|
||||
defaultPercolatorMappingSource = Streams.copyToString(new InputStreamReader(percolatorMappingUrl.openStream(), Charsets.UTF_8));
|
||||
defaultPercolatorMappingSource = Streams.copyToString(new InputStreamReader(Files.newInputStream(percolatorMappingUrl), Charsets.UTF_8));
|
||||
} catch (IOException e) {
|
||||
throw new MapperException("Failed to load default percolator mapping source from [" + percolatorMappingUrl + "]", e);
|
||||
}
|
||||
|
@ -208,16 +208,20 @@ public class MapperService extends AbstractIndexComponent {
|
|||
}
|
||||
}
|
||||
|
||||
private URL getMappingUrl(Settings indexSettings, Environment environment, String mappingLocation, String configString, String resourceLocation) {
|
||||
URL mappingUrl;
|
||||
private Path getMappingUrl(Settings indexSettings, Environment environment, String mappingLocation, String configString, String resourceLocation) {
|
||||
Path mappingUrl;
|
||||
if (mappingLocation == null) {
|
||||
try {
|
||||
mappingUrl = environment.resolveConfig(configString);
|
||||
} catch (FailedToResolveConfigException e) {
|
||||
// not there, default to the built in one
|
||||
mappingUrl = indexSettings.getClassLoader().getResource(resourceLocation);
|
||||
if (mappingUrl == null) {
|
||||
mappingUrl = MapperService.class.getClassLoader().getResource(resourceLocation);
|
||||
try {
|
||||
// not there, default to the built in one
|
||||
mappingUrl = Paths.get(indexSettings.getClassLoader().getResource(resourceLocation).toURI());
|
||||
if (mappingUrl == null) {
|
||||
mappingUrl = Paths.get(MapperService.class.getClassLoader().getResource(resourceLocation).toURI());
|
||||
}
|
||||
} catch (URISyntaxException e1) {
|
||||
throw new FailedToResolveConfigException("Failed to resolve dynamic mapping location [" + mappingLocation + "]");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -225,11 +229,8 @@ public class MapperService extends AbstractIndexComponent {
|
|||
mappingUrl = environment.resolveConfig(mappingLocation);
|
||||
} catch (FailedToResolveConfigException e) {
|
||||
// not there, default to the built in one
|
||||
try {
|
||||
mappingUrl = new File(mappingLocation).toURI().toURL();
|
||||
} catch (MalformedURLException e1) {
|
||||
throw new FailedToResolveConfigException("Failed to resolve dynamic mapping location [" + mappingLocation + "]");
|
||||
}
|
||||
mappingUrl = Paths.get(mappingLocation);
|
||||
|
||||
}
|
||||
}
|
||||
return mappingUrl;
|
||||
|
|
|
@ -54,6 +54,7 @@ import org.elasticsearch.indices.recovery.RecoveryFailedException;
|
|||
|
||||
import java.io.*;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
|
@ -380,11 +381,11 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
|||
* Reads a MetadataSnapshot from the given index locations or returns an empty snapshot if it can't be read.
|
||||
* @throws IOException if the index we try to read is corrupted
|
||||
*/
|
||||
public static MetadataSnapshot readMetadataSnapshot(File[] indexLocations, ESLogger logger) throws IOException {
|
||||
public static MetadataSnapshot readMetadataSnapshot(Path[] indexLocations, ESLogger logger) throws IOException {
|
||||
final Directory[] dirs = new Directory[indexLocations.length];
|
||||
try {
|
||||
for (int i=0; i< indexLocations.length; i++) {
|
||||
dirs[i] = new SimpleFSDirectory(indexLocations[i].toPath());
|
||||
dirs[i] = new SimpleFSDirectory(indexLocations[i]);
|
||||
}
|
||||
DistributorDirectory dir = new DistributorDirectory(dirs);
|
||||
failIfCorrupted(dir, new ShardId("", 1));
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.store.support;
|
|||
import org.apache.lucene.store.StoreRateLimiting;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchIllegalStateException;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
|
@ -34,9 +35,7 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.index.store.IndexStore;
|
||||
import org.elasticsearch.indices.store.IndicesStore;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
/**
|
||||
|
@ -74,7 +73,7 @@ public abstract class AbstractIndexStore extends AbstractIndexComponent implemen
|
|||
}
|
||||
private final NodeEnvironment nodeEnv;
|
||||
|
||||
private final File[] locations;
|
||||
private final Path[] locations;
|
||||
|
||||
protected final IndexService indexService;
|
||||
|
||||
|
@ -108,7 +107,7 @@ public abstract class AbstractIndexStore extends AbstractIndexComponent implemen
|
|||
indexService.settingsService().addListener(applySettings);
|
||||
this.nodeEnv = nodeEnv;
|
||||
if (nodeEnv.hasNodeFile()) {
|
||||
this.locations = nodeEnv.indexLocations(index);
|
||||
this.locations = nodeEnv.indexPaths(index);
|
||||
} else {
|
||||
this.locations = null;
|
||||
}
|
||||
|
@ -133,12 +132,7 @@ public abstract class AbstractIndexStore extends AbstractIndexComponent implemen
|
|||
if (indexService.hasShard(shardId.id())) {
|
||||
return false;
|
||||
}
|
||||
for (Path location : nodeEnv.shardPaths(shardId)) {
|
||||
if (Files.exists(location)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
return FileSystemUtils.exists(nodeEnv.shardPaths(shardId));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,12 +25,17 @@ import org.apache.lucene.analysis.hunspell.Dictionary;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.MalformedURLException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
|
@ -64,8 +69,6 @@ import java.util.*;
|
|||
*/
|
||||
public class HunspellService extends AbstractComponent {
|
||||
|
||||
private final static DictionaryFileFilter DIC_FILE_FILTER = new DictionaryFileFilter();
|
||||
private final static AffixFileFilter AFFIX_FILE_FILTER = new AffixFileFilter();
|
||||
public final static String HUNSPELL_LAZY_LOAD = "indices.analysis.hunspell.dictionary.lazy";
|
||||
public final static String HUNSPELL_IGNORE_CASE = "indices.analysis.hunspell.dictionary.ignore_case";
|
||||
public final static String HUNSPELL_LOCATION = "indices.analysis.hunspell.dictionary.location";
|
||||
|
@ -73,14 +76,14 @@ public class HunspellService extends AbstractComponent {
|
|||
private final Map<String, Dictionary> knownDictionaries;
|
||||
|
||||
private final boolean defaultIgnoreCase;
|
||||
private final File hunspellDir;
|
||||
private final Path hunspellDir;
|
||||
|
||||
public HunspellService(final Settings settings, final Environment env) {
|
||||
public HunspellService(final Settings settings, final Environment env) throws IOException {
|
||||
this(settings, env, Collections.<String, Dictionary>emptyMap());
|
||||
}
|
||||
|
||||
@Inject
|
||||
public HunspellService(final Settings settings, final Environment env, final Map<String, Dictionary> knownDictionaries) {
|
||||
public HunspellService(final Settings settings, final Environment env, final Map<String, Dictionary> knownDictionaries) throws IOException {
|
||||
super(settings);
|
||||
this.knownDictionaries = knownDictionaries;
|
||||
this.hunspellDir = resolveHunspellDirectory(settings, env);
|
||||
|
@ -109,23 +112,27 @@ public class HunspellService extends AbstractComponent {
|
|||
return dictionaries.getUnchecked(locale);
|
||||
}
|
||||
|
||||
private File resolveHunspellDirectory(Settings settings, Environment env) {
|
||||
private Path resolveHunspellDirectory(Settings settings, Environment env) {
|
||||
String location = settings.get(HUNSPELL_LOCATION, null);
|
||||
if (location != null) {
|
||||
return new File(location);
|
||||
return Paths.get(location);
|
||||
}
|
||||
return new File(env.configFile(), "hunspell");
|
||||
return env.configFile().resolve( "hunspell");
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the hunspell directory and loads all found dictionaries
|
||||
*/
|
||||
private void scanAndLoadDictionaries() {
|
||||
if (hunspellDir.exists() && hunspellDir.isDirectory()) {
|
||||
for (File file : hunspellDir.listFiles()) {
|
||||
if (file.isDirectory()) {
|
||||
if (file.list(DIC_FILE_FILTER).length > 0) { // just making sure it's indeed a dictionary dir
|
||||
dictionaries.getUnchecked(file.getName());
|
||||
private void scanAndLoadDictionaries() throws IOException {
|
||||
if (Files.isDirectory(hunspellDir)) {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(hunspellDir)) {
|
||||
for (Path file : stream) {
|
||||
if (Files.isDirectory(file)) {
|
||||
try (DirectoryStream<Path> inner = Files.newDirectoryStream(hunspellDir.resolve(file), "*.dic")) {
|
||||
if (inner.iterator().hasNext()) { // just making sure it's indeed a dictionary dir
|
||||
dictionaries.getUnchecked(file.getFileName().toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -138,7 +145,6 @@ public class HunspellService extends AbstractComponent {
|
|||
* @param locale The locale of the hunspell dictionary to be loaded.
|
||||
* @param nodeSettings The node level settings
|
||||
* @param env The node environment (from which the conf path will be resolved)
|
||||
* @param version The lucene version
|
||||
* @return The loaded Hunspell dictionary
|
||||
* @throws Exception when loading fails (due to IO errors or malformed dictionary files)
|
||||
*/
|
||||
|
@ -146,8 +152,8 @@ public class HunspellService extends AbstractComponent {
|
|||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Loading hunspell dictionary [{}]...", locale);
|
||||
}
|
||||
File dicDir = new File(hunspellDir, locale);
|
||||
if (!dicDir.exists() || !dicDir.isDirectory()) {
|
||||
Path dicDir = hunspellDir.resolve(locale);
|
||||
if (FileSystemUtils.isAccessibleDirectory(dicDir, logger) == false) {
|
||||
throw new ElasticsearchException(String.format(Locale.ROOT, "Could not find hunspell dictionary [%s]", locale));
|
||||
}
|
||||
|
||||
|
@ -156,7 +162,7 @@ public class HunspellService extends AbstractComponent {
|
|||
|
||||
boolean ignoreCase = nodeSettings.getAsBoolean("ignore_case", defaultIgnoreCase);
|
||||
|
||||
File[] affixFiles = dicDir.listFiles(AFFIX_FILE_FILTER);
|
||||
Path[] affixFiles = FileSystemUtils.files(dicDir, "*.aff");
|
||||
if (affixFiles.length == 0) {
|
||||
throw new ElasticsearchException(String.format(Locale.ROOT, "Missing affix file for hunspell dictionary [%s]", locale));
|
||||
}
|
||||
|
@ -165,15 +171,15 @@ public class HunspellService extends AbstractComponent {
|
|||
}
|
||||
InputStream affixStream = null;
|
||||
|
||||
File[] dicFiles = dicDir.listFiles(DIC_FILE_FILTER);
|
||||
Path[] dicFiles = FileSystemUtils.files(dicDir, "*.dic");
|
||||
List<InputStream> dicStreams = new ArrayList<>(dicFiles.length);
|
||||
try {
|
||||
|
||||
for (int i = 0; i < dicFiles.length; i++) {
|
||||
dicStreams.add(new FileInputStream(dicFiles[i]));
|
||||
dicStreams.add(Files.newInputStream(dicFiles[i]));
|
||||
}
|
||||
|
||||
affixStream = new FileInputStream(affixFiles[0]);
|
||||
affixStream = Files.newInputStream(affixFiles[0]);
|
||||
|
||||
return new Dictionary(affixStream, dicStreams, ignoreCase);
|
||||
|
||||
|
@ -208,47 +214,18 @@ public class HunspellService extends AbstractComponent {
|
|||
* @param defaults The default settings for this dictionary
|
||||
* @return The resolved settings.
|
||||
*/
|
||||
private static Settings loadDictionarySettings(File dir, Settings defaults) {
|
||||
File file = new File(dir, "settings.yml");
|
||||
if (file.exists()) {
|
||||
try {
|
||||
return ImmutableSettings.settingsBuilder().loadFromUrl(file.toURI().toURL()).put(defaults).build();
|
||||
} catch (MalformedURLException e) {
|
||||
throw new ElasticsearchException(String.format(Locale.ROOT, "Could not load hunspell dictionary settings from [%s]", file.getAbsolutePath()), e);
|
||||
}
|
||||
private static Settings loadDictionarySettings(Path dir, Settings defaults) {
|
||||
Path file = dir.resolve("settings.yml");
|
||||
if (Files.exists(file)) {
|
||||
return ImmutableSettings.settingsBuilder().loadFromPath(file).put(defaults).build();
|
||||
}
|
||||
|
||||
file = new File(dir, "settings.json");
|
||||
if (file.exists()) {
|
||||
try {
|
||||
return ImmutableSettings.settingsBuilder().loadFromUrl(file.toURI().toURL()).put(defaults).build();
|
||||
} catch (MalformedURLException e) {
|
||||
throw new ElasticsearchException(String.format(Locale.ROOT, "Could not load hunspell dictionary settings from [%s]", file.getAbsolutePath()), e);
|
||||
}
|
||||
file = dir.resolve("settings.json");
|
||||
if (Files.exists(file)) {
|
||||
return ImmutableSettings.settingsBuilder().loadFromPath(file).put(defaults).build();
|
||||
}
|
||||
|
||||
return defaults;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only accepts {@code *.dic} files
|
||||
*/
|
||||
static class DictionaryFileFilter implements FilenameFilter {
|
||||
@Override
|
||||
public boolean accept(File dir, String name) {
|
||||
return name.toLowerCase(Locale.ROOT).endsWith(".dic");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Only accepts {@code *.aff} files
|
||||
*/
|
||||
static class AffixFileFilter implements FilenameFilter {
|
||||
@Override
|
||||
public boolean accept(File dir, String name) {
|
||||
return name.toLowerCase(Locale.ROOT).endsWith(".aff");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ import org.elasticsearch.transport.*;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
@ -161,7 +162,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
|
|||
IndexService indexService = indicesService.indexService(shardId.getIndex());
|
||||
if (indexService == null) {
|
||||
if (nodeEnv.hasNodeFile()) {
|
||||
File[] shardLocations = nodeEnv.shardLocations(shardId);
|
||||
Path[] shardLocations = nodeEnv.shardPaths(shardId);
|
||||
if (FileSystemUtils.exists(shardLocations)) {
|
||||
deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable);
|
||||
}
|
||||
|
@ -320,7 +321,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
|
|||
if (indexService == null) {
|
||||
// not physical allocation of the index, delete it from the file system if applicable
|
||||
if (nodeEnv.hasNodeFile()) {
|
||||
File[] shardLocations = nodeEnv.shardLocations(shardId);
|
||||
Path[] shardLocations = nodeEnv.shardPaths(shardId);
|
||||
if (FileSystemUtils.exists(shardLocations)) {
|
||||
logger.debug("{} deleting shard that is no longer used", shardId);
|
||||
try {
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
|
@ -50,6 +51,7 @@ import org.elasticsearch.transport.TransportService;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
|
||||
|
@ -164,18 +166,12 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio
|
|||
if (!storeType.contains("fs")) {
|
||||
return new StoreFilesMetaData(false, shardId, ImmutableMap.<String, StoreFileMetaData>of());
|
||||
}
|
||||
File[] shardLocations = nodeEnv.shardLocations(shardId);
|
||||
File[] shardIndexLocations = new File[shardLocations.length];
|
||||
Path[] shardLocations = nodeEnv.shardPaths(shardId);
|
||||
Path[] shardIndexLocations = new Path[shardLocations.length];
|
||||
for (int i = 0; i < shardLocations.length; i++) {
|
||||
shardIndexLocations[i] = new File(shardLocations[i], "index");
|
||||
}
|
||||
boolean exists = false;
|
||||
for (File shardIndexLocation : shardIndexLocations) {
|
||||
if (shardIndexLocation.exists()) {
|
||||
exists = true;
|
||||
break;
|
||||
}
|
||||
shardIndexLocations[i] = shardLocations[i].resolve("index");
|
||||
}
|
||||
final boolean exists = FileSystemUtils.exists(shardIndexLocations);
|
||||
if (!exists) {
|
||||
return new StoreFilesMetaData(false, shardId, ImmutableMap.<String, StoreFileMetaData>of());
|
||||
}
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
|
||||
package org.elasticsearch.monitor.fs;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public interface FsProbe {
|
||||
|
||||
FsStats stats();
|
||||
FsStats stats() throws IOException;
|
||||
}
|
||||
|
|
|
@ -24,6 +24,8 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class FsService extends AbstractComponent {
|
||||
|
@ -35,7 +37,7 @@ public class FsService extends AbstractComponent {
|
|||
private FsStats cachedStats;
|
||||
|
||||
@Inject
|
||||
public FsService(Settings settings, FsProbe probe) {
|
||||
public FsService(Settings settings, FsProbe probe) throws IOException {
|
||||
super(settings);
|
||||
this.probe = probe;
|
||||
this.cachedStats = probe.stats();
|
||||
|
@ -46,8 +48,13 @@ public class FsService extends AbstractComponent {
|
|||
}
|
||||
|
||||
public synchronized FsStats stats() {
|
||||
if ((System.currentTimeMillis() - cachedStats.getTimestamp()) > refreshInterval.millis()) {
|
||||
cachedStats = probe.stats();
|
||||
try {
|
||||
if ((System.currentTimeMillis() - cachedStats.getTimestamp()) > refreshInterval.millis()) {
|
||||
cachedStats = probe.stats();
|
||||
}
|
||||
return cachedStats;
|
||||
} catch (IOException ex) {
|
||||
logger.warn("can't fetch fs stats", ex);
|
||||
}
|
||||
return cachedStats;
|
||||
}
|
||||
|
|
|
@ -25,6 +25,10 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.env.NodeEnvironment;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileStore;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -39,19 +43,20 @@ public class JmxFsProbe extends AbstractComponent implements FsProbe {
|
|||
}
|
||||
|
||||
@Override
|
||||
public FsStats stats() {
|
||||
public FsStats stats() throws IOException {
|
||||
if (!nodeEnv.hasNodeFile()) {
|
||||
return new FsStats(System.currentTimeMillis(), new FsStats.Info[0]);
|
||||
}
|
||||
File[] dataLocations = nodeEnv.nodeDataLocations();
|
||||
Path[] dataLocations = nodeEnv.nodeDataPaths();
|
||||
FsStats.Info[] infos = new FsStats.Info[dataLocations.length];
|
||||
for (int i = 0; i < dataLocations.length; i++) {
|
||||
File dataLocation = dataLocations[i];
|
||||
Path dataLocation = dataLocations[i];
|
||||
FsStats.Info info = new FsStats.Info();
|
||||
info.path = dataLocation.getAbsolutePath();
|
||||
info.total = dataLocation.getTotalSpace();
|
||||
info.free = dataLocation.getFreeSpace();
|
||||
info.available = dataLocation.getUsableSpace();
|
||||
FileStore fileStore = Files.getFileStore(dataLocation);
|
||||
info.path = dataLocation.toAbsolutePath().toString();
|
||||
info.total = fileStore.getTotalSpace();
|
||||
info.free = fileStore.getUnallocatedSpace();
|
||||
info.available = fileStore.getUsableSpace();
|
||||
infos[i] = info;
|
||||
}
|
||||
return new FsStats(System.currentTimeMillis(), infos);
|
||||
|
|
|
@ -32,6 +32,8 @@ import org.hyperic.sigar.Sigar;
|
|||
import org.hyperic.sigar.SigarException;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
|
@ -42,7 +44,7 @@ public class SigarFsProbe extends AbstractComponent implements FsProbe {
|
|||
|
||||
private final SigarService sigarService;
|
||||
|
||||
private Map<File, FileSystem> fileSystems = Maps.newHashMap();
|
||||
private Map<Path, FileSystem> fileSystems = Maps.newHashMap();
|
||||
|
||||
@Inject
|
||||
public SigarFsProbe(Settings settings, NodeEnvironment nodeEnv, SigarService sigarService) {
|
||||
|
@ -56,13 +58,13 @@ public class SigarFsProbe extends AbstractComponent implements FsProbe {
|
|||
if (!nodeEnv.hasNodeFile()) {
|
||||
return new FsStats(System.currentTimeMillis(), new FsStats.Info[0]);
|
||||
}
|
||||
File[] dataLocations = nodeEnv.nodeDataLocations();
|
||||
Path[] dataLocations = nodeEnv.nodeDataPaths();
|
||||
FsStats.Info[] infos = new FsStats.Info[dataLocations.length];
|
||||
for (int i = 0; i < dataLocations.length; i++) {
|
||||
File dataLocation = dataLocations[i];
|
||||
Path dataLocation = dataLocations[i];
|
||||
|
||||
FsStats.Info info = new FsStats.Info();
|
||||
info.path = dataLocation.getAbsolutePath();
|
||||
info.path = dataLocation.toAbsolutePath().toString();
|
||||
|
||||
try {
|
||||
FileSystem fileSystem = fileSystems.get(dataLocation);
|
||||
|
@ -70,7 +72,7 @@ public class SigarFsProbe extends AbstractComponent implements FsProbe {
|
|||
if (fileSystem == null) {
|
||||
FileSystemMap fileSystemMap = sigar.getFileSystemMap();
|
||||
if (fileSystemMap != null) {
|
||||
fileSystem = fileSystemMap.getMountPoint(dataLocation.getPath());
|
||||
fileSystem = fileSystemMap.getMountPoint(dataLocation.toAbsolutePath().toString());
|
||||
fileSystems.put(dataLocation, fileSystem);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,33 +60,33 @@ public class InternalSettingsPreparer {
|
|||
// if its default, then load it, but also load form env
|
||||
if (Strings.hasText(System.getProperty("es.default.config"))) {
|
||||
loadFromEnv = true;
|
||||
settingsBuilder.loadFromUrl(environment.resolveConfig(System.getProperty("es.default.config")));
|
||||
settingsBuilder.loadFromPath(environment.resolveConfig(System.getProperty("es.default.config")));
|
||||
}
|
||||
// if explicit, just load it and don't load from env
|
||||
if (Strings.hasText(System.getProperty("es.config"))) {
|
||||
loadFromEnv = false;
|
||||
settingsBuilder.loadFromUrl(environment.resolveConfig(System.getProperty("es.config")));
|
||||
settingsBuilder.loadFromPath(environment.resolveConfig(System.getProperty("es.config")));
|
||||
}
|
||||
if (Strings.hasText(System.getProperty("elasticsearch.config"))) {
|
||||
loadFromEnv = false;
|
||||
settingsBuilder.loadFromUrl(environment.resolveConfig(System.getProperty("elasticsearch.config")));
|
||||
settingsBuilder.loadFromPath(environment.resolveConfig(System.getProperty("elasticsearch.config")));
|
||||
}
|
||||
}
|
||||
if (loadFromEnv) {
|
||||
try {
|
||||
settingsBuilder.loadFromUrl(environment.resolveConfig("elasticsearch.yml"));
|
||||
settingsBuilder.loadFromPath(environment.resolveConfig("elasticsearch.yml"));
|
||||
} catch (FailedToResolveConfigException e) {
|
||||
// ignore
|
||||
} catch (NoClassDefFoundError e) {
|
||||
// ignore, no yaml
|
||||
}
|
||||
try {
|
||||
settingsBuilder.loadFromUrl(environment.resolveConfig("elasticsearch.json"));
|
||||
settingsBuilder.loadFromPath(environment.resolveConfig("elasticsearch.json"));
|
||||
} catch (FailedToResolveConfigException e) {
|
||||
// ignore
|
||||
}
|
||||
try {
|
||||
settingsBuilder.loadFromUrl(environment.resolveConfig("elasticsearch.properties"));
|
||||
settingsBuilder.loadFromPath(environment.resolveConfig("elasticsearch.properties"));
|
||||
} catch (FailedToResolveConfigException e) {
|
||||
// ignore
|
||||
}
|
||||
|
@ -136,7 +136,7 @@ public class InternalSettingsPreparer {
|
|||
// put back the env settings
|
||||
settingsBuilder = settingsBuilder().put(v1);
|
||||
// we put back the path.logs so we can use it in the logging configuration file
|
||||
settingsBuilder.put("path.logs", cleanPath(environment.logsFile().getAbsolutePath()));
|
||||
settingsBuilder.put("path.logs", cleanPath(environment.logsFile().toAbsolutePath().toString()));
|
||||
|
||||
v1 = settingsBuilder.build();
|
||||
|
||||
|
|
|
@ -39,6 +39,8 @@ import org.elasticsearch.plugins.PluginsService;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class NodeService extends AbstractComponent {
|
||||
|
@ -132,7 +134,7 @@ public class NodeService extends AbstractComponent {
|
|||
);
|
||||
}
|
||||
|
||||
public NodeStats stats() {
|
||||
public NodeStats stats() throws IOException {
|
||||
// for indices stats we want to include previous allocated shards stats as well (it will
|
||||
// only be applied to the sensible ones to use, like refresh/merge/flush/indexing stats)
|
||||
return new NodeStats(discovery.localNode(), System.currentTimeMillis(),
|
||||
|
|
|
@ -21,11 +21,10 @@ package org.elasticsearch.plugins;
|
|||
|
||||
import com.google.common.base.Strings;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
||||
import org.elasticsearch.ElasticsearchIllegalStateException;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.*;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.http.client.HttpDownloadHelper;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
|
@ -43,13 +42,13 @@ import javax.net.ssl.X509TrustManager;
|
|||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.nio.file.attribute.PosixFileAttributes;
|
||||
import java.nio.file.attribute.PosixFilePermission;
|
||||
import java.util.*;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
|
@ -84,15 +83,7 @@ public class PluginManager {
|
|||
"plugin.bat",
|
||||
"service.bat").build();
|
||||
|
||||
// Valid directory names for plugin ZIP files when it has only one single dir
|
||||
private static final ImmutableSet<Object> VALID_TOP_LEVEL_PLUGIN_DIRS = ImmutableSet.builder()
|
||||
.add("_site",
|
||||
"bin",
|
||||
"config",
|
||||
"_dict").build();
|
||||
|
||||
private final Environment environment;
|
||||
|
||||
private String url;
|
||||
private OutputMode outputMode;
|
||||
private TimeValue timeout;
|
||||
|
@ -125,7 +116,7 @@ public class PluginManager {
|
|||
sc.init(null, trustAllCerts, new java.security.SecureRandom());
|
||||
HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
throw new ElasticsearchException("Failed to install all-trusting trust manager", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -139,22 +130,21 @@ public class PluginManager {
|
|||
if (outputMode == OutputMode.SILENT) {
|
||||
progress = new HttpDownloadHelper.NullProgress();
|
||||
} else {
|
||||
progress = new HttpDownloadHelper.VerboseProgress(System.out);
|
||||
progress = new HttpDownloadHelper.VerboseProgress(SysOut.getOut());
|
||||
}
|
||||
|
||||
if (!environment.pluginsFile().canWrite()) {
|
||||
System.err.println();
|
||||
if (!Files.isWritable(environment.pluginsFile())) {
|
||||
throw new IOException("plugin directory " + environment.pluginsFile() + " is read only");
|
||||
}
|
||||
|
||||
PluginHandle pluginHandle = PluginHandle.parse(name);
|
||||
checkForForbiddenName(pluginHandle.name);
|
||||
|
||||
File pluginFile = pluginHandle.distroFile(environment);
|
||||
Path pluginFile = pluginHandle.distroFile(environment);
|
||||
// extract the plugin
|
||||
File extractLocation = pluginHandle.extractedDir(environment);
|
||||
if (extractLocation.exists()) {
|
||||
throw new IOException("plugin directory " + extractLocation.getAbsolutePath() + " already exists. To update the plugin, uninstall it first using --remove " + name + " command");
|
||||
final Path extractLocation = pluginHandle.extractedDir(environment);
|
||||
if (Files.exists(extractLocation)) {
|
||||
throw new IOException("plugin directory " + extractLocation.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using --remove " + name + " command");
|
||||
}
|
||||
|
||||
// first, try directly from the URL provided
|
||||
|
@ -191,41 +181,45 @@ public class PluginManager {
|
|||
if (!downloaded) {
|
||||
throw new IOException("failed to download out of all possible locations..., use --verbose to get detailed information");
|
||||
}
|
||||
try (FileSystem zipFile = FileSystems.newFileSystem(pluginFile, null)) {
|
||||
for (final Path root : zipFile.getRootDirectories() ) {
|
||||
final Path[] topLevelFiles = FileSystemUtils.files(root);
|
||||
//we check whether we need to remove the top-level folder while extracting
|
||||
//sometimes (e.g. github) the downloaded archive contains a top-level folder which needs to be removed
|
||||
final boolean stripTopLevelDirectory;
|
||||
if (topLevelFiles.length == 1 && Files.isDirectory(topLevelFiles[0])) {
|
||||
// valid names if the zip has only one top level directory
|
||||
switch (topLevelFiles[0].getFileName().toString()) {
|
||||
case "_site/":
|
||||
case "bin/":
|
||||
case "config/":
|
||||
case "_dict/":
|
||||
stripTopLevelDirectory = false;
|
||||
break;
|
||||
default:
|
||||
stripTopLevelDirectory = true;
|
||||
}
|
||||
} else {
|
||||
stripTopLevelDirectory = false;
|
||||
}
|
||||
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Path target = FileSystemUtils.append(extractLocation, file, stripTopLevelDirectory ? 1 : 0);
|
||||
Files.createDirectories(target);
|
||||
Files.copy(file, target, StandardCopyOption.REPLACE_EXISTING);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
ZipFile zipFile = null;
|
||||
try {
|
||||
zipFile = new ZipFile(pluginFile);
|
||||
//we check whether we need to remove the top-level folder while extracting
|
||||
//sometimes (e.g. github) the downloaded archive contains a top-level folder which needs to be removed
|
||||
boolean removeTopLevelDir = topLevelDirInExcess(zipFile);
|
||||
Enumeration<? extends ZipEntry> zipEntries = zipFile.entries();
|
||||
while (zipEntries.hasMoreElements()) {
|
||||
ZipEntry zipEntry = zipEntries.nextElement();
|
||||
if (zipEntry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
String zipEntryName = zipEntry.getName().replace('\\', '/');
|
||||
if (removeTopLevelDir) {
|
||||
zipEntryName = zipEntryName.substring(zipEntryName.indexOf('/'));
|
||||
}
|
||||
File target = new File(extractLocation, zipEntryName);
|
||||
FileSystemUtils.mkdirs(target.getParentFile());
|
||||
Streams.copy(zipFile.getInputStream(zipEntry), new FileOutputStream(target));
|
||||
});
|
||||
}
|
||||
log("Installed " + name + " into " + extractLocation.getAbsolutePath());
|
||||
log("Installed " + name + " into " + extractLocation.toAbsolutePath());
|
||||
} catch (Exception e) {
|
||||
log("failed to extract plugin [" + pluginFile + "]: " + ExceptionsHelper.detailedMessage(e));
|
||||
return;
|
||||
} finally {
|
||||
if (zipFile != null) {
|
||||
try {
|
||||
zipFile.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
try {
|
||||
Files.delete(pluginFile.toPath());
|
||||
Files.delete(pluginFile);
|
||||
} catch (Exception ex) {
|
||||
log("Failed to delete plugin file" + pluginFile + " " + ex);
|
||||
}
|
||||
|
@ -234,7 +228,7 @@ public class PluginManager {
|
|||
if (FileSystemUtils.hasExtensions(extractLocation, ".java")) {
|
||||
debug("Plugin installation assumed to be site plugin, but contains source code, aborting installation...");
|
||||
try {
|
||||
IOUtils.rm(extractLocation.toPath());
|
||||
IOUtils.rm(extractLocation);
|
||||
} catch(Exception ex) {
|
||||
debug("Failed to remove site plugin from path " + extractLocation + " - " + ex.getMessage());
|
||||
}
|
||||
|
@ -243,54 +237,52 @@ public class PluginManager {
|
|||
|
||||
// It could potentially be a non explicit _site plugin
|
||||
boolean potentialSitePlugin = true;
|
||||
File binFile = new File(extractLocation, "bin");
|
||||
if (binFile.exists() && binFile.isDirectory()) {
|
||||
File toLocation = pluginHandle.binDir(environment);
|
||||
debug("Found bin, moving to " + toLocation.getAbsolutePath());
|
||||
if (toLocation.exists()) {
|
||||
IOUtils.rm(toLocation.toPath());
|
||||
Path binFile = extractLocation.resolve("bin");
|
||||
if (Files.exists(binFile) && Files.isDirectory(binFile)) {
|
||||
Path toLocation = pluginHandle.binDir(environment);
|
||||
debug("Found bin, moving to " + toLocation.toAbsolutePath());
|
||||
if (Files.exists(toLocation)) {
|
||||
IOUtils.rm(toLocation);
|
||||
}
|
||||
if (!binFile.renameTo(toLocation)) {
|
||||
throw new IOException("Could not move ["+ binFile.getAbsolutePath() + "] to [" + toLocation.getAbsolutePath() + "]");
|
||||
}
|
||||
// Make everything in bin/ executable
|
||||
Files.walkFileTree(toLocation.toPath(), new SimpleFileVisitor<Path>() {
|
||||
Files.move(binFile, toLocation, StandardCopyOption.ATOMIC_MOVE);
|
||||
final Set<PosixFilePermission> perms = new HashSet<PosixFilePermission>();
|
||||
perms.add(PosixFilePermission.OWNER_EXECUTE);
|
||||
perms.add(PosixFilePermission.GROUP_EXECUTE);
|
||||
perms.add(PosixFilePermission.OTHERS_EXECUTE);
|
||||
Files.walkFileTree(toLocation, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
if (attrs.isRegularFile()) {
|
||||
file.toFile().setExecutable(true);
|
||||
|
||||
Files.setPosixFilePermissions(file, perms);
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
debug("Installed " + name + " into " + toLocation.getAbsolutePath());
|
||||
debug("Installed " + name + " into " + toLocation.toAbsolutePath());
|
||||
potentialSitePlugin = false;
|
||||
}
|
||||
|
||||
File configFile = new File(extractLocation, "config");
|
||||
if (configFile.exists() && configFile.isDirectory()) {
|
||||
File configDestLocation = pluginHandle.configDir(environment);
|
||||
debug("Found config, moving to " + configDestLocation.getAbsolutePath());
|
||||
Path configFile = extractLocation.resolve("config");
|
||||
if (Files.exists(configFile) && Files.isDirectory(configFile)) {
|
||||
Path configDestLocation = pluginHandle.configDir(environment);
|
||||
debug("Found config, moving to " + configDestLocation.toAbsolutePath());
|
||||
moveFilesWithoutOverwriting(configFile, configDestLocation, ".new");
|
||||
debug("Installed " + name + " into " + configDestLocation.getAbsolutePath());
|
||||
debug("Installed " + name + " into " + configDestLocation.toAbsolutePath());
|
||||
potentialSitePlugin = false;
|
||||
}
|
||||
|
||||
// try and identify the plugin type, see if it has no .class or .jar files in it
|
||||
// so its probably a _site, and it it does not have a _site in it, move everything to _site
|
||||
if (!new File(extractLocation, "_site").exists()) {
|
||||
if (!Files.exists(extractLocation.resolve("_site"))) {
|
||||
if (potentialSitePlugin && !FileSystemUtils.hasExtensions(extractLocation, ".class", ".jar")) {
|
||||
log("Identified as a _site plugin, moving to _site structure ...");
|
||||
File site = new File(extractLocation, "_site");
|
||||
File tmpLocation = new File(environment.pluginsFile(), extractLocation.getName() + ".tmp");
|
||||
if (!extractLocation.renameTo(tmpLocation)) {
|
||||
throw new IOException("failed to rename in order to copy to _site (rename to " + tmpLocation.getAbsolutePath() + "");
|
||||
}
|
||||
FileSystemUtils.mkdirs(extractLocation);
|
||||
if (!tmpLocation.renameTo(site)) {
|
||||
throw new IOException("failed to rename in order to copy to _site (rename to " + site.getAbsolutePath() + "");
|
||||
}
|
||||
debug("Installed " + name + " into " + site.getAbsolutePath());
|
||||
Path site = extractLocation.resolve("_site");
|
||||
Path tmpLocation = environment.pluginsFile().resolve(extractLocation.getFileName() + ".tmp");
|
||||
Files.move(extractLocation, tmpLocation);
|
||||
Files.createDirectories(extractLocation);
|
||||
Files.move(tmpLocation, site);
|
||||
debug("Installed " + name + " into " + site.toAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -303,11 +295,11 @@ public class PluginManager {
|
|||
boolean removed = false;
|
||||
|
||||
checkForForbiddenName(pluginHandle.name);
|
||||
File pluginToDelete = pluginHandle.extractedDir(environment);
|
||||
if (pluginToDelete.exists()) {
|
||||
debug("Removing: " + pluginToDelete.getPath());
|
||||
Path pluginToDelete = pluginHandle.extractedDir(environment);
|
||||
if (Files.exists(pluginToDelete)) {
|
||||
debug("Removing: " + pluginToDelete);
|
||||
try {
|
||||
IOUtils.rm(pluginToDelete.toPath());
|
||||
IOUtils.rm(pluginToDelete);
|
||||
} catch (IOException ex){
|
||||
throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " +
|
||||
pluginToDelete.toString(), ex);
|
||||
|
@ -315,21 +307,21 @@ public class PluginManager {
|
|||
removed = true;
|
||||
}
|
||||
pluginToDelete = pluginHandle.distroFile(environment);
|
||||
if (pluginToDelete.exists()) {
|
||||
debug("Removing: " + pluginToDelete.getPath());
|
||||
if (Files.exists(pluginToDelete)) {
|
||||
debug("Removing: " + pluginToDelete);
|
||||
try {
|
||||
Files.delete(pluginToDelete.toPath());
|
||||
Files.delete(pluginToDelete);
|
||||
} catch (Exception ex) {
|
||||
throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " +
|
||||
pluginToDelete.toString(), ex);
|
||||
}
|
||||
removed = true;
|
||||
}
|
||||
File binLocation = pluginHandle.binDir(environment);
|
||||
if (binLocation.exists()) {
|
||||
debug("Removing: " + binLocation.getPath());
|
||||
Path binLocation = pluginHandle.binDir(environment);
|
||||
if (Files.exists(binLocation)) {
|
||||
debug("Removing: " + binLocation);
|
||||
try {
|
||||
IOUtils.rm(binLocation.toPath());
|
||||
IOUtils.rm(binLocation);
|
||||
} catch (IOException ex){
|
||||
throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " +
|
||||
binLocation.toString(), ex);
|
||||
|
@ -350,52 +342,24 @@ public class PluginManager {
|
|||
}
|
||||
}
|
||||
|
||||
public File[] getListInstalledPlugins() {
|
||||
File[] plugins = environment.pluginsFile().listFiles();
|
||||
return plugins;
|
||||
public Path[] getListInstalledPlugins() throws IOException {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
|
||||
return Iterators.toArray(stream.iterator(), Path.class);
|
||||
}
|
||||
}
|
||||
|
||||
public void listInstalledPlugins() {
|
||||
File[] plugins = getListInstalledPlugins();
|
||||
log("Installed plugins:");
|
||||
public void listInstalledPlugins() throws IOException {
|
||||
Path[] plugins = getListInstalledPlugins();
|
||||
log("Installed plugins in " + environment.pluginsFile().toAbsolutePath() + ":");
|
||||
if (plugins == null || plugins.length == 0) {
|
||||
log(" - No plugin detected in " + environment.pluginsFile().getAbsolutePath());
|
||||
log(" - No plugin detected");
|
||||
} else {
|
||||
for (int i = 0; i < plugins.length; i++) {
|
||||
log(" - " + plugins[i].getName());
|
||||
log(" - " + plugins[i].getFileName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean topLevelDirInExcess(ZipFile zipFile) {
|
||||
//We don't rely on ZipEntry#isDirectory because it might be that there is no explicit dir
|
||||
//but the files path do contain dirs, thus they are going to be extracted on sub-folders anyway
|
||||
Enumeration<? extends ZipEntry> zipEntries = zipFile.entries();
|
||||
Set<String> topLevelDirNames = new HashSet<>();
|
||||
while (zipEntries.hasMoreElements()) {
|
||||
ZipEntry zipEntry = zipEntries.nextElement();
|
||||
String zipEntryName = zipEntry.getName().replace('\\', '/');
|
||||
|
||||
int slash = zipEntryName.indexOf('/');
|
||||
//if there isn't a slash in the entry name it means that we have a file in the top-level
|
||||
if (slash == -1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
topLevelDirNames.add(zipEntryName.substring(0, slash));
|
||||
//if we have more than one top-level folder
|
||||
if (topLevelDirNames.size() > 1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (topLevelDirNames.size() == 1) {
|
||||
return !VALID_TOP_LEVEL_PLUGIN_DIRS.contains(topLevelDirNames.iterator().next());
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static final int EXIT_CODE_OK = 0;
|
||||
private static final int EXIT_CODE_CMD_USAGE = 64;
|
||||
private static final int EXIT_CODE_IO_ERROR = 74;
|
||||
|
@ -404,8 +368,13 @@ public class PluginManager {
|
|||
public static void main(String[] args) {
|
||||
Tuple<Settings, Environment> initialSettings = InternalSettingsPreparer.prepareSettings(EMPTY_SETTINGS, true);
|
||||
|
||||
if (!initialSettings.v2().pluginsFile().exists()) {
|
||||
FileSystemUtils.mkdirs(initialSettings.v2().pluginsFile());
|
||||
try {
|
||||
if (!Files.exists(initialSettings.v2().pluginsFile())) {
|
||||
Files.createDirectories(initialSettings.v2().pluginsFile());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
displayHelp("Unable to create plugins dir: " + initialSettings.v2().pluginsFile());
|
||||
System.exit(EXIT_CODE_ERROR);
|
||||
}
|
||||
|
||||
String url = null;
|
||||
|
@ -576,34 +545,48 @@ public class PluginManager {
|
|||
}
|
||||
|
||||
private static void displayHelp(String message) {
|
||||
System.out.println("Usage:");
|
||||
System.out.println(" -u, --url [plugin location] : Set exact URL to download the plugin from");
|
||||
System.out.println(" -i, --install [plugin name] : Downloads and installs listed plugins [*]");
|
||||
System.out.println(" -t, --timeout [duration] : Timeout setting: 30s, 1m, 1h... (infinite by default)");
|
||||
System.out.println(" -r, --remove [plugin name] : Removes listed plugins");
|
||||
System.out.println(" -l, --list : List installed plugins");
|
||||
System.out.println(" -v, --verbose : Prints verbose messages");
|
||||
System.out.println(" -s, --silent : Run in silent mode");
|
||||
System.out.println(" -h, --help : Prints this help message");
|
||||
System.out.println();
|
||||
System.out.println(" [*] Plugin name could be:");
|
||||
System.out.println(" elasticsearch/plugin/version for official elasticsearch plugins (download from download.elasticsearch.org)");
|
||||
System.out.println(" groupId/artifactId/version for community plugins (download from maven central or oss sonatype)");
|
||||
System.out.println(" username/repository for site plugins (download from github master)");
|
||||
SysOut.println("Usage:");
|
||||
SysOut.println(" -u, --url [plugin location] : Set exact URL to download the plugin from");
|
||||
SysOut.println(" -i, --install [plugin name] : Downloads and installs listed plugins [*]");
|
||||
SysOut.println(" -t, --timeout [duration] : Timeout setting: 30s, 1m, 1h... (infinite by default)");
|
||||
SysOut.println(" -r, --remove [plugin name] : Removes listed plugins");
|
||||
SysOut.println(" -l, --list : List installed plugins");
|
||||
SysOut.println(" -v, --verbose : Prints verbose messages");
|
||||
SysOut.println(" -s, --silent : Run in silent mode");
|
||||
SysOut.println(" -h, --help : Prints this help message");
|
||||
SysOut.newline();
|
||||
SysOut.println(" [*] Plugin name could be:");
|
||||
SysOut.println(" elasticsearch/plugin/version for official elasticsearch plugins (download from download.elasticsearch.org)");
|
||||
SysOut.println(" groupId/artifactId/version for community plugins (download from maven central or oss sonatype)");
|
||||
SysOut.println(" username/repository for site plugins (download from github master)");
|
||||
|
||||
if (message != null) {
|
||||
System.out.println();
|
||||
System.out.println("Message:");
|
||||
System.out.println(" " + message);
|
||||
SysOut.newline();
|
||||
SysOut.println("Message:");
|
||||
SysOut.println(" " + message);
|
||||
}
|
||||
}
|
||||
|
||||
private void debug(String line) {
|
||||
if (outputMode == OutputMode.VERBOSE) System.out.println(line);
|
||||
if (outputMode == OutputMode.VERBOSE) SysOut.println(line);
|
||||
}
|
||||
|
||||
private void log(String line) {
|
||||
if (outputMode != OutputMode.SILENT) System.out.println(line);
|
||||
if (outputMode != OutputMode.SILENT) SysOut.println(line);
|
||||
}
|
||||
|
||||
static class SysOut {
|
||||
|
||||
public static void newline() {
|
||||
System.out.println();
|
||||
}
|
||||
public static void println(String msg) {
|
||||
System.out.println(msg);
|
||||
}
|
||||
|
||||
public static PrintStream getOut() {
|
||||
return System.out;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -649,20 +632,20 @@ public class PluginManager {
|
|||
}
|
||||
}
|
||||
|
||||
File distroFile(Environment env) {
|
||||
return new File(env.pluginsFile(), name + ".zip");
|
||||
Path distroFile(Environment env) {
|
||||
return env.pluginsFile().resolve(name + ".zip");
|
||||
}
|
||||
|
||||
File extractedDir(Environment env) {
|
||||
return new File(env.pluginsFile(), name);
|
||||
Path extractedDir(Environment env) {
|
||||
return env.pluginsFile().resolve(name);
|
||||
}
|
||||
|
||||
File binDir(Environment env) {
|
||||
return new File(new File(env.homeFile(), "bin"), name);
|
||||
Path binDir(Environment env) {
|
||||
return env.homeFile().resolve("bin").resolve(name);
|
||||
}
|
||||
|
||||
File configDir(Environment env) {
|
||||
return new File(env.configFile(), name);
|
||||
Path configDir(Environment env) {
|
||||
return env.configFile().resolve(name);
|
||||
}
|
||||
|
||||
static PluginHandle parse(String name) {
|
||||
|
|
|
@ -19,10 +19,13 @@
|
|||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.*;
|
||||
import com.google.common.primitives.Chars;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchIllegalStateException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
||||
|
@ -42,6 +45,10 @@ import org.elasticsearch.env.Environment;
|
|||
import java.io.*;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory;
|
||||
|
@ -106,7 +113,11 @@ public class PluginsService extends AbstractComponent {
|
|||
}
|
||||
|
||||
// now, find all the ones that are in the classpath
|
||||
loadPluginsIntoClassLoader();
|
||||
try {
|
||||
loadPluginsIntoClassLoader();
|
||||
} catch (IOException ex) {
|
||||
throw new ElasticsearchIllegalStateException("Can't load plugins into classloader", ex);
|
||||
}
|
||||
if (loadClasspathPlugins) {
|
||||
tupleBuilder.addAll(loadPluginsFromClasspath(settings));
|
||||
}
|
||||
|
@ -122,11 +133,14 @@ public class PluginsService extends AbstractComponent {
|
|||
sitePlugins.add(tuple.v1().getName());
|
||||
}
|
||||
}
|
||||
|
||||
// we load site plugins
|
||||
ImmutableList<Tuple<PluginInfo, Plugin>> tuples = loadSitePlugins();
|
||||
for (Tuple<PluginInfo, Plugin> tuple : tuples) {
|
||||
sitePlugins.add(tuple.v1().getName());
|
||||
try {
|
||||
// we load site plugins
|
||||
ImmutableList<Tuple<PluginInfo, Plugin>> tuples = loadSitePlugins();
|
||||
for (Tuple<PluginInfo, Plugin> tuple : tuples) {
|
||||
sitePlugins.add(tuple.v1().getName());
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new ElasticsearchIllegalStateException("Can't load site plugins", ex);
|
||||
}
|
||||
|
||||
// Checking expected plugins
|
||||
|
@ -315,19 +329,23 @@ public class PluginsService extends AbstractComponent {
|
|||
cachedPluginsInfo.add(plugin.v1());
|
||||
}
|
||||
|
||||
// We reload site plugins (in case of some changes)
|
||||
for (Tuple<PluginInfo, Plugin> plugin : loadSitePlugins()) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("adding site plugin [{}]", plugin.v1());
|
||||
try {
|
||||
// We reload site plugins (in case of some changes)
|
||||
for (Tuple<PluginInfo, Plugin> plugin : loadSitePlugins()) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("adding site plugin [{}]", plugin.v1());
|
||||
}
|
||||
cachedPluginsInfo.add(plugin.v1());
|
||||
}
|
||||
cachedPluginsInfo.add(plugin.v1());
|
||||
} catch (IOException ex) {
|
||||
logger.warn("can load site plugins info", ex);
|
||||
}
|
||||
|
||||
return cachedPluginsInfo;
|
||||
}
|
||||
|
||||
private void loadPluginsIntoClassLoader() {
|
||||
File pluginsDirectory = environment.pluginsFile();
|
||||
private void loadPluginsIntoClassLoader() throws IOException {
|
||||
Path pluginsDirectory = environment.pluginsFile();
|
||||
if (!isAccessibleDirectory(pluginsDirectory, logger)) {
|
||||
return;
|
||||
}
|
||||
|
@ -349,41 +367,46 @@ public class PluginsService extends AbstractComponent {
|
|||
logger.debug("failed to find addURL method on classLoader [" + classLoader + "] to add methods");
|
||||
return;
|
||||
}
|
||||
|
||||
for (File plugin : pluginsDirectory.listFiles()) {
|
||||
// We check that subdirs are directories and readable
|
||||
if (!isAccessibleDirectory(plugin, logger)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.trace("--- adding plugin [{}]", plugin.getAbsolutePath());
|
||||
|
||||
try {
|
||||
// add the root
|
||||
addURL.invoke(classLoader, plugin.toURI().toURL());
|
||||
// gather files to add
|
||||
List<File> libFiles = Lists.newArrayList();
|
||||
if (plugin.listFiles() != null) {
|
||||
libFiles.addAll(Arrays.asList(plugin.listFiles()));
|
||||
}
|
||||
File libLocation = new File(plugin, "lib");
|
||||
if (libLocation.exists() && libLocation.isDirectory() && libLocation.listFiles() != null) {
|
||||
libFiles.addAll(Arrays.asList(libLocation.listFiles()));
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(pluginsDirectory)) {
|
||||
for (Path plugin : stream) {
|
||||
// We check that subdirs are directories and readable
|
||||
if (!isAccessibleDirectory(plugin, logger)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// if there are jars in it, add it as well
|
||||
for (File libFile : libFiles) {
|
||||
if (!(libFile.getName().endsWith(".jar") || libFile.getName().endsWith(".zip"))) {
|
||||
continue;
|
||||
logger.trace("--- adding plugin [{}]", plugin.toAbsolutePath());
|
||||
|
||||
try {
|
||||
// add the root
|
||||
addURL.invoke(classLoader, plugin.toUri().toURL());
|
||||
// gather files to add
|
||||
List<Path> libFiles = Lists.newArrayList();
|
||||
libFiles.addAll(Arrays.asList(files(plugin)));
|
||||
Path libLocation = plugin.resolve("lib");
|
||||
if (Files.exists(libLocation) && Files.isDirectory(libLocation)) {
|
||||
libFiles.addAll(Arrays.asList(files(libLocation)));
|
||||
}
|
||||
addURL.invoke(classLoader, libFile.toURI().toURL());
|
||||
|
||||
// if there are jars in it, add it as well
|
||||
for (Path libFile : libFiles) {
|
||||
if (!(libFile.getFileName().endsWith(".jar") || libFile.getFileName().endsWith(".zip"))) {
|
||||
continue;
|
||||
}
|
||||
addURL.invoke(classLoader, libFile.toUri().toURL());
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
logger.warn("failed to add plugin [" + plugin + "]", e);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
logger.warn("failed to add plugin [" + plugin + "]", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Path[] files(Path from) throws IOException {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(from)) {
|
||||
return Iterators.toArray(stream.iterator(), Path.class);
|
||||
}
|
||||
}
|
||||
|
||||
private ImmutableList<Tuple<PluginInfo,Plugin>> loadPluginsFromClasspath(Settings settings) {
|
||||
ImmutableList.Builder<Tuple<PluginInfo, Plugin>> plugins = ImmutableList.builder();
|
||||
|
||||
|
@ -402,7 +425,7 @@ public class PluginsService extends AbstractComponent {
|
|||
Plugin plugin = loadPlugin(pluginClassName, settings);
|
||||
|
||||
// Is it a site plugin as well? Does it have also an embedded _site structure
|
||||
File siteFile = new File(new File(environment.pluginsFile(), plugin.name()), "_site");
|
||||
Path siteFile = environment.pluginsFile().resolve(plugin.name()).resolve("_site");
|
||||
boolean isSite = isAccessibleDirectory(siteFile, logger);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("found a jvm plugin [{}], [{}]{}",
|
||||
|
@ -425,7 +448,7 @@ public class PluginsService extends AbstractComponent {
|
|||
return plugins.build();
|
||||
}
|
||||
|
||||
private ImmutableList<Tuple<PluginInfo,Plugin>> loadSitePlugins() {
|
||||
private ImmutableList<Tuple<PluginInfo,Plugin>> loadSitePlugins() throws IOException {
|
||||
ImmutableList.Builder<Tuple<PluginInfo, Plugin>> sitePlugins = ImmutableList.builder();
|
||||
List<String> loadedJvmPlugins = new ArrayList<>();
|
||||
|
||||
|
@ -437,49 +460,46 @@ public class PluginsService extends AbstractComponent {
|
|||
}
|
||||
|
||||
// Let's try to find all _site plugins we did not already found
|
||||
File pluginsFile = environment.pluginsFile();
|
||||
Path pluginsFile = environment.pluginsFile();
|
||||
|
||||
if (!pluginsFile.exists() || !pluginsFile.isDirectory()) {
|
||||
if (!Files.exists(pluginsFile) || !Files.isDirectory(pluginsFile)) {
|
||||
return sitePlugins.build();
|
||||
}
|
||||
|
||||
for (File pluginFile : pluginsFile.listFiles()) {
|
||||
if (!loadedJvmPlugins.contains(pluginFile.getName())) {
|
||||
File sitePluginDir = new File(pluginFile, "_site");
|
||||
if (isAccessibleDirectory(sitePluginDir, logger)) {
|
||||
// We have a _site plugin. Let's try to get more information on it
|
||||
String name = pluginFile.getName();
|
||||
String version = PluginInfo.VERSION_NOT_AVAILABLE;
|
||||
String description = PluginInfo.DESCRIPTION_NOT_AVAILABLE;
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(pluginsFile)) {
|
||||
for (Path pluginFile : stream) {
|
||||
if (!loadedJvmPlugins.contains(pluginFile.getFileName().toString())) {
|
||||
Path sitePluginDir = pluginFile.resolve("_site");
|
||||
if (isAccessibleDirectory(sitePluginDir, logger)) {
|
||||
// We have a _site plugin. Let's try to get more information on it
|
||||
String name = pluginFile.getFileName().toString();
|
||||
String version = PluginInfo.VERSION_NOT_AVAILABLE;
|
||||
String description = PluginInfo.DESCRIPTION_NOT_AVAILABLE;
|
||||
|
||||
// We check if es-plugin.properties exists in plugin/_site dir
|
||||
File pluginPropFile = new File(sitePluginDir, esPluginPropertiesFile);
|
||||
if (pluginPropFile.exists()) {
|
||||
// We check if es-plugin.properties exists in plugin/_site dir
|
||||
final Path pluginPropFile = sitePluginDir.resolve(esPluginPropertiesFile);
|
||||
if (Files.exists(pluginPropFile)) {
|
||||
|
||||
Properties pluginProps = new Properties();
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = new FileInputStream(pluginPropFile.getAbsolutePath());
|
||||
pluginProps.load(is);
|
||||
description = pluginProps.getProperty("description", PluginInfo.DESCRIPTION_NOT_AVAILABLE);
|
||||
version = pluginProps.getProperty("version", PluginInfo.VERSION_NOT_AVAILABLE);
|
||||
} catch (Exception e) {
|
||||
// Can not load properties for this site plugin. Ignoring.
|
||||
logger.debug("can not load {} file.", e, esPluginPropertiesFile);
|
||||
} finally {
|
||||
IOUtils.closeWhileHandlingException(is);
|
||||
final Properties pluginProps = new Properties();
|
||||
try (final BufferedReader reader = Files.newBufferedReader(pluginPropFile, Charsets.UTF_8)) {
|
||||
pluginProps.load(reader);
|
||||
description = pluginProps.getProperty("description", PluginInfo.DESCRIPTION_NOT_AVAILABLE);
|
||||
version = pluginProps.getProperty("version", PluginInfo.VERSION_NOT_AVAILABLE);
|
||||
} catch (Exception e) {
|
||||
// Can not load properties for this site plugin. Ignoring.
|
||||
logger.debug("can not load {} file.", e, esPluginPropertiesFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("found a site plugin name [{}], version [{}], description [{}]",
|
||||
name, version, description);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("found a site plugin name [{}], version [{}], description [{}]",
|
||||
name, version, description);
|
||||
}
|
||||
sitePlugins.add(new Tuple<PluginInfo, Plugin>(new PluginInfo(name, description, true, false, version), null));
|
||||
}
|
||||
sitePlugins.add(new Tuple<PluginInfo, Plugin>(new PluginInfo(name, description, true, false, version), null));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sitePlugins.build();
|
||||
}
|
||||
|
||||
|
@ -489,13 +509,13 @@ public class PluginsService extends AbstractComponent {
|
|||
*/
|
||||
private boolean hasSite(String name) {
|
||||
// Let's try to find all _site plugins we did not already found
|
||||
File pluginsFile = environment.pluginsFile();
|
||||
Path pluginsFile = environment.pluginsFile();
|
||||
|
||||
if (!pluginsFile.exists() || !pluginsFile.isDirectory()) {
|
||||
if (!Files.exists(pluginsFile) || !Files.isDirectory(pluginsFile)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
File sitePluginDir = new File(pluginsFile, name + "/_site");
|
||||
Path sitePluginDir = pluginsFile.resolve(name).resolve("_site");
|
||||
return isAccessibleDirectory(sitePluginDir, logger);
|
||||
}
|
||||
|
||||
|
|
|
@ -27,6 +27,8 @@ import org.elasticsearch.common.io.stream.Streamable;
|
|||
import org.elasticsearch.snapshots.Snapshot;
|
||||
import org.elasticsearch.snapshots.SnapshotShardFailure;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Snapshot repository interface.
|
||||
* <p/>
|
||||
|
@ -62,7 +64,7 @@ public interface Repository extends LifecycleComponent<Repository> {
|
|||
* @param indices list of indices
|
||||
* @return information about snapshot
|
||||
*/
|
||||
MetaData readSnapshotMetaData(SnapshotId snapshotId, ImmutableList<String> indices);
|
||||
MetaData readSnapshotMetaData(SnapshotId snapshotId, ImmutableList<String> indices) throws IOException;
|
||||
|
||||
/**
|
||||
* Returns the list of snapshots currently stored in the repository
|
||||
|
|
|
@ -26,6 +26,7 @@ import com.google.common.collect.Maps;
|
|||
import com.google.common.io.ByteStreams;
|
||||
import org.apache.lucene.store.RateLimiter;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchIllegalStateException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
|
@ -159,6 +160,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
|
|||
*/
|
||||
@Override
|
||||
protected void doStart() throws ElasticsearchException {
|
||||
|
||||
this.snapshotsBlobContainer = blobStore().blobContainer(basePath());
|
||||
indexShardRepository.initialize(blobStore(), basePath(), chunkSize(), snapshotRateLimiter, restoreRateLimiter, this);
|
||||
}
|
||||
|
@ -270,7 +272,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
|
|||
MetaData metaData = null;
|
||||
try {
|
||||
metaData = readSnapshotMetaData(snapshotId, snapshot.indices(), true);
|
||||
} catch (SnapshotException ex) {
|
||||
} catch (IOException | SnapshotException ex) {
|
||||
logger.warn("cannot read metadata for snapshot [{}]", ex, snapshotId);
|
||||
}
|
||||
try {
|
||||
|
@ -388,7 +390,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
|
|||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public MetaData readSnapshotMetaData(SnapshotId snapshotId, ImmutableList<String> indices) {
|
||||
public MetaData readSnapshotMetaData(SnapshotId snapshotId, ImmutableList<String> indices) throws IOException {
|
||||
return readSnapshotMetaData(snapshotId, indices, false);
|
||||
}
|
||||
|
||||
|
@ -429,7 +431,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
|
|||
}
|
||||
}
|
||||
|
||||
private MetaData readSnapshotMetaData(SnapshotId snapshotId, ImmutableList<String> indices, boolean ignoreIndexErrors) {
|
||||
private MetaData readSnapshotMetaData(SnapshotId snapshotId, ImmutableList<String> indices, boolean ignoreIndexErrors) throws IOException {
|
||||
MetaData metaData;
|
||||
try (InputStream blob = snapshotsBlobContainer.openInput(metaDataBlobName(snapshotId))) {
|
||||
byte[] data = ByteStreams.toByteArray(blob);
|
||||
|
|
|
@ -33,6 +33,8 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
/**
|
||||
* Shared file system implementation of the BlobStoreRepository
|
||||
|
@ -68,13 +70,13 @@ public class FsRepository extends BlobStoreRepository {
|
|||
@Inject
|
||||
public FsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException {
|
||||
super(name.getName(), repositorySettings, indexShardRepository);
|
||||
File locationFile;
|
||||
Path locationFile;
|
||||
String location = repositorySettings.settings().get("location", componentSettings.get("location"));
|
||||
if (location == null) {
|
||||
logger.warn("using local fs location for gateway, should be changed to be a shared location across nodes");
|
||||
throw new RepositoryException(name.name(), "missing location");
|
||||
} else {
|
||||
locationFile = new File(location);
|
||||
locationFile = Paths.get(location);
|
||||
}
|
||||
blobStore = new FsBlobStore(componentSettings, locationFile);
|
||||
this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", componentSettings.getAsBytesSize("chunk_size", null));
|
||||
|
|
|
@ -66,6 +66,9 @@ import java.io.File;
|
|||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.file.FileSystems;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -92,7 +95,7 @@ public class ScriptService extends AbstractComponent {
|
|||
private final ConcurrentMap<String, CompiledScript> staticCache = ConcurrentCollections.newConcurrentMap();
|
||||
|
||||
private final Cache<CacheKey, CompiledScript> cache;
|
||||
private final File scriptsDirectory;
|
||||
private final Path scriptsDirectory;
|
||||
|
||||
private final DynamicScriptDisabling dynamicScriptingDisabled;
|
||||
|
||||
|
@ -205,7 +208,7 @@ public class ScriptService extends AbstractComponent {
|
|||
|
||||
@Inject
|
||||
public ScriptService(Settings settings, Environment env, Set<ScriptEngineService> scriptEngines,
|
||||
ResourceWatcherService resourceWatcherService) {
|
||||
ResourceWatcherService resourceWatcherService) throws IOException {
|
||||
super(settings);
|
||||
|
||||
int cacheMaxSize = settings.getAsInt(SCRIPT_CACHE_SIZE_SETTING, 100);
|
||||
|
@ -234,7 +237,7 @@ public class ScriptService extends AbstractComponent {
|
|||
this.scriptEngines = builder.build();
|
||||
|
||||
// add file watcher for static scripts
|
||||
scriptsDirectory = new File(env.configFile(), "scripts");
|
||||
scriptsDirectory = env.configFile().resolve("scripts");
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("Using scripts directory [{}] ", scriptsDirectory);
|
||||
}
|
||||
|
@ -512,12 +515,12 @@ public class ScriptService extends AbstractComponent {
|
|||
|
||||
private class ScriptChangesListener extends FileChangesListener {
|
||||
|
||||
private Tuple<String, String> scriptNameExt(File file) {
|
||||
String scriptPath = scriptsDirectory.toURI().relativize(file.toURI()).getPath();
|
||||
int extIndex = scriptPath.lastIndexOf('.');
|
||||
private Tuple<String, String> scriptNameExt(Path file) {
|
||||
Path scriptPath = scriptsDirectory.relativize(file);
|
||||
int extIndex = scriptPath.toString().lastIndexOf('.');
|
||||
if (extIndex != -1) {
|
||||
String ext = scriptPath.substring(extIndex + 1);
|
||||
String scriptName = scriptPath.substring(0, extIndex).replace(File.separatorChar, '_');
|
||||
String ext = scriptPath.toString().substring(extIndex + 1);
|
||||
String scriptName = scriptPath.toString().substring(0, extIndex).replace(scriptPath.getFileSystem().getSeparator(), "_");
|
||||
return new Tuple<>(scriptName, ext);
|
||||
} else {
|
||||
return null;
|
||||
|
@ -525,7 +528,7 @@ public class ScriptService extends AbstractComponent {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFileInit(File file) {
|
||||
public void onFileInit(Path file) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("Loading script file : [{}]", file);
|
||||
}
|
||||
|
@ -537,8 +540,8 @@ public class ScriptService extends AbstractComponent {
|
|||
if (s.equals(scriptNameExt.v2())) {
|
||||
found = true;
|
||||
try {
|
||||
logger.info("compiling script file [{}]", file.getAbsolutePath());
|
||||
String script = Streams.copyToString(new InputStreamReader(new FileInputStream(file), Charsets.UTF_8));
|
||||
logger.info("compiling script file [{}]", file.toAbsolutePath());
|
||||
String script = Streams.copyToString(new InputStreamReader(Files.newInputStream(file), Charsets.UTF_8));
|
||||
staticCache.put(scriptNameExt.v1(), new CompiledScript(engineService.types()[0], engineService.compile(script)));
|
||||
} catch (Throwable e) {
|
||||
logger.warn("failed to load/compile script [{}]", e, scriptNameExt.v1());
|
||||
|
@ -557,21 +560,21 @@ public class ScriptService extends AbstractComponent {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFileCreated(File file) {
|
||||
public void onFileCreated(Path file) {
|
||||
onFileInit(file);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileDeleted(File file) {
|
||||
public void onFileDeleted(Path file) {
|
||||
Tuple<String, String> scriptNameExt = scriptNameExt(file);
|
||||
if (scriptNameExt != null) {
|
||||
logger.info("removing script file [{}]", file.getAbsolutePath());
|
||||
logger.info("removing script file [{}]", file.toAbsolutePath());
|
||||
staticCache.remove(scriptNameExt.v1());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileChanged(File file) {
|
||||
public void onFileChanged(Path file) {
|
||||
onFileInit(file);
|
||||
}
|
||||
|
||||
|
|
|
@ -447,7 +447,7 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic
|
|||
* @param snapshotId snapshot id
|
||||
* @return map of shard id to snapshot status
|
||||
*/
|
||||
public ImmutableMap<ShardId, IndexShardSnapshotStatus> snapshotShards(SnapshotId snapshotId) {
|
||||
public ImmutableMap<ShardId, IndexShardSnapshotStatus> snapshotShards(SnapshotId snapshotId) throws IOException {
|
||||
ImmutableMap.Builder<ShardId, IndexShardSnapshotStatus> shardStatusBuilder = ImmutableMap.builder();
|
||||
Repository repository = repositoriesService.repository(snapshotId.getRepository());
|
||||
IndexShardRepository indexShardRepository = repositoriesService.indexShardRepository(snapshotId.getRepository());
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.watcher;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
|
||||
|
@ -30,7 +31,7 @@ public abstract class AbstractResourceWatcher<Listener> implements ResourceWatch
|
|||
private boolean initialized = false;
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
public void init() throws IOException {
|
||||
if (!initialized) {
|
||||
doInit();
|
||||
initialized = true;
|
||||
|
@ -38,7 +39,7 @@ public abstract class AbstractResourceWatcher<Listener> implements ResourceWatch
|
|||
}
|
||||
|
||||
@Override
|
||||
public void checkAndNotify() {
|
||||
public void checkAndNotify() throws IOException {
|
||||
init();
|
||||
doCheckAndNotify();
|
||||
}
|
||||
|
@ -67,13 +68,13 @@ public abstract class AbstractResourceWatcher<Listener> implements ResourceWatch
|
|||
/**
|
||||
* Will be called once on initialization
|
||||
*/
|
||||
protected abstract void doInit();
|
||||
protected abstract void doInit() throws IOException;
|
||||
|
||||
/**
|
||||
* Will be called periodically
|
||||
* <p/>
|
||||
* Implementing watcher should check resource and notify all {@link #listeners()}.
|
||||
*/
|
||||
protected abstract void doCheckAndNotify();
|
||||
protected abstract void doCheckAndNotify() throws IOException;
|
||||
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.watcher;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
|
||||
/**
|
||||
* Callback interface that file changes File Watcher is using to notify listeners about changes.
|
||||
|
@ -27,49 +27,49 @@ public class FileChangesListener {
|
|||
/**
|
||||
* Called for every file found in the watched directory during initialization
|
||||
*/
|
||||
public void onFileInit(File file) {
|
||||
public void onFileInit(Path file) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called for every subdirectory found in the watched directory during initialization
|
||||
*/
|
||||
public void onDirectoryInit(File file) {
|
||||
public void onDirectoryInit(Path file) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called for every new file found in the watched directory
|
||||
*/
|
||||
public void onFileCreated(File file) {
|
||||
public void onFileCreated(Path file) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called for every file that disappeared in the watched directory
|
||||
*/
|
||||
public void onFileDeleted(File file) {
|
||||
public void onFileDeleted(Path file) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called for every file that was changed in the watched directory
|
||||
*/
|
||||
public void onFileChanged(File file) {
|
||||
public void onFileChanged(Path file) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called for every new subdirectory found in the watched directory
|
||||
*/
|
||||
public void onDirectoryCreated(File file) {
|
||||
public void onDirectoryCreated(Path file) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called for every file that disappeared in the watched directory
|
||||
*/
|
||||
public void onDirectoryDeleted(File file) {
|
||||
public void onDirectoryDeleted(Path file) {
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,10 +18,16 @@
|
|||
*/
|
||||
package org.elasticsearch.watcher;
|
||||
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
|
@ -38,50 +44,50 @@ public class FileWatcher extends AbstractResourceWatcher<FileChangesListener> {
|
|||
/**
|
||||
* Creates new file watcher on the given directory
|
||||
*/
|
||||
public FileWatcher(File file) {
|
||||
public FileWatcher(Path file) {
|
||||
rootFileObserver = new FileObserver(file);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doInit() {
|
||||
protected void doInit() throws IOException {
|
||||
rootFileObserver.init(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doCheckAndNotify() {
|
||||
protected void doCheckAndNotify() throws IOException {
|
||||
rootFileObserver.checkAndNotify();
|
||||
}
|
||||
|
||||
private static FileObserver[] EMPTY_DIRECTORY = new FileObserver[0];
|
||||
|
||||
private class FileObserver {
|
||||
private File file;
|
||||
private Path file;
|
||||
private boolean exists;
|
||||
private long length;
|
||||
private long lastModified;
|
||||
private boolean isDirectory;
|
||||
private FileObserver[] children;
|
||||
|
||||
public FileObserver(File file) {
|
||||
public FileObserver(Path file) {
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
public void checkAndNotify() {
|
||||
public void checkAndNotify() throws IOException {
|
||||
boolean prevExists = exists;
|
||||
boolean prevIsDirectory = isDirectory;
|
||||
long prevLength = length;
|
||||
long prevLastModified = lastModified;
|
||||
|
||||
exists = file.exists();
|
||||
|
||||
exists = Files.exists(file);
|
||||
// TODO we might use the new NIO2 API to get real notification?
|
||||
if (exists) {
|
||||
isDirectory = file.isDirectory();
|
||||
isDirectory = Files.isDirectory(file);
|
||||
if (isDirectory) {
|
||||
length = 0;
|
||||
lastModified = 0;
|
||||
} else {
|
||||
length = file.length();
|
||||
lastModified = file.lastModified();
|
||||
length = Files.size(file);
|
||||
lastModified = Files.getLastModifiedTime(file).toMillis();
|
||||
}
|
||||
} else {
|
||||
isDirectory = false;
|
||||
|
@ -134,36 +140,34 @@ public class FileWatcher extends AbstractResourceWatcher<FileChangesListener> {
|
|||
|
||||
}
|
||||
|
||||
private void init(boolean initial) {
|
||||
exists = file.exists();
|
||||
private void init(boolean initial) throws IOException {
|
||||
exists = Files.exists(file);
|
||||
if (exists) {
|
||||
isDirectory = file.isDirectory();
|
||||
isDirectory =Files.isDirectory(file);
|
||||
if (isDirectory) {
|
||||
onDirectoryCreated(initial);
|
||||
} else {
|
||||
length = file.length();
|
||||
lastModified = file.lastModified();
|
||||
length = Files.size(file);
|
||||
lastModified = Files.getLastModifiedTime(file).toMillis();
|
||||
onFileCreated(initial);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private FileObserver createChild(File file, boolean initial) {
|
||||
private FileObserver createChild(Path file, boolean initial) throws IOException {
|
||||
FileObserver child = new FileObserver(file);
|
||||
child.init(initial);
|
||||
return child;
|
||||
}
|
||||
|
||||
private File[] listFiles() {
|
||||
File[] files = file.listFiles();
|
||||
if (files != null) {
|
||||
Arrays.sort(files);
|
||||
}
|
||||
private Path[] listFiles() throws IOException {
|
||||
final Path[] files = FileSystemUtils.files(file);
|
||||
Arrays.sort(files);
|
||||
return files;
|
||||
}
|
||||
|
||||
private FileObserver[] listChildren(boolean initial) {
|
||||
File[] files = listFiles();
|
||||
private FileObserver[] listChildren(boolean initial) throws IOException {
|
||||
Path[] files = listFiles();
|
||||
if (files != null && files.length > 0) {
|
||||
FileObserver[] children = new FileObserver[files.length];
|
||||
for (int i = 0; i < files.length; i++) {
|
||||
|
@ -175,8 +179,8 @@ public class FileWatcher extends AbstractResourceWatcher<FileChangesListener> {
|
|||
}
|
||||
}
|
||||
|
||||
private void updateChildren() {
|
||||
File[] files = listFiles();
|
||||
private void updateChildren() throws IOException {
|
||||
Path[] files = listFiles();
|
||||
if (files != null && files.length > 0) {
|
||||
FileObserver[] newChildren = new FileObserver[files.length];
|
||||
int child = 0;
|
||||
|
@ -266,7 +270,7 @@ public class FileWatcher extends AbstractResourceWatcher<FileChangesListener> {
|
|||
}
|
||||
}
|
||||
|
||||
private void onDirectoryCreated(boolean initial) {
|
||||
private void onDirectoryCreated(boolean initial) throws IOException {
|
||||
for (FileChangesListener listener : listeners()) {
|
||||
try {
|
||||
if (initial) {
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
*/
|
||||
package org.elasticsearch.watcher;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Abstract resource watcher interface.
|
||||
* <p/>
|
||||
|
@ -28,10 +30,10 @@ public interface ResourceWatcher {
|
|||
/**
|
||||
* Called once when the resource watcher is added to {@link ResourceWatcherService}
|
||||
*/
|
||||
void init();
|
||||
void init() throws IOException;
|
||||
|
||||
/**
|
||||
* Called periodically by {@link ResourceWatcherService} so resource watcher can check the resource
|
||||
*/
|
||||
void checkAndNotify();
|
||||
void checkAndNotify() throws IOException;
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CopyOnWriteArraySet;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
|
@ -117,14 +118,14 @@ public class ResourceWatcherService extends AbstractLifecycleComponent<ResourceW
|
|||
/**
|
||||
* Register new resource watcher that will be checked in default {@link Frequency#MEDIUM MEDIUM} frequency
|
||||
*/
|
||||
public <W extends ResourceWatcher> WatcherHandle<W> add(W watcher) {
|
||||
public <W extends ResourceWatcher> WatcherHandle<W> add(W watcher) throws IOException {
|
||||
return add(watcher, Frequency.MEDIUM);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register new resource watcher that will be checked in the given frequency
|
||||
*/
|
||||
public <W extends ResourceWatcher> WatcherHandle<W> add(W watcher, Frequency frequency) {
|
||||
public <W extends ResourceWatcher> WatcherHandle<W> add(W watcher, Frequency frequency) throws IOException {
|
||||
watcher.init();
|
||||
switch (frequency) {
|
||||
case LOW:
|
||||
|
@ -158,7 +159,7 @@ public class ResourceWatcherService extends AbstractLifecycleComponent<ResourceW
|
|||
}
|
||||
}
|
||||
|
||||
static class ResourceMonitor implements Runnable {
|
||||
class ResourceMonitor implements Runnable {
|
||||
|
||||
final TimeValue interval;
|
||||
final Frequency frequency;
|
||||
|
@ -178,7 +179,11 @@ public class ResourceWatcherService extends AbstractLifecycleComponent<ResourceW
|
|||
@Override
|
||||
public synchronized void run() {
|
||||
for(ResourceWatcher watcher : watchers) {
|
||||
watcher.checkAndNotify();
|
||||
try {
|
||||
watcher.checkAndNotify();
|
||||
} catch (IOException e) {
|
||||
logger.trace("failed to check resource watcher", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,8 +45,12 @@ import org.junit.runner.RunWith;
|
|||
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.annotation.*;
|
||||
import java.lang.reflect.Method;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.logging.Logger;
|
||||
|
@ -180,7 +184,7 @@ public abstract class AbstractRandomizedTest extends RandomizedTest {
|
|||
/**
|
||||
* Create indexes in this directory, optimally use a subdir, named after the test
|
||||
*/
|
||||
public static final File TEMP_DIR;
|
||||
public static final Path TEMP_DIR;
|
||||
|
||||
public static final int TESTS_PROCESSORS;
|
||||
|
||||
|
@ -188,8 +192,12 @@ public abstract class AbstractRandomizedTest extends RandomizedTest {
|
|||
String s = System.getProperty("tempDir", System.getProperty("java.io.tmpdir"));
|
||||
if (s == null)
|
||||
throw new RuntimeException("To run tests, you need to define system property 'tempDir' or 'java.io.tmpdir'.");
|
||||
TEMP_DIR = new File(s);
|
||||
TEMP_DIR.mkdirs();
|
||||
TEMP_DIR = Paths.get(s);
|
||||
try {
|
||||
Files.createDirectories(TEMP_DIR);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
String processors = System.getProperty(SYSPROP_PROCESSORS, ""); // mvn sets "" as default
|
||||
if (processors == null || processors.isEmpty()) {
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
|||
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -191,13 +191,13 @@ public class ClusterRerouteTests extends ElasticsearchIntegrationTest {
|
|||
client().prepareIndex("test", "type", "1").setSource("field", "value").setRefresh(true).execute().actionGet();
|
||||
|
||||
logger.info("--> closing all nodes");
|
||||
File[] shardLocation = internalCluster().getInstance(NodeEnvironment.class, node_1).shardLocations(new ShardId("test", 0));
|
||||
Path[] shardLocation = internalCluster().getInstance(NodeEnvironment.class, node_1).shardPaths(new ShardId("test", 0));
|
||||
assertThat(FileSystemUtils.exists(shardLocation), equalTo(true)); // make sure the data is there!
|
||||
internalCluster().closeNonSharedNodes(false); // don't wipe data directories the index needs to be there!
|
||||
|
||||
logger.info("--> deleting the shard data [{}] ", Arrays.toString(shardLocation));
|
||||
assertThat(FileSystemUtils.exists(shardLocation), equalTo(true)); // verify again after cluster was shut down
|
||||
IOUtils.rm(FileSystemUtils.toPaths(shardLocation));
|
||||
IOUtils.rm(shardLocation);
|
||||
|
||||
logger.info("--> starting nodes back, will not allocate the shard since it has no data, but the index will be there");
|
||||
node_1 = internalCluster().startNode(commonSettings);
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.test.ElasticsearchTestCase;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class BlobStoreTest extends ElasticsearchTestCase {
|
||||
|
@ -60,8 +61,8 @@ public class BlobStoreTest extends ElasticsearchTestCase {
|
|||
store.close();
|
||||
}
|
||||
|
||||
protected BlobStore newBlobStore() {
|
||||
File tempDir = newTempDir(LifecycleScope.TEST);
|
||||
protected BlobStore newBlobStore() throws IOException {
|
||||
Path tempDir = newTempDir(LifecycleScope.TEST).toPath();
|
||||
Settings settings = randomBoolean() ? ImmutableSettings.EMPTY : ImmutableSettings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build();
|
||||
FsBlobStore store = new FsBlobStore(settings, tempDir);
|
||||
return store;
|
||||
|
|
|
@ -40,6 +40,7 @@ import java.io.EOFException;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class PagedBytesReferenceTest extends ElasticsearchTestCase {
|
||||
|
@ -263,7 +264,7 @@ public class PagedBytesReferenceTest extends ElasticsearchTestCase {
|
|||
try (RandomAccessFile file = new RandomAccessFile(tFile, "rw")) {
|
||||
pbr.writeTo(file.getChannel());
|
||||
assertEquals(pbr.length(), file.length());
|
||||
assertArrayEquals(pbr.toBytes(), Streams.copyToByteArray(tFile));
|
||||
assertArrayEquals(pbr.toBytes(), Files.readAllBytes(tFile.toPath()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -290,7 +291,7 @@ public class PagedBytesReferenceTest extends ElasticsearchTestCase {
|
|||
try (RandomAccessFile file = new RandomAccessFile(tFile, "rw")) {
|
||||
slice.writeTo(file.getChannel());
|
||||
assertEquals(slice.length(), file.length());
|
||||
assertArrayEquals(slice.toBytes(), Streams.copyToByteArray(tFile));
|
||||
assertArrayEquals(slice.toBytes(), Files.readAllBytes(tFile.toPath()));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -27,6 +27,9 @@ import org.junit.Test;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Properties;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists;
|
||||
|
@ -39,16 +42,16 @@ import static org.hamcrest.CoreMatchers.is;
|
|||
*/
|
||||
public class FileSystemUtilsTests extends ElasticsearchTestCase {
|
||||
|
||||
File src;
|
||||
File dst;
|
||||
Path src;
|
||||
Path dst;
|
||||
|
||||
@Before
|
||||
public void copySourceFilesToTarget() throws IOException {
|
||||
File globalTempDir = globalTempDir();
|
||||
src = new File(globalTempDir, "iocopyappend-src");
|
||||
dst = new File(globalTempDir, "iocopyappend-dst");
|
||||
FileSystemUtils.mkdirs(src);
|
||||
FileSystemUtils.mkdirs(dst);
|
||||
Path globalTempDir = globalTempDir().toPath();
|
||||
src = globalTempDir.resolve("iocopyappend-src");
|
||||
dst = globalTempDir.resolve("iocopyappend-dst");
|
||||
Files.createDirectories(src);
|
||||
Files.createDirectories(dst);
|
||||
|
||||
// We first copy sources test files from src/test/resources
|
||||
// Because after when the test runs, src files are moved to their destination
|
||||
|
@ -57,17 +60,17 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
|
|||
props.load(is);
|
||||
}
|
||||
|
||||
FileSystemUtils.copyDirectoryRecursively(new File(props.getProperty("copyappend.root.dir")), src);
|
||||
FileSystemUtils.copyDirectoryRecursively(Paths.get(props.getProperty("copyappend.root.dir")), src);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMoveOverExistingFileAndAppend() throws IOException {
|
||||
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(new File(src, "v1"), dst, ".new");
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v1"), dst, ".new");
|
||||
assertFileContent(dst, "file1.txt", "version1");
|
||||
assertFileContent(dst, "dir/file2.txt", "version1");
|
||||
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(new File(src, "v2"), dst, ".new");
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v2"), dst, ".new");
|
||||
assertFileContent(dst, "file1.txt", "version1");
|
||||
assertFileContent(dst, "dir/file2.txt", "version1");
|
||||
assertFileContent(dst, "file1.txt.new", "version2");
|
||||
|
@ -75,7 +78,7 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
|
|||
assertFileContent(dst, "file3.txt", "version1");
|
||||
assertFileContent(dst, "dir/subdir/file4.txt", "version1");
|
||||
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(new File(src, "v3"), dst, ".new");
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v3"), dst, ".new");
|
||||
assertFileContent(dst, "file1.txt", "version1");
|
||||
assertFileContent(dst, "dir/file2.txt", "version1");
|
||||
assertFileContent(dst, "file1.txt.new", "version3");
|
||||
|
@ -89,13 +92,13 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testMoveOverExistingFileAndIgnore() throws IOException {
|
||||
File dest = globalTempDir();
|
||||
Path dest = globalTempDir().toPath();
|
||||
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(new File(src, "v1"), dest, null);
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v1"), dest, null);
|
||||
assertFileContent(dest, "file1.txt", "version1");
|
||||
assertFileContent(dest, "dir/file2.txt", "version1");
|
||||
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(new File(src, "v2"), dest, null);
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v2"), dest, null);
|
||||
assertFileContent(dest, "file1.txt", "version1");
|
||||
assertFileContent(dest, "dir/file2.txt", "version1");
|
||||
assertFileContent(dest, "file1.txt.new", null);
|
||||
|
@ -103,7 +106,7 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
|
|||
assertFileContent(dest, "file3.txt", "version1");
|
||||
assertFileContent(dest, "dir/subdir/file4.txt", "version1");
|
||||
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(new File(src, "v3"), dest, null);
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v3"), dest, null);
|
||||
assertFileContent(dest, "file1.txt", "version1");
|
||||
assertFileContent(dest, "dir/file2.txt", "version1");
|
||||
assertFileContent(dest, "file1.txt.new", null);
|
||||
|
@ -122,16 +125,28 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
|
|||
* @param filename relative path from root dir to file
|
||||
* @param expected expected content (if null, we don't expect any file)
|
||||
*/
|
||||
public static void assertFileContent(File dir, String filename, String expected) throws IOException {
|
||||
Assert.assertThat(dir.exists(), is(true));
|
||||
File file = dir.toPath().resolve(filename).toFile();
|
||||
public static void assertFileContent(Path dir, String filename, String expected) throws IOException {
|
||||
Assert.assertThat(Files.exists(dir), is(true));
|
||||
Path file = dir.resolve(filename);
|
||||
if (expected == null) {
|
||||
Assert.assertThat("file [" + file + "] should not exist.", file.exists(), is(false));
|
||||
Assert.assertThat("file [" + file + "] should not exist.", Files.exists(file), is(false));
|
||||
} else {
|
||||
assertFileExists(file);
|
||||
String fileContent = com.google.common.io.Files.toString(file, UTF8);
|
||||
String fileContent = new String(Files.readAllBytes(file), UTF8);
|
||||
// trim the string content to prevent different handling on windows vs. unix and CR chars...
|
||||
Assert.assertThat(fileContent.trim(), equalTo(expected.trim()));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAppend() {
|
||||
assertEquals(FileSystemUtils.append(Paths.get("/foo/bar"), Paths.get("/hello/world/this_is/awesome"), 0),
|
||||
Paths.get("/foo/bar/hello/world/this_is/awesome"));
|
||||
|
||||
assertEquals(FileSystemUtils.append(Paths.get("/foo/bar"), Paths.get("/hello/world/this_is/awesome"), 2),
|
||||
Paths.get("/foo/bar/this_is/awesome"));
|
||||
|
||||
assertEquals(FileSystemUtils.append(Paths.get("/foo/bar"), Paths.get("/hello/world/this_is/awesome"), 1),
|
||||
Paths.get("/foo/bar/world/this_is/awesome"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,14 +56,6 @@ public class StreamsTests extends ElasticsearchTestCase {
|
|||
assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCopyToByteArray() throws IOException {
|
||||
byte[] content = "content".getBytes(Charsets.UTF_8);
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(content);
|
||||
byte[] result = copyToByteArray(in);
|
||||
assertThat(Arrays.equals(content, result), equalTo(true));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCopyFromReader() throws IOException {
|
||||
String content = "content";
|
||||
|
|
|
@ -39,7 +39,8 @@ import org.elasticsearch.test.InternalTestCluster;
|
|||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
|
||||
|
@ -67,8 +68,8 @@ public class IndicesStoreIntegrationTests extends ElasticsearchIntegrationTest {
|
|||
ensureGreen("test");
|
||||
|
||||
logger.info("--> making sure that shard and its replica are allocated on node_1 and node_2");
|
||||
assertThat(shardDirectory(node_1, "test", 0).exists(), equalTo(true));
|
||||
assertThat(shardDirectory(node_2, "test", 0).exists(), equalTo(true));
|
||||
assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true));
|
||||
assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(true));
|
||||
|
||||
logger.info("--> starting node server3");
|
||||
String node_3 = internalCluster().startNode(SETTINGS);
|
||||
|
@ -82,7 +83,7 @@ public class IndicesStoreIntegrationTests extends ElasticsearchIntegrationTest {
|
|||
logger.info("--> making sure that shard is not allocated on server3");
|
||||
assertThat(waitForShardDeletion(node_3, "test", 0), equalTo(false));
|
||||
|
||||
File server2Shard = shardDirectory(node_2, "test", 0);
|
||||
Path server2Shard = shardDirectory(node_2, "test", 0);
|
||||
logger.info("--> stopping node " + node_2);
|
||||
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_2));
|
||||
|
||||
|
@ -95,12 +96,12 @@ public class IndicesStoreIntegrationTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(clusterHealth.isTimedOut(), equalTo(false));
|
||||
logger.info("--> done cluster_health, status " + clusterHealth.getStatus());
|
||||
|
||||
assertThat(server2Shard.exists(), equalTo(true));
|
||||
assertThat(Files.exists(server2Shard), equalTo(true));
|
||||
|
||||
logger.info("--> making sure that shard and its replica exist on server1, server2 and server3");
|
||||
assertThat(shardDirectory(node_1, "test", 0).exists(), equalTo(true));
|
||||
assertThat(server2Shard.exists(), equalTo(true));
|
||||
assertThat(shardDirectory(node_3, "test", 0).exists(), equalTo(true));
|
||||
assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true));
|
||||
assertThat(Files.exists(server2Shard), equalTo(true));
|
||||
assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(true));
|
||||
|
||||
logger.info("--> starting node node_4");
|
||||
final String node_4 = internalCluster().startNode(SETTINGS);
|
||||
|
@ -109,8 +110,8 @@ public class IndicesStoreIntegrationTests extends ElasticsearchIntegrationTest {
|
|||
ensureGreen();
|
||||
|
||||
logger.info("--> making sure that shard and its replica are allocated on server1 and server3 but not on server2");
|
||||
assertThat(shardDirectory(node_1, "test", 0).exists(), equalTo(true));
|
||||
assertThat(shardDirectory(node_3, "test", 0).exists(), equalTo(true));
|
||||
assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true));
|
||||
assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(true));
|
||||
assertThat(waitForShardDeletion(node_4, "test", 0), equalTo(false));
|
||||
}
|
||||
|
||||
|
@ -163,18 +164,18 @@ public class IndicesStoreIntegrationTests extends ElasticsearchIntegrationTest {
|
|||
}
|
||||
}
|
||||
|
||||
private File shardDirectory(String server, String index, int shard) {
|
||||
private Path shardDirectory(String server, String index, int shard) {
|
||||
NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server);
|
||||
return env.shardLocations(new ShardId(index, shard))[0];
|
||||
return env.shardPaths(new ShardId(index, shard))[0];
|
||||
}
|
||||
|
||||
private boolean waitForShardDeletion(final String server, final String index, final int shard) throws InterruptedException {
|
||||
awaitBusy(new Predicate<Object>() {
|
||||
public boolean apply(Object o) {
|
||||
return !shardDirectory(server, index, shard).exists();
|
||||
return !Files.exists(shardDirectory(server, index, shard));
|
||||
}
|
||||
});
|
||||
return shardDirectory(server, index, shard).exists();
|
||||
return Files.exists(shardDirectory(server, index, shard));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.junit.Test;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -53,10 +54,10 @@ public class SimpleDistributorTests extends ElasticsearchIntegrationTest {
|
|||
createIndexWithStoreType("test", IndexStoreModule.Type.NIOFS, "least_used");
|
||||
String storeString = getStoreDirectory("test", 0).toString();
|
||||
logger.info(storeString);
|
||||
File[] dataPaths = dataPaths();
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[rate_limited(niofs(" + dataPaths[0].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
Path[] dataPaths = dataPaths();
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[rate_limited(niofs(" + dataPaths[0].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
if (dataPaths.length > 1) {
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(niofs(" + dataPaths[1].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(niofs(" + dataPaths[1].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
assertThat(storeString, endsWith(", type=MERGE, rate=20.0)])"));
|
||||
|
||||
|
@ -64,9 +65,9 @@ public class SimpleDistributorTests extends ElasticsearchIntegrationTest {
|
|||
storeString = getStoreDirectory("test", 0).toString();
|
||||
logger.info(storeString);
|
||||
dataPaths = dataPaths();
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(random[rate_limited(niofs(" + dataPaths[0].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(random[rate_limited(niofs(" + dataPaths[0].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
if (dataPaths.length > 1) {
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(niofs(" + dataPaths[1].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(niofs(" + dataPaths[1].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
assertThat(storeString, endsWith(", type=MERGE, rate=20.0)])"));
|
||||
|
||||
|
@ -74,9 +75,9 @@ public class SimpleDistributorTests extends ElasticsearchIntegrationTest {
|
|||
storeString = getStoreDirectory("test", 0).toString();
|
||||
logger.info(storeString);
|
||||
dataPaths = dataPaths();
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[rate_limited(mmapfs(" + dataPaths[0].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[rate_limited(mmapfs(" + dataPaths[0].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
if (dataPaths.length > 1) {
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(mmapfs(" + dataPaths[1].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(mmapfs(" + dataPaths[1].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
assertThat(storeString, endsWith(", type=MERGE, rate=20.0)])"));
|
||||
|
||||
|
@ -84,9 +85,9 @@ public class SimpleDistributorTests extends ElasticsearchIntegrationTest {
|
|||
storeString = getStoreDirectory("test", 0).toString();
|
||||
logger.info(storeString);
|
||||
dataPaths = dataPaths();
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[rate_limited(simplefs(" + dataPaths[0].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[rate_limited(simplefs(" + dataPaths[0].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
if (dataPaths.length > 1) {
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(simplefs(" + dataPaths[1].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(simplefs(" + dataPaths[1].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
assertThat(storeString, endsWith(", type=MERGE, rate=20.0)])"));
|
||||
|
||||
|
@ -94,11 +95,11 @@ public class SimpleDistributorTests extends ElasticsearchIntegrationTest {
|
|||
storeString = getStoreDirectory("test", 0).toString();
|
||||
logger.info(storeString);
|
||||
dataPaths = dataPaths();
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[rate_limited(default(mmapfs(" + dataPaths[0].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("),niofs(" + dataPaths[0].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[rate_limited(default(mmapfs(" + dataPaths[0].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("),niofs(" + dataPaths[0].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
|
||||
if (dataPaths.length > 1) {
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(default(mmapfs(" + dataPaths[1].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), rate_limited(default(mmapfs(" + dataPaths[1].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
assertThat(storeString, endsWith(", type=MERGE, rate=20.0)])"));
|
||||
|
||||
|
@ -106,9 +107,9 @@ public class SimpleDistributorTests extends ElasticsearchIntegrationTest {
|
|||
storeString = getStoreDirectory("test", 0).toString();
|
||||
logger.info(storeString);
|
||||
dataPaths = dataPaths();
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[niofs(" + dataPaths[0].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), startsWith("store(least_used[niofs(" + dataPaths[0].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
if (dataPaths.length > 1) {
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), niofs(" + dataPaths[1].getAbsolutePath().toLowerCase(Locale.ROOT)));
|
||||
assertThat(storeString.toLowerCase(Locale.ROOT), containsString("), niofs(" + dataPaths[1].toAbsolutePath().toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
assertThat(storeString, endsWith(")])"));
|
||||
}
|
||||
|
@ -141,11 +142,11 @@ public class SimpleDistributorTests extends ElasticsearchIntegrationTest {
|
|||
}
|
||||
|
||||
|
||||
private File[] dataPaths() {
|
||||
private Path[] dataPaths() {
|
||||
Set<String> nodes = internalCluster().nodesInclude("test");
|
||||
assertThat(nodes.isEmpty(), equalTo(false));
|
||||
NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, nodes.iterator().next());
|
||||
return env.nodeDataLocations();
|
||||
return env.nodeDataPaths();
|
||||
}
|
||||
|
||||
private Directory getStoreDirectory(String index, int shardId) {
|
||||
|
|
|
@ -47,6 +47,8 @@ import org.junit.Test;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -99,33 +101,33 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
Tuple<Settings, Environment> initialSettings = InternalSettingsPreparer.prepareSettings(
|
||||
ImmutableSettings.settingsBuilder().build(), false);
|
||||
Environment env = initialSettings.v2();
|
||||
File binDir = new File(env.homeFile(), "bin");
|
||||
if (!binDir.exists() && !FileSystemUtils.mkdirs(binDir)) {
|
||||
throw new IOException("Could not create bin directory [" + binDir.getAbsolutePath() + "]");
|
||||
Path binDir = env.homeFile().resolve("bin");
|
||||
if (!Files.exists(binDir)) {
|
||||
Files.createDirectories(binDir);
|
||||
}
|
||||
File pluginBinDir = new File(binDir, pluginName);
|
||||
File configDir = env.configFile();
|
||||
if (!configDir.exists() && !FileSystemUtils.mkdirs(configDir)) {
|
||||
throw new IOException("Could not create config directory [" + configDir.getAbsolutePath() + "]");
|
||||
Path pluginBinDir = binDir.resolve(pluginName);
|
||||
Path configDir = env.configFile();
|
||||
if (!Files.exists(configDir)) {
|
||||
Files.createDirectories(configDir);
|
||||
}
|
||||
File pluginConfigDir = new File(configDir, pluginName);
|
||||
Path pluginConfigDir =configDir.resolve(pluginName);
|
||||
try {
|
||||
|
||||
PluginManager pluginManager = pluginManager(getPluginUrlForResource("plugin_with_bin_and_config.zip"), initialSettings);
|
||||
|
||||
pluginManager.downloadAndExtract(pluginName);
|
||||
|
||||
File[] plugins = pluginManager.getListInstalledPlugins();
|
||||
Path[] plugins = pluginManager.getListInstalledPlugins();
|
||||
|
||||
assertThat(plugins, arrayWithSize(1));
|
||||
assertDirectoryExists(pluginBinDir);
|
||||
assertDirectoryExists(pluginConfigDir);
|
||||
File toolFile = new File(pluginBinDir, "tool");
|
||||
Path toolFile = pluginBinDir.resolve("tool");
|
||||
assertFileExists(toolFile);
|
||||
assertThat(toolFile.canExecute(), is(true));
|
||||
assertThat(Files.isExecutable(toolFile), is(true));
|
||||
} finally {
|
||||
// we need to clean up the copied dirs
|
||||
IOUtils.rm(pluginBinDir.toPath(), pluginConfigDir.toPath());
|
||||
IOUtils.rm(pluginBinDir, pluginConfigDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -139,17 +141,17 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
ImmutableSettings.settingsBuilder().build(), false);
|
||||
Environment env = initialSettings.v2();
|
||||
|
||||
File configDir = env.configFile();
|
||||
if (!configDir.exists() && !FileSystemUtils.mkdirs(configDir)) {
|
||||
throw new IOException("Could not create config directory [" + configDir.getAbsolutePath() + "]");
|
||||
Path configDir = env.configFile();
|
||||
if (!Files.exists(configDir)) {
|
||||
Files.createDirectories(configDir);
|
||||
}
|
||||
File pluginConfigDir = new File(configDir, pluginName);
|
||||
Path pluginConfigDir = configDir.resolve(pluginName);
|
||||
|
||||
try {
|
||||
PluginManager pluginManager = pluginManager(getPluginUrlForResource("plugin_with_config_v1.zip"), initialSettings);
|
||||
pluginManager.downloadAndExtract(pluginName);
|
||||
|
||||
File[] plugins = pluginManager.getListInstalledPlugins();
|
||||
Path[] plugins = pluginManager.getListInstalledPlugins();
|
||||
assertThat(plugins, arrayWithSize(1));
|
||||
|
||||
/*
|
||||
|
@ -207,7 +209,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
assertFileContent(pluginConfigDir, "dir/subdir/testsubdir.txt.new", "version2\n");
|
||||
} finally {
|
||||
// we need to clean up the copied dirs
|
||||
IOUtils.rm(pluginConfigDir.toPath());
|
||||
IOUtils.rm(pluginConfigDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -218,20 +220,20 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
Tuple<Settings, Environment> initialSettings = InternalSettingsPreparer.prepareSettings(
|
||||
ImmutableSettings.settingsBuilder().build(), false);
|
||||
Environment env = initialSettings.v2();
|
||||
File binDir = new File(env.homeFile(), "bin");
|
||||
if (!binDir.exists() && !FileSystemUtils.mkdirs(binDir)) {
|
||||
throw new IOException("Could not create bin directory [" + binDir.getAbsolutePath() + "]");
|
||||
Path binDir = env.homeFile().resolve("bin");
|
||||
if (!Files.exists(binDir)) {
|
||||
Files.createDirectories(binDir);
|
||||
}
|
||||
File pluginBinDir = new File(binDir, pluginName);
|
||||
Path pluginBinDir = binDir.resolve(pluginName);
|
||||
try {
|
||||
PluginManager pluginManager = pluginManager(getPluginUrlForResource("plugin_with_bin_only.zip"), initialSettings);
|
||||
pluginManager.downloadAndExtract(pluginName);
|
||||
File[] plugins = pluginManager.getListInstalledPlugins();
|
||||
Path[] plugins = pluginManager.getListInstalledPlugins();
|
||||
assertThat(plugins.length, is(1));
|
||||
assertDirectoryExists(pluginBinDir);
|
||||
} finally {
|
||||
// we need to clean up the copied dirs
|
||||
IOUtils.rm(pluginBinDir.toPath());
|
||||
IOUtils.rm(pluginBinDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -278,7 +280,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
downloadAndExtract(pluginName, getPluginUrlForResource("plugin_with_sourcefiles.zip"));
|
||||
}
|
||||
|
||||
private static PluginManager pluginManager(String pluginUrl) {
|
||||
private static PluginManager pluginManager(String pluginUrl) throws IOException {
|
||||
Tuple<Settings, Environment> initialSettings = InternalSettingsPreparer.prepareSettings(
|
||||
ImmutableSettings.settingsBuilder().build(), false);
|
||||
return pluginManager(pluginUrl, initialSettings);
|
||||
|
@ -288,9 +290,9 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
* We build a plugin manager instance which wait only for 30 seconds before
|
||||
* raising an ElasticsearchTimeoutException
|
||||
*/
|
||||
private static PluginManager pluginManager(String pluginUrl, Tuple<Settings, Environment> initialSettings) {
|
||||
if (!initialSettings.v2().pluginsFile().exists()) {
|
||||
FileSystemUtils.mkdirs(initialSettings.v2().pluginsFile());
|
||||
private static PluginManager pluginManager(String pluginUrl, Tuple<Settings, Environment> initialSettings) throws IOException {
|
||||
if (!Files.exists(initialSettings.v2().pluginsFile())) {
|
||||
Files.createDirectories(initialSettings.v2().pluginsFile());
|
||||
}
|
||||
return new PluginManager(initialSettings.v2(), pluginUrl, PluginManager.OutputMode.SILENT, TimeValue.timeValueSeconds(30));
|
||||
}
|
||||
|
@ -343,7 +345,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
//checking now that the plugin is available
|
||||
HttpResponse response = getHttpRequestBuilder().method("GET").path("/_plugin/" + pluginName + "/").execute();
|
||||
assertThat(response, notNullValue());
|
||||
assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus()));
|
||||
assertThat(response.getReasonPhrase(), response.getStatusCode(), equalTo(RestStatus.OK.getStatus()));
|
||||
}
|
||||
|
||||
private HttpRequestBuilder getHttpRequestBuilder() {
|
||||
|
@ -352,7 +354,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Test
|
||||
public void testListInstalledEmpty() throws IOException {
|
||||
File[] plugins = pluginManager(null).getListInstalledPlugins();
|
||||
Path[] plugins = pluginManager(null).getListInstalledPlugins();
|
||||
assertThat(plugins, notNullValue());
|
||||
assertThat(plugins.length, is(0));
|
||||
}
|
||||
|
@ -368,7 +370,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
PluginManager pluginManager = pluginManager(getPluginUrlForResource("plugin_with_classfile.zip"));
|
||||
|
||||
pluginManager.downloadAndExtract("plugin-classfile");
|
||||
File[] plugins = pluginManager.getListInstalledPlugins();
|
||||
Path[] plugins = pluginManager.getListInstalledPlugins();
|
||||
assertThat(plugins, notNullValue());
|
||||
assertThat(plugins.length, is(1));
|
||||
}
|
||||
|
@ -378,7 +380,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
PluginManager pluginManager = pluginManager(getPluginUrlForResource("plugin_without_folders.zip"));
|
||||
|
||||
pluginManager.downloadAndExtract("plugin-site");
|
||||
File[] plugins = pluginManager.getListInstalledPlugins();
|
||||
Path[] plugins = pluginManager.getListInstalledPlugins();
|
||||
assertThat(plugins, notNullValue());
|
||||
assertThat(plugins.length, is(1));
|
||||
|
||||
|
@ -393,7 +395,7 @@ public class PluginManagerTests extends ElasticsearchIntegrationTest {
|
|||
PluginManager pluginManager = pluginManager(pluginCoordinates);
|
||||
try {
|
||||
pluginManager.downloadAndExtract(pluginShortName);
|
||||
File[] plugins = pluginManager.getListInstalledPlugins();
|
||||
Path[] plugins = pluginManager.getListInstalledPlugins();
|
||||
assertThat(plugins, notNullValue());
|
||||
assertThat(plugins.length, is(1));
|
||||
|
||||
|
|
|
@ -49,8 +49,8 @@ public class PluginManagerUnitTests extends ElasticsearchTestCase {
|
|||
Environment environment = new Environment(settings);
|
||||
|
||||
PluginManager.PluginHandle pluginHandle = new PluginManager.PluginHandle(pluginName, "version", "user", "repo");
|
||||
String configDirPath = Files.simplifyPath(pluginHandle.configDir(environment).getCanonicalPath());
|
||||
String expectedDirPath = Files.simplifyPath(new File(genericConfigFolder, pluginName).getCanonicalPath());
|
||||
String configDirPath = Files.simplifyPath(pluginHandle.configDir(environment).normalize().toString());
|
||||
String expectedDirPath = Files.simplifyPath(new File(genericConfigFolder, pluginName).toPath().normalize().toString());
|
||||
|
||||
assertThat(configDirPath, is(expectedDirPath));
|
||||
}
|
||||
|
|
|
@ -66,7 +66,6 @@ import org.elasticsearch.test.transport.MockTransportService;
|
|||
import org.elasticsearch.transport.*;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
|
@ -501,12 +500,12 @@ public class RelocationTests extends ElasticsearchIntegrationTest {
|
|||
logger.info("--> verifying no temporary recoveries are left");
|
||||
for (String node : internalCluster().getNodeNames()) {
|
||||
NodeEnvironment nodeEnvironment = internalCluster().getInstance(NodeEnvironment.class, node);
|
||||
for (final File shardLoc : nodeEnvironment.shardLocations(new ShardId(indexName, 0))) {
|
||||
for (final Path shardLoc : nodeEnvironment.shardPaths(new ShardId(indexName, 0))) {
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
Files.walkFileTree(shardLoc.toPath(), new SimpleFileVisitor<Path>() {
|
||||
Files.walkFileTree(shardLoc, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
assertThat("found a temporary recovery file: " + file, file.getFileName().toString(), not(startsWith("recovery.")));
|
||||
|
@ -514,7 +513,7 @@ public class RelocationTests extends ElasticsearchIntegrationTest {
|
|||
}
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new AssertionError("failed to walk file tree starting at [" + shardLoc.toPath() + "]", e);
|
||||
throw new AssertionError("failed to walk file tree starting at [" + shardLoc + "]", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.junit.Test;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
|
||||
|
@ -45,8 +46,8 @@ public class ScriptServiceTests extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testScriptsWithoutExtensions() throws IOException {
|
||||
File homeFolder = newTempDir();
|
||||
File genericConfigFolder = newTempDir();
|
||||
Path homeFolder = newTempDir().toPath();
|
||||
Path genericConfigFolder = newTempDir().toPath();
|
||||
|
||||
Settings settings = settingsBuilder()
|
||||
.put("path.conf", genericConfigFolder)
|
||||
|
@ -58,15 +59,15 @@ public class ScriptServiceTests extends ElasticsearchTestCase {
|
|||
|
||||
logger.info("--> setup script service");
|
||||
ScriptService scriptService = new ScriptService(settings, environment, ImmutableSet.of(new TestEngineService()), resourceWatcherService);
|
||||
File scriptsFile = new File(genericConfigFolder, "scripts");
|
||||
assertThat(scriptsFile.mkdir(), equalTo(true));
|
||||
Path scriptsFile = genericConfigFolder.resolve("scripts");
|
||||
Files.createDirectories(scriptsFile);
|
||||
resourceWatcherService.notifyNow();
|
||||
|
||||
logger.info("--> setup two test files one with extension and another without");
|
||||
File testFileNoExt = new File(scriptsFile, "test_no_ext");
|
||||
File testFileWithExt = new File(scriptsFile, "test_script.tst");
|
||||
Streams.copy("test_file_no_ext".getBytes("UTF-8"), testFileNoExt);
|
||||
Streams.copy("test_file".getBytes("UTF-8"), testFileWithExt);
|
||||
Path testFileNoExt = scriptsFile.resolve("test_no_ext");
|
||||
Path testFileWithExt = scriptsFile.resolve("test_script.tst");
|
||||
Streams.copy("test_file_no_ext".getBytes("UTF-8"), Files.newOutputStream(testFileNoExt));
|
||||
Streams.copy("test_file".getBytes("UTF-8"), Files.newOutputStream(testFileWithExt));
|
||||
resourceWatcherService.notifyNow();
|
||||
|
||||
logger.info("--> verify that file with extension was correctly processed");
|
||||
|
@ -74,8 +75,8 @@ public class ScriptServiceTests extends ElasticsearchTestCase {
|
|||
assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file"));
|
||||
|
||||
logger.info("--> delete both files");
|
||||
Files.delete(testFileNoExt.toPath());
|
||||
Files.delete(testFileWithExt.toPath());
|
||||
Files.delete(testFileNoExt);
|
||||
Files.delete(testFileWithExt);
|
||||
resourceWatcherService.notifyNow();
|
||||
|
||||
logger.info("--> verify that file with extension was correctly removed");
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.action.count.CountResponse;
|
|||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
|
@ -39,7 +38,7 @@ import org.elasticsearch.node.Node;
|
|||
import org.elasticsearch.node.NodeBuilder;
|
||||
import org.elasticsearch.node.internal.InternalNode;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
@ -198,11 +197,11 @@ public class FullRestartStressTest {
|
|||
|
||||
client.close();
|
||||
for (Node node : nodes) {
|
||||
File[] nodeDatas = ((InternalNode) node).injector().getInstance(NodeEnvironment.class).nodeDataLocations();
|
||||
Path[] nodeDatas = ((InternalNode) node).injector().getInstance(NodeEnvironment.class).nodeDataPaths();
|
||||
node.close();
|
||||
if (clearNodeWork && !settings.get("gateway.type").equals("local")) {
|
||||
try {
|
||||
IOUtils.rm(FileSystemUtils.toPaths(nodeDatas));
|
||||
IOUtils.rm(nodeDatas);
|
||||
} catch (Exception ex) {
|
||||
logger.debug("failed to remove node data locations", ex);
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ import org.elasticsearch.node.NodeBuilder;
|
|||
import org.elasticsearch.node.internal.InternalNode;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
@ -168,11 +168,11 @@ public class RollingRestartStressTest {
|
|||
// start doing the rolling restart
|
||||
int nodeIndex = 0;
|
||||
while (true) {
|
||||
File[] nodeData = ((InternalNode) nodes[nodeIndex]).injector().getInstance(NodeEnvironment.class).nodeDataLocations();
|
||||
Path[] nodeData = ((InternalNode) nodes[nodeIndex]).injector().getInstance(NodeEnvironment.class).nodeDataPaths();
|
||||
nodes[nodeIndex].close();
|
||||
if (clearNodeData) {
|
||||
try {
|
||||
IOUtils.rm(FileSystemUtils.toPaths(nodeData));
|
||||
IOUtils.rm(nodeData);
|
||||
} catch (Exception ex) {
|
||||
logger.debug("Failed to delete node data directories", ex);
|
||||
|
||||
|
|
|
@ -41,50 +41,50 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
private class RecordingChangeListener extends FileChangesListener {
|
||||
|
||||
private File rootDir;
|
||||
private Path rootDir;
|
||||
|
||||
private RecordingChangeListener(File rootDir) {
|
||||
private RecordingChangeListener(Path rootDir) {
|
||||
this.rootDir = rootDir;
|
||||
}
|
||||
|
||||
private String getRelativeFileName(File file) {
|
||||
return rootDir.toURI().relativize(file.toURI()).getPath();
|
||||
private String getRelativeFileName(Path file) {
|
||||
return rootDir.toUri().relativize(file.toUri()).getPath();
|
||||
}
|
||||
|
||||
private List<String> notifications = newArrayList();
|
||||
|
||||
@Override
|
||||
public void onFileInit(File file) {
|
||||
public void onFileInit(Path file) {
|
||||
notifications.add("onFileInit: " + getRelativeFileName(file));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDirectoryInit(File file) {
|
||||
public void onDirectoryInit(Path file) {
|
||||
notifications.add("onDirectoryInit: " + getRelativeFileName(file));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileCreated(File file) {
|
||||
public void onFileCreated(Path file) {
|
||||
notifications.add("onFileCreated: " + getRelativeFileName(file));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileDeleted(File file) {
|
||||
public void onFileDeleted(Path file) {
|
||||
notifications.add("onFileDeleted: " + getRelativeFileName(file));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileChanged(File file) {
|
||||
public void onFileChanged(Path file) {
|
||||
notifications.add("onFileChanged: " + getRelativeFileName(file));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDirectoryCreated(File file) {
|
||||
public void onDirectoryCreated(Path file) {
|
||||
notifications.add("onDirectoryCreated: " + getRelativeFileName(file));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDirectoryDeleted(File file) {
|
||||
public void onDirectoryDeleted(Path file) {
|
||||
notifications.add("onDirectoryDeleted: " + getRelativeFileName(file));
|
||||
}
|
||||
|
||||
|
@ -95,10 +95,10 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testSimpleFileOperations() throws IOException {
|
||||
File tempDir = newTempDir(LifecycleScope.TEST);
|
||||
Path tempDir = newTempDir(LifecycleScope.TEST).toPath();
|
||||
RecordingChangeListener changes = new RecordingChangeListener(tempDir);
|
||||
File testFile = new File(tempDir, "test.txt");
|
||||
touch(testFile);
|
||||
Path testFile = tempDir.resolve("test.txt");
|
||||
touch(testFile.toFile()); // TODO fix this to use path
|
||||
FileWatcher fileWatcher = new FileWatcher(testFile);
|
||||
fileWatcher.addListener(changes);
|
||||
fileWatcher.init();
|
||||
|
@ -108,7 +108,7 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), hasSize(0));
|
||||
|
||||
append("Test", testFile, Charset.defaultCharset());
|
||||
append("Test", testFile.toFile(), Charset.defaultCharset());
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(equalTo("onFileChanged: test.txt")));
|
||||
|
||||
|
@ -116,7 +116,7 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), hasSize(0));
|
||||
|
||||
Files.delete(testFile.toPath());
|
||||
Files.delete(testFile);
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(equalTo("onFileDeleted: test.txt")));
|
||||
|
||||
|
@ -124,12 +124,12 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testSimpleDirectoryOperations() throws IOException {
|
||||
File tempDir = newTempDir(LifecycleScope.TEST);
|
||||
Path tempDir = newTempDir(LifecycleScope.TEST).toPath();
|
||||
RecordingChangeListener changes = new RecordingChangeListener(tempDir);
|
||||
File testDir = new File(tempDir, "test-dir");
|
||||
testDir.mkdir();
|
||||
touch(new File(testDir, "test.txt"));
|
||||
touch(new File(testDir, "test0.txt"));
|
||||
Path testDir = tempDir.resolve("test-dir");
|
||||
Files.createDirectories(testDir);
|
||||
touch(testDir.resolve("test.txt").toFile());
|
||||
touch(testDir.resolve("test0.txt").toFile());
|
||||
|
||||
FileWatcher fileWatcher = new FileWatcher(testDir);
|
||||
fileWatcher.addListener(changes);
|
||||
|
@ -145,10 +145,10 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
assertThat(changes.notifications(), hasSize(0));
|
||||
|
||||
for (int i = 0; i < 4; i++) {
|
||||
touch(new File(testDir, "test" + i + ".txt"));
|
||||
touch(testDir.resolve("test" + i + ".txt").toFile());
|
||||
}
|
||||
// Make sure that first file is modified
|
||||
append("Test", new File(testDir, "test0.txt"), Charset.defaultCharset());
|
||||
append("Test", testDir.resolve("test0.txt").toFile(), Charset.defaultCharset());
|
||||
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
|
@ -162,8 +162,8 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), hasSize(0));
|
||||
|
||||
Files.delete(new File(testDir, "test1.txt").toPath());
|
||||
Files.delete(new File(testDir, "test2.txt").toPath());
|
||||
Files.delete(testDir.resolve("test1.txt"));
|
||||
Files.delete(testDir.resolve("test2.txt"));
|
||||
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
|
@ -175,9 +175,9 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), hasSize(0));
|
||||
|
||||
Files.delete(new File(testDir, "test0.txt").toPath());
|
||||
touch(new File(testDir, "test2.txt"));
|
||||
touch(new File(testDir, "test4.txt"));
|
||||
Files.delete(testDir.resolve("test0.txt"));
|
||||
touch(testDir.resolve("test2.txt").toFile());
|
||||
touch(testDir.resolve("test4.txt").toFile());
|
||||
fileWatcher.checkAndNotify();
|
||||
|
||||
assertThat(changes.notifications(), contains(
|
||||
|
@ -189,8 +189,8 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
changes.notifications().clear();
|
||||
|
||||
Files.delete(new File(testDir, "test3.txt").toPath());
|
||||
Files.delete(new File(testDir, "test4.txt").toPath());
|
||||
Files.delete(testDir.resolve("test3.txt"));
|
||||
Files.delete(testDir.resolve("test4.txt"));
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
equalTo("onFileDeleted: test-dir/test3.txt"),
|
||||
|
@ -199,8 +199,8 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
|
||||
changes.notifications().clear();
|
||||
if (testDir.exists()) {
|
||||
IOUtils.rm(testDir.toPath());
|
||||
if (Files.exists(testDir)) {
|
||||
IOUtils.rm(testDir);
|
||||
}
|
||||
fileWatcher.checkAndNotify();
|
||||
|
||||
|
@ -214,13 +214,13 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testNestedDirectoryOperations() throws IOException {
|
||||
File tempDir = newTempDir(LifecycleScope.TEST);
|
||||
Path tempDir = newTempDir(LifecycleScope.TEST).toPath();
|
||||
RecordingChangeListener changes = new RecordingChangeListener(tempDir);
|
||||
File testDir = new File(tempDir, "test-dir");
|
||||
testDir.mkdir();
|
||||
touch(new File(testDir, "test.txt"));
|
||||
new File(testDir, "sub-dir").mkdir();
|
||||
touch(new File(testDir, "sub-dir/test0.txt"));
|
||||
Path testDir = tempDir.resolve("test-dir");
|
||||
Files.createDirectories(testDir);
|
||||
touch(testDir.resolve("test.txt").toFile());
|
||||
Files.createDirectories(testDir.resolve("sub-dir"));
|
||||
touch(testDir.resolve("sub-dir/test0.txt").toFile());
|
||||
|
||||
FileWatcher fileWatcher = new FileWatcher(testDir);
|
||||
fileWatcher.addListener(changes);
|
||||
|
@ -237,7 +237,7 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
assertThat(changes.notifications(), hasSize(0));
|
||||
|
||||
// Create new file in subdirectory
|
||||
touch(new File(testDir, "sub-dir/test1.txt"));
|
||||
touch(testDir.resolve("sub-dir/test1.txt").toFile());
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
equalTo("onFileCreated: test-dir/sub-dir/test1.txt")
|
||||
|
@ -248,10 +248,10 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
assertThat(changes.notifications(), hasSize(0));
|
||||
|
||||
// Create new subdirectory in subdirectory
|
||||
new File(testDir, "first-level").mkdir();
|
||||
touch(new File(testDir, "first-level/file1.txt"));
|
||||
new File(testDir, "first-level/second-level").mkdir();
|
||||
touch(new File(testDir, "first-level/second-level/file2.txt"));
|
||||
Files.createDirectories(testDir.resolve("first-level"));
|
||||
touch(testDir.resolve("first-level/file1.txt").toFile());
|
||||
Files.createDirectories(testDir.resolve("first-level/second-level"));
|
||||
touch(testDir.resolve("first-level/second-level/file2.txt").toFile());
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
equalTo("onDirectoryCreated: test-dir/first-level/"),
|
||||
|
@ -265,7 +265,7 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
assertThat(changes.notifications(), hasSize(0));
|
||||
|
||||
// Delete a directory, check notifications for
|
||||
Path path = new File(testDir, "first-level").toPath();
|
||||
Path path = testDir.resolve("first-level");
|
||||
if (Files.exists(path)) {
|
||||
IOUtils.rm(path);
|
||||
}
|
||||
|
@ -280,14 +280,14 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testFileReplacingDirectory() throws IOException {
|
||||
File tempDir = newTempDir(LifecycleScope.TEST);
|
||||
Path tempDir = newTempDir(LifecycleScope.TEST).toPath();
|
||||
RecordingChangeListener changes = new RecordingChangeListener(tempDir);
|
||||
File testDir = new File(tempDir, "test-dir");
|
||||
testDir.mkdir();
|
||||
File subDir = new File(testDir, "sub-dir");
|
||||
subDir.mkdir();
|
||||
touch(new File(subDir, "test0.txt"));
|
||||
touch(new File(subDir, "test1.txt"));
|
||||
Path testDir = tempDir.resolve("test-dir");
|
||||
Files.createDirectories(testDir);
|
||||
Path subDir = testDir.resolve("sub-dir");
|
||||
Files.createDirectories(subDir);
|
||||
touch(subDir.resolve("test0.txt").toFile());
|
||||
touch(subDir.resolve("test1.txt").toFile());
|
||||
|
||||
FileWatcher fileWatcher = new FileWatcher(testDir);
|
||||
fileWatcher.addListener(changes);
|
||||
|
@ -301,10 +301,10 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
changes.notifications().clear();
|
||||
|
||||
if (subDir.exists()) {
|
||||
IOUtils.rm(subDir.toPath());
|
||||
if (Files.exists(subDir)) {
|
||||
IOUtils.rm(subDir);
|
||||
}
|
||||
touch(subDir);
|
||||
touch(subDir.toFile());
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
equalTo("onFileDeleted: test-dir/sub-dir/test0.txt"),
|
||||
|
@ -315,8 +315,8 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
changes.notifications().clear();
|
||||
|
||||
Files.delete(subDir.toPath());
|
||||
subDir.mkdir();
|
||||
Files.delete(subDir);
|
||||
Files.createDirectories(subDir);
|
||||
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
|
@ -327,20 +327,20 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testEmptyDirectory() throws IOException {
|
||||
File tempDir = newTempDir(LifecycleScope.TEST);
|
||||
Path tempDir = newTempDir(LifecycleScope.TEST).toPath();
|
||||
RecordingChangeListener changes = new RecordingChangeListener(tempDir);
|
||||
File testDir = new File(tempDir, "test-dir");
|
||||
testDir.mkdir();
|
||||
touch(new File(testDir, "test0.txt"));
|
||||
touch(new File(testDir, "test1.txt"));
|
||||
Path testDir = tempDir.resolve("test-dir");
|
||||
Files.createDirectories(testDir);
|
||||
touch(testDir.resolve("test0.txt").toFile());
|
||||
touch(testDir.resolve("test1.txt").toFile());
|
||||
|
||||
FileWatcher fileWatcher = new FileWatcher(testDir);
|
||||
fileWatcher.addListener(changes);
|
||||
fileWatcher.init();
|
||||
changes.notifications().clear();
|
||||
|
||||
Files.delete(new File(testDir, "test0.txt").toPath());
|
||||
Files.delete(new File(testDir, "test1.txt").toPath());
|
||||
Files.delete(testDir.resolve("test0.txt"));
|
||||
Files.delete(testDir.resolve("test1.txt"));
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
equalTo("onFileDeleted: test-dir/test0.txt"),
|
||||
|
@ -350,9 +350,9 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testNoDirectoryOnInit() throws IOException {
|
||||
File tempDir = newTempDir(LifecycleScope.TEST);
|
||||
Path tempDir = newTempDir(LifecycleScope.TEST).toPath();
|
||||
RecordingChangeListener changes = new RecordingChangeListener(tempDir);
|
||||
File testDir = new File(tempDir, "test-dir");
|
||||
Path testDir = tempDir.resolve("test-dir");
|
||||
|
||||
FileWatcher fileWatcher = new FileWatcher(testDir);
|
||||
fileWatcher.addListener(changes);
|
||||
|
@ -360,9 +360,9 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
assertThat(changes.notifications(), hasSize(0));
|
||||
changes.notifications().clear();
|
||||
|
||||
testDir.mkdir();
|
||||
touch(new File(testDir, "test0.txt"));
|
||||
touch(new File(testDir, "test1.txt"));
|
||||
Files.createDirectories(testDir);
|
||||
touch(testDir.resolve("test0.txt").toFile());
|
||||
touch(testDir.resolve("test1.txt").toFile());
|
||||
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
|
@ -374,9 +374,9 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testNoFileOnInit() throws IOException {
|
||||
File tempDir = newTempDir(LifecycleScope.TEST);
|
||||
Path tempDir = newTempDir(LifecycleScope.TEST).toPath();
|
||||
RecordingChangeListener changes = new RecordingChangeListener(tempDir);
|
||||
File testFile = new File(tempDir, "testfile.txt");
|
||||
Path testFile = tempDir.resolve("testfile.txt");
|
||||
|
||||
FileWatcher fileWatcher = new FileWatcher(testFile);
|
||||
fileWatcher.addListener(changes);
|
||||
|
@ -384,7 +384,7 @@ public class FileWatcherTest extends ElasticsearchTestCase {
|
|||
assertThat(changes.notifications(), hasSize(0));
|
||||
changes.notifications().clear();
|
||||
|
||||
touch(testFile);
|
||||
touch(testFile.toFile());
|
||||
|
||||
fileWatcher.checkAndNotify();
|
||||
assertThat(changes.notifications(), contains(
|
||||
|
|
Loading…
Reference in New Issue