Merge remote-tracking branch 'es/master' into feature/ingest

This commit is contained in:
Martijn van Groningen 2015-12-24 15:34:20 +01:00
commit 4a0ec0da26
79 changed files with 3127 additions and 1368 deletions

View File

@ -198,6 +198,10 @@ class BuildPlugin implements Plugin<Project> {
* to iterate the transitive dependencies and add excludes.
*/
static void configureConfigurations(Project project) {
// we are not shipping these jars, we act like dumb consumers of these things
if (project.path.startsWith(':test:fixtures')) {
return
}
// fail on any conflicting dependency versions
project.configurations.all({ Configuration configuration ->
if (configuration.name.startsWith('_transitive_')) {
@ -205,6 +209,10 @@ class BuildPlugin implements Plugin<Project> {
// we just have them to find *what* transitive deps exist
return
}
if (configuration.name.endsWith('Fixture')) {
// just a self contained test-fixture configuration, likely transitive and hellacious
return
}
configuration.resolutionStrategy.failOnVersionConflict()
})

View File

@ -23,6 +23,8 @@ import org.elasticsearch.common.SuppressForbidden;
import java.net.SocketPermission;
import java.net.URL;
import java.io.FilePermission;
import java.io.IOException;
import java.security.CodeSource;
import java.security.Permission;
import java.security.PermissionCollection;
@ -81,10 +83,39 @@ final class ESPolicy extends Policy {
}
}
// Special handling for broken Hadoop code: "let me execute or my classes will not load"
// yeah right, REMOVE THIS when hadoop is fixed
if (permission instanceof FilePermission && "<<ALL FILES>>".equals(permission.getName())) {
for (StackTraceElement element : Thread.currentThread().getStackTrace()) {
if ("org.apache.hadoop.util.Shell".equals(element.getClassName()) &&
"runCommand".equals(element.getMethodName())) {
// we found the horrible method: the hack begins!
// force the hadoop code to back down, by throwing an exception that it catches.
rethrow(new IOException("no hadoop, you cannot do this."));
}
}
}
// otherwise defer to template + dynamic file permissions
return template.implies(domain, permission) || dynamic.implies(permission) || system.implies(domain, permission);
}
/**
* Classy puzzler to rethrow any checked exception as an unchecked one.
*/
private static class Rethrower<T extends Throwable> {
private void rethrow(Throwable t) throws T {
throw (T) t;
}
}
/**
* Rethrows <code>t</code> (identical object).
*/
private void rethrow(Throwable t) {
new Rethrower<Error>().rethrow(t);
}
@Override
public PermissionCollection getPermissions(CodeSource codesource) {
// code should not rely on this method, or at least use it correctly:

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
import java.io.BufferedInputStream;
@ -97,6 +96,7 @@ public class FsBlobContainer extends AbstractBlobContainer {
@Override
public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException {
final Path file = path.resolve(blobName);
// TODO: why is this not specifying CREATE_NEW? Do we really need to be able to truncate existing files?
try (OutputStream outputStream = Files.newOutputStream(file)) {
Streams.copy(inputStream, outputStream, new byte[blobStore.bufferSizeInBytes()]);
}
@ -104,16 +104,6 @@ public class FsBlobContainer extends AbstractBlobContainer {
IOUtils.fsync(path, true);
}
@Override
public void writeBlob(String blobName, BytesReference data) throws IOException {
final Path file = path.resolve(blobName);
try (OutputStream outputStream = Files.newOutputStream(file)) {
data.writeTo(outputStream);
}
IOUtils.fsync(file, false);
IOUtils.fsync(path, true);
}
@Override
public void move(String source, String target) throws IOException {
Path sourcePath = path.resolve(source);

View File

@ -22,8 +22,10 @@ package org.elasticsearch.common.blobstore.support;
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.bytes.BytesReference;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.Map;
@ -57,4 +59,11 @@ public abstract class AbstractBlobContainer implements BlobContainer {
deleteBlob(blob);
}
}
@Override
public void writeBlob(String blobName, BytesReference bytes) throws IOException {
try (InputStream stream = bytes.streamInput()) {
writeBlob(blobName, stream, bytes.length());
}
}
}

View File

@ -596,40 +596,22 @@ class DocumentParser implements Closeable {
if (dynamic == ObjectMapper.Dynamic.FALSE) {
return null;
}
final String path = context.path().pathAsText(currentFieldName);
final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
final MappedFieldType existingFieldType = context.mapperService().fullName(context.path().pathAsText(currentFieldName));
final MappedFieldType existingFieldType = context.mapperService().fullName(path);
Mapper.Builder builder = null;
if (existingFieldType != null) {
// create a builder of the same type
builder = createBuilderFromFieldType(context, existingFieldType, currentFieldName);
if (builder != null) {
// best-effort to not introduce a conflict
if (builder instanceof StringFieldMapper.Builder) {
StringFieldMapper.Builder stringBuilder = (StringFieldMapper.Builder) builder;
stringBuilder.fieldDataSettings(existingFieldType.fieldDataType().getSettings());
stringBuilder.store(existingFieldType.stored());
stringBuilder.indexOptions(existingFieldType.indexOptions());
stringBuilder.tokenized(existingFieldType.tokenized());
stringBuilder.omitNorms(existingFieldType.omitNorms());
stringBuilder.docValues(existingFieldType.hasDocValues());
stringBuilder.indexAnalyzer(existingFieldType.indexAnalyzer());
stringBuilder.searchAnalyzer(existingFieldType.searchAnalyzer());
} else if (builder instanceof NumberFieldMapper.Builder) {
NumberFieldMapper.Builder<?,?> numberBuilder = (NumberFieldMapper.Builder<?, ?>) builder;
numberBuilder.fieldDataSettings(existingFieldType.fieldDataType().getSettings());
numberBuilder.store(existingFieldType.stored());
numberBuilder.indexOptions(existingFieldType.indexOptions());
numberBuilder.tokenized(existingFieldType.tokenized());
numberBuilder.omitNorms(existingFieldType.omitNorms());
numberBuilder.docValues(existingFieldType.hasDocValues());
numberBuilder.precisionStep(existingFieldType.numericPrecisionStep());
}
}
}
if (builder == null) {
builder = createBuilderFromDynamicValue(context, token, currentFieldName);
}
Mapper mapper = builder.build(builderContext);
if (existingFieldType != null) {
// try to not introduce a conflict
mapper = mapper.updateFieldType(Collections.singletonMap(path, existingFieldType));
}
mapper = parseAndMergeUpdate(mapper, context);

View File

@ -363,6 +363,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.name());
if (newFieldType == null) {
throw new IllegalStateException();
} else if (fieldType.getClass() != newFieldType.getClass()) {
throw new IllegalStateException("Mixing up field types: " + fieldType.getClass() + " != " + newFieldType.getClass());
}
MultiFields updatedMultiFields = multiFields.updateFieldType(fullNameToFieldType);
if (fieldType == newFieldType && multiFields == updatedMultiFields) {

View File

@ -43,6 +43,7 @@ import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.booleanField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
/**
* A field mapper for boolean fields.
@ -107,6 +108,8 @@ public class BooleanFieldMapper extends FieldMapper {
}
builder.nullValue(nodeBooleanValue(propNode));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
}
}
return builder;

View File

@ -133,8 +133,9 @@ public class CidrsTests extends ESTestCase {
public void testValidCombinations() {
for (long i = 0; i < (1 << 16); i++) {
String octetsString = Cidrs.octetsToString(Cidrs.longToOctets(i << 16));
for (int mask = 16; mask <= 32; mask++) {
String test = Cidrs.octetsToCIDR(Cidrs.longToOctets(i << 16), mask);
String test = octetsString + "/" + mask;
long[] actual = Cidrs.cidrMaskToMinMax(test);
assertNotNull(test, actual);
assertEquals(test, 2, actual.length);

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -30,8 +31,13 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -367,17 +373,52 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
}
public void testReuseExistingMappings() throws IOException, Exception {
IndexService indexService = createIndex("test", Settings.EMPTY, "type", "my_field1", "type=string,store=yes", "my_field2", "type=integer,precision_step=10");
IndexService indexService = createIndex("test", Settings.EMPTY, "type",
"my_field1", "type=string,store=yes",
"my_field2", "type=integer,precision_step=10",
"my_field3", "type=long,doc_values=false",
"my_field4", "type=float,index_options=freqs",
"my_field5", "type=double,precision_step=14",
"my_field6", "type=date,doc_values=false");
// Even if the dynamic type of our new field is long, we already have a mapping for the same field
// of type string so it should be mapped as a string
DocumentMapper newMapper = indexService.mapperService().documentMapperWithAutoCreate("type2").getDocumentMapper();
Mapper update = parse(newMapper, indexService.mapperService().documentMapperParser(),
XContentFactory.jsonBuilder().startObject().field("my_field1", 42).endObject());
XContentFactory.jsonBuilder().startObject()
.field("my_field1", 42)
.field("my_field2", 43)
.field("my_field3", 44)
.field("my_field4", 45)
.field("my_field5", 46)
.field("my_field6", 47)
.endObject());
Mapper myField1Mapper = null;
Mapper myField2Mapper = null;
Mapper myField3Mapper = null;
Mapper myField4Mapper = null;
Mapper myField5Mapper = null;
Mapper myField6Mapper = null;
for (Mapper m : update) {
if (m.name().equals("my_field1")) {
switch (m.name()) {
case "my_field1":
myField1Mapper = m;
break;
case "my_field2":
myField2Mapper = m;
break;
case "my_field3":
myField3Mapper = m;
break;
case "my_field4":
myField4Mapper = m;
break;
case "my_field5":
myField5Mapper = m;
break;
case "my_field6":
myField6Mapper = m;
break;
}
}
assertNotNull(myField1Mapper);
@ -388,20 +429,28 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
// Even if dynamic mappings would map a numeric field as a long, here it should map it as a integer
// since we already have a mapping of type integer
update = parse(newMapper, indexService.mapperService().documentMapperParser(),
XContentFactory.jsonBuilder().startObject().field("my_field2", 42).endObject());
Mapper myField2Mapper = null;
for (Mapper m : update) {
if (m.name().equals("my_field2")) {
myField2Mapper = m;
}
}
assertNotNull(myField2Mapper);
// same type
assertTrue(myField2Mapper instanceof IntegerFieldMapper);
// and same option
assertEquals(10, ((IntegerFieldMapper) myField2Mapper).fieldType().numericPrecisionStep());
assertNotNull(myField3Mapper);
assertTrue(myField3Mapper instanceof LongFieldMapper);
assertFalse(((LongFieldType) ((LongFieldMapper) myField3Mapper).fieldType()).hasDocValues());
assertNotNull(myField4Mapper);
assertTrue(myField4Mapper instanceof FloatFieldMapper);
assertEquals(IndexOptions.DOCS_AND_FREQS, ((FieldMapper) myField4Mapper).fieldType().indexOptions());
assertNotNull(myField5Mapper);
assertTrue(myField5Mapper instanceof DoubleFieldMapper);
assertEquals(14, ((DoubleFieldMapper) myField5Mapper).fieldType().numericPrecisionStep());
assertNotNull(myField6Mapper);
assertTrue(myField6Mapper instanceof DateFieldMapper);
assertFalse(((DateFieldType) ((DateFieldMapper) myField6Mapper).fieldType()).hasDocValues());
// This can't work
try {
parse(newMapper, indexService.mapperService().documentMapperParser(),

View File

@ -28,6 +28,7 @@ import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -110,4 +111,27 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
builder.endObject();
assertEquals("{\"field\":{\"type\":\"boolean\",\"doc_values\":false,\"null_value\":true}}", builder.string());
}
public void testMultiFields() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "boolean")
.startObject("fields")
.startObject("as_string")
.field("type", "string")
.field("index", "not_analyzed")
.endObject()
.endObject()
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false);
assertEquals(mapping, mapper.mappingSource().toString());
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", false)
.endObject().bytes();
ParsedDocument doc = mapper.parse("test", "type", "1", source);
assertNotNull(doc.rootDoc().getField("field.as_string"));
}
}

View File

@ -8,29 +8,25 @@ The HDFS repository plugin adds support for using HDFS File System as a reposito
[float]
==== Installation
This plugin can be installed using the plugin manager using _one_ of the following packages:
This plugin can be installed through the plugin manager:
[source,sh]
----------------------------------------------------------------
sudo bin/plugin install repository-hdfs
sudo bin/plugin install repository-hdfs-hadoop2
sudo bin/plugin install repository-hdfs-lite
----------------------------------------------------------------
The chosen plugin must be installed on every node in the cluster, and each node must
The plugin must be installed on _every_ node in the cluster, and each node must
be restarted after installation.
[[repository-hdfs-remove]]
[float]
==== Removal
The plugin can be removed by specifying the _installed_ package using _one_ of the following commands:
The plugin can be removed by specifying the _installed_ package:
[source,sh]
----------------------------------------------------------------
sudo bin/plugin remove repository-hdfs
sudo bin/plugin remove repository-hdfs-hadoop2
sudo bin/plugin remove repository-hdfs-lite
----------------------------------------------------------------
The node must be stopped before removing the plugin.
@ -38,49 +34,14 @@ The node must be stopped before removing the plugin.
[[repository-hdfs-usage]]
==== Getting started with HDFS
The HDFS snapshot/restore plugin comes in three _flavors_:
The HDFS snapshot/restore plugin is built against the latest Apache Hadoop 2.x (currently 2.7.1). If the distro you are using is not protocol
compatible with Apache Hadoop, consider replacing the Hadoop libraries inside the plugin folder with your own (you might have to adjust the security permissions required).
* Default / Hadoop 1.x::
The default version contains the plugin jar alongside Apache Hadoop 1.x (stable) dependencies.
* YARN / Hadoop 2.x::
The `hadoop2` version contains the plugin jar plus the Apache Hadoop 2.x (also known as YARN) dependencies.
* Lite::
The `lite` version contains just the plugin jar, without any Hadoop dependencies. The user should provide these (read below).
Even if Hadoop is already installed on the Elasticsearch nodes, for security reasons, the required libraries need to be placed under the plugin folder. Note that in most cases, if the distro is compatible, one simply needs to configure the repository with the appropriate Hadoop configuration files (see below).
[[repository-hdfs-flavor]]
===== What version to use?
It depends on whether Hadoop is locally installed or not and if not, whether it is compatible with Apache Hadoop clients.
* Are you using Apache Hadoop (or a _compatible_ distro) and do not have installed on the Elasticsearch nodes?::
+
If the answer is yes, for Apache Hadoop 1 use the default `repository-hdfs` or `repository-hdfs-hadoop2` for Apache Hadoop 2.
+
* If you are have Hadoop installed locally on the Elasticsearch nodes or are using a certain distro::
+
Use the `lite` version and place your Hadoop _client_ jars and their dependencies in the plugin folder under `hadoop-libs`.
For large deployments, it is recommended to package the libraries in the plugin zip and deploy it manually across nodes
(and thus avoiding having to do the libraries setup on each node).
[[repository-hdfs-security]]
==== Handling JVM Security and Permissions
Out of the box, Elasticsearch runs in a JVM with the security manager turned _on_ to make sure that unsafe or sensitive actions
are allowed only from trusted code. Hadoop however is not really designed to run under one; it does not rely on privileged blocks
to execute sensitive code, of which it uses plenty.
The `repository-hdfs` plugin provides the necessary permissions for both Apache Hadoop 1.x and 2.x (latest versions) to successfully
run in a secured JVM as one can tell from the number of permissions required when installing the plugin.
However using a certain Hadoop File-System (outside DFS), a certain distro or operating system (in particular Windows), might require
additional permissions which are not provided by the plugin.
In this case there are several workarounds:
* add the permission into `plugin-security.policy` (available in the plugin folder)
* disable the security manager through `es.security.manager.enabled=false` configurations setting - NOT RECOMMENDED
If you find yourself in such a situation, please let us know what Hadoop distro version and OS you are using and what permission is missing
by raising an issue. Thank you!
Windows Users::
Using Apache Hadoop on Windows is problematic and thus it is not recommended. For those _really_ wanting to use it, make sure you place the elusive `winutils.exe` under the
plugin folder and point `HADOOP_HOME` variable to it; this should minimize the amount of permissions Hadoop requires (though one would still have to add some more).
[[repository-hdfs-config]]
==== Configuration Properties
@ -92,8 +53,8 @@ Once installed, define the configuration for the `hdfs` repository through `elas
----
repositories
hdfs:
uri: "hdfs://<host>:<port>/" \# optional - Hadoop file-system URI
path: "some/path" \# required - path with the file-system where data is stored/loaded
uri: "hdfs://<host>:<port>/" \# required - HDFS address only
path: "some/path" \# required - path within the file-system where data is stored/loaded
load_defaults: "true" \# optional - whether to load the default Hadoop configuration (default) or not
conf_location: "extra-cfg.xml" \# optional - Hadoop configuration XML to be loaded (use commas for multi values)
conf.<key> : "<value>" \# optional - 'inlined' key=value added to the Hadoop configuration
@ -102,17 +63,3 @@ repositories
chunk_size: "10mb" \# optional - chunk size (disabled by default)
----
NOTE: Be careful when including a paths within the `uri` setting; Some implementations ignore them completely while
others consider them. In general, we recommend keeping the `uri` to a minimum and using the `path` element instead.
[[repository-hdfs-other-fs]]
==== Plugging other file-systems
Any HDFS-compatible file-systems (like Amazon `s3://` or Google `gs://`) can be used as long as the proper Hadoop
configuration is passed to the Elasticsearch plugin. In practice, this means making sure the correct Hadoop configuration
files (`core-site.xml` and `hdfs-site.xml`) and its jars are available in plugin classpath, just as you would with any
other Hadoop client or job.
Otherwise, the plugin will only read the _default_, vanilla configuration of Hadoop and will not be able to recognized
the plugged-in file-system.

View File

@ -3,7 +3,7 @@
Character filters are used to preprocess the string of
characters before it is passed to the <<analysis-tokenizers,tokenizer>>.
A character filter may be used to strip out HTML markup, , or to convert
A character filter may be used to strip out HTML markup, or to convert
`"&"` characters to the word `"and"`.
Elasticsearch has built in characters filters which can be

View File

@ -11,6 +11,6 @@ filter type:
|Setting |Description
|`min_gram` |Defaults to `1`.
|`max_gram` |Defaults to `2`.
|`side` |Either `front` or `back`. Defaults to `front`.
|`side` |deprecated. Either `front` or `back`. Defaults to `front`.
|======================================================

View File

@ -19,188 +19,137 @@
//apply plugin: 'nebula.provided-base'
import org.apache.tools.ant.taskdefs.condition.Os
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
esplugin {
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
classname 'org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin'
}
configurations {
hadoop1
hadoop2
classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
}
versions << [
'hadoop1': '1.2.1',
'hadoop2': '2.7.1'
]
configurations {
hdfsFixture
}
dependencies {
provided "org.elasticsearch:elasticsearch:${versions.elasticsearch}"
provided "org.apache.hadoop:hadoop-core:${versions.hadoop1}"
compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
compile 'org.apache.htrace:htrace-core:3.1.0-incubating'
compile 'com.google.guava:guava:16.0.1'
compile 'com.google.protobuf:protobuf-java:2.5.0'
compile 'commons-logging:commons-logging:1.1.3'
compile 'commons-collections:commons-collections:3.2.2'
compile 'commons-configuration:commons-configuration:1.6'
compile 'commons-io:commons-io:2.4'
compile 'commons-lang:commons-lang:2.6'
compile 'javax.servlet:servlet-api:2.5'
compile "org.slf4j:slf4j-api:${versions.slf4j}"
// use Hadoop1 to compile and test things (a subset of Hadoop2)
testCompile "org.apache.hadoop:hadoop-core:${versions.hadoop1}"
testCompile "org.apache.hadoop:hadoop-test:${versions.hadoop1}"
// Hadoop dependencies
testCompile "commons-configuration:commons-configuration:1.6"
testCompile "commons-lang:commons-lang:${versions.commonslang}"
testCompile "commons-collections:commons-collections:3.2.2"
testCompile "commons-net:commons-net:1.4.1"
testCompile "org.mortbay.jetty:jetty:6.1.26"
testCompile "org.mortbay.jetty:jetty-util:6.1.26"
testCompile "org.mortbay.jetty:servlet-api:2.5-20081211"
testCompile "com.sun.jersey:jersey-core:1.8"
hadoop1("org.apache.hadoop:hadoop-core:${versions.hadoop1}") {
exclude module: "commons-cli"
exclude group: "com.sun.jersey"
exclude group: "org.mortbay.jetty"
exclude group: "tomcat"
exclude module: "commons-el"
exclude module: "hsqldb"
exclude group: "org.eclipse.jdt"
exclude module: "commons-beanutils"
exclude module: "commons-beanutils-core"
exclude module: "junit"
// provided by ES itself
exclude group: "log4j"
}
hadoop2("org.apache.hadoop:hadoop-client:${versions.hadoop2}") {
exclude module: "commons-cli"
exclude group: "com.sun.jersey"
exclude group: "com.sun.jersey.contribs"
exclude group: "com.sun.jersey.jersey-test-framework"
exclude module: "guice"
exclude group: "org.mortbay.jetty"
exclude group: "tomcat"
exclude module: "commons-el"
exclude module: "hsqldb"
exclude group: "org.eclipse.jdt"
exclude module: "commons-beanutils"
exclude module: "commons-beanutils-core"
exclude module: "javax.servlet"
exclude module: "junit"
// provided by ES itself
exclude group: "log4j"
}
hadoop2("org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}") {
exclude module: "guava"
exclude module: "junit"
// provided by ES itself
exclude group: "log4j"
}
hdfsFixture project(':test:fixtures:hdfs-fixture')
}
configurations.all {
resolutionStrategy {
force "commons-codec:commons-codec:${versions.commonscodec}"
force "commons-logging:commons-logging:${versions.commonslogging}"
force "commons-lang:commons-lang:2.6"
force "commons-httpclient:commons-httpclient:3.0.1"
force "org.codehaus.jackson:jackson-core-asl:1.8.8"
force "org.codehaus.jackson:jackson-mapper-asl:1.8.8"
force "com.google.code.findbugs:jsr305:3.0.0"
force "com.google.guava:guava:16.0.1"
force "org.slf4j:slf4j-api:1.7.10"
force "org.slf4j:slf4j-log4j12:1.7.10"
}
}
dependencyLicenses {
mapping from: /hadoop-core.*/, to: 'hadoop-1'
mapping from: /hadoop-.*/, to: 'hadoop-2'
mapping from: /hadoop-.*/, to: 'hadoop'
}
task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) {
dependsOn project.configurations.hdfsFixture
executable = new File(project.javaHome, 'bin/java')
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"
args 'hdfs.MiniHDFS',
baseDir
}
integTest {
boolean fixtureSupported = false;
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
// hdfs fixture will not start without hadoop native libraries on windows
String nativePath = System.getenv("HADOOP_HOME")
if (nativePath != null) {
Path path = Paths.get(nativePath);
if (Files.isDirectory(path) &&
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
fixtureSupported = true
} else {
throw new IllegalStateException("HADOOP_HOME: " + path.toString() + " is invalid, does not contain hadoop native libraries in $HADOOP_HOME/bin");
}
}
} else {
fixtureSupported = true
}
if (fixtureSupported) {
dependsOn hdfsFixture
} else {
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
// just tests that the plugin loads
systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
}
}
compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes'
// main jar includes just the plugin classes
jar {
include "org/elasticsearch/plugin/hadoop/hdfs/*"
}
// hadoop jar (which actually depend on Hadoop)
task hadoopLinkedJar(type: Jar, dependsOn:jar) {
appendix "internal"
from sourceSets.main.output.classesDir
// exclude plugin
exclude "org/elasticsearch/plugin/hadoop/hdfs/*"
}
bundlePlugin.dependsOn hadoopLinkedJar
// configure 'bundle' as being w/o Hadoop deps
bundlePlugin {
into ("internal-libs") {
from hadoopLinkedJar.archivePath
}
into ("hadoop-libs") {
from configurations.hadoop2.allArtifacts.files
from configurations.hadoop2
}
}
task distZipHadoop1(type: Zip, dependsOn: [hadoopLinkedJar, jar]) { zipTask ->
from (zipTree(bundlePlugin.archivePath)) {
include "*"
include "internal-libs/**"
}
description = "Builds archive (with Hadoop1 dependencies) suitable for download page."
classifier = "hadoop1"
into ("hadoop-libs") {
from configurations.hadoop1.allArtifacts.files
from configurations.hadoop1
}
}
task distZipHadoop2(type: Zip, dependsOn: [hadoopLinkedJar, jar]) { zipTask ->
from (zipTree(bundlePlugin.archivePath)) {
include "*"
include "internal-libs/**"
}
description = "Builds archive (with Hadoop2/YARN dependencies) suitable for download page."
classifier = "hadoop2"
into ("hadoop-libs") {
from configurations.hadoop2.allArtifacts.files
from configurations.hadoop2
}
}
task distZipNoHadoop(type: Zip, dependsOn: [hadoopLinkedJar, jar]) { zipTask ->
from (zipTree(bundlePlugin.archivePath)) {
exclude "hadoop-libs/**"
}
from sourceSets.main.output.resourcesDir
description = "Builds archive (without any Hadoop dependencies) suitable for download page."
classifier = "lite"
}
artifacts {
archives bundlePlugin
'default' bundlePlugin
archives distZipHadoop1
archives distZipHadoop2
archives distZipNoHadoop
}
integTest {
cluster {
plugin(pluginProperties.extension.name, zipTree(distZipHadoop2.archivePath))
}
}
// classes are missing, e.g. org.mockito.Mockito
thirdPartyAudit.missingClasses = true
thirdPartyAudit.excludes = [
// note: the jersey ones may be bogus, see my bug report at forbidden-apis!
// internal java api: com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable
// internal java api: com.sun.jersey.api.core.HttpContext
// internal java api: com.sun.jersey.core.spi.component.ComponentScope
// internal java api: com.sun.jersey.spi.inject.Injectable
// internal java api: com.sun.jersey.core.spi.component.ComponentContext
'org.apache.hadoop.hdfs.web.resources.UserProvider',
// internal java api: com.sun.jersey.spi.container.ResourceFilters
'org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods',
// internal java api: com.sun.jersey.spi.container.servlet.ServletContainer
'org.apache.hadoop.http.HttpServer',
'org.apache.hadoop.http.HttpServer2',
// internal java api: com.sun.jersey.api.ParamException
'org.apache.hadoop.hdfs.web.resources.ExceptionHandler',
'org.apache.hadoop.hdfs.server.datanode.web.webhdfs.ExceptionHandler',
'org.apache.hadoop.hdfs.web.ParamFilter',
// internal java api: com.sun.jersey.spi.container.ContainerRequestFilter
// internal java api: com.sun.jersey.spi.container.ContainerRequest
'org.apache.hadoop.hdfs.web.ParamFilter',
'org.apache.hadoop.hdfs.web.ParamFilter$1',
// internal java api: com.sun.jndi.ldap.LdapCtxFactory
'org.apache.hadoop.security.LdapGroupsMapping',
// internal java api: sun.net.dns.ResolverConfiguration
// internal java api: sun.net.util.IPAddressUtil
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
// internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
'org.apache.hadoop.io.nativeio.NativeIO',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
// internal java api: sun.nio.ch.DirectBuffer
// internal java api: sun.misc.Cleaner
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
'org.apache.hadoop.crypto.CryptoStreamUtils',
// internal java api: sun.misc.SignalHandler
'org.apache.hadoop.util.SignalLogger$Handler',
]

View File

@ -0,0 +1 @@
8ad72fe39fa8c91eaaf12aadb21e0c3661fe26d5

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,5 @@
Apache Commons Collections
Copyright 2001-2015 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).

View File

@ -0,0 +1 @@
32cadde23955d7681b0d94a2715846d20b425235

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,5 @@
Apache Commons Configuration
Copyright 2001-2015 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).

View File

@ -0,0 +1 @@
b1b6ea3b7e4aa4f492509a4952029cd8e48019ad

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,6 @@
Apache Commons IO
Copyright 2002-2014 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).

View File

@ -0,0 +1 @@
0ce1edb914c94ebc388f086c6827e8bdeec71ac2

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,9 @@
Apache Commons Lang
Copyright 2001-2015 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
This product includes software from the Spring Framework,
under the Apache License 2.0 (see: StringUtils.containsWhitespace())

View File

@ -0,0 +1 @@
f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,6 @@
Apache Commons Logging
Copyright 2003-2014 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).

View File

@ -0,0 +1 @@
5fa98cd1a63c99a44dd8d3b77e4762b066a5d0c5

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,2 @@

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,2 @@
This product includes software developed by The Apache Software
Foundation (http://www.apache.org/).

View File

@ -0,0 +1 @@
2a77fe74ee056bf45598cf7e20cd624e8388e627

View File

@ -0,0 +1 @@
2515f339f97f1d7ba850485e06e395a58586bc2e

View File

@ -0,0 +1 @@
dbc2faacd210e6a1e3eb7def6e42065c7457d960

View File

@ -0,0 +1 @@
50580f5ebab60b1b318ad157f668d8e40a1cc0da

View File

@ -0,0 +1 @@
11681de93a4cd76c841e352b7094f839b072a21f

View File

@ -0,0 +1 @@
f73606e7c9ede5802335c290bf47490ad6d51df3

View File

@ -0,0 +1,242 @@
Apache HTrace (incubating) is Apache 2.0 Licensed. See below for licensing
of dependencies that are NOT Apache Licensed.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
The HTrace Owl logo is from http://www.clker.com/clipart-13653.html. It is
public domain.
D3, a javascript library for manipulating data, used by htrace-hbase
is Copyright 2010-2014, Michael Bostock and BSD licensed:
https://github.com/mbostock/d3/blob/master/LICENSE
Bootstrap, an html, css, and javascript framework, is
Copyright (c) 2011-2015 Twitter, Inc and MIT licensed:
https://github.com/twbs/bootstrap/blob/master/LICENSE
underscore, a javascript library of functional programming helpers, is
(c) 2009-2014 Jeremy Ashkenas, DocumentCloud and Investigative Reporters
& Editors and an MIT license:
https://github.com/jashkenas/underscore/blob/master/LICENSE
jquery, a javascript library, is Copyright jQuery Foundation and other
contributors, https://jquery.org/. The software consists of
voluntary contributions made by many individuals. For exact
contribution history, see the revision history
available at https://github.com/jquery/jquery
It is MIT licensed:
https://github.com/jquery/jquery/blob/master/LICENSE.txt
backbone, is a javascript library, that is Copyright (c) 2010-2014
Jeremy Ashkenas, DocumentCloud. It is MIT licensed:
https://github.com/jashkenas/backbone/blob/master/LICENSE
moment.js is a front end time conversion project.
It is (c) 2011-2014 Tim Wood, Iskren Chernev, Moment.js contributors
and shared under the MIT license:
https://github.com/moment/moment/blob/develop/LICENSE
CMP is an implementation of the MessagePack serialization format in
C. It is licensed under the MIT license:
https://github.com/camgunz/cmp/blob/master/LICENSE
See ./htrace-c/src/util/cmp.c and ./htrace-c/src/util/cmp.h.

View File

@ -0,0 +1,13 @@
Apache HTrace
Copyright 2015 The Apache Software Foundation
This product includes software developed at The Apache Software
Foundation (http://www.apache.org/).
In addition, this product includes software dependencies. See
the accompanying LICENSE.txt for a listing of dependencies
that are NOT Apache licensed (with pointers to their licensing)
Apache HTrace includes an Apache Thrift connector to Zipkin. Zipkin
is a distributed tracing system that is Apache 2.0 Licensed.
Copyright 2012 Twitter, Inc.

View File

@ -0,0 +1 @@
a10732c76bfacdbd633a7eb0f7968b1059a65dfa

View File

@ -0,0 +1,10 @@
Copyright (c) <YEAR>, <OWNER>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,2 @@

View File

@ -0,0 +1 @@
5959582d97d8b61f4d154ca9e495aafd16726e34

View File

@ -0,0 +1,93 @@
COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 1.
Definitions.
1.1. Contributor means each individual or entity that creates or contributes to the creation of Modifications.
1.2. Contributor Version means the combination of the Original Software, prior Modifications used by a Contributor (if any), and the Modifications made by that particular Contributor.
1.3. Covered Software means (a) the Original Software, or (b) Modifications, or (c) the combination of files containing Original Software with files containing Modifications, in each case including portions thereof.
1.4. Executable means the Covered Software in any form other than Source Code.
1.5. Initial Developer means the individual or entity that first makes Original Software available under this License.
1.6. Larger Work means a work which combines Covered Software or portions thereof with code not governed by the terms of this License.
1.7. License means this document.
1.8. Licensable means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently acquired, any and all of the rights conveyed herein.
1.9. Modifications means the Source Code and Executable form of any of the following: A. Any file that results from an addition to, deletion from or modification of the contents of a file containing Original Software or previous Modifications; B. Any new file that contains any part of the Original Software or previous Modification; or C. Any new file that is contributed or otherwise made available under the terms of this License.
1.10. Original Software means the Source Code and Executable form of computer software code that is originally released under this License.
1.11. Patent Claims means any patent claim(s), now owned or hereafter acquired, including without limitation, method, process, and apparatus claims, in any patent Licensable by grantor.
1.12. Source Code means (a) the common form of computer software code in which modifications are made and (b) associated documentation included in or with such code.
1.13. You (or Your) means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, You includes any entity which controls, is controlled by, or is under common control with You. For purposes of this definition, control means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity.
2. License Grants.
2.1. The Initial Developer Grant. Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, the Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive license:
(a) under intellectual property rights (other than patent or trademark) Licensable by Initial Developer, to use, reproduce, modify, display, perform, sublicense and distribute the Original Software (or portions thereof), with or without Modifications, and/or as part of a Larger Work; and
(b) under Patent Claims infringed by the making, using or selling of Original Software, to make, have made, use, practice, sell, and offer for sale, and/or otherwise dispose of the Original Software (or portions thereof);
(c) The licenses granted in Sections 2.1(a) and (b) are effective on the date Initial Developer first distributes or otherwise makes the Original Software available to a third party under the terms of this License;
(d) Notwithstanding Section 2.1(b) above, no patent license is granted: (1) for code that You delete from the Original Software, or (2) for infringements caused by: (i) the modification of the Original Software, or (ii) the combination of the Original Software with other software or devices.
2.2. Contributor Grant. Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license:
(a) under intellectual property rights (other than patent or trademark) Licensable by Contributor to use, reproduce, modify, display, perform, sublicense and distribute the Modifications created by such Contributor (or portions thereof), either on an unmodified basis, with other Modifications, as Covered Software and/or as part of a Larger Work; and
(b) under Patent Claims infringed by the making, using, or selling of Modifications made by that Contributor either alone and/or in combination with its Contributor Version (or portions of such combination), to make, use, sell, offer for sale, have made, and/or otherwise dispose of: (1) Modifications made by that Contributor (or portions thereof); and (2) the combination of Modifications made by that Contributor with its Contributor Version (or portions of such combination).
(c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective on the date Contributor first distributes or otherwise makes the Modifications available to a third party.
(d) Notwithstanding Section 2.2(b) above, no patent license is granted: (1) for any code that Contributor has deleted from the Contributor Version; (2) for infringements caused by: (i) third party modifications of Contributor Version, or (ii) the combination of Modifications made by that Contributor with other software (except as part of the Contributor Version) or other devices; or (3) under Patent Claims infringed by Covered Software in the absence of Modifications made by that Contributor.
3. Distribution Obligations.
3.1. Availability of Source Code. Any Covered Software that You distribute or otherwise make available in Executable form must also be made available in Source Code form and that Source Code form must be distributed only under the terms of this License. You must include a copy of this License with every copy of the Source Code form of the Covered Software You distribute or otherwise make available. You must inform recipients of any such Covered Software in Executable form as to how they can obtain such Covered Software in Source Code form in a reasonable manner on or through a medium customarily used for software exchange.
3.2. Modifications. The Modifications that You create or to which You contribute are governed by the terms of this License. You represent that You believe Your Modifications are Your original creation(s) and/or You have sufficient rights to grant the rights conveyed by this License.
3.3. Required Notices. You must include a notice in each of Your Modifications that identifies You as the Contributor of the Modification. You may not remove or alter any copyright, patent or trademark notices contained within the Covered Software, or any notices of licensing or any descriptive text giving attribution to any Contributor or the Initial Developer.
3.4. Application of Additional Terms. You may not offer or impose any terms on any Covered Software in Source Code form that alters or restricts the applicable version of this License or the recipients rights hereunder. You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, you may do so only on Your own behalf, and not on behalf of the Initial Developer or any Contributor. You must make it absolutely clear that any such warranty, support, indemnity or liability obligation is offered by You alone, and You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of warranty, support, indemnity or liability terms You offer.
3.5. Distribution of Executable Versions. You may distribute the Executable form of the Covered Software under the terms of this License or under the terms of a license of Your choice, which may contain terms different from this License, provided that You are in compliance with the terms of this License and that the license for the Executable form does not attempt to limit or alter the recipients rights in the Source Code form from the rights set forth in this License. If You distribute the Covered Software in Executable form under a different license, You must make it absolutely clear that any terms which differ from this License are offered by You alone, not by the Initial Developer or Contributor. You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of any such terms You offer.
3.6. Larger Works. You may create a Larger Work by combining Covered Software with other code not governed by the terms of this License and distribute the Larger Work as a single product. In such a case, You must make sure the requirements of this License are fulfilled for the Covered Software.
4. Versions of the License.
4.1. New Versions. Sun Microsystems, Inc. is the initial license steward and may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Except as provided in Section 4.3, no one other than the license steward has the right to modify this License.
4.2. Effect of New Versions. You may always continue to use, distribute or otherwise make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. If the Initial Developer includes a notice in the Original Software prohibiting it from being distributed or otherwise made available under any subsequent version of the License, You must distribute and make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. Otherwise, You may also choose to use, distribute or otherwise make the Covered Software available under the terms of any subsequent version of the License published by the license steward.
4.3. Modified Versions. When You are an Initial Developer and You want to create a new license for Your Original Software, You may create and use a modified version of this License if You: (a) rename the license and remove any references to the name of the license steward (except to note that the license differs from this License); and (b) otherwise make it clear that the license contains terms which differ from this License.
5. DISCLAIMER OF WARRANTY. COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN AS IS BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
6. TERMINATION.
6.1. This License and the rights granted hereunder will terminate automatically if You fail to comply with terms herein and fail to cure such breach within 30 days of becoming aware of the breach. Provisions which, by their nature, must remain in effect beyond the termination of this License shall survive.
6.2. If You assert a patent infringement claim (excluding declaratory judgment actions) against Initial Developer or a Contributor (the Initial Developer or Contributor against whom You assert such claim is referred to as Participant) alleging that the Participant Software (meaning the Contributor Version where the Participant is a Contributor or the Original Software where the Participant is the Initial Developer) directly or indirectly infringes any patent, then any and all rights granted directly or indirectly to You by such Participant, the Initial Developer (if the Initial Developer is not the Participant) and all Contributors under Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice from Participant terminate prospectively and automatically at the expiration of such 60 day notice period, unless if within such 60 day period You withdraw Your claim with respect to the Participant Software against such Participant either unilaterally or pursuant to a written agreement with Participant.
6.3. In the event of termination under Sections 6.1 or 6.2 above, all end user licenses that have been validly granted by You or any distributor hereunder prior to termination (excluding licenses granted to You by any distributor) shall survive termination.
7. LIMITATION OF LIABILITY. UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTYS NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.
8. U.S. GOVERNMENT END USERS. The Covered Software is a commercial item, as that term is defined in 48 C.F.R. 2.101 (Oct. 1995), consisting of commercial computer software (as that term is defined at 48 C.F.R. 252.227-7014(a)(1)) and commercial computer software documentation as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Covered Software with only those rights set forth herein. This U.S. Government Rights clause is in lieu of, and supersedes, any other FAR, DFAR, or other clause or provision that addresses Government rights in computer software under this License.
9. MISCELLANEOUS. This License represents the complete agreement concerning subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by the law of the jurisdiction specified in a notice contained within the Original Software (except to the extent applicable law, if any, provides otherwise), excluding such jurisdictions conflict-of-law provisions. Any litigation relating to this License shall be subject to the jurisdiction of the courts located in the jurisdiction and venue specified in a notice contained within the Original Software, with the losing party responsible for costs, including, without limitation, court costs and reasonable attorneys fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not apply to this License. You agree that You alone are responsible for compliance with the United States export administration regulations (and the export control laws and regulation of any other countries) when You use, distribute or otherwise make available any Covered Software.
10. RESPONSIBILITY FOR CLAIMS. As between Initial Developer and the Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License and You agree to work with Initial Developer and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability.
NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California.

View File

@ -0,0 +1,2 @@

View File

@ -1,173 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugin.hadoop.hdfs;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.RepositoriesModule;
import org.elasticsearch.repositories.Repository;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
//
// Note this plugin is somewhat special as Hadoop itself loads a number of libraries and thus requires a number of permissions to run even in client mode.
// This poses two problems:
// - Hadoop itself comes with tons of jars, many providing the same classes across packages. In particular Hadoop 2 provides package annotations in the same
// package across jars which trips JarHell. Thus, to allow Hadoop jars to load, the plugin uses a dedicated CL which picks them up from the hadoop-libs folder.
// - The issue though with using a different CL is that it picks up the jars from a different location / codeBase and thus it does not fall under the plugin
// permissions. In other words, the plugin permissions don't apply to the hadoop libraries.
// There are different approaches here:
// - implement a custom classloader that loads the jars but 'lies' about the codesource. It is doable but since URLClassLoader is locked down, one would
// would have to implement the whole jar opening and loading from it. Not impossible but still fairly low-level.
// Further more, even if the code has the proper credentials, it needs to use the proper Privileged blocks to use its full permissions which does not
// happen in the Hadoop code base.
// - use a different Policy. Works but the Policy is JVM wide and thus the code needs to be quite efficient - quite a bit impact to cover just some plugin
// libraries
// - use a DomainCombiner. This doesn't change the semantics (it's clear where the code is loaded from, etc..) however it gives us a scoped, fine-grained
// callback on handling the permission intersection for secured calls. Note that DC works only in the current PAC call - the moment another PA is used,
// the domain combiner is going to be ignored (unless the caller specifically uses it). Due to its scoped impact and official Java support, this approach
// was used.
// ClassLoading info
// - package plugin.hadoop.hdfs is part of the plugin
// - all the other packages are assumed to be in the nested Hadoop CL.
// Code
public class HdfsPlugin extends Plugin {
@Override
public String name() {
return "repository-hdfs";
}
@Override
public String description() {
return "HDFS Repository Plugin";
}
@SuppressWarnings("unchecked")
public void onModule(RepositoriesModule repositoriesModule) {
String baseLib = Utils.detectLibFolder();
List<URL> cp = getHadoopClassLoaderPath(baseLib);
ClassLoader hadoopCL = URLClassLoader.newInstance(cp.toArray(new URL[cp.size()]), getClass().getClassLoader());
Class<? extends Repository> repository = null;
try {
repository = (Class<? extends Repository>) hadoopCL.loadClass("org.elasticsearch.repositories.hdfs.HdfsRepository");
} catch (ClassNotFoundException cnfe) {
throw new IllegalStateException("Cannot load plugin class; is the plugin class setup correctly?", cnfe);
}
repositoriesModule.registerRepository("hdfs", repository, BlobStoreIndexShardRepository.class);
Loggers.getLogger(HdfsPlugin.class).info("Loaded Hadoop [{}] libraries from {}", getHadoopVersion(hadoopCL), baseLib);
}
protected List<URL> getHadoopClassLoaderPath(String baseLib) {
List<URL> cp = new ArrayList<>();
// add plugin internal jar
discoverJars(createURI(baseLib, "internal-libs"), cp, false);
// add Hadoop jars
discoverJars(createURI(baseLib, "hadoop-libs"), cp, true);
return cp;
}
private String getHadoopVersion(ClassLoader hadoopCL) {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
// unprivileged code such as scripts do not have SpecialPermission
sm.checkPermission(new SpecialPermission());
}
return AccessController.doPrivileged(new PrivilegedAction<String>() {
@Override
public String run() {
// Hadoop 2 relies on TCCL to determine the version
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(hadoopCL);
return doGetHadoopVersion(hadoopCL);
} finally {
Thread.currentThread().setContextClassLoader(tccl);
}
}
}, Utils.hadoopACC());
}
private String doGetHadoopVersion(ClassLoader hadoopCL) {
String version = "Unknown";
Class<?> clz = null;
try {
clz = hadoopCL.loadClass("org.apache.hadoop.util.VersionInfo");
} catch (ClassNotFoundException cnfe) {
// unknown
}
if (clz != null) {
try {
Method method = clz.getMethod("getVersion");
version = method.invoke(null).toString();
} catch (Exception ex) {
// class has changed, ignore
}
}
return version;
}
private URI createURI(String base, String suffix) {
String location = base + suffix;
try {
return new URI(location);
} catch (URISyntaxException ex) {
throw new IllegalStateException(String.format(Locale.ROOT, "Cannot detect plugin folder; [%s] seems invalid", location), ex);
}
}
@SuppressForbidden(reason = "discover nested jar")
private void discoverJars(URI libPath, List<URL> cp, boolean optional) {
try {
Path[] jars = FileSystemUtils.files(PathUtils.get(libPath), "*.jar");
for (Path path : jars) {
cp.add(path.toUri().toURL());
}
} catch (IOException ex) {
if (!optional) {
throw new IllegalStateException("Cannot compute plugin classpath", ex);
}
}
}
}

View File

@ -1,103 +0,0 @@
package org.elasticsearch.plugin.hadoop.hdfs;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.elasticsearch.SpecialPermission;
import java.net.URL;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.DomainCombiner;
import java.security.PrivilegedAction;
import java.security.ProtectionDomain;
public abstract class Utils {
protected static AccessControlContext hadoopACC() {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
// unprivileged code such as scripts do not have SpecialPermission
sm.checkPermission(new SpecialPermission());
}
return AccessController.doPrivileged(new PrivilegedAction<AccessControlContext>() {
@Override
public AccessControlContext run() {
return new AccessControlContext(AccessController.getContext(), new HadoopDomainCombiner());
}
});
}
private static class HadoopDomainCombiner implements DomainCombiner {
private static String BASE_LIB = detectLibFolder();
@Override
public ProtectionDomain[] combine(ProtectionDomain[] currentDomains, ProtectionDomain[] assignedDomains) {
for (ProtectionDomain pd : assignedDomains) {
if (pd.getCodeSource().getLocation().toString().startsWith(BASE_LIB)) {
return assignedDomains;
}
}
return currentDomains;
}
}
static String detectLibFolder() {
ClassLoader cl = Utils.class.getClassLoader();
// we could get the URL from the URLClassloader directly
// but that can create issues when running the tests from the IDE
// we could detect that by loading resources but that as well relies on
// the JAR URL
String classToLookFor = HdfsPlugin.class.getName().replace(".", "/").concat(".class");
URL classURL = cl.getResource(classToLookFor);
if (classURL == null) {
throw new IllegalStateException("Cannot detect itself; something is wrong with this ClassLoader " + cl);
}
String base = classURL.toString();
// extract root
// typically a JAR URL
int index = base.indexOf("!/");
if (index > 0) {
base = base.substring(0, index);
// remove its prefix (jar:)
base = base.substring(4);
// remove the trailing jar
index = base.lastIndexOf("/");
base = base.substring(0, index + 1);
}
// not a jar - something else, do a best effort here
else {
// remove the class searched
base = base.substring(0, base.length() - classToLookFor.length());
}
// append /
if (!base.endsWith("/")) {
base = base.concat("/");
}
return base;
}
}

View File

@ -1,28 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.hdfs;
import org.apache.hadoop.fs.FileSystem;
import java.io.IOException;
interface FileSystemFactory {
FileSystem getFileSystem() throws IOException;
}

View File

@ -18,8 +18,11 @@
*/
package org.elasticsearch.repositories.hdfs;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Options.CreateOpts;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.elasticsearch.common.Nullable;
@ -27,35 +30,35 @@ import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.repositories.hdfs.HdfsBlobStore.Operation;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Collections;
import java.util.EnumSet;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
public class HdfsBlobContainer extends AbstractBlobContainer {
final class HdfsBlobContainer extends AbstractBlobContainer {
private final HdfsBlobStore store;
private final Path path;
private final int bufferSize;
protected final HdfsBlobStore blobStore;
protected final Path path;
public HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore blobStore, Path path) {
HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore store, Path path, int bufferSize) {
super(blobPath);
this.blobStore = blobStore;
this.store = store;
this.path = path;
this.bufferSize = bufferSize;
}
@Override
public boolean blobExists(String blobName) {
try {
return SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback<Boolean>() {
return store.execute(new Operation<Boolean>() {
@Override
public Boolean doInHdfs(FileSystem fs) throws IOException {
return fs.exists(new Path(path, blobName));
public Boolean run(FileContext fileContext) throws IOException {
return fileContext.util().exists(new Path(path, blobName));
}
});
} catch (Exception e) {
@ -65,46 +68,62 @@ public class HdfsBlobContainer extends AbstractBlobContainer {
@Override
public void deleteBlob(String blobName) throws IOException {
SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback<Boolean>() {
@Override
public Boolean doInHdfs(FileSystem fs) throws IOException {
return fs.delete(new Path(path, blobName), true);
}
});
try {
store.execute(new Operation<Boolean>() {
@Override
public Boolean run(FileContext fileContext) throws IOException {
return fileContext.delete(new Path(path, blobName), true);
}
});
} catch (FileNotFoundException ok) {
// behaves like Files.deleteIfExists
}
}
@Override
public void move(String sourceBlobName, String targetBlobName) throws IOException {
boolean rename = SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback<Boolean>() {
store.execute(new Operation<Void>() {
@Override
public Boolean doInHdfs(FileSystem fs) throws IOException {
return fs.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName));
public Void run(FileContext fileContext) throws IOException {
fileContext.rename(new Path(path, sourceBlobName), new Path(path, targetBlobName));
return null;
}
});
if (!rename) {
throw new IOException(String.format(Locale.ROOT, "can not move blob from [%s] to [%s]", sourceBlobName, targetBlobName));
}
}
@Override
public InputStream readBlob(String blobName) throws IOException {
// FSDataInputStream does buffering internally
return SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback<InputStream>() {
return store.execute(new Operation<InputStream>() {
@Override
public InputStream doInHdfs(FileSystem fs) throws IOException {
return fs.open(new Path(path, blobName), blobStore.bufferSizeInBytes());
public InputStream run(FileContext fileContext) throws IOException {
return fileContext.open(new Path(path, blobName), bufferSize);
}
});
}
@Override
public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException {
SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback<Void>() {
store.execute(new Operation<Void>() {
@Override
public Void doInHdfs(FileSystem fs) throws IOException {
try (OutputStream stream = createOutput(blobName)) {
Streams.copy(inputStream, stream);
public Void run(FileContext fileContext) throws IOException {
Path blob = new Path(path, blobName);
// we pass CREATE, which means it fails if a blob already exists.
// NOTE: this behavior differs from FSBlobContainer, which passes TRUNCATE_EXISTING
// that should be fixed there, no need to bring truncation into this, give the user an error.
EnumSet<CreateFlag> flags = EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK);
CreateOpts[] opts = { CreateOpts.bufferSize(bufferSize) };
try (FSDataOutputStream stream = fileContext.create(blob, flags, opts)) {
int bytesRead;
byte[] buffer = new byte[bufferSize];
while ((bytesRead = inputStream.read(buffer)) != -1) {
stream.write(buffer, 0, bytesRead);
// For safety we also hsync each write as well, because of its docs:
// SYNC_BLOCK - to force closed blocks to the disk device
// "In addition Syncable.hsync() should be called after each write,
// if true synchronous behavior is required"
stream.hsync();
}
}
return null;
}
@ -112,40 +131,18 @@ public class HdfsBlobContainer extends AbstractBlobContainer {
}
@Override
public void writeBlob(String blobName, BytesReference bytes) throws IOException {
SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback<Void>() {
public Map<String, BlobMetaData> listBlobsByPrefix(final @Nullable String prefix) throws IOException {
FileStatus[] files = store.execute(new Operation<FileStatus[]>() {
@Override
public Void doInHdfs(FileSystem fs) throws IOException {
try (OutputStream stream = createOutput(blobName)) {
bytes.writeTo(stream);
}
return null;
}
});
}
private OutputStream createOutput(String blobName) throws IOException {
Path file = new Path(path, blobName);
// FSDataOutputStream does buffering internally
return blobStore.fileSystemFactory().getFileSystem().create(file, true, blobStore.bufferSizeInBytes());
}
@Override
public Map<String, BlobMetaData> listBlobsByPrefix(final @Nullable String blobNamePrefix) throws IOException {
FileStatus[] files = SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback<FileStatus[]>() {
@Override
public FileStatus[] doInHdfs(FileSystem fs) throws IOException {
return fs.listStatus(path, new PathFilter() {
public FileStatus[] run(FileContext fileContext) throws IOException {
return (fileContext.util().listStatus(path, new PathFilter() {
@Override
public boolean accept(Path path) {
return path.getName().startsWith(blobNamePrefix);
return prefix == null || path.getName().startsWith(prefix);
}
});
}));
}
});
if (files == null || files.length == 0) {
return Collections.emptyMap();
}
Map<String, BlobMetaData> map = new LinkedHashMap<String, BlobMetaData>();
for (FileStatus file : files) {
map.put(file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen()));
@ -155,19 +152,6 @@ public class HdfsBlobContainer extends AbstractBlobContainer {
@Override
public Map<String, BlobMetaData> listBlobs() throws IOException {
FileStatus[] files = SecurityUtils.execute(blobStore.fileSystemFactory(), new FsCallback<FileStatus[]>() {
@Override
public FileStatus[] doInHdfs(FileSystem fs) throws IOException {
return fs.listStatus(path);
}
});
if (files == null || files.length == 0) {
return Collections.emptyMap();
}
Map<String, BlobMetaData> map = new LinkedHashMap<String, BlobMetaData>();
for (FileStatus file : files) {
map.put(file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen()));
}
return Collections.unmodifiableMap(map);
return listBlobsByPrefix(null);
}
}

View File

@ -18,46 +18,63 @@
*/
package org.elasticsearch.repositories.hdfs;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.util.concurrent.Executor;
import java.lang.reflect.ReflectPermission;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
public class HdfsBlobStore extends AbstractComponent implements BlobStore {
import javax.security.auth.AuthPermission;
private final FileSystemFactory ffs;
private final Path rootHdfsPath;
private final ThreadPool threadPool;
private final int bufferSizeInBytes;
final class HdfsBlobStore implements BlobStore {
public HdfsBlobStore(Settings settings, FileSystemFactory ffs, Path path, ThreadPool threadPool) throws IOException {
super(settings);
this.ffs = ffs;
this.rootHdfsPath = path;
this.threadPool = threadPool;
private final Path root;
private final FileContext fileContext;
private final int bufferSize;
private volatile boolean closed;
this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes();
mkdirs(path);
HdfsBlobStore(FileContext fileContext, String path, int bufferSize) throws IOException {
this.fileContext = fileContext;
this.bufferSize = bufferSize;
this.root = execute(new Operation<Path>() {
@Override
public Path run(FileContext fileContext) throws IOException {
return fileContext.makeQualified(new Path(path));
}
});
try {
mkdirs(root);
} catch (FileAlreadyExistsException ok) {
// behaves like Files.createDirectories
}
}
private void mkdirs(Path path) throws IOException {
SecurityUtils.execute(ffs, new FsCallback<Void>() {
execute(new Operation<Void>() {
@Override
public Void doInHdfs(FileSystem fs) throws IOException {
if (!fs.exists(path)) {
fs.mkdirs(path);
}
public Void run(FileContext fileContext) throws IOException {
fileContext.mkdir(path, null, true);
return null;
}
});
}
@Override
public void delete(BlobPath path) throws IOException {
execute(new Operation<Void>() {
@Override
public Void run(FileContext fc) throws IOException {
fc.delete(translateToHdfsPath(path), true);
return null;
}
});
@ -65,45 +82,20 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore {
@Override
public String toString() {
return rootHdfsPath.toUri().toString();
}
public FileSystemFactory fileSystemFactory() {
return ffs;
}
public Path path() {
return rootHdfsPath;
}
public Executor executor() {
return threadPool.executor(ThreadPool.Names.SNAPSHOT);
}
public int bufferSizeInBytes() {
return bufferSizeInBytes;
return root.toUri().toString();
}
@Override
public BlobContainer blobContainer(BlobPath path) {
return new HdfsBlobContainer(path, this, buildHdfsPath(path));
}
@Override
public void delete(BlobPath path) throws IOException {
SecurityUtils.execute(ffs, new FsCallback<Void>() {
@Override
public Void doInHdfs(FileSystem fs) throws IOException {
fs.delete(translateToHdfsPath(path), true);
return null;
}
});
return new HdfsBlobContainer(path, this, buildHdfsPath(path), bufferSize);
}
private Path buildHdfsPath(BlobPath blobPath) {
final Path path = translateToHdfsPath(blobPath);
try {
mkdirs(path);
} catch (FileAlreadyExistsException ok) {
// behaves like Files.createDirectories
} catch (IOException ex) {
throw new ElasticsearchException("failed to create blob container", ex);
}
@ -111,15 +103,47 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore {
}
private Path translateToHdfsPath(BlobPath blobPath) {
Path path = path();
Path path = root;
for (String p : blobPath) {
path = new Path(path, p);
}
return path;
}
interface Operation<V> {
V run(FileContext fileContext) throws IOException;
}
/**
* Executes the provided operation against this store
*/
// we can do FS ops with only two elevated permissions:
// 1) hadoop dynamic proxy is messy with access rules
// 2) allow hadoop to add credentials to our Subject
<V> V execute(Operation<V> operation) throws IOException {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
// unprivileged code such as scripts do not have SpecialPermission
sm.checkPermission(new SpecialPermission());
}
if (closed) {
throw new AlreadyClosedException("HdfsBlobStore is closed: " + this);
}
try {
return AccessController.doPrivileged(new PrivilegedExceptionAction<V>() {
@Override
public V run() throws IOException {
return operation.run(fileContext);
}
}, null, new ReflectPermission("suppressAccessChecks"),
new AuthPermission("modifyPrivateCredentials"));
} catch (PrivilegedActionException pae) {
throw (IOException) pae.getException();
}
}
@Override
public void close() {
//
closed = true;
}
}

View File

@ -0,0 +1,100 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.hdfs;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.PrivilegedAction;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.RepositoriesModule;
// Code
public final class HdfsPlugin extends Plugin {
// initialize some problematic classes with elevated privileges
static {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(new SpecialPermission());
}
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
return evilHadoopInit();
}
});
}
@SuppressForbidden(reason = "Needs a security hack for hadoop on windows, until HADOOP-XXXX is fixed")
private static Void evilHadoopInit() {
// hack: on Windows, Shell's clinit has a similar problem that on unix,
// but here we can workaround it for now by setting hadoop home
// on unix: we still want to set this to something we control, because
// if the user happens to have HADOOP_HOME in their environment -> checkHadoopHome goes boom
// TODO: remove THIS when hadoop is fixed
Path hadoopHome = null;
String oldValue = null;
try {
hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath();
oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString());
Class.forName("org.apache.hadoop.security.UserGroupInformation");
Class.forName("org.apache.hadoop.util.StringUtils");
Class.forName("org.apache.hadoop.util.ShutdownHookManager");
Class.forName("org.apache.hadoop.conf.Configuration");
Class.forName("org.apache.hadoop.hdfs.protocol.HdfsConstants");
Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck");
} catch (ClassNotFoundException | IOException e) {
throw new RuntimeException(e);
} finally {
// try to clean up the hack
if (oldValue == null) {
System.clearProperty("hadoop.home.dir");
} else {
System.setProperty("hadoop.home.dir", oldValue);
}
try {
// try to clean up our temp dir too if we can
if (hadoopHome != null) {
Files.delete(hadoopHome);
}
} catch (IOException thisIsBestEffort) {}
}
return null;
}
@Override
public String name() {
return "repository-hdfs";
}
@Override
public String description() {
return "HDFS Repository Plugin";
}
public void onModule(RepositoriesModule repositoriesModule) {
repositoriesModule.registerRepository("hdfs", HdfsRepository.class, BlobStoreIndexShardRepository.class);
}
}

View File

@ -18,12 +18,23 @@
*/
package org.elasticsearch.repositories.hdfs;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.net.URI;
import java.security.AccessController;
import java.security.Principal;
import java.security.PrivilegedAction;
import java.util.Collections;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import javax.security.auth.Subject;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.elasticsearch.ElasticsearchException;
import org.apache.hadoop.fs.AbstractFileSystem;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.Strings;
@ -31,202 +42,118 @@ import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.snapshots.IndexShardRepository;
import org.elasticsearch.repositories.RepositoryName;
import org.elasticsearch.repositories.RepositorySettings;
import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.nio.file.Files;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
public final class HdfsRepository extends BlobStoreRepository {
public class HdfsRepository extends BlobStoreRepository implements FileSystemFactory {
public final static String TYPE = "hdfs";
private final HdfsBlobStore blobStore;
private final BlobPath basePath;
private final BlobPath basePath = BlobPath.cleanPath();
private final RepositorySettings repositorySettings;
private final ByteSizeValue chunkSize;
private final boolean compress;
private final RepositorySettings repositorySettings;
private FileSystem fs;
private HdfsBlobStore blobStore;
// buffer size passed to HDFS read/write methods
// TODO: why 100KB?
private static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(100, ByteSizeUnit.KB);
@Inject
public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ThreadPool threadPool) throws IOException {
public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException {
super(name.getName(), repositorySettings, indexShardRepository);
this.repositorySettings = repositorySettings;
String path = repositorySettings.settings().get("path", settings.get("path"));
if (path == null) {
throw new IllegalArgumentException("no 'path' defined for hdfs snapshot/restore");
}
// get configuration
fs = getFileSystem();
Path hdfsPath = SecurityUtils.execute(fs, new FsCallback<Path>() {
@Override
public Path doInHdfs(FileSystem fs) throws IOException {
return fs.makeQualified(new Path(path));
}
});
this.basePath = BlobPath.cleanPath();
logger.debug("Using file-system [{}] for URI [{}], path [{}]", fs, fs.getUri(), hdfsPath);
blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool);
this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null));
this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false));
this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", null);
this.compress = repositorySettings.settings().getAsBoolean("compress", false);
}
// as the FileSystem is long-lived and might go away, make sure to check it before it's being used.
@Override
public FileSystem getFileSystem() throws IOException {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
// unprivileged code such as scripts do not have SpecialPermission
sm.checkPermission(new SpecialPermission());
protected void doStart() {
String uriSetting = repositorySettings.settings().get("uri");
if (Strings.hasText(uriSetting) == false) {
throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore");
}
URI uri = URI.create(uriSetting);
if ("hdfs".equalsIgnoreCase(uri.getScheme()) == false) {
throw new IllegalArgumentException(
String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", uri.getScheme(), uriSetting));
}
if (Strings.hasLength(uri.getPath()) && uri.getPath().equals("/") == false) {
throw new IllegalArgumentException(String.format(Locale.ROOT,
"Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", uri.getPath(), uriSetting));
}
String pathSetting = repositorySettings.settings().get("path");
// get configuration
if (pathSetting == null) {
throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore");
}
int bufferSize = repositorySettings.settings().getAsBytesSize("buffer_size", DEFAULT_BUFFER_SIZE).bytesAsInt();
try {
return AccessController.doPrivileged(new PrivilegedExceptionAction<FileSystem>() {
// initialize our filecontext
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(new SpecialPermission());
}
FileContext fileContext = AccessController.doPrivileged(new PrivilegedAction<FileContext>() {
@Override
public FileSystem run() throws IOException {
return doGetFileSystem();
public FileContext run() {
return createContext(uri, repositorySettings);
}
}, SecurityUtils.AccBridge.acc());
} catch (PrivilegedActionException pae) {
Throwable th = pae.getCause();
if (th instanceof Error) {
throw (Error) th;
}
if (th instanceof RuntimeException) {
throw (RuntimeException) th;
}
if (th instanceof IOException) {
throw (IOException) th;
}
throw new ElasticsearchException(pae);
});
blobStore = new HdfsBlobStore(fileContext, pathSetting, bufferSize);
logger.debug("Using file-system [{}] for URI [{}], path [{}]", fileContext.getDefaultFileSystem(), fileContext.getDefaultFileSystem().getUri(), pathSetting);
} catch (IOException e) {
throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create HDFS repository for uri [%s]", uri), e);
}
super.doStart();
}
private FileSystem doGetFileSystem() throws IOException {
// check if the fs is still alive
// make a cheap call that triggers little to no security checks
if (fs != null) {
try {
fs.isFile(fs.getWorkingDirectory());
} catch (IOException ex) {
if (ex.getMessage().contains("Filesystem closed")) {
fs = null;
}
else {
throw ex;
}
}
}
if (fs == null) {
Thread th = Thread.currentThread();
ClassLoader oldCL = th.getContextClassLoader();
try {
th.setContextClassLoader(getClass().getClassLoader());
return initFileSystem(repositorySettings);
} catch (IOException ex) {
throw ex;
} finally {
th.setContextClassLoader(oldCL);
}
}
return fs;
}
private FileSystem initFileSystem(RepositorySettings repositorySettings) throws IOException {
Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", settings.getAsBoolean("load_defaults", true)));
cfg.setClassLoader(this.getClass().getClassLoader());
// create hadoop filecontext
@SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)")
private static FileContext createContext(URI uri, RepositorySettings repositorySettings) {
Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", true));
cfg.setClassLoader(HdfsRepository.class.getClassLoader());
cfg.reloadConfiguration();
String confLocation = repositorySettings.settings().get("conf_location", settings.get("conf_location"));
if (Strings.hasText(confLocation)) {
for (String entry : Strings.commaDelimitedListToStringArray(confLocation)) {
addConfigLocation(cfg, entry.trim());
}
}
Map<String, String> map = repositorySettings.settings().getByPrefix("conf.").getAsMap();
for (Entry<String, String> entry : map.entrySet()) {
cfg.set(entry.getKey(), entry.getValue());
}
// create a hadoop user. if we want some auth, it must be done different anyway, and tested.
Subject subject;
try {
UserGroupInformation.setConfiguration(cfg);
} catch (Throwable th) {
throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot initialize Hadoop"), th);
Class<?> clazz = Class.forName("org.apache.hadoop.security.User");
Constructor<?> ctor = clazz.getConstructor(String.class);
ctor.setAccessible(true);
Principal principal = (Principal) ctor.newInstance(System.getProperty("user.name"));
subject = new Subject(false, Collections.singleton(principal), Collections.emptySet(), Collections.emptySet());
} catch (ReflectiveOperationException e) {
throw new RuntimeException(e);
}
String uri = repositorySettings.settings().get("uri", settings.get("uri"));
URI actualUri = (uri != null ? URI.create(uri) : FileSystem.getDefaultUri(cfg));
String user = repositorySettings.settings().get("user", settings.get("user"));
// disable FS cache
cfg.setBoolean("fs.hdfs.impl.disable.cache", true);
try {
// disable FS cache
String disableFsCache = String.format(Locale.ROOT, "fs.%s.impl.disable.cache", actualUri.getScheme());
cfg.setBoolean(disableFsCache, true);
return (user != null ? FileSystem.get(actualUri, cfg, user) : FileSystem.get(actualUri, cfg));
} catch (Exception ex) {
throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create Hdfs file-system for uri [%s]", actualUri), ex);
}
}
@SuppressForbidden(reason = "pick up Hadoop config (which can be on HDFS)")
private void addConfigLocation(Configuration cfg, String confLocation) {
URL cfgURL = null;
// it's an URL
if (!confLocation.contains(":")) {
cfgURL = cfg.getClassLoader().getResource(confLocation);
// fall back to file
if (cfgURL == null) {
java.nio.file.Path path = PathUtils.get(confLocation);
if (!Files.isReadable(path)) {
throw new IllegalArgumentException(
String.format(Locale.ROOT,
"Cannot find classpath resource or file 'conf_location' [%s] defined for hdfs snapshot/restore",
confLocation));
// create the filecontext with our user
return Subject.doAs(subject, new PrivilegedAction<FileContext>() {
@Override
public FileContext run() {
try {
AbstractFileSystem fs = AbstractFileSystem.get(uri, cfg);
return FileContext.getFileContext(fs, cfg);
} catch (UnsupportedFileSystemException e) {
throw new RuntimeException(e);
}
String pathLocation = path.toUri().toString();
logger.debug("Adding path [{}] as file [{}]", confLocation, pathLocation);
confLocation = pathLocation;
}
else {
logger.debug("Resolving path [{}] to classpath [{}]", confLocation, cfgURL);
}
}
else {
logger.debug("Adding path [{}] as URL", confLocation);
}
if (cfgURL == null) {
try {
cfgURL = new URL(confLocation);
} catch (MalformedURLException ex) {
throw new IllegalArgumentException(String.format(Locale.ROOT,
"Invalid 'conf_location' URL [%s] defined for hdfs snapshot/restore", confLocation), ex);
}
}
cfg.addResource(cfgURL);
});
}
@Override
@ -248,12 +175,4 @@ public class HdfsRepository extends BlobStoreRepository implements FileSystemFac
protected ByteSizeValue chunkSize() {
return chunkSize;
}
@Override
protected void doClose() throws ElasticsearchException {
super.doClose();
IOUtils.closeStream(fs);
fs = null;
}
}

View File

@ -1,73 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.hdfs;
import org.apache.hadoop.fs.FileSystem;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.plugin.hadoop.hdfs.Utils;
import java.io.IOException;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
class SecurityUtils {
abstract static class AccBridge extends Utils {
static AccessControlContext acc() {
return Utils.hadoopACC();
}
}
static <V> V execute(FileSystemFactory ffs, FsCallback<V> callback) throws IOException {
return execute(ffs.getFileSystem(), callback);
}
static <V> V execute(FileSystem fs, FsCallback<V> callback) throws IOException {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
// unprivileged code such as scripts do not have SpecialPermission
sm.checkPermission(new SpecialPermission());
}
try {
return AccessController.doPrivileged(new PrivilegedExceptionAction<V>() {
@Override
public V run() throws IOException {
return callback.doInHdfs(fs);
}
}, AccBridge.acc());
} catch (PrivilegedActionException pae) {
Throwable th = pae.getCause();
if (th instanceof Error) {
throw (Error) th;
}
if (th instanceof RuntimeException) {
throw (RuntimeException) th;
}
if (th instanceof IOException) {
throw (IOException) th;
}
throw new ElasticsearchException(pae);
}
}
}

View File

@ -1,57 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.hdfs;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.elasticsearch.common.SuppressForbidden;
import java.io.File;
import java.io.IOException;
/**
* Extends LFS to improve some operations to keep the security permissions at
* bay. In particular mkdir is smarter and doesn't have to walk all the file
* hierarchy but rather only limits itself to the parent/working dir and creates
* a file only when necessary.
*/
public class TestingFs extends LocalFileSystem {
private static class ImprovedRawLocalFileSystem extends RawLocalFileSystem {
@Override
@SuppressForbidden(reason = "the Hadoop API depends on java.io.File")
public boolean mkdirs(Path f) throws IOException {
File wd = pathToFile(getWorkingDirectory());
File local = pathToFile(f);
if (wd.equals(local) || local.exists()) {
return true;
}
return mkdirs(f.getParent()) && local.mkdir();
}
}
public TestingFs() {
super(new ImprovedRawLocalFileSystem());
// use the build path instead of the starting dir as that one has read permissions
//setWorkingDirectory(new Path(getClass().getProtectionDomain().getCodeSource().getLocation().toString()));
setWorkingDirectory(new Path(System.getProperty("java.io.tmpdir")));
}
}

View File

@ -18,50 +18,21 @@
*/
grant {
// used by the plugin to get the TCCL to properly initialize all of Hadoop components
// Hadoop UserGroupInformation, HdfsConstants, PipelineAck clinit
permission java.lang.RuntimePermission "getClassLoader";
// used for DomainCombiner
permission java.security.SecurityPermission "createAccessControlContext";
// set TCCL used for bootstrapping Hadoop Configuration and JAAS
permission java.lang.RuntimePermission "setContextClassLoader";
//
// Hadoop 1
//
// UserGroupInformation (UGI)
// UserGroupInformation (UGI) Metrics clinit
permission java.lang.RuntimePermission "accessDeclaredMembers";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
// UGI triggers JAAS
permission javax.security.auth.AuthPermission "getSubject";
// JAAS libraries are not loaded with the proper context in Hadoop, hence why the permission is needed here
permission java.lang.RuntimePermission "loadLibrary.jaas_nt";
// which triggers the use of the Kerberos library
permission java.lang.RuntimePermission "accessClassInPackage.sun.security.krb5";
// plus LoginContext
permission javax.security.auth.AuthPermission "modifyPrincipals";
permission javax.security.auth.AuthPermission "modifyPublicCredentials";
permission javax.security.auth.AuthPermission "modifyPrivateCredentials";
//
// Hadoop 2
//
// UGI (Ugi Metrics)
permission java.lang.RuntimePermission "accessDeclaredMembers";
// Shell initialization - reading system props
// org.apache.hadoop.util.StringUtils clinit
permission java.util.PropertyPermission "*", "read,write";
permission javax.security.auth.PrivateCredentialPermission "org.apache.hadoop.security.Credentials \"*\"", "read";
// org.apache.hadoop.util.ShutdownHookManager clinit
permission java.lang.RuntimePermission "shutdownHooks";
// HftpFileSystem (all present FS are loaded and initialized at startup ...)
permission java.lang.RuntimePermission "setFactory";
// JAAS is used always, we use a fake subject, hurts nobody
permission javax.security.auth.AuthPermission "getSubject";
permission javax.security.auth.AuthPermission "doAs";
permission javax.security.auth.AuthPermission "modifyPrivateCredentials";
};

View File

@ -1 +0,0 @@
Folder containing the required Hadoop client libraries and dependencies.

View File

@ -1,32 +0,0 @@
package org.elasticsearch.plugin.hadoop.hdfs;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.net.URL;
import java.util.Collections;
import java.util.List;
public class HdfsTestPlugin extends HdfsPlugin {
@Override
protected List<URL> getHadoopClassLoaderPath(String baseLib) {
return Collections.emptyList();
}
}

View File

@ -1,48 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugin.hadoop.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.elasticsearch.common.SuppressForbidden;
import java.io.File;
public class MiniHDFSCluster {
@SuppressForbidden(reason = "Hadoop is messy")
public static void main(String[] args) throws Exception {
FileUtil.fullyDelete(new File(System.getProperty("test.build.data", "build/test/data"), "dfs/"));
// MiniHadoopClusterManager.main(new String[] { "-nomr" });
Configuration cfg = new Configuration();
cfg.set(DataNode.DATA_DIR_PERMISSION_KEY, "666");
cfg.set("dfs.replication", "0");
MiniDFSCluster dfsCluster = new MiniDFSCluster(cfg, 1, true, null);
FileSystem fs = dfsCluster.getFileSystem();
System.out.println(fs.getClass());
System.out.println(fs.getUri());
System.out.println(dfsCluster.getHftpFileSystem().getClass());
// dfsCluster.shutdown();
}
}

View File

@ -1,30 +0,0 @@
package org.elasticsearch.plugin.hadoop.hdfs;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.elasticsearch.test.ESTestCase;
public class UtilsTests extends ESTestCase {
public void testDetectLibFolder() {
String location = HdfsPlugin.class.getProtectionDomain().getCodeSource().getLocation().toString();
assertEquals(location, Utils.detectLibFolder());
}
}

View File

@ -1,5 +1,3 @@
package org.elasticsearch.plugin.hadoop.hdfs;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
@ -18,17 +16,20 @@ package org.elasticsearch.plugin.hadoop.hdfs;
* specific language governing permissions and limitations
* under the License.
*/
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.RestTestCandidate;
import org.elasticsearch.test.rest.parser.RestTestParseException;
package org.elasticsearch.repositories.hdfs;
import java.io.IOException;
import java.util.Collection;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.hdfs.HdfsPlugin;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.RestTestCandidate;
import org.elasticsearch.test.rest.parser.RestTestParseException;
public class HdfsRepositoryRestIT extends ESRestTestCase {
@Override

View File

@ -16,7 +16,12 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugin.hadoop.hdfs;
package org.elasticsearch.repositories.hdfs;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import java.util.Collection;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
@ -26,94 +31,43 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.RepositoryException;
import org.elasticsearch.repositories.RepositoryMissingException;
import org.elasticsearch.repositories.hdfs.TestingFs;
import org.elasticsearch.repositories.hdfs.HdfsPlugin;
import org.elasticsearch.snapshots.SnapshotState;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.elasticsearch.test.ESIntegTestCase.ThirdParty;
import org.elasticsearch.test.store.MockFSDirectoryService;
import org.junit.After;
import org.junit.Before;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.Collection;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
/**
* You must specify {@code -Dtests.thirdparty=true}
*/
@ThirdParty
@ClusterScope(scope = Scope.SUITE, numDataNodes = 1, transportClientRatio = 0.0)
public class HdfsTests extends ESIntegTestCase {
public class HdfsTests extends ESSingleNodeTestCase {
@Override
public Settings indexSettings() {
return Settings.builder()
.put(super.indexSettings())
.put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false)
.put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false)
.build();
}
@Override
protected Settings nodeSettings(int ordinal) {
Settings.Builder settings = Settings.builder()
.put(super.nodeSettings(ordinal))
.put("path.home", createTempDir())
.put("path.repo", "")
.put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false)
.put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false);
return settings.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(HdfsTestPlugin.class);
}
private String path;
@Before
public final void wipeBefore() throws Exception {
wipeRepositories();
path = "build/data/repo-" + randomInt();
}
@After
public final void wipeAfter() throws Exception {
wipeRepositories();
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(HdfsPlugin.class);
}
public void testSimpleWorkflow() {
Client client = client();
logger.info("--> creating hdfs repository with path [{}]", path);
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.settingsBuilder()
//.put("uri", "hdfs://127.0.0.1:51227")
.put("conf.fs.es-hdfs.impl", TestingFs.class.getName())
.put("uri", "es-hdfs://./build/")
.put("path", path)
.put("conf", "additional-cfg.xml, conf-2.xml")
.put("uri", "hdfs:///")
.put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName())
.put("path", "foo")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean())
).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
createIndex("test-idx-1", "test-idx-2", "test-idx-3");
createIndex("test-idx-1");
createIndex("test-idx-2");
createIndex("test-idx-3");
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i);
index("test-idx-2", "doc", Integer.toString(i), "foo", "baz" + i);
index("test-idx-3", "doc", Integer.toString(i), "foo", "baz" + i);
client().prepareIndex("test-idx-1", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get();
client().prepareIndex("test-idx-2", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get();
client().prepareIndex("test-idx-3", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get();
}
refresh();
client().admin().indices().prepareRefresh().get();
assertThat(count(client, "test-idx-1"), equalTo(100L));
assertThat(count(client, "test-idx-2"), equalTo(100L));
assertThat(count(client, "test-idx-3"), equalTo(100L));
@ -135,7 +89,7 @@ public class HdfsTests extends ESIntegTestCase {
for (int i = 0; i < 100; i += 2) {
client.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get();
}
refresh();
client().admin().indices().prepareRefresh().get();
assertThat(count(client, "test-idx-1"), equalTo(50L));
assertThat(count(client, "test-idx-2"), equalTo(50L));
assertThat(count(client, "test-idx-3"), equalTo(50L));
@ -154,7 +108,7 @@ public class HdfsTests extends ESIntegTestCase {
// Test restore after index deletion
logger.info("--> delete indices");
wipeIndices("test-idx-1", "test-idx-2");
client().admin().indices().prepareDelete("test-idx-1", "test-idx-2").get();
logger.info("--> restore one index after deletion");
restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-2").execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
@ -165,50 +119,67 @@ public class HdfsTests extends ESIntegTestCase {
assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false));
}
private void wipeIndices(String... indices) {
cluster().wipeIndices(indices);
}
// RepositoryVerificationException.class
public void testWrongPath() {
Client client = client();
logger.info("--> creating hdfs repository with path [{}]", path);
public void testMissingUri() {
try {
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.settingsBuilder()
// .put("uri", "hdfs://127.0.0.1:51227/")
.put("conf.fs.es-hdfs.impl", TestingFs.class.getName())
.put("uri", "es-hdfs:///")
.put("path", path + "a@b$c#11:22")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean()))
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
createIndex("test-idx-1", "test-idx-2", "test-idx-3");
ensureGreen();
fail("Path name is invalid");
} catch (RepositoryException re) {
// expected
client().admin().cluster().preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.EMPTY).get();
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
assertTrue(e.getCause().getMessage().contains("No 'uri' defined for hdfs"));
}
}
/**
* Deletes repositories, supports wildcard notation.
*/
public static void wipeRepositories(String... repositories) {
// if nothing is provided, delete all
if (repositories.length == 0) {
repositories = new String[]{"*"};
public void testEmptyUri() {
try {
client().admin().cluster().preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder()
.put("uri", "/path").build()).get();
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
assertTrue(e.getCause().getMessage(), e.getCause().getMessage().contains("Invalid scheme [null] specified in uri [/path]"));
}
for (String repository : repositories) {
try {
client().admin().cluster().prepareDeleteRepository(repository).execute().actionGet();
} catch (RepositoryMissingException ex) {
// ignore
}
}
public void testNonHdfsUri() {
try {
client().admin().cluster().preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder()
.put("uri", "file:///").build()).get();
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
assertTrue(e.getCause().getMessage().contains("Invalid scheme [file] specified in uri [file:///]"));
}
}
public void testPathSpecifiedInHdfs() {
try {
client().admin().cluster().preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder()
.put("uri", "hdfs:///some/path").build()).get();
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
assertTrue(e.getCause().getMessage().contains("Use 'path' option to specify a path [/some/path]"));
}
}
public void testMissingPath() {
try {
client().admin().cluster().preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder()
.put("uri", "hdfs:///").build()).get();
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
assertTrue(e.getCause().getMessage().contains("No 'path' defined for hdfs"));
}
}

View File

@ -0,0 +1,117 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.DelegateToFileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.lucene.util.LuceneTestCase;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.spi.FileSystemProvider;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
/**
* Extends LFS to improve some operations to keep the security permissions at
* bay. In particular it never tries to execute!
*/
public class TestingFs extends DelegateToFileSystem {
// wrap hadoop rawlocalfilesystem to behave less crazy
static RawLocalFileSystem wrap(final Path base) {
final FileSystemProvider baseProvider = base.getFileSystem().provider();
return new RawLocalFileSystem() {
private org.apache.hadoop.fs.Path box(Path path) {
return new org.apache.hadoop.fs.Path(path.toUri());
}
private Path unbox(org.apache.hadoop.fs.Path path) {
return baseProvider.getPath(path.toUri());
}
@Override
protected org.apache.hadoop.fs.Path getInitialWorkingDirectory() {
return box(base);
}
@Override
public void setPermission(org.apache.hadoop.fs.Path path, FsPermission permission) {
// no execution, thank you very much!
}
// pretend we don't support symlinks (which causes hadoop to want to do crazy things),
// returning the boolean does not seem to really help, link-related operations are still called.
@Override
public boolean supportsSymlinks() {
return false;
}
@Override
public FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path path) throws IOException {
return getFileStatus(path);
}
@Override
public org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path path) throws IOException {
return path;
}
@Override
public FileStatus getFileStatus(org.apache.hadoop.fs.Path path) throws IOException {
BasicFileAttributes attributes;
try {
attributes = Files.readAttributes(unbox(path), BasicFileAttributes.class);
} catch (NoSuchFileException e) {
// unfortunately, specific exceptions are not guaranteed. don't wrap hadoop over a zip filesystem or something.
FileNotFoundException fnfe = new FileNotFoundException("File " + path + " does not exist");
fnfe.initCause(e);
throw fnfe;
}
// we set similar values to raw local filesystem, except we are never a symlink
long length = attributes.size();
boolean isDir = attributes.isDirectory();
int blockReplication = 1;
long blockSize = getDefaultBlockSize(path);
long modificationTime = attributes.creationTime().toMillis();
return new FileStatus(length, isDir, blockReplication, blockSize, modificationTime, path);
}
};
}
public TestingFs(URI uri, Configuration configuration) throws URISyntaxException, IOException {
super(URI.create("file:///"), wrap(LuceneTestCase.createTempDir()), configuration, "file", false);
}
@Override
public void checkPath(org.apache.hadoop.fs.Path path) {
// we do evil stuff, we admit it.
}
}

View File

@ -1,12 +0,0 @@
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>foo</name>
<value>foo</value>
</property>
<property>
<name>paradise</name>
<value>lost</value>
</property>
</configuration>

View File

@ -1,12 +0,0 @@
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>foo</name>
<value>foo</value>
</property>
<property>
<name>paradise</name>
<value>lost</value>
</property>
</configuration>

View File

@ -2,7 +2,7 @@
#
# Check plugin is installed
#
"HDFS Repository loaded":
"Plugin loaded":
- do:
cluster.state: {}
@ -14,3 +14,18 @@
- match: { nodes.$master.plugins.0.name: repository-hdfs }
- match: { nodes.$master.plugins.0.jvm: true }
---
#
# Check that we can't use file:// repositories or anything like that
# We only test this plugin against hdfs://
#
"HDFS only":
- do:
catch: /Invalid scheme/
snapshot.create_repository:
repository: misconfigured_repository
body:
type: hdfs
settings:
uri: "file://bogus"
path: "foo/bar"

View File

@ -1,25 +0,0 @@
# Integration tests for HDFS Repository plugin
#
# Check plugin is installed
#
"HDFS Repository Config":
- do:
snapshot.create_repository:
repository: test_repo_hdfs_1
verify: false
body:
type: hdfs
settings:
# local HDFS implementation
conf.fs.es-hdfs.impl: "org.elasticsearch.repositories.hdfs.TestingFs"
uri: "es-hdfs://./build/"
path: "build/data/repo-hdfs"
# Get repositry
- do:
snapshot.get_repository:
repository: test_repo_hdfs_1
- is_true: test_repo_hdfs_1
- is_true: test_repo_hdfs_1.settings.uri
- match: {test_repo_hdfs_1.settings.path : "build/data/repo-hdfs"}

View File

@ -0,0 +1,27 @@
# Integration tests for HDFS Repository plugin
#
# Tests creating a repository
#
"HDFS Repository Creation":
# Create repository
- do:
snapshot.create_repository:
repository: test_repository_create
body:
type: hdfs
settings:
uri: "hdfs://localhost:9999"
path: "test/repository_create"
# Get repository
- do:
snapshot.get_repository:
repository: test_repository_create
- is_true: test_repository_create
- match: {test_repository_create.settings.path : "test/repository_create"}
# Remove our repository
- do:
snapshot.delete_repository:
repository: test_repository_create

View File

@ -0,0 +1,50 @@
# Integration tests for HDFS Repository plugin
#
# Tests creating a repository, then deleting it and creating it again.
#
"HDFS Delete Repository":
# Create repository
- do:
snapshot.create_repository:
repository: test_repo_hdfs_1
body:
type: hdfs
settings:
uri: "hdfs://localhost:9999"
path: "foo/bar"
# Get repository
- do:
snapshot.get_repository:
repository: test_repo_hdfs_1
- is_true: test_repo_hdfs_1
- match: {test_repo_hdfs_1.settings.path : "foo/bar"}
# Delete repository
- do:
snapshot.delete_repository:
repository: test_repo_hdfs_1
# Get repository: It should be gone
- do:
catch: /repository_missing_exception/
snapshot.get_repository:
repository: test_repo_hdfs_1
# Create it again
- do:
snapshot.create_repository:
repository: test_repo_hdfs_1
body:
type: hdfs
settings:
uri: "hdfs://localhost:9999"
path: "foo/bar"
# Get repository again
- do:
snapshot.get_repository:
repository: test_repo_hdfs_1
- is_true: test_repo_hdfs_1

View File

@ -0,0 +1,23 @@
# Integration tests for HDFS Repository plugin
#
# Tests explicit verify
#
"HDFS Repository Verify":
- do:
snapshot.create_repository:
repository: test_repository_verify
body:
type: hdfs
settings:
uri: "hdfs://localhost:9999"
path: "test/repository_verify"
# Verify repository
- do:
snapshot.verify_repository:
repository: test_repository_verify
# Remove our repository
- do:
snapshot.delete_repository:
repository: test_repository_verify

View File

@ -0,0 +1,53 @@
# Integration tests for HDFS Repository plugin
#
# Actually perform a snapshot to hdfs
#
---
"take snapshot":
# Create repository
- do:
snapshot.create_repository:
repository: test_snapshot_repository
body:
type: hdfs
settings:
uri: "hdfs://localhost:9999"
path: "test/snapshot"
# Create index
- do:
indices.create:
index: test_index
body:
settings:
number_of_shards: 1
number_of_replicas: 1
# Wait for yellow
- do:
cluster.health:
wait_for_status: yellow
# Create snapshot
- do:
snapshot.create:
repository: test_snapshot_repository
snapshot: test_snapshot
wait_for_completion: true
- match: { snapshot.snapshot: test_snapshot }
- match: { snapshot.state : SUCCESS }
- match: { snapshot.shards.successful: 1 }
- match: { snapshot.shards.failed : 0 }
# Remove our snapshot
- do:
snapshot.delete:
repository: test_snapshot_repository
snapshot: test_snapshot
# Remove our repository
- do:
snapshot.delete_repository:
repository: test_snapshot_repository

View File

@ -0,0 +1,70 @@
# Integration tests for HDFS Repository plugin
#
# Tests retrieving information about snapshot
#
---
"Get a snapshot":
# Create repository
- do:
snapshot.create_repository:
repository: test_snapshot_get_repository
body:
type: hdfs
settings:
uri: "hdfs://localhost:9999"
path: "test/snapshot_get"
# Create index
- do:
indices.create:
index: test_index
body:
settings:
number_of_shards: 1
number_of_replicas: 0
# Wait for green
- do:
cluster.health:
wait_for_status: green
# Create snapshot
- do:
snapshot.create:
repository: test_snapshot_get_repository
snapshot: test_snapshot_get
wait_for_completion: true
- match: { snapshot.snapshot: test_snapshot_get }
- match: { snapshot.state : SUCCESS }
- match: { snapshot.shards.successful: 1 }
- match: { snapshot.shards.failed : 0 }
# Get snapshot info
- do:
snapshot.get:
repository: test_snapshot_get_repository
snapshot: test_snapshot_get
- length: { snapshots: 1 }
- match: { snapshots.0.snapshot : test_snapshot_get }
# List snapshot info
- do:
snapshot.get:
repository: test_snapshot_get_repository
snapshot: "*"
- length: { snapshots: 1 }
- match: { snapshots.0.snapshot : test_snapshot_get }
# Remove our snapshot
- do:
snapshot.delete:
repository: test_snapshot_get_repository
snapshot: test_snapshot_get
# Remove our repository
- do:
snapshot.delete_repository:
repository: test_snapshot_get_repository

View File

@ -0,0 +1,79 @@
# Integration tests for HDFS Repository plugin
#
# Actually perform a snapshot to hdfs, then restore it
#
---
"Create a snapshot and then restore it":
# Create repository
- do:
snapshot.create_repository:
repository: test_restore_repository
body:
type: hdfs
settings:
uri: "hdfs://localhost:9999"
path: "test/restore"
# Create index
- do:
indices.create:
index: test_index
body:
settings:
number_of_shards: 1
number_of_replicas: 0
# Wait for green
- do:
cluster.health:
wait_for_status: green
# Take snapshot
- do:
snapshot.create:
repository: test_restore_repository
snapshot: test_restore
wait_for_completion: true
- match: { snapshot.snapshot: test_restore }
- match: { snapshot.state : SUCCESS }
- match: { snapshot.shards.successful: 1 }
- match: { snapshot.shards.failed : 0 }
- is_true: snapshot.version
- gt: { snapshot.version_id: 0}
# Close index
- do:
indices.close:
index : test_index
# Restore index
- do:
snapshot.restore:
repository: test_restore_repository
snapshot: test_restore
wait_for_completion: true
# Check recovery stats
- do:
indices.recovery:
index: test_index
- match: { test_index.shards.0.type: SNAPSHOT }
- match: { test_index.shards.0.stage: DONE }
- match: { test_index.shards.0.index.files.recovered: 1}
- gt: { test_index.shards.0.index.size.recovered_in_bytes: 0}
- match: { test_index.shards.0.index.files.reused: 0}
- match: { test_index.shards.0.index.size.reused_in_bytes: 0}
# Remove our snapshot
- do:
snapshot.delete:
repository: test_restore_repository
snapshot: test_restore
# Remove our repository
- do:
snapshot.delete_repository:
repository: test_restore_repository

View File

@ -10,6 +10,7 @@ List projects = [
'distribution:rpm',
'test:framework',
'test:fixtures:example-fixture',
'test:fixtures:hdfs-fixture',
'modules:lang-expression',
'modules:lang-groovy',
'modules:lang-mustache',

View File

@ -17,13 +17,26 @@
* under the License.
*/
package org.elasticsearch.repositories.hdfs;
apply plugin: 'elasticsearch.build'
import org.apache.hadoop.fs.FileSystem;
versions << [
'hadoop2': '2.7.1'
]
import java.io.IOException;
interface FsCallback<V> {
V doInHdfs(FileSystem fs) throws IOException;
// we create MiniHdfsCluster with the hadoop artifact
dependencies {
compile "org.apache.hadoop:hadoop-minicluster:${versions.hadoop2}"
}
// for testing, until fixtures are actually debuggable.
// gradle hides *EVERYTHING* so you have no clue what went wrong.
task hdfs(type: JavaExec) {
classpath = sourceSets.test.compileClasspath + sourceSets.test.output
main = "hdfs.MiniHDFS"
args = [ 'build/fixtures/hdfsFixture' ]
}
// just a test fixture: we aren't using jars in releases
thirdPartyAudit.enabled = false
// TODO: add a simple HDFS client test for this fixture
test.enabled = false

View File

@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Locale;
import java.lang.management.ManagementFactory;
/**
* MiniHDFS test fixture. There is a CLI tool, but here we can
* easily properly setup logging, avoid parsing JSON, etc.
*/
public class MiniHDFS {
private static String PORT_FILE_NAME = "ports";
private static String PID_FILE_NAME = "pid";
public static void main(String[] args) throws Exception {
if (args.length != 1) {
throw new IllegalArgumentException("MiniHDFS <baseDirectory>");
}
// configure Paths
Path baseDir = Paths.get(args[0]);
// hadoop-home/, so logs will not complain
if (System.getenv("HADOOP_HOME") == null) {
Path hadoopHome = baseDir.resolve("hadoop-home");
Files.createDirectories(hadoopHome);
System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString());
}
// hdfs-data/, where any data is going
Path hdfsHome = baseDir.resolve("hdfs-data");
// start cluster
Configuration cfg = new Configuration();
cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString());
// lower default permission: TODO: needed?
cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766");
// TODO: remove hardcoded port!
MiniDFSCluster dfs = new MiniDFSCluster.Builder(cfg).nameNodePort(9999).build();
// write our PID file
Path tmp = Files.createTempFile(baseDir, null, null);
String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0];
Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8));
Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE);
// write our port file
tmp = Files.createTempFile(baseDir, null, null);
Files.write(tmp, Integer.toString(dfs.getNameNodePort()).getBytes(StandardCharsets.UTF_8));
Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE);
}
}

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.test;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
@ -37,15 +38,22 @@ import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.node.MockNode;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@ -59,13 +67,13 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
private static Node NODE = null;
private static void reset() {
private void reset() {
assert NODE != null;
stopNode();
startNode();
}
private static void startNode() {
private void startNode() {
assert NODE == null;
NODE = newNode();
// we must wait for the node to actually be up and running. otherwise the node might have started, elected itself master but might not yet have removed the
@ -80,7 +88,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
Releasables.close(node);
}
static void cleanup(boolean resetNode) {
private void cleanup(boolean resetNode) {
assertAcked(client().admin().indices().prepareDelete("*").get());
if (resetNode) {
reset();
@ -92,7 +100,19 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
metaData.transientSettings().getAsMap().size(), equalTo(0));
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
// Create the node lazily, on the first test. This is ok because we do not randomize any settings,
// only the cluster name. This allows us to have overriden properties for plugins and the version to use.
if (NODE == null) {
startNode();
}
}
@After
@Override
public void tearDown() throws Exception {
logger.info("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName());
super.tearDown();
@ -102,7 +122,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
@BeforeClass
public static void setUpClass() throws Exception {
stopNode();
startNode();
}
@AfterClass
@ -119,25 +138,42 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
return false;
}
private static Node newNode() {
Node build = new Node(Settings.builder()
.put(ClusterName.SETTING, InternalTestCluster.clusterName("single-node-cluster", randomLong()))
.put("path.home", createTempDir())
// TODO: use a consistent data path for custom paths
// This needs to tie into the ESIntegTestCase#indexSettings() method
.put("path.shared_data", createTempDir().getParent())
.put("node.name", nodeName())
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put("script.inline", "on")
.put("script.indexed", "on")
.put(EsExecutors.PROCESSORS, 1) // limit the number of threads created
.put("http.enabled", false)
.put("node.local", true)
.put("node.data", true)
.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :)
.build()
);
/** The version of elasticsearch the node should act like. */
protected Version getVersion() {
return Version.CURRENT;
}
/** The plugin classes that should be added to the node. */
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.emptyList();
}
/** Helper method to create list of plugins without specifying generic types. */
@SafeVarargs
@SuppressWarnings("varargs") // due to type erasure, the varargs type is non-reifiable, which casues this warning
protected final Collection<Class<? extends Plugin>> pluginList(Class<? extends Plugin>... plugins) {
return Arrays.asList(plugins);
}
private Node newNode() {
Settings settings = Settings.builder()
.put(ClusterName.SETTING, InternalTestCluster.clusterName("single-node-cluster", randomLong()))
.put("path.home", createTempDir())
// TODO: use a consistent data path for custom paths
// This needs to tie into the ESIntegTestCase#indexSettings() method
.put("path.shared_data", createTempDir().getParent())
.put("node.name", nodeName())
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put("script.inline", "on")
.put("script.indexed", "on")
.put(EsExecutors.PROCESSORS, 1) // limit the number of threads created
.put("http.enabled", false)
.put("node.local", true)
.put("node.data", true)
.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :)
.build();
Node build = new MockNode(settings, getVersion(), getPlugins());
build.start();
assertThat(DiscoveryNode.localNode(build.settings()), is(true));
return build;