allocationMultibinder = Multibinder.newSetBinder(binder(), AllocationDecider.class);
- for (Class extends AllocationDecider> allocation : allocationDeciders) {
- allocationMultibinder.addBinding().to(allocation).asEagerSingleton();
- }
+ allocationDeciders.bind(binder());
bind(GatewayAllocator.class).asEagerSingleton();
- bind(AllocationDeciders.class).asEagerSingleton();
bind(AllocationService.class).asEagerSingleton();
}
+
}
diff --git a/core/src/main/java/org/elasticsearch/common/Classes.java b/core/src/main/java/org/elasticsearch/common/Classes.java
index b9a508e94bd..4a73c0d8ae4 100644
--- a/core/src/main/java/org/elasticsearch/common/Classes.java
+++ b/core/src/main/java/org/elasticsearch/common/Classes.java
@@ -19,17 +19,7 @@
package org.elasticsearch.common;
-import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.bootstrap.Elasticsearch;
-import org.elasticsearch.common.inject.Module;
-import org.elasticsearch.common.settings.NoClassSettingsException;
-
import java.lang.reflect.Modifier;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.Locale;
-
-import static org.elasticsearch.common.Strings.toCamelCase;
/**
*
@@ -41,34 +31,6 @@ public class Classes {
*/
private static final char PACKAGE_SEPARATOR = '.';
- /**
- * Return the default ClassLoader to use: typically the thread context
- * ClassLoader, if available; the ClassLoader that loaded the ClassUtils
- * class will be used as fallback.
- *
- * Call this method if you intend to use the thread context ClassLoader
- * in a scenario where you absolutely need a non-null ClassLoader reference:
- * for example, for class path resource loading (but not necessarily for
- * Class.forName
, which accepts a null
ClassLoader
- * reference as well).
- *
- * @return the default ClassLoader (never null
)
- * @see java.lang.Thread#getContextClassLoader()
- */
- public static ClassLoader getDefaultClassLoader() {
- ClassLoader cl = null;
- try {
- cl = Thread.currentThread().getContextClassLoader();
- } catch (Throwable ex) {
- // Cannot access thread context ClassLoader - falling back to system class loader...
- }
- if (cl == null) {
- // No thread context class loader -> use class loader of this class.
- cl = Classes.class.getClassLoader();
- }
- return cl;
- }
-
/**
* Determine the name of the package of the given class:
* e.g. "java.lang" for the java.lang.String
class.
@@ -93,13 +55,5 @@ public class Classes {
return !clazz.isInterface() && !Modifier.isAbstract(modifiers);
}
- public static Class extends T> loadClass(ClassLoader classLoader, String className) {
- try {
- return (Class extends T>) classLoader.loadClass(className);
- } catch (ClassNotFoundException|NoClassDefFoundError e) {
- throw new ElasticsearchException("failed to load class [" + className + "]", e);
- }
- }
-
private Classes() {}
}
diff --git a/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java b/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java
index 882bfcbf718..fce18337728 100644
--- a/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java
+++ b/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java
@@ -31,7 +31,7 @@ public class ShapesAvailability {
static {
boolean xSPATIAL4J_AVAILABLE;
try {
- Classes.getDefaultClassLoader().loadClass("com.spatial4j.core.shape.impl.PointImpl");
+ Class.forName("com.spatial4j.core.shape.impl.PointImpl");
xSPATIAL4J_AVAILABLE = true;
} catch (Throwable t) {
xSPATIAL4J_AVAILABLE = false;
@@ -40,7 +40,7 @@ public class ShapesAvailability {
boolean xJTS_AVAILABLE;
try {
- Classes.getDefaultClassLoader().loadClass("com.vividsolutions.jts.geom.GeometryFactory");
+ Class.forName("com.vividsolutions.jts.geom.GeometryFactory");
xJTS_AVAILABLE = true;
} catch (Throwable t) {
xJTS_AVAILABLE = false;
diff --git a/core/src/main/java/org/elasticsearch/common/inject/Modules.java b/core/src/main/java/org/elasticsearch/common/inject/Modules.java
index 4e5ae23e05a..edb08dd183a 100644
--- a/core/src/main/java/org/elasticsearch/common/inject/Modules.java
+++ b/core/src/main/java/org/elasticsearch/common/inject/Modules.java
@@ -30,10 +30,6 @@ import java.lang.reflect.Constructor;
*/
public class Modules {
- public static Module createModule(String moduleClass, Settings settings) throws ClassNotFoundException {
- return createModule((Class extends Module>) settings.getClassLoader().loadClass(moduleClass), settings);
- }
-
public static Module createModule(Class extends Module> moduleClass, @Nullable Settings settings) {
Constructor extends Module> constructor;
try {
diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java
index 2671874f86f..16eca5c8dfc 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java
@@ -79,9 +79,8 @@ public final class Settings implements ToXContent {
private ImmutableMap settings;
private final ImmutableMap forcedUnderscoreSettings;
- private transient ClassLoader classLoader;
- Settings(Map settings, ClassLoader classLoader) {
+ Settings(Map settings) {
// we use a sorted map for consistent serialization when using getAsMap()
// TODO: use Collections.unmodifiableMap with a TreeMap
this.settings = ImmutableSortedMap.copyOf(settings);
@@ -96,22 +95,6 @@ public final class Settings implements ToXContent {
}
}
this.forcedUnderscoreSettings = forcedUnderscoreSettings == null ? ImmutableMap.of() : ImmutableMap.copyOf(forcedUnderscoreSettings);
- this.classLoader = classLoader;
- }
-
- /**
- * The class loader associated with this settings, or {@link org.elasticsearch.common.Classes#getDefaultClassLoader()}
- * if not set.
- */
- public ClassLoader getClassLoader() {
- return this.classLoader == null ? Classes.getDefaultClassLoader() : classLoader;
- }
-
- /**
- * The class loader associated with this settings, but only if explicitly set, otherwise null.
- */
- public ClassLoader getClassLoaderIfSet() {
- return this.classLoader;
}
/**
@@ -227,7 +210,6 @@ public final class Settings implements ToXContent {
builder.put(entry.getKey().substring(prefix.length()), entry.getValue());
}
}
- builder.classLoader(classLoader);
return builder.build();
}
@@ -648,7 +630,7 @@ public final class Settings implements ToXContent {
}
Map retVal = new LinkedHashMap<>();
for (Map.Entry> entry : map.entrySet()) {
- retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue()), classLoader));
+ retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue())));
}
return Collections.unmodifiableMap(retVal);
}
@@ -701,17 +683,13 @@ public final class Settings implements ToXContent {
if (o == null || getClass() != o.getClass()) return false;
Settings that = (Settings) o;
-
- if (classLoader != null ? !classLoader.equals(that.classLoader) : that.classLoader != null) return false;
if (settings != null ? !settings.equals(that.settings) : that.settings != null) return false;
-
return true;
}
@Override
public int hashCode() {
int result = settings != null ? settings.hashCode() : 0;
- result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0);
return result;
}
@@ -769,8 +747,6 @@ public final class Settings implements ToXContent {
private final Map map = new LinkedHashMap<>();
- private ClassLoader classLoader;
-
private Builder() {
}
@@ -998,7 +974,6 @@ public final class Settings implements ToXContent {
public Builder put(Settings settings) {
removeNonArraysFieldsIfNewSettingsContainsFieldAsArray(settings.getAsMap());
map.putAll(settings.getAsMap());
- classLoader = settings.getClassLoaderIfSet();
return this;
}
@@ -1118,31 +1093,6 @@ public final class Settings implements ToXContent {
return this;
}
- /**
- * Loads settings from classpath that represents them using the
- * {@link SettingsLoaderFactory#loaderFromSource(String)}.
- */
- public Builder loadFromClasspath(String resourceName) throws SettingsException {
- ClassLoader classLoader = this.classLoader;
- if (classLoader == null) {
- classLoader = Classes.getDefaultClassLoader();
- }
- InputStream is = classLoader.getResourceAsStream(resourceName);
- if (is == null) {
- return this;
- }
-
- return loadFromStream(resourceName, is);
- }
-
- /**
- * Sets the class loader associated with the settings built.
- */
- public Builder classLoader(ClassLoader classLoader) {
- this.classLoader = classLoader;
- return this;
- }
-
/**
* Puts all the properties with keys starting with the provided prefix.
*
@@ -1270,7 +1220,7 @@ public final class Settings implements ToXContent {
* set on this builder.
*/
public Settings build() {
- return new Settings(Collections.unmodifiableMap(map), classLoader);
+ return new Settings(Collections.unmodifiableMap(map));
}
}
diff --git a/core/src/main/java/org/elasticsearch/common/util/BigArraysModule.java b/core/src/main/java/org/elasticsearch/common/util/BigArraysModule.java
deleted file mode 100644
index 8d863910332..00000000000
--- a/core/src/main/java/org/elasticsearch/common/util/BigArraysModule.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common.util;
-
-import org.elasticsearch.common.Classes;
-import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.common.settings.Settings;
-
-import static org.elasticsearch.common.inject.Modules.createModule;
-
-/**
- */
-public class BigArraysModule extends AbstractModule {
-
- public static final String IMPL = "common.util.big_arrays_impl";
-
- private final Settings settings;
-
- public BigArraysModule(Settings settings) {
- this.settings = settings;
- }
-
- @Override
- protected void configure() {
- String impl = settings.get(IMPL);
- if (impl == null) {
- bind(BigArrays.class).asEagerSingleton();
- } else {
- Class extends BigArrays> implClass = Classes.loadClass(getClass().getClassLoader(), impl);
- bind(BigArrays.class).to(implClass).asEagerSingleton();
- }
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java b/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java
new file mode 100644
index 00000000000..435c3ae4066
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java
@@ -0,0 +1,194 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.util;
+
+import org.elasticsearch.common.inject.Binder;
+import org.elasticsearch.common.inject.multibindings.MapBinder;
+import org.elasticsearch.common.inject.multibindings.Multibinder;
+import org.elasticsearch.common.settings.Settings;
+
+import java.util.*;
+
+/**
+ * This class defines an official elasticsearch extension point. It registers
+ * all extensions by a single name and ensures that extensions are not registered
+ * more than once.
+ */
+public abstract class ExtensionPoint {
+ protected final String name;
+ protected final Class extensionClass;
+ protected final Class>[] singletons;
+
+ /**
+ * Creates a new extension point
+ *
+ * @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
+ * @param extensionClass the base class that should be extended
+ * @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
+ */
+ public ExtensionPoint(String name, Class extensionClass, Class>... singletons) {
+ this.name = name;
+ this.extensionClass = extensionClass;
+ this.singletons = singletons;
+ }
+
+ /**
+ * Binds the extension as well as the singletons to the given guice binder.
+ *
+ * @param binder the binder to use
+ */
+ public final void bind(Binder binder) {
+ if (singletons == null || singletons.length == 0) {
+ throw new IllegalStateException("Can't bind empty or null singletons");
+ }
+ for (Class> c : singletons) {
+ binder.bind(c).asEagerSingleton();
+ }
+ bindExtensions(binder);
+ }
+
+ /**
+ * Subclasses can bind their type, map or set exentions here.
+ */
+ protected abstract void bindExtensions(Binder binder);
+
+ /**
+ * A map based extension point which allows to register keyed implementations ie. parsers or some kind of strategies.
+ */
+ public static class MapExtensionPoint extends ExtensionPoint {
+ private final Map> extensions = new HashMap<>();
+ private final Set reservedKeys;
+
+ /**
+ * Creates a new {@link org.elasticsearch.common.util.ExtensionPoint.MapExtensionPoint}
+ *
+ * @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
+ * @param extensionClass the base class that should be extended
+ * @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
+ * @param reservedKeys a set of reserved keys by internal implementations
+ */
+ public MapExtensionPoint(String name, Class extensionClass, Set reservedKeys, Class>... singletons) {
+ super(name, extensionClass, singletons);
+ this.reservedKeys = reservedKeys;
+
+ }
+
+ /**
+ * Returns the extension for the given key or null
+ */
+ public Class extends T> getExtension(String type) {
+ return extensions.get(type);
+ }
+
+ /**
+ * Registers an extension class for a given key. This method will thr
+ *
+ * @param key the extensions key
+ * @param extension the extension
+ * @throws IllegalArgumentException iff the key is already registered or if the key is a reserved key for an internal implementation
+ */
+ public final void registerExtension(String key, Class extends T> extension) {
+ if (extensions.containsKey(key) || reservedKeys.contains(key)) {
+ throw new IllegalArgumentException("Can't register the same [" + this.name + "] more than once for [" + key + "]");
+ }
+ extensions.put(key, extension);
+ }
+
+ @Override
+ protected final void bindExtensions(Binder binder) {
+ MapBinder parserMapBinder = MapBinder.newMapBinder(binder, String.class, extensionClass);
+ for (Map.Entry> clazz : extensions.entrySet()) {
+ parserMapBinder.addBinding(clazz.getKey()).to(clazz.getValue());
+ }
+ }
+ }
+
+ /**
+ * A Type extension point which basically allows to registerd keyed extensions like {@link org.elasticsearch.common.util.ExtensionPoint.MapExtensionPoint}
+ * but doesn't instantiate and bind all the registered key value pairs but instead replace a singleton based on a given setting via {@link #bindType(Binder, Settings, String, String)}
+ * Note: {@link #bind(Binder)} is not supported by this class
+ */
+ public static final class TypeExtensionPoint extends MapExtensionPoint {
+
+ public TypeExtensionPoint(String name, Class extensionClass) {
+ super(name, extensionClass, Collections.EMPTY_SET);
+ }
+
+ /**
+ * Binds the extension class to the class that is registered for the give configured for the settings key in
+ * the settings object.
+ *
+ * @param binder the binder to use
+ * @param settings the settings to look up the key to find the implemetation to bind
+ * @param settingsKey the key to use with the settings
+ * @param defaultValue the default value if they settings doesn't contain the key
+ * @return the actual bound type key
+ */
+ public String bindType(Binder binder, Settings settings, String settingsKey, String defaultValue) {
+ final String type = settings.get(settingsKey, defaultValue);
+ final Class extends T> instance = getExtension(type);
+ if (instance == null) {
+ throw new IllegalArgumentException("Unknown [" + this.name + "] type [" + type + "]");
+ }
+ binder.bind(extensionClass).to(instance).asEagerSingleton();
+ return type;
+ }
+
+ }
+
+ /**
+ * A set based extension point which allows to register extended classes that might be used to chain additional functionality etc.
+ */
+ public final static class SetExtensionPoint extends ExtensionPoint {
+ private final Set> extensions = new HashSet<>();
+
+ /**
+ * Creates a new {@link org.elasticsearch.common.util.ExtensionPoint.SetExtensionPoint}
+ *
+ * @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
+ * @param extensionClass the base class that should be extended
+ * @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
+ */
+ public SetExtensionPoint(String name, Class extensionClass, Class>... singletons) {
+ super(name, extensionClass, singletons);
+ }
+
+ /**
+ * Registers a new extension
+ *
+ * @param extension the extension to register
+ * @throws IllegalArgumentException iff the class is already registered
+ */
+ public final void registerExtension(Class extends T> extension) {
+ if (extensions.contains(extension)) {
+ throw new IllegalArgumentException("Can't register the same [" + this.name + "] more than once for [" + extension.getName() + "]");
+ }
+ extensions.add(extension);
+ }
+
+ @Override
+ protected final void bindExtensions(Binder binder) {
+ Multibinder allocationMultibinder = Multibinder.newSetBinder(binder, extensionClass);
+ for (Class extends T> clazz : extensions) {
+ allocationMultibinder.addBinding().to(clazz);
+ }
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java
index c0e3bf4be7d..8ec9a7e802b 100644
--- a/core/src/main/java/org/elasticsearch/env/Environment.java
+++ b/core/src/main/java/org/elasticsearch/env/Environment.java
@@ -319,13 +319,14 @@ public class Environment {
}
}
// try and load it from the classpath directly
- URL resource = settings.getClassLoader().getResource(path);
+ // TODO: remove this, callers can look up their own config on classpath
+ URL resource = getClass().getClassLoader().getResource(path);
if (resource != null) {
return resource;
}
// try and load it from the classpath with config/ prefix
if (!path.startsWith("config/")) {
- resource = settings.getClassLoader().getResource("config/" + path);
+ resource = getClass().getClassLoader().getResource("config/" + path);
if (resource != null) {
return resource;
}
diff --git a/core/src/main/java/org/elasticsearch/index/cache/IndexCacheModule.java b/core/src/main/java/org/elasticsearch/index/cache/IndexCacheModule.java
index 43ddbf7cef1..fc0b4d08e1f 100644
--- a/core/src/main/java/org/elasticsearch/index/cache/IndexCacheModule.java
+++ b/core/src/main/java/org/elasticsearch/index/cache/IndexCacheModule.java
@@ -20,15 +20,21 @@
package org.elasticsearch.index.cache;
import org.elasticsearch.common.inject.AbstractModule;
+import org.elasticsearch.common.inject.Scopes;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.cache.bitset.BitsetFilterCacheModule;
-import org.elasticsearch.index.cache.query.QueryCacheModule;
+import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
+import org.elasticsearch.index.cache.query.QueryCache;
+import org.elasticsearch.index.cache.query.index.IndexQueryCache;
+import org.elasticsearch.index.cache.query.none.NoneQueryCache;
-/**
- *
- */
public class IndexCacheModule extends AbstractModule {
+ public static final String INDEX_QUERY_CACHE = "index";
+ public static final String NONE_QUERY_CACHE = "none";
+ public static final String QUERY_CACHE_TYPE = "index.queries.cache.type";
+ // for test purposes only
+ public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
+
private final Settings settings;
public IndexCacheModule(Settings settings) {
@@ -37,9 +43,17 @@ public class IndexCacheModule extends AbstractModule {
@Override
protected void configure() {
- new QueryCacheModule(settings).configure(binder());
- new BitsetFilterCacheModule(settings).configure(binder());
-
+ String queryCacheType = settings.get(QUERY_CACHE_TYPE, INDEX_QUERY_CACHE);
+ Class extends QueryCache> queryCacheImpl;
+ if (queryCacheType.equals(INDEX_QUERY_CACHE)) {
+ queryCacheImpl = IndexQueryCache.class;
+ } else if (queryCacheType.equals(NONE_QUERY_CACHE)) {
+ queryCacheImpl = NoneQueryCache.class;
+ } else {
+ throw new IllegalArgumentException("Unknown QueryCache type [" + queryCacheType + "]");
+ }
+ bind(QueryCache.class).to(queryCacheImpl).in(Scopes.SINGLETON);
+ bind(BitsetFilterCache.class).asEagerSingleton();
bind(IndexCache.class).asEagerSingleton();
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheModule.java b/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheModule.java
deleted file mode 100644
index f5465c9c6ed..00000000000
--- a/core/src/main/java/org/elasticsearch/index/cache/query/QueryCacheModule.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.index.cache.query;
-
-import org.elasticsearch.cluster.metadata.AliasOrIndex;
-import org.elasticsearch.common.Classes;
-import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.common.inject.Scopes;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.cache.query.index.IndexQueryCache;
-
-/**
- *
- */
-public class QueryCacheModule extends AbstractModule {
-
- public static final class QueryCacheSettings {
- public static final String QUERY_CACHE_TYPE = "index.queries.cache.type";
- // for test purposes only
- public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
- }
-
- private final Settings settings;
-
- public QueryCacheModule(Settings settings) {
- this.settings = settings;
- }
-
- @Override
- protected void configure() {
- Class extends IndexQueryCache> queryCacheClass = IndexQueryCache.class;
- String customQueryCache = settings.get(QueryCacheSettings.QUERY_CACHE_TYPE);
- if (customQueryCache != null) {
- // TODO: make this only useable from tests
- queryCacheClass = Classes.loadClass(getClass().getClassLoader(), customQueryCache);
- }
- bind(QueryCache.class).to(queryCacheClass).in(Scopes.SINGLETON);
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java
index 29e83f69272..52930e8fac6 100644
--- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java
+++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java
@@ -20,6 +20,7 @@
package org.elasticsearch.index.percolator;
import org.apache.lucene.index.Term;
+import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
@@ -261,7 +262,9 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple
try (Engine.Searcher searcher = shard.engine().acquireSearcher("percolator_load_queries")) {
Query query = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorService.TYPE_NAME));
QueriesLoaderCollector queryCollector = new QueriesLoaderCollector(PercolatorQueriesRegistry.this, logger, mapperService, indexFieldDataService);
- searcher.searcher().search(query, queryCollector);
+ IndexSearcher indexSearcher = new IndexSearcher(searcher.reader());
+ indexSearcher.setQueryCache(null);
+ indexSearcher.search(query, queryCollector);
Map queries = queryCollector.queries();
for (Map.Entry entry : queries.entrySet()) {
Query previousQuery = percolateQueries.put(entry.getKey(), entry.getValue());
diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsLookupQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TermsLookupQueryBuilder.java
index 7e82f1f88bd..a074e2adcda 100644
--- a/core/src/main/java/org/elasticsearch/index/query/TermsLookupQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/TermsLookupQueryBuilder.java
@@ -34,5 +34,5 @@ public class TermsLookupQueryBuilder extends TermsQueryBuilder {
@Override
public String getWriteableName() {
return TermsQueryBuilder.NAME;
- }
+ }
}
\ No newline at end of file
diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
index 987e97a4605..24644e98a27 100644
--- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
@@ -36,6 +36,10 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
private final Object values;
+ private String minimumShouldMatch;
+
+ private Boolean disableCoord;
+
private String execution;
private String lookupIndex;
@@ -43,7 +47,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
private String lookupId;
private String lookupRouting;
private String lookupPath;
- private Boolean lookupCache;
/**
* A filter for a field based on several terms matching on any of them.
@@ -133,7 +136,31 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
}
/**
+<<<<<<< HEAD
* Sets the index name to lookup the terms from.
+=======
+ * Sets the minimum number of matches across the provided terms. Defaults to 1.
+ * @deprecated use [bool] query instead
+ */
+ @Deprecated
+ public TermsQueryBuilder minimumShouldMatch(String minimumShouldMatch) {
+ this.minimumShouldMatch = minimumShouldMatch;
+ return this;
+ }
+
+ /**
+ * Disables Similarity#coord(int,int) in scoring. Defaults to false.
+ * @deprecated use [bool] query instead
+ */
+ @Deprecated
+ public TermsQueryBuilder disableCoord(boolean disableCoord) {
+ this.disableCoord = disableCoord;
+ return this;
+ }
+
+ /**
+ * Sets the filter name for the filter that can be used when searching for matched_filters per hit.
+>>>>>>> master
*/
public TermsQueryBuilder lookupIndex(String lookupIndex) {
this.lookupIndex = lookupIndex;
@@ -169,11 +196,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
return this;
}
- public TermsQueryBuilder lookupCache(boolean lookupCache) {
- this.lookupCache = lookupCache;
- return this;
- }
-
@Override
public void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
@@ -187,9 +209,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
if (lookupRouting != null) {
builder.field("routing", lookupRouting);
}
- if (lookupCache != null) {
- builder.field("cache", lookupCache);
- }
builder.field("path", lookupPath);
builder.endObject();
} else {
@@ -199,7 +218,16 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
builder.field("execution", execution);
}
+ if (minimumShouldMatch != null) {
+ builder.field("minimum_should_match", minimumShouldMatch);
+ }
+
+ if (disableCoord != null) {
+ builder.field("disable_coord", disableCoord);
+ }
+
printBoostAndQueryName(builder);
+
builder.endObject();
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java
index 9d58c19be73..0dc450e4144 100644
--- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java
+++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java
@@ -50,6 +50,7 @@ import java.util.List;
public class TermsQueryParser extends BaseQueryParserTemp {
private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match").withAllDeprecated("Use [bool] query instead");
+ private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord").withAllDeprecated("Use [bool] query instead");
private Client client;
@Deprecated
@@ -149,7 +150,7 @@ public class TermsQueryParser extends BaseQueryParserTemp {
minShouldMatch = parser.textOrNull();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
- } else if (("disable_coord").equals(currentFieldName) || ("disableCoord").equals(currentFieldName)) {
+ } else if (parseContext.parseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
disableCoord = parser.booleanValue();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java
index 4fd5233889a..837837a92b2 100644
--- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java
@@ -19,7 +19,6 @@
package org.elasticsearch.index.query.functionscore;
-import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
index bb681586781..dc1207afaad 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
@@ -58,8 +58,8 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.aliases.IndexAliasesService;
import org.elasticsearch.index.cache.IndexCache;
+import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache;
-import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
import org.elasticsearch.index.cache.query.QueryCacheStats;
import org.elasticsearch.index.cache.request.ShardRequestCache;
import org.elasticsearch.index.codec.CodecService;
@@ -249,7 +249,7 @@ public class IndexShard extends AbstractIndexShardComponent {
final QueryCachingPolicy cachingPolicy;
// the query cache is a node-level thing, however we want the most popular filters
// to be computed on a per-shard basis
- if (indexSettings.getAsBoolean(QueryCacheSettings.QUERY_CACHE_EVERYTHING, false)) {
+ if (indexSettings.getAsBoolean(IndexCacheModule.QUERY_CACHE_EVERYTHING, false)) {
cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE;
} else {
cachingPolicy = new UsageTrackingQueryCachingPolicy();
diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java
index 28a59734dac..0851b1c0e18 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java
@@ -40,12 +40,13 @@ import org.elasticsearch.index.translog.TranslogService;
*/
public class IndexShardModule extends AbstractModule {
- public static final String ENGINE_FACTORY = "index.engine.factory";
-
private final ShardId shardId;
private final Settings settings;
private final boolean primary;
+ // pkg private so tests can mock
+ Class extends EngineFactory> engineFactoryImpl = InternalEngineFactory.class;
+
public IndexShardModule(ShardId shardId, boolean primary, Settings settings) {
this.settings = settings;
this.shardId = shardId;
@@ -70,13 +71,7 @@ public class IndexShardModule extends AbstractModule {
bind(TranslogService.class).asEagerSingleton();
}
- Class extends InternalEngineFactory> engineFactoryClass = InternalEngineFactory.class;
- String customEngineFactory = settings.get(ENGINE_FACTORY);
- if (customEngineFactory != null) {
- // TODO: make this only useable from tests
- engineFactoryClass = Classes.loadClass(getClass().getClassLoader(), customEngineFactory);
- }
- bind(EngineFactory.class).to(engineFactoryClass);
+ bind(EngineFactory.class).to(engineFactoryImpl);
bind(StoreRecoveryService.class).asEagerSingleton();
bind(ShardPercolateService.class).asEagerSingleton();
bind(ShardTermVectorsService.class).asEagerSingleton();
diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
index d207e4b6618..022fbd997b6 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
@@ -300,7 +300,6 @@ public class IndicesService extends AbstractLifecycleComponent i
Settings indexSettings = settingsBuilder()
.put(this.settings)
.put(settings)
- .classLoader(settings.getClassLoader())
.build();
ModulesBuilder modules = new ModulesBuilder();
diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java
index f1db13e9cee..7b9ed73bd3d 100644
--- a/core/src/main/java/org/elasticsearch/node/Node.java
+++ b/core/src/main/java/org/elasticsearch/node/Node.java
@@ -23,7 +23,6 @@ import org.elasticsearch.Build;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
-import org.elasticsearch.cache.recycler.PageCacheRecyclerModule;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.node.NodeClientModule;
import org.elasticsearch.cluster.ClusterModule;
@@ -44,7 +43,6 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
-import org.elasticsearch.common.util.BigArraysModule;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoveryModule;
import org.elasticsearch.discovery.DiscoveryService;
@@ -71,7 +69,6 @@ import org.elasticsearch.monitor.MonitorModule;
import org.elasticsearch.monitor.MonitorService;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
-import org.elasticsearch.node.internal.NodeModule;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.percolator.PercolatorModule;
import org.elasticsearch.percolator.PercolatorService;
@@ -161,9 +158,7 @@ public class Node implements Releasable {
try {
ModulesBuilder modules = new ModulesBuilder();
modules.add(new Version.Module(version));
- modules.add(new PageCacheRecyclerModule(settings));
modules.add(new CircuitBreakerModule(settings));
- modules.add(new BigArraysModule(settings));
modules.add(new PluginsModule(settings, pluginsService));
modules.add(new SettingsModule(settings));
modules.add(new NodeModule(this));
diff --git a/core/src/main/java/org/elasticsearch/node/internal/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java
similarity index 63%
rename from core/src/main/java/org/elasticsearch/node/internal/NodeModule.java
rename to core/src/main/java/org/elasticsearch/node/NodeModule.java
index 7ce0f4ed56d..befba85af09 100644
--- a/core/src/main/java/org/elasticsearch/node/internal/NodeModule.java
+++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java
@@ -17,9 +17,11 @@
* under the License.
*/
-package org.elasticsearch.node.internal;
+package org.elasticsearch.node;
+import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.inject.AbstractModule;
+import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.node.settings.NodeSettingsService;
@@ -31,12 +33,27 @@ public class NodeModule extends AbstractModule {
private final Node node;
+ // pkg private so tests can mock
+ Class extends PageCacheRecycler> pageCacheRecyclerImpl = PageCacheRecycler.class;
+ Class extends BigArrays> bigArraysImpl = BigArrays.class;
+
public NodeModule(Node node) {
this.node = node;
}
@Override
protected void configure() {
+ if (pageCacheRecyclerImpl == PageCacheRecycler.class) {
+ bind(PageCacheRecycler.class).asEagerSingleton();
+ } else {
+ bind(PageCacheRecycler.class).to(pageCacheRecyclerImpl).asEagerSingleton();
+ }
+ if (bigArraysImpl == BigArrays.class) {
+ bind(BigArrays.class).asEagerSingleton();
+ } else {
+ bind(BigArrays.class).to(bigArraysImpl).asEagerSingleton();
+ }
+
bind(Node.class).toInstance(node);
bind(NodeSettingsService.class).asEagerSingleton();
bind(NodeService.class).asEagerSingleton();
diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java
index 09de3a118aa..7a824cd1ed0 100644
--- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java
+++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java
@@ -180,7 +180,7 @@ public class InternalSettingsPreparer {
static Settings replacePromptPlaceholders(Settings settings, Terminal terminal) {
UnmodifiableIterator> iter = settings.getAsMap().entrySet().iterator();
- Settings.Builder builder = Settings.builder().classLoader(settings.getClassLoaderIfSet());
+ Settings.Builder builder = Settings.builder();
while (iter.hasNext()) {
Map.Entry entry = iter.next();
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java
index 858a6ca1843..1e7cbb780e7 100644
--- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java
@@ -56,6 +56,8 @@ import static org.elasticsearch.common.io.FileSystemUtils.moveFilesWithoutOverwr
*/
public class PluginManager {
+ public static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging";
+
public enum OutputMode {
DEFAULT, SILENT, VERBOSE
}
@@ -430,8 +432,8 @@ public class PluginManager {
// Elasticsearch new download service uses groupId org.elasticsearch.plugins from 2.0.0
if (user == null) {
// TODO Update to https
- if (Version.CURRENT.snapshot()) {
- addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/snapshot/org/elasticsearch/plugin/%s/%s-SNAPSHOT/%s-%s-SNAPSHOT.zip", repo, version, repo, version));
+ if (!Strings.isNullOrEmpty(System.getProperty(PROPERTY_SUPPORT_STAGING_URLS))) {
+ addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/org/elasticsearch/plugin/%s/%s/%s-%s.zip", repo, version, repo, version));
}
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip", repo, version, repo, version));
} else {
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
index c895bf96e18..762d497e832 100644
--- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
@@ -95,7 +95,7 @@ public class PluginsService extends AbstractComponent {
// this is a hack for what is between unit and integration tests...
String[] defaultPluginsClasses = settings.getAsArray("plugin.types");
for (String pluginClass : defaultPluginsClasses) {
- Plugin plugin = loadPlugin(pluginClass, settings);
+ Plugin plugin = loadPlugin(pluginClass, settings, getClass().getClassLoader());
PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), false, "NA", true, pluginClass, false);
if (logger.isTraceEnabled()) {
logger.trace("plugin loaded from settings [{}]", pluginInfo);
@@ -347,7 +347,7 @@ public class PluginsService extends AbstractComponent {
// pluginmanager does it, but we do it again, in case lusers mess with jar files manually
try {
final List jars = new ArrayList<>();
- ClassLoader parentLoader = settings.getClassLoader();
+ ClassLoader parentLoader = getClass().getClassLoader();
if (parentLoader instanceof URLClassLoader) {
for (URL url : ((URLClassLoader) parentLoader).getURLs()) {
jars.add(url);
@@ -360,16 +360,11 @@ public class PluginsService extends AbstractComponent {
}
// create a child to load the plugins in this bundle
- ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), settings.getClassLoader());
- Settings settings = Settings.builder()
- .put(this.settings)
- .classLoader(loader)
- .build();
-
+ ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader());
for (PluginInfo pluginInfo : bundle.plugins) {
final Plugin plugin;
if (pluginInfo.isJvm()) {
- plugin = loadPlugin(pluginInfo.getClassname(), settings);
+ plugin = loadPlugin(pluginInfo.getClassname(), settings, loader);
} else {
plugin = new SitePlugin(pluginInfo.getName(), pluginInfo.getDescription());
}
@@ -380,9 +375,9 @@ public class PluginsService extends AbstractComponent {
return plugins.build();
}
- private Plugin loadPlugin(String className, Settings settings) {
+ private Plugin loadPlugin(String className, Settings settings, ClassLoader loader) {
try {
- Class extends Plugin> pluginClass = settings.getClassLoader().loadClass(className).asSubclass(Plugin.class);
+ Class extends Plugin> pluginClass = loader.loadClass(className).asSubclass(Plugin.class);
try {
return pluginClass.getConstructor(Settings.class).newInstance(settings);
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModule.java b/core/src/main/java/org/elasticsearch/script/ScriptModule.java
index d6c52a6b130..f5de92f67d7 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptModule.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptModule.java
@@ -79,21 +79,21 @@ public class ScriptModule extends AbstractModule {
multibinder.addBinding().to(NativeScriptEngineService.class);
try {
- settings.getClassLoader().loadClass("groovy.lang.GroovyClassLoader");
+ Class.forName("groovy.lang.GroovyClassLoader");
multibinder.addBinding().to(GroovyScriptEngineService.class).asEagerSingleton();
} catch (Throwable t) {
Loggers.getLogger(ScriptService.class, settings).debug("failed to load groovy", t);
}
try {
- settings.getClassLoader().loadClass("com.github.mustachejava.Mustache");
+ Class.forName("com.github.mustachejava.Mustache");
multibinder.addBinding().to(MustacheScriptEngineService.class).asEagerSingleton();
} catch (Throwable t) {
Loggers.getLogger(ScriptService.class, settings).debug("failed to load mustache", t);
}
try {
- settings.getClassLoader().loadClass("org.apache.lucene.expressions.Expression");
+ Class.forName("org.apache.lucene.expressions.Expression");
multibinder.addBinding().to(ExpressionScriptEngineService.class).asEagerSingleton();
} catch (Throwable t) {
Loggers.getLogger(ScriptService.class, settings).debug("failed to load lucene expressions", t);
diff --git a/core/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/core/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java
index 93f196f448e..a74cdf94946 100644
--- a/core/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java
+++ b/core/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java
@@ -70,7 +70,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
config.addCompilationCustomizers(imports);
// Add BigDecimal -> Double transformer
config.addCompilationCustomizers(new GroovyBigDecimalTransformer(CompilePhase.CONVERSION));
- this.loader = new GroovyClassLoader(settings.getClassLoader(), config);
+ this.loader = new GroovyClassLoader(getClass().getClassLoader(), config);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java
index b4d15b35d16..49acd1163fc 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchModule.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java
@@ -19,8 +19,6 @@
package org.elasticsearch.search;
-import com.google.common.collect.Lists;
-
import org.elasticsearch.common.Classes;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.multibindings.Multibinder;
@@ -150,24 +148,25 @@ import org.elasticsearch.search.suggest.SuggestPhase;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.Suggesters;
-import java.util.List;
+import java.util.*;
/**
*
*/
public class SearchModule extends AbstractModule {
- public static final String SEARCH_SERVICE_IMPL = "search.service_impl";
-
private final Settings settings;
- private final List> aggParsers = Lists.newArrayList();
- private final List> pipelineAggParsers = Lists.newArrayList();
- private final List> highlighters = Lists.newArrayList();
- private final List> suggesters = Lists.newArrayList();
- private final List> functionScoreParsers = Lists.newArrayList();
- private final List> fetchSubPhases = Lists.newArrayList();
- private final List> heuristicParsers = Lists.newArrayList();
- private final List> modelParsers = Lists.newArrayList();
+ private final Set> aggParsers = new HashSet<>();
+ private final Set> pipelineAggParsers = new HashSet<>();
+ private final Highlighters highlighters = new Highlighters();
+ private final Suggesters suggesters = new Suggesters();
+ private final Set> functionScoreParsers = new HashSet<>();
+ private final Set> fetchSubPhases = new HashSet<>();
+ private final Set> heuristicParsers = new HashSet<>();
+ private final Set> modelParsers = new HashSet<>();
+
+ // pkg private so tests can mock
+ Class extends SearchService> searchServiceImpl = SearchService.class;
public SearchModule(Settings settings) {
this.settings = settings;
@@ -182,12 +181,12 @@ public class SearchModule extends AbstractModule {
MovAvgModelStreams.registerStream(stream);
}
- public void registerHighlighter(Class extends Highlighter> clazz) {
- highlighters.add(clazz);
+ public void registerHighlighter(String key, Class extends Highlighter> clazz) {
+ highlighters.registerExtension(key, clazz);
}
- public void registerSuggester(Class extends Suggester> suggester) {
- suggesters.add(suggester);
+ public void registerSuggester(String key, Class extends Suggester> suggester) {
+ suggesters.registerExtension(key, suggester);
}
public void registerFunctionScoreParser(Class extends ScoreFunctionParser> parser) {
@@ -245,14 +244,7 @@ public class SearchModule extends AbstractModule {
}
protected void configureSuggesters() {
- Multibinder suggesterMultibinder = Multibinder.newSetBinder(binder(), Suggester.class);
- for (Class extends Suggester> clazz : suggesters) {
- suggesterMultibinder.addBinding().to(clazz);
- }
-
- bind(SuggestParseElement.class).asEagerSingleton();
- bind(SuggestPhase.class).asEagerSingleton();
- bind(Suggesters.class).asEagerSingleton();
+ suggesters.bind(binder());
}
protected void configureFunctionScore() {
@@ -264,11 +256,7 @@ public class SearchModule extends AbstractModule {
}
protected void configureHighlighters() {
- Multibinder multibinder = Multibinder.newSetBinder(binder(), Highlighter.class);
- for (Class extends Highlighter> highlighter : highlighters) {
- multibinder.addBinding().to(highlighter);
- }
- bind(Highlighters.class).asEagerSingleton();
+ highlighters.bind(binder());
}
protected void configureAggs() {
@@ -347,13 +335,10 @@ public class SearchModule extends AbstractModule {
bind(SearchServiceTransportAction.class).asEagerSingleton();
bind(MoreLikeThisFetchService.class).asEagerSingleton();
- // search service -- testing only!
- String impl = settings.get(SEARCH_SERVICE_IMPL);
- if (impl == null) {
+ if (searchServiceImpl == SearchService.class) {
bind(SearchService.class).asEagerSingleton();
} else {
- Class extends SearchService> implClass = Classes.loadClass(getClass().getClassLoader(), impl);
- bind(SearchService.class).to(implClass).asEagerSingleton();
+ bind(SearchService.class).to(searchServiceImpl).asEagerSingleton();
}
}
@@ -414,4 +399,5 @@ public class SearchModule extends AbstractModule {
BucketSelectorPipelineAggregator.registerStreams();
SerialDiffPipelineAggregator.registerStreams();
}
+
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramBuilder.java
index b37378652b9..6b7305db9e6 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramBuilder.java
@@ -40,7 +40,6 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder map = new HashMap<>();
map.putAll(STREAMS);
map.put(stream.getName(), stream);
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelStreams.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelStreams.java
index d12b0cdfef4..faee8a9f75b 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelStreams.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelStreams.java
@@ -64,6 +64,9 @@ public class MovAvgModelStreams {
* @param stream The stream to register
*/
public static synchronized void registerStream(Stream stream) {
+ if (STREAMS.containsKey(stream.getName())) {
+ throw new IllegalArgumentException("Can't register stream with name [" + stream.getName() + "] more than once");
+ }
HashMap map = new HashMap<>();
map.putAll(STREAMS);
map.put(stream.getName(), stream);
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java
index 67b42a5d866..de73a898334 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java
@@ -49,11 +49,6 @@ public class FastVectorHighlighter implements Highlighter {
this.termVectorMultiValue = settings.getAsBoolean("search.highlight.term_vector_multi_value", true);
}
- @Override
- public String[] names() {
- return new String[]{"fvh", "fast-vector-highlighter"};
- }
-
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/Highlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/Highlighter.java
index 26c3dc0bf21..af4801f3633 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/Highlighter.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/Highlighter.java
@@ -25,8 +25,6 @@ import org.elasticsearch.index.mapper.FieldMapper;
*/
public interface Highlighter {
- String[] names();
-
HighlightField highlight(HighlighterContext highlighterContext);
boolean canHighlight(FieldMapper fieldMapper);
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java b/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java
index 9f14b0f7ed1..349227f5c57 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java
@@ -18,44 +18,74 @@
*/
package org.elasticsearch.search.highlight;
-import com.google.common.collect.ImmutableMap;
-import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.logging.DeprecationLogger;
+import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.ExtensionPoint;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
/**
- *
+ * An extensions point and registry for all the highlighters a node supports.
*/
-public class Highlighters {
+public class Highlighters extends ExtensionPoint.MapExtensionPoint {
+
+ @Deprecated // remove in 3.0
+ private static final String FAST_VECTOR_HIGHLIGHTER = "fast-vector-highlighter";
+ private static final String FVH = "fvh";
+ @Deprecated // remove in 3.0
+ private static final String HIGHLIGHTER = "highlighter";
+ private static final String PLAIN = "plain";
+ @Deprecated // remove in 3.0
+ private static final String POSTINGS_HIGHLIGHTER = "postings-highlighter";
+ private static final String POSTINGS = "postings";
+
private final Map parsers;
+ private final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Highlighters.class.getName()));
+
+ public Highlighters(){
+ this(Collections.EMPTY_MAP);
+ }
+
+ private Highlighters(Map parsers) {
+ super("highlighter", Highlighter.class, new HashSet<>(Arrays.asList(FVH, FAST_VECTOR_HIGHLIGHTER, PLAIN, HIGHLIGHTER, POSTINGS, POSTINGS_HIGHLIGHTER)),
+ Highlighters.class);
+ this.parsers = Collections.unmodifiableMap(parsers);
+ }
@Inject
- public Highlighters(Settings settings, Set parsers) {
+ public Highlighters(Settings settings, Map parsers) {
+ this(addBuiltIns(settings, parsers));
+ }
+
+ private static Map addBuiltIns(Settings settings, Map parsers) {
// build in highlighers
Map map = new HashMap<>();
- add(map, new FastVectorHighlighter(settings));
- add(map, new PlainHighlighter());
- add(map, new PostingsHighlighter());
- for (Highlighter highlighter : parsers) {
- add(map, highlighter);
- }
- this.parsers = Collections.unmodifiableMap(map);
+ map.put(FVH, new FastVectorHighlighter(settings));
+ map.put(FAST_VECTOR_HIGHLIGHTER, map.get(FVH));
+ map.put(PLAIN, new PlainHighlighter());
+ map.put(HIGHLIGHTER, map.get(PLAIN));
+ map.put(POSTINGS, new PostingsHighlighter());
+ map.put(POSTINGS_HIGHLIGHTER, map.get(POSTINGS));
+ map.putAll(parsers);
+ return map;
}
public Highlighter get(String type) {
+ switch (type) {
+ case FAST_VECTOR_HIGHLIGHTER:
+ deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", FAST_VECTOR_HIGHLIGHTER, FVH);
+ break;
+ case HIGHLIGHTER:
+ deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", HIGHLIGHTER, PLAIN);
+ break;
+ case POSTINGS_HIGHLIGHTER:
+ deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", POSTINGS_HIGHLIGHTER, POSTINGS);
+ break;
+ }
return parsers.get(type);
}
- private void add(Map map, Highlighter highlighter) {
- for (String type : highlighter.names()) {
- map.put(type, highlighter);
- }
- }
-
}
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java
index a9094f9ceaf..27f439a5c33 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java
@@ -47,11 +47,6 @@ public class PlainHighlighter implements Highlighter {
private static final String CACHE_KEY = "highlight-plain";
- @Override
- public String[] names() {
- return new String[] { "plain", "highlighter" };
- }
-
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java
index 35f6560899e..270401a9108 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java
@@ -40,11 +40,6 @@ public class PostingsHighlighter implements Highlighter {
private static final String CACHE_KEY = "highlight-postings";
- @Override
- public String[] names() {
- return new String[]{"postings", "postings-highlighter"};
- }
-
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java
index 51f5f21b460..7b3f7bdb89f 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java
@@ -29,8 +29,6 @@ public abstract class Suggester>
innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException;
- public abstract String[] names();
-
public abstract SuggestContextParser getContextParser();
public Suggest.Suggestion extends Suggest.Suggestion.Entry extends Suggest.Suggestion.Entry.Option>>
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java
index 264720b8b90..1be80b57502 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java
@@ -18,45 +18,46 @@
*/
package org.elasticsearch.search.suggest;
-import com.google.common.collect.ImmutableMap;
-import org.elasticsearch.common.collect.MapBuilder;
+import org.elasticsearch.common.inject.Binder;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.script.ScriptService;
-import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
import org.elasticsearch.search.suggest.term.TermSuggester;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
/**
*
*/
-public class Suggesters {
+public final class Suggesters extends ExtensionPoint.MapExtensionPoint {
private final Map parsers;
+ public Suggesters() {
+ this(Collections.EMPTY_MAP);
+ }
+
+ public Suggesters(Map suggesters) {
+ super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestParseElement.class, SuggestPhase.class);
+ this.parsers = Collections.unmodifiableMap(suggesters);
+ }
+
@Inject
- public Suggesters(Set suggesters, ScriptService scriptService) {
+ public Suggesters(Map suggesters, ScriptService scriptService) {
+ this(addBuildIns(suggesters, scriptService));
+ }
+
+ private static Map addBuildIns(Map suggesters, ScriptService scriptService) {
final Map map = new HashMap<>();
- add(map, new PhraseSuggester(scriptService));
- add(map, new TermSuggester());
- add(map, new CompletionSuggester());
- for (Suggester suggester : suggesters) {
- add(map, suggester);
- }
- this.parsers = Collections.unmodifiableMap(map);
+ map.put("phrase", new PhraseSuggester(scriptService));
+ map.put("term", new TermSuggester());
+ map.put("completion", new CompletionSuggester());
+ map.putAll(suggesters);
+ return map;
}
public Suggester get(String type) {
return parsers.get(type);
}
-
- private void add(Map map, Suggester suggester) {
- for (String type : suggester.names()) {
- map.put(type, suggester);
- }
- }
}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java
index 4af360fa05f..4a1d5d1d28c 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java
@@ -101,11 +101,6 @@ public class CompletionSuggester extends Suggester
return completionSuggestion;
}
- @Override
- public String[] names() {
- return new String[] { "completion" };
- }
-
@Override
public SuggestContextParser getContextParser() {
return new CompletionSuggestParser(this);
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java
index 30c1b63de21..e7d0eb378c3 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java
@@ -150,11 +150,6 @@ public final class PhraseSuggester extends Suggester {
return scriptService;
}
- @Override
- public String[] names() {
- return new String[] {"phrase"};
- }
-
@Override
public SuggestContextParser getContextParser() {
return new PhraseSuggestParser(this);
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java
index 70dfefe9522..4c1b176c990 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java
@@ -65,11 +65,6 @@ public final class TermSuggester extends Suggester {
return response;
}
- @Override
- public String[] names() {
- return new String[] {"term"};
- }
-
@Override
public SuggestContextParser getContextParser() {
return new TermSuggestParser(this);
diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
index 9078cdc2ad6..fd8cbec0755 100644
--- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
+++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
@@ -167,7 +167,7 @@ public class ExceptionSerializationTests extends ESTestCase {
pkg.append(p.getFileName().toString()).append(".");
}
pkg.append(filename.substring(0, filename.length() - 6));
- return Thread.currentThread().getContextClassLoader().loadClass(pkg.toString());
+ return getClass().getClassLoader().loadClass(pkg.toString());
}
@Override
diff --git a/core/src/test/java/org/elasticsearch/test/cache/recycler/MockPageCacheRecycler.java b/core/src/test/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java
similarity index 97%
rename from core/src/test/java/org/elasticsearch/test/cache/recycler/MockPageCacheRecycler.java
rename to core/src/test/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java
index 482575d7a23..1f8ec84e193 100644
--- a/core/src/test/java/org/elasticsearch/test/cache/recycler/MockPageCacheRecycler.java
+++ b/core/src/test/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java
@@ -17,15 +17,16 @@
* under the License.
*/
-package org.elasticsearch.test.cache.recycler;
+package org.elasticsearch.cache.recycler;
import com.google.common.base.Predicate;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
-import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.recycler.Recycler.V;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.node.NodeModule;
+import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.threadpool.ThreadPool;
diff --git a/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java
new file mode 100644
index 00000000000..93318346ac2
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.cluster;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
+import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
+import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction;
+import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
+import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction;
+import org.elasticsearch.cluster.routing.allocation.decider.MockDiskUsagesIT;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.node.settings.NodeSettingsService;
+import org.elasticsearch.plugins.AbstractPlugin;
+import org.elasticsearch.threadpool.ThreadPool;
+
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Fake ClusterInfoService class that allows updating the nodes stats disk
+ * usage with fake values
+ */
+public class MockInternalClusterInfoService extends InternalClusterInfoService {
+
+ public static class Plugin extends AbstractPlugin {
+ @Override
+ public String name() {
+ return "mock-cluster-info-service";
+ }
+ @Override
+ public String description() {
+ return "a mock cluster info service for testing";
+ }
+ public void onModule(ClusterModule module) {
+ module.clusterInfoServiceImpl = MockInternalClusterInfoService.class;
+ }
+ }
+
+ private final ClusterName clusterName;
+ private volatile NodeStats[] stats = new NodeStats[3];
+
+ @Inject
+ public MockInternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService,
+ TransportNodesStatsAction transportNodesStatsAction,
+ TransportIndicesStatsAction transportIndicesStatsAction,
+ ClusterService clusterService, ThreadPool threadPool) {
+ super(settings, nodeSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool);
+ this.clusterName = ClusterName.clusterNameFromSettings(settings);
+ stats[0] = MockDiskUsagesIT.makeStats("node_t1", new DiskUsage("node_t1", "n1", 100, 100));
+ stats[1] = MockDiskUsagesIT.makeStats("node_t2", new DiskUsage("node_t2", "n2", 100, 100));
+ stats[2] = MockDiskUsagesIT.makeStats("node_t3", new DiskUsage("node_t3", "n3", 100, 100));
+ }
+
+ public void setN1Usage(String nodeName, DiskUsage newUsage) {
+ stats[0] = MockDiskUsagesIT.makeStats(nodeName, newUsage);
+ }
+
+ public void setN2Usage(String nodeName, DiskUsage newUsage) {
+ stats[1] = MockDiskUsagesIT.makeStats(nodeName, newUsage);
+ }
+
+ public void setN3Usage(String nodeName, DiskUsage newUsage) {
+ stats[2] = MockDiskUsagesIT.makeStats(nodeName, newUsage);
+ }
+
+ @Override
+ public CountDownLatch updateNodeStats(final ActionListener listener) {
+ NodesStatsResponse response = new NodesStatsResponse(clusterName, stats);
+ listener.onResponse(response);
+ return new CountDownLatch(0);
+ }
+
+ @Override
+ public CountDownLatch updateIndicesStats(final ActionListener listener) {
+ // Not used, so noop
+ return new CountDownLatch(0);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationModuleTests.java
index 678bfa3b0e5..7b57ef07190 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationModuleTests.java
@@ -59,8 +59,7 @@ public class AllocationModuleTests extends ModuleTestCase {
try {
module.registerAllocationDecider(EnableAllocationDecider.class);
} catch (IllegalArgumentException e) {
- assertTrue(e.getMessage().contains("Cannot register AllocationDecider"));
- assertTrue(e.getMessage().contains("twice"));
+ assertEquals(e.getMessage(), "Can't register the same [allocation_decider] more than once for [" + EnableAllocationDecider.class.getName() + "]");
}
}
@@ -82,14 +81,14 @@ public class AllocationModuleTests extends ModuleTestCase {
try {
module.registerShardAllocator(AllocationModule.BALANCED_ALLOCATOR, FakeShardsAllocator.class);
} catch (IllegalArgumentException e) {
- assertTrue(e.getMessage().contains("already registered"));
+ assertEquals(e.getMessage(), "Can't register the same [shards_allocator] more than once for [balanced]");
}
}
public void testUnknownShardsAllocator() {
Settings settings = Settings.builder().put(AllocationModule.SHARDS_ALLOCATOR_TYPE_KEY, "dne").build();
AllocationModule module = new AllocationModule(settings);
- assertBindingFailure(module, "Unknown ShardsAllocator");
+ assertBindingFailure(module, "Unknown [shards_allocator]");
}
public void testEvenShardsAllocatorBackcompat() {
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java
index a24e980f236..79612d07b0e 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java
@@ -20,28 +20,20 @@
package org.elasticsearch.cluster.routing.allocation.decider;
import org.elasticsearch.Version;
-import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
-import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
-import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
-import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction;
import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingNode;
-import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.monitor.fs.FsInfo;
-import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.threadpool.ThreadPool;
import org.junit.Test;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.CountDownLatch;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Maps.newHashMap;
@@ -57,8 +49,8 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
- // Use the mock internal cluster info service, which has fake-able disk usages
- .put(ClusterModule.CLUSTER_SERVICE_IMPL, MockInternalClusterInfoService.class.getName())
+ // Use the mock internal cluster info service, which has fake-able disk usages
+ .extendArray("plugin.types", MockInternalClusterInfoService.Plugin.class.getName())
// Update more frequently
.put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "1s")
.build();
@@ -183,50 +175,4 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
null);
}
- /**
- * Fake ClusterInfoService class that allows updating the nodes stats disk
- * usage with fake values
- */
- public static class MockInternalClusterInfoService extends InternalClusterInfoService {
-
- private final ClusterName clusterName;
- private volatile NodeStats[] stats = new NodeStats[3];
-
- @Inject
- public MockInternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService,
- TransportNodesStatsAction transportNodesStatsAction,
- TransportIndicesStatsAction transportIndicesStatsAction,
- ClusterService clusterService, ThreadPool threadPool) {
- super(settings, nodeSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool);
- this.clusterName = ClusterName.clusterNameFromSettings(settings);
- stats[0] = makeStats("node_t1", new DiskUsage("node_t1", "n1", 100, 100));
- stats[1] = makeStats("node_t2", new DiskUsage("node_t2", "n2", 100, 100));
- stats[2] = makeStats("node_t3", new DiskUsage("node_t3", "n3", 100, 100));
- }
-
- public void setN1Usage(String nodeName, DiskUsage newUsage) {
- stats[0] = makeStats(nodeName, newUsage);
- }
-
- public void setN2Usage(String nodeName, DiskUsage newUsage) {
- stats[1] = makeStats(nodeName, newUsage);
- }
-
- public void setN3Usage(String nodeName, DiskUsage newUsage) {
- stats[2] = makeStats(nodeName, newUsage);
- }
-
- @Override
- public CountDownLatch updateNodeStats(final ActionListener listener) {
- NodesStatsResponse response = new NodesStatsResponse(clusterName, stats);
- listener.onResponse(response);
- return new CountDownLatch(0);
- }
-
- @Override
- public CountDownLatch updateIndicesStats(final ActionListener listener) {
- // Not used, so noop
- return new CountDownLatch(0);
- }
- }
}
diff --git a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java
index d96b89d382c..60c3ca126d5 100644
--- a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java
+++ b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java
@@ -72,6 +72,37 @@ public abstract class ModuleTestCase extends ESTestCase {
}
}
+ /**
+ * Configures the module and checks a Map of the "to" class
+ * is bound to "theClas".
+ */
+ public void assertMapMultiBinding(Module module, Class to, Class theClass) {
+ List elements = Elements.getElements(module);
+ Set bindings = new HashSet<>();
+ boolean providerFound = false;
+ for (Element element : elements) {
+ if (element instanceof LinkedKeyBinding) {
+ LinkedKeyBinding binding = (LinkedKeyBinding)element;
+ if (to.equals(binding.getKey().getTypeLiteral().getType())) {
+ bindings.add(binding.getLinkedKey().getTypeLiteral().getType());
+ }
+ } else if (element instanceof ProviderInstanceBinding) {
+ ProviderInstanceBinding binding = (ProviderInstanceBinding)element;
+ String setType = binding.getKey().getTypeLiteral().getType().toString();
+ if (setType.equals("java.util.Map")) {
+ providerFound = true;
+ }
+ }
+ }
+
+ if (bindings.contains(theClass) == false) {
+ fail("Expected to find " + theClass.getName() + " as binding to " + to.getName() + ", found these classes:\n" + bindings);
+ }
+ assertTrue("Did not find provider for map of " + to.getName(), providerFound);
+ }
+
+
+
/**
* Configures the module and checks a Set of the "to" class
* is bound to "classes". There may be more classes bound
diff --git a/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java b/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java
index f4604d28d1a..142d60871aa 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java
@@ -34,8 +34,9 @@ public class JsonSettingsLoaderTests extends ESTestCase {
@Test
public void testSimpleJsonSettings() throws Exception {
+ String json = "/org/elasticsearch/common/settings/loader/test-settings.json";
Settings settings = settingsBuilder()
- .loadFromClasspath("org/elasticsearch/common/settings/loader/test-settings.json")
+ .loadFromStream(json, getClass().getResourceAsStream(json))
.build();
assertThat(settings.get("test1.value1"), equalTo("value1"));
diff --git a/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java b/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java
index e43f5e7f2aa..49b5444a52b 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java
@@ -34,8 +34,9 @@ public class YamlSettingsLoaderTests extends ESTestCase {
@Test
public void testSimpleYamlSettings() throws Exception {
+ String yaml = "/org/elasticsearch/common/settings/loader/test-settings.yml";
Settings settings = settingsBuilder()
- .loadFromClasspath("org/elasticsearch/common/settings/loader/test-settings.yml")
+ .loadFromStream(yaml, getClass().getResourceAsStream(yaml))
.build();
assertThat(settings.get("test1.value1"), equalTo("value1"));
@@ -52,15 +53,17 @@ public class YamlSettingsLoaderTests extends ESTestCase {
@Test(expected = SettingsException.class)
public void testIndentation() {
+ String yaml = "/org/elasticsearch/common/settings/loader/indentation-settings.yml";
settingsBuilder()
- .loadFromClasspath("org/elasticsearch/common/settings/loader/indentation-settings.yml")
- .build();
+ .loadFromStream(yaml, getClass().getResourceAsStream(yaml))
+ .build();
}
@Test(expected = SettingsException.class)
public void testIndentationWithExplicitDocumentStart() {
+ String yaml = "/org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml";
settingsBuilder()
- .loadFromClasspath("org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml")
+ .loadFromStream(yaml, getClass().getResourceAsStream(yaml))
.build();
}
}
\ No newline at end of file
diff --git a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java
index 01fae517c3d..7d36c09ee19 100644
--- a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java
@@ -29,7 +29,6 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.test.ESSingleNodeTestCase;
-import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.junit.Before;
import java.lang.reflect.InvocationTargetException;
diff --git a/core/src/test/java/org/elasticsearch/test/cache/recycler/MockBigArrays.java b/core/src/test/java/org/elasticsearch/common/util/MockBigArrays.java
similarity index 97%
rename from core/src/test/java/org/elasticsearch/test/cache/recycler/MockBigArrays.java
rename to core/src/test/java/org/elasticsearch/common/util/MockBigArrays.java
index 4c67fbcd08b..4eb4a376962 100644
--- a/core/src/test/java/org/elasticsearch/test/cache/recycler/MockBigArrays.java
+++ b/core/src/test/java/org/elasticsearch/common/util/MockBigArrays.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.test.cache.recycler;
+package org.elasticsearch.common.util;
import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.SeedUtils;
@@ -30,15 +30,8 @@ import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.util.BigArray;
-import org.elasticsearch.common.util.BigArrays;
-import org.elasticsearch.common.util.ByteArray;
-import org.elasticsearch.common.util.DoubleArray;
-import org.elasticsearch.common.util.FloatArray;
-import org.elasticsearch.common.util.IntArray;
-import org.elasticsearch.common.util.LongArray;
-import org.elasticsearch.common.util.ObjectArray;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
+import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.test.ESTestCase;
import java.util.Collection;
diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java
index a2eb9565f7f..0a4b057dd3d 100644
--- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java
+++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java
@@ -27,8 +27,8 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.cache.recycler.MockBigArrays;
-import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
+import org.elasticsearch.common.util.MockBigArrays;
+import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.threadpool.ThreadPool;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTest.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTest.java
index 07678d95982..de923f5d3f9 100644
--- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTest.java
+++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTest.java
@@ -29,8 +29,8 @@ import org.elasticsearch.http.netty.pipelining.OrderedDownstreamChannelEvent;
import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.cache.recycler.MockBigArrays;
-import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
+import org.elasticsearch.common.util.MockBigArrays;
+import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.threadpool.ThreadPool;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java
index e667afaf5a3..8a81705684d 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java
@@ -79,7 +79,7 @@ public class AnalysisModuleTests extends ESTestCase {
}
private Settings loadFromClasspath(String path) {
- return settingsBuilder().loadFromClasspath(path)
+ return settingsBuilder().loadFromStream(path, getClass().getResourceAsStream(path))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", createTempDir().toString())
.build();
@@ -88,13 +88,13 @@ public class AnalysisModuleTests extends ESTestCase {
@Test
public void testSimpleConfigurationJson() {
- Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.json");
+ Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.json");
testSimpleConfiguration(settings);
}
@Test
public void testSimpleConfigurationYaml() {
- Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.yml");
+ Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.yml");
testSimpleConfiguration(settings);
}
@@ -107,8 +107,9 @@ public class AnalysisModuleTests extends ESTestCase {
@Test
public void testVersionedAnalyzers() throws Exception {
+ String yaml = "/org/elasticsearch/index/analysis/test1.yml";
Settings settings2 = settingsBuilder()
- .loadFromClasspath("org/elasticsearch/index/analysis/test1.yml")
+ .loadFromStream(yaml, getClass().getResourceAsStream(yaml))
.put("path.home", createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0)
.build();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java
index 6f7581768f3..74ff95d4a14 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java
@@ -39,7 +39,7 @@ public class AnalysisTestsHelper {
public static AnalysisService createAnalysisServiceFromClassPath(Path baseDir, String resource) {
Settings settings = Settings.settingsBuilder()
- .loadFromClasspath(resource)
+ .loadFromStream(resource, AnalysisTestsHelper.class.getResourceAsStream(resource))
.put("path.home", baseDir.toString())
.build();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CJKFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CJKFilterFactoryTests.java
index 418cd2b30a2..98ed9d28703 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CJKFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CJKFilterFactoryTests.java
@@ -29,7 +29,7 @@ import java.io.StringReader;
public class CJKFilterFactoryTests extends ESTokenStreamTestCase {
- private static final String RESOURCE = "org/elasticsearch/index/analysis/cjk_analysis.json";
+ private static final String RESOURCE = "/org/elasticsearch/index/analysis/cjk_analysis.json";
@Test
public void testDefault() throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java
index a259dc0d19e..ad81450c336 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java
@@ -115,16 +115,18 @@ public class CompoundAnalysisTests extends ESTestCase {
}
private Settings getJsonSettings() {
+ String json = "/org/elasticsearch/index/analysis/test1.json";
return settingsBuilder()
- .loadFromClasspath("org/elasticsearch/index/analysis/test1.json")
+ .loadFromStream(json, getClass().getResourceAsStream(json))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", createTempDir().toString())
.build();
}
private Settings getYamlSettings() {
+ String yaml = "/org/elasticsearch/index/analysis/test1.yml";
return settingsBuilder()
- .loadFromClasspath("org/elasticsearch/index/analysis/test1.yml")
+ .loadFromStream(yaml, getClass().getResourceAsStream(yaml))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", createTempDir().toString())
.build();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java
index 14f3b6fe0d3..33cb31f751e 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java
@@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.instanceOf;
public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
- private static final String RESOURCE = "org/elasticsearch/index/analysis/keep_analysis.json";
+ private static final String RESOURCE = "/org/elasticsearch/index/analysis/keep_analysis.json";
@Test
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java
index 0a1d7e5e196..2796367f07f 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java
@@ -41,10 +41,11 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
@Test
public void testPatternCaptureTokenFilter() throws Exception {
+ String json = "/org/elasticsearch/index/analysis/pattern_capture.json";
Index index = new Index("test");
Settings settings = settingsBuilder()
.put("path.home", createTempDir())
- .loadFromClasspath("org/elasticsearch/index/analysis/pattern_capture.json")
+ .loadFromStream(json, getClass().getResourceAsStream(json))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java
index d8b6905d777..26883f562f5 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java
@@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.instanceOf;
@ThreadLeakScope(Scope.NONE)
public class ShingleTokenFilterFactoryTests extends ESTokenStreamTestCase {
- private static final String RESOURCE = "org/elasticsearch/index/analysis/shingle_analysis.json";
+ private static final String RESOURCE = "/org/elasticsearch/index/analysis/shingle_analysis.json";
@Test
public void testDefault() throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java
index e520fd1c557..5ec0178cea0 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java
@@ -41,9 +41,10 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase {
@Test
public void testDefaultsCompoundAnalysis() throws Exception {
+ String json = "/org/elasticsearch/index/analysis/stop.json";
Index index = new Index("test");
Settings settings = settingsBuilder()
- .loadFromClasspath("org/elasticsearch/index/analysis/stop.json")
+ .loadFromStream(json, getClass().getResourceAsStream(json))
.put("path.home", createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java
index 6aadf4d1377..7391b045f6d 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java
@@ -134,8 +134,9 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
@Test
public void testCommonGramsAnalysis() throws IOException {
+ String json = "/org/elasticsearch/index/analysis/commongrams/commongrams.json";
Settings settings = Settings.settingsBuilder()
- .loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams.json")
+ .loadFromStream(json, getClass().getResourceAsStream(json))
.put("path.home", createTempDir().toString())
.build();
{
@@ -218,8 +219,9 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
@Test
public void testQueryModeCommonGramsAnalysis() throws IOException {
+ String json = "/org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json";
Settings settings = Settings.settingsBuilder()
- .loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json")
+ .loadFromStream(json, getClass().getResourceAsStream(json))
.put("path.home", createTempDir().toString())
.build();
{
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTest.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTest.java
index beb5ae3b51a..78349a012bb 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTest.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTest.java
@@ -59,8 +59,9 @@ public class SynonymsAnalysisTest extends ESTestCase {
@Test
public void testSynonymsAnalysis() throws IOException {
+ String json = "/org/elasticsearch/index/analysis/synonyms/synonyms.json";
Settings settings = settingsBuilder().
- loadFromClasspath("org/elasticsearch/index/analysis/synonyms/synonyms.json")
+ loadFromStream(json, getClass().getResourceAsStream(json))
.put("path.home", createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
index 3ff77a39278..361fd156bfa 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
@@ -20,9 +20,11 @@ package org.elasticsearch.index.shard;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.store.LockObtainFailedException;
+import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.stats.IndexStats;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
@@ -30,6 +32,7 @@ import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock;
import org.elasticsearch.index.IndexService;
@@ -48,6 +51,7 @@ import org.junit.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ExecutionException;
@@ -56,6 +60,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
+import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
@@ -434,4 +439,89 @@ public class IndexShardTests extends ESSingleNodeTestCase {
response = client().prepareSearch("test").get();
assertHitCount(response, 0l);
}
+
+ public void testIndexDirIsDeletedWhenShardRemoved() throws Exception {
+ Environment env = getInstanceFromNode(Environment.class);
+ Path idxPath = env.sharedDataFile().resolve(randomAsciiOfLength(10));
+ logger.info("--> idxPath: [{}]", idxPath);
+ Settings idxSettings = Settings.builder()
+ .put(IndexMetaData.SETTING_DATA_PATH, idxPath)
+ .build();
+ createIndex("test", idxSettings);
+ ensureGreen("test");
+ client().prepareIndex("test", "bar", "1").setSource("{}").setRefresh(true).get();
+ SearchResponse response = client().prepareSearch("test").get();
+ assertHitCount(response, 1l);
+ client().admin().indices().prepareDelete("test").get();
+ assertPathHasBeenCleared(idxPath);
+ }
+
+ public void testIndexCanChangeCustomDataPath() throws Exception {
+ Environment env = getInstanceFromNode(Environment.class);
+ Path idxPath = env.sharedDataFile().resolve(randomAsciiOfLength(10));
+ final String INDEX = "idx";
+ Path startDir = idxPath.resolve("start-" + randomAsciiOfLength(10));
+ Path endDir = idxPath.resolve("end-" + randomAsciiOfLength(10));
+ logger.info("--> start dir: [{}]", startDir.toAbsolutePath().toString());
+ logger.info("--> end dir: [{}]", endDir.toAbsolutePath().toString());
+ // temp dirs are automatically created, but the end dir is what
+ // startDir is going to be renamed as, so it needs to be deleted
+ // otherwise we get all sorts of errors about the directory
+ // already existing
+ IOUtils.rm(endDir);
+
+ Settings sb = Settings.builder()
+ .put(IndexMetaData.SETTING_DATA_PATH, startDir.toAbsolutePath().toString())
+ .build();
+ Settings sb2 = Settings.builder()
+ .put(IndexMetaData.SETTING_DATA_PATH, endDir.toAbsolutePath().toString())
+ .build();
+
+ logger.info("--> creating an index with data_path [{}]", startDir.toAbsolutePath().toString());
+ createIndex(INDEX, sb);
+ ensureGreen(INDEX);
+ client().prepareIndex(INDEX, "bar", "1").setSource("{}").setRefresh(true).get();
+
+ SearchResponse resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
+ assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
+
+ logger.info("--> closing the index [{}]", INDEX);
+ client().admin().indices().prepareClose(INDEX).get();
+ logger.info("--> index closed, re-opening...");
+ client().admin().indices().prepareOpen(INDEX).get();
+ logger.info("--> index re-opened");
+ ensureGreen(INDEX);
+
+ resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
+ assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
+
+ // Now, try closing and changing the settings
+
+ logger.info("--> closing the index [{}]", INDEX);
+ client().admin().indices().prepareClose(INDEX).get();
+
+ logger.info("--> moving data on disk [{}] to [{}]", startDir.getFileName(), endDir.getFileName());
+ assert Files.exists(endDir) == false : "end directory should not exist!";
+ Files.move(startDir, endDir, StandardCopyOption.REPLACE_EXISTING);
+
+ logger.info("--> updating settings...");
+ client().admin().indices().prepareUpdateSettings(INDEX)
+ .setSettings(sb2)
+ .setIndicesOptions(IndicesOptions.fromOptions(true, false, true, true))
+ .get();
+
+ assert Files.exists(startDir) == false : "start dir shouldn't exist";
+
+ logger.info("--> settings updated and files moved, re-opening index");
+ client().admin().indices().prepareOpen(INDEX).get();
+ logger.info("--> index re-opened");
+ ensureGreen(INDEX);
+
+ resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
+ assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
+
+ assertAcked(client().admin().indices().prepareDelete(INDEX));
+ assertPathHasBeenCleared(startDir.toAbsolutePath().toString());
+ assertPathHasBeenCleared(endDir.toAbsolutePath().toString());
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/shard/MockEngineFactoryPlugin.java b/core/src/test/java/org/elasticsearch/index/shard/MockEngineFactoryPlugin.java
new file mode 100644
index 00000000000..8ed60609683
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/index/shard/MockEngineFactoryPlugin.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.index.shard;
+
+import org.elasticsearch.common.inject.Module;
+import org.elasticsearch.plugins.AbstractPlugin;
+import org.elasticsearch.test.engine.MockEngineFactory;
+import org.elasticsearch.test.engine.MockEngineSupportModule;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+// this must exist in the same package as IndexShardModule to allow access to setting the impl
+public class MockEngineFactoryPlugin extends AbstractPlugin {
+ @Override
+ public String name() {
+ return "mock-engine-factory";
+ }
+ @Override
+ public String description() {
+ return "a mock engine factory for testing";
+ }
+ @Override
+ public Collection> indexModules() {
+ List> modules = new ArrayList<>();
+ modules.add(MockEngineSupportModule.class);
+ return modules;
+ }
+ public void onModule(IndexShardModule module) {
+ module.engineFactoryImpl = MockEngineFactory.class;
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesCustomDataPathIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesCustomDataPathIT.java
deleted file mode 100644
index 46d6f126f81..00000000000
--- a/core/src/test/java/org/elasticsearch/indices/IndicesCustomDataPathIT.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.indices;
-
-import org.apache.lucene.util.IOUtils;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.support.IndicesOptions;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.io.PathUtils;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.junit.annotations.TestLogging;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardCopyOption;
-
-import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
-import static org.hamcrest.Matchers.equalTo;
-
-/**
- * Tests for custom data path locations and templates
- */
-@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
-public class IndicesCustomDataPathIT extends ESIntegTestCase {
-
- private String path;
-
- private Settings nodeSettings(Path dataPath) {
- return nodeSettings(dataPath.toString());
- }
-
- private Settings nodeSettings(String dataPath) {
- return Settings.builder()
- .put("node.add_id_to_custom_path", false)
- .put("path.shared_data", dataPath)
- .put("index.store.fs.fs_lock", randomFrom("native", "simple"))
- .build();
- }
-
- @Before
- public void setup() {
- path = createTempDir().toAbsolutePath().toString();
- }
-
- @After
- public void teardown() throws Exception {
- IOUtils.deleteFilesIgnoringExceptions(PathUtils.get(path));
- }
-
- @Test
- @TestLogging("_root:DEBUG,index:TRACE")
- @AwaitsFix(bugUrl = "path shenanigans, Lee is looking into it")
- public void testDataPathCanBeChanged() throws Exception {
- final String INDEX = "idx";
- Path root = createTempDir();
- internalCluster().startNodesAsync(1, nodeSettings(root));
- Path startDir = root.resolve("start");
- Path endDir = root.resolve("end");
- logger.info("--> start dir: [{}]", startDir.toAbsolutePath().toString());
- logger.info("--> end dir: [{}]", endDir.toAbsolutePath().toString());
- // temp dirs are automatically created, but the end dir is what
- // startDir is going to be renamed as, so it needs to be deleted
- // otherwise we get all sorts of errors about the directory
- // already existing
- IOUtils.rm(endDir);
-
- Settings.Builder sb = Settings.builder().put(IndexMetaData.SETTING_DATA_PATH,
- startDir.toAbsolutePath().toString());
- Settings.Builder sb2 = Settings.builder().put(IndexMetaData.SETTING_DATA_PATH,
- endDir.toAbsolutePath().toString());
-
- logger.info("--> creating an index with data_path [{}]", startDir.toAbsolutePath().toString());
- client().admin().indices().prepareCreate(INDEX).setSettings(sb).get();
- ensureGreen(INDEX);
-
- indexRandom(true, client().prepareIndex(INDEX, "doc", "1").setSource("{\"body\": \"foo\"}"));
-
- SearchResponse resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
- assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
-
- logger.info("--> closing the index [{}]", INDEX);
- client().admin().indices().prepareClose(INDEX).get();
- logger.info("--> index closed, re-opening...");
- client().admin().indices().prepareOpen(INDEX).get();
- logger.info("--> index re-opened");
- ensureGreen(INDEX);
-
- resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
- assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
-
- // Now, try closing and changing the settings
-
- logger.info("--> closing the index [{}]", INDEX);
- client().admin().indices().prepareClose(INDEX).get();
-
- logger.info("--> moving data on disk [{}] to [{}]", startDir.getFileName(), endDir.getFileName());
- assert Files.exists(endDir) == false : "end directory should not exist!";
- Files.move(startDir, endDir, StandardCopyOption.REPLACE_EXISTING);
-
- logger.info("--> updating settings...");
- client().admin().indices().prepareUpdateSettings(INDEX)
- .setSettings(sb2)
- .setIndicesOptions(IndicesOptions.fromOptions(true, false, true, true))
- .get();
-
- assert Files.exists(startDir) == false : "start dir shouldn't exist";
-
- logger.info("--> settings updated and files moved, re-opening index");
- client().admin().indices().prepareOpen(INDEX).get();
- logger.info("--> index re-opened");
- ensureGreen(INDEX);
-
- resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
- assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
-
- assertAcked(client().admin().indices().prepareDelete(INDEX));
- assertPathHasBeenCleared(startDir.toAbsolutePath().toString());
- assertPathHasBeenCleared(endDir.toAbsolutePath().toString());
- }
-
- @Test
- @AwaitsFix(bugUrl = "path shenanigans, Lee is looking into it")
- public void testIndexCreatedWithCustomPathAndTemplate() throws Exception {
- final String INDEX = "myindex2";
- internalCluster().startNodesAsync(1, nodeSettings(path));
-
- logger.info("--> creating an index with data_path [{}]", path);
- Settings.Builder sb = Settings.builder()
- .put(IndexMetaData.SETTING_DATA_PATH, path)
- .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0);
-
- client().admin().indices().prepareCreate(INDEX).setSettings(sb).get();
- ensureGreen(INDEX);
-
- indexRandom(true, client().prepareIndex(INDEX, "doc", "1").setSource("{\"body\": \"foo\"}"));
-
- SearchResponse resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
- assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
- assertAcked(client().admin().indices().prepareDelete(INDEX));
- assertPathHasBeenCleared(path);
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java
index 053b2391536..2e8c1658270 100644
--- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java
@@ -35,9 +35,11 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
+import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.engine.MockEngineSupport;
+import org.elasticsearch.test.engine.MockEngineSupportModule;
import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper;
import org.junit.Test;
@@ -105,7 +107,7 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
Settings.Builder settings = settingsBuilder()
.put(indexSettings())
- .put(MockEngineSupport.READER_WRAPPER_TYPE, RandomExceptionDirectoryReaderWrapper.class.getName())
+ .extendArray("plugin.types", RandomExceptionDirectoryReaderWrapper.Plugin.class.getName())
.put(EXCEPTION_TOP_LEVEL_RATIO_KEY, topLevelRate)
.put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate)
.put(MockEngineSupport.WRAP_READER_RATIO, 1.0d);
@@ -199,6 +201,21 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
// TODO: Generalize this class and add it as a utility
public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper {
+
+ public static class Plugin extends AbstractPlugin {
+ @Override
+ public String name() {
+ return "random-exception-reader-wrapper";
+ }
+ @Override
+ public String description() {
+ return "a mock reader wrapper that throws random exceptions for testing";
+ }
+ public void onModule(MockEngineSupportModule module) {
+ module.wrapperImpl = RandomExceptionDirectoryReaderWrapper.class;
+ }
+ }
+
private final Settings settings;
static class ThrowingSubReaderWrapper extends SubReaderWrapper implements ThrowingLeafReaderWrapper.Thrower {
diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java
index 8aeb6f65b9b..b01ec7865f8 100644
--- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.indices.stats;
+import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.shard.MergeSchedulerConfig;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.Version;
@@ -39,9 +40,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.cache.query.QueryCacheModule;
import org.elasticsearch.index.cache.query.QueryCacheStats;
-import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.shard.MergePolicyConfig;
@@ -79,8 +78,8 @@ public class IndexStatsIT extends ESIntegTestCase {
//Filter/Query cache is cleaned periodically, default is 60s, so make sure it runs often. Thread.sleep for 60s is bad
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal))
.put(IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL, "1ms")
- .put(QueryCacheSettings.QUERY_CACHE_EVERYTHING, true)
- .put(QueryCacheModule.QueryCacheSettings.QUERY_CACHE_TYPE, IndexQueryCache.class)
+ .put(IndexCacheModule.QUERY_CACHE_EVERYTHING, true)
+ .put(IndexCacheModule.QUERY_CACHE_TYPE, IndexCacheModule.INDEX_QUERY_CACHE)
.build();
}
diff --git a/core/src/test/java/org/elasticsearch/node/NodeMocksPlugin.java b/core/src/test/java/org/elasticsearch/node/NodeMocksPlugin.java
new file mode 100644
index 00000000000..8eed1e16713
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/node/NodeMocksPlugin.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.node;
+
+import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
+import org.elasticsearch.common.util.MockBigArrays;
+import org.elasticsearch.plugins.AbstractPlugin;
+
+public class NodeMocksPlugin extends AbstractPlugin {
+
+ @Override
+ public String name() {
+ return "node-mocks";
+ }
+
+ @Override
+ public String description() {
+ return "a plugin to setup mocks for node level classes";
+ }
+
+ public void onModule(NodeModule module) {
+ module.pageCacheRecyclerImpl = MockPageCacheRecycler.class;
+ module.bigArraysImpl = MockBigArrays.class;
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java
index a299be41093..e77a1410a91 100644
--- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java
+++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java
@@ -222,19 +222,4 @@ public class InternalSettingsPreparerTests extends ESTestCase {
assertThat(settings.get("name"), is("prompted name 0"));
assertThat(settings.get("node.name"), is("prompted name 0"));
}
-
- @Test
- public void testPreserveSettingsClassloader() {
- final ClassLoader classLoader = URLClassLoader.newInstance(new URL[0]);
- Settings settings = settingsBuilder()
- .put("foo", "bar")
- .put("path.home", createTempDir())
- .classLoader(classLoader)
- .build();
-
- Tuple tuple = InternalSettingsPreparer.prepareSettings(settings, randomBoolean());
-
- Settings preparedSettings = tuple.v1();
- assertThat(preparedSettings.getClassLoaderIfSet(), is(classLoader));
- }
}
diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java
index 519c003ac69..40f90c908b6 100644
--- a/core/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java
+++ b/core/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java
@@ -24,6 +24,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
+import org.junit.After;
import org.junit.Test;
import java.io.IOException;
@@ -33,7 +34,6 @@ import java.util.Iterator;
import java.util.Locale;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
-import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
@@ -42,6 +42,11 @@ import static org.hamcrest.Matchers.is;
*/
public class PluginManagerUnitTests extends ESTestCase {
+ @After
+ public void cleanSystemProperty() {
+ System.clearProperty(PluginManager.PROPERTY_SUPPORT_STAGING_URLS);
+ }
+
@Test
public void testThatConfigDirectoryCanBeOutsideOfElasticsearchHomeDirectory() throws IOException {
String pluginName = randomAsciiOfLength(10);
@@ -66,19 +71,24 @@ public class PluginManagerUnitTests extends ESTestCase {
String pluginName = randomAsciiOfLength(10);
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(pluginName);
- assertThat(handle.urls(), hasSize(Version.CURRENT.snapshot() ? 2 : 1));
+ boolean supportStagingUrls = randomBoolean();
+ if (supportStagingUrls) {
+ System.setProperty(PluginManager.PROPERTY_SUPPORT_STAGING_URLS, "true");
+ }
Iterator iterator = handle.urls().iterator();
- if (Version.CURRENT.snapshot()) {
- String expectedSnapshotUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/snapshot/org/elasticsearch/plugin/%s/%s-SNAPSHOT/%s-%s-SNAPSHOT.zip",
+ if (supportStagingUrls) {
+ String expectedStagingURL = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
pluginName, Version.CURRENT.number(), pluginName, Version.CURRENT.number());
- assertThat(iterator.next(), is(new URL(expectedSnapshotUrl)));
+ assertThat(iterator.next(), is(new URL(expectedStagingURL)));
}
URL expected = new URL("http", "download.elastic.co", "/elasticsearch/release/org/elasticsearch/plugin/" + pluginName + "/" + Version.CURRENT.number() + "/" +
- pluginName + "-" + Version.CURRENT.number() + ".zip");
+ pluginName + "-" + Version.CURRENT.number() + ".zip");
assertThat(iterator.next(), is(expected));
+
+ assertThat(iterator.hasNext(), is(false));
}
@Test
@@ -87,18 +97,24 @@ public class PluginManagerUnitTests extends ESTestCase {
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(randomPluginName);
assertThat(handle.name, is(randomPluginName.replaceAll("^elasticsearch-", "")));
- assertThat(handle.urls(), hasSize(Version.CURRENT.snapshot() ? 2 : 1));
+ boolean supportStagingUrls = randomBoolean();
+ if (supportStagingUrls) {
+ System.setProperty(PluginManager.PROPERTY_SUPPORT_STAGING_URLS, "true");
+ }
+
Iterator iterator = handle.urls().iterator();
- if (Version.CURRENT.snapshot()) {
- String expectedSnapshotUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/snapshot/org/elasticsearch/plugin/%s/%s-SNAPSHOT/%s-%s-SNAPSHOT.zip",
+ if (supportStagingUrls) {
+ String expectedStagingUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
- assertThat(iterator.next(), is(new URL(expectedSnapshotUrl)));
+ assertThat(iterator.next(), is(new URL(expectedStagingUrl)));
}
String releaseUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
assertThat(iterator.next(), is(new URL(releaseUrl)));
+
+ assertThat(iterator.hasNext(), is(false));
}
@Test
diff --git a/core/src/test/java/org/elasticsearch/test/search/MockSearchService.java b/core/src/test/java/org/elasticsearch/search/MockSearchService.java
similarity index 87%
rename from core/src/test/java/org/elasticsearch/test/search/MockSearchService.java
rename to core/src/test/java/org/elasticsearch/search/MockSearchService.java
index dd6c972af00..077f730f2c9 100644
--- a/core/src/test/java/org/elasticsearch/test/search/MockSearchService.java
+++ b/core/src/test/java/org/elasticsearch/search/MockSearchService.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.test.search;
+package org.elasticsearch.search;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
@@ -28,6 +28,7 @@ import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.cache.request.IndicesRequestCache;
import org.elasticsearch.node.settings.NodeSettingsService;
+import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.dfs.DfsPhase;
@@ -42,6 +43,20 @@ import java.util.concurrent.ConcurrentHashMap;
public class MockSearchService extends SearchService {
+ public static class Plugin extends AbstractPlugin {
+ @Override
+ public String name() {
+ return "mock-search-service";
+ }
+ @Override
+ public String description() {
+ return "a mock search service for testing";
+ }
+ public void onModule(SearchModule module) {
+ module.searchServiceImpl = MockSearchService.class;
+ }
+ }
+
private static final Map ACTIVE_SEARCH_CONTEXTS = new ConcurrentHashMap<>();
/** Throw an {@link AssertionError} if there are still in-flight contexts. */
diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java
new file mode 100644
index 00000000000..efdcf0062c3
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search;
+
+import org.elasticsearch.common.inject.ModuleTestCase;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.search.highlight.CustomHighlighter;
+import org.elasticsearch.search.highlight.Highlighter;
+import org.elasticsearch.search.highlight.PlainHighlighter;
+import org.elasticsearch.search.suggest.CustomSuggester;
+import org.elasticsearch.search.suggest.Suggester;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
+/**
+ */
+public class SearchModuleTests extends ModuleTestCase {
+
+ public void testDoubleRegister() {
+ SearchModule module = new SearchModule(Settings.EMPTY);
+ try {
+ module.registerHighlighter("fvh", PlainHighlighter.class);
+ } catch (IllegalArgumentException e) {
+ assertEquals(e.getMessage(), "Can't register the same [highlighter] more than once for [fvh]");
+ }
+
+ try {
+ module.registerSuggester("term", PhraseSuggester.class);
+ } catch (IllegalArgumentException e) {
+ assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [term]");
+ }
+ }
+
+ public void testRegisterSuggester() {
+ SearchModule module = new SearchModule(Settings.EMPTY);
+ module.registerSuggester("custom", CustomSuggester.class);
+ try {
+ module.registerSuggester("custom", CustomSuggester.class);
+ } catch (IllegalArgumentException e) {
+ assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [custom]");
+ }
+ assertMapMultiBinding(module, Suggester.class, CustomSuggester.class);
+ }
+
+ public void testRegisterHighlighter() {
+ SearchModule module = new SearchModule(Settings.EMPTY);
+ module.registerHighlighter("custom", CustomHighlighter.class);
+ try {
+ module.registerHighlighter("custom", CustomHighlighter.class);
+ } catch (IllegalArgumentException e) {
+ assertEquals(e.getMessage(), "Can't register the same [highlighter] more than once for [custom]");
+ }
+ assertMapMultiBinding(module, Highlighter.class, CustomHighlighter.class);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java
index 93fbd80b52a..64375138efd 100644
--- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java
@@ -35,10 +35,12 @@ import org.elasticsearch.common.settings.Settings.Builder;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.engine.MockEngineSupport;
+import org.elasticsearch.test.engine.MockEngineSupportModule;
import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.store.MockFSDirectoryService;
@@ -250,7 +252,7 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase {
Builder settings = settingsBuilder()
.put(indexSettings())
- .put(MockEngineSupport.READER_WRAPPER_TYPE, RandomExceptionDirectoryReaderWrapper.class.getName())
+ .extendArray("plugin.types", RandomExceptionDirectoryReaderWrapper.Plugin.class.getName())
.put(EXCEPTION_TOP_LEVEL_RATIO_KEY, topLevelRate)
.put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate)
.put(MockEngineSupport.WRAP_READER_RATIO, 1.0d);
@@ -310,6 +312,21 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase {
public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper {
+
+ public static class Plugin extends AbstractPlugin {
+ @Override
+ public String name() {
+ return "random-exception-reader-wrapper";
+ }
+ @Override
+ public String description() {
+ return "a mock reader wrapper that throws random exceptions for testing";
+ }
+ public void onModule(MockEngineSupportModule module) {
+ module.wrapperImpl = RandomExceptionDirectoryReaderWrapper.class;
+ }
+ }
+
private final Settings settings;
static class ThrowingSubReaderWrapper extends FilterDirectoryReader.SubReaderWrapper implements ThrowingLeafReaderWrapper.Thrower {
diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
index 4a1eed7a457..ec9fb42a461 100644
--- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
@@ -29,8 +29,7 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.index.cache.query.QueryCacheModule;
-import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
+import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.query.HasChildQueryBuilder;
@@ -74,8 +73,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal))
// aggressive filter caching so that we can assert on the filter cache size
- .put(QueryCacheModule.QueryCacheSettings.QUERY_CACHE_TYPE, IndexQueryCache.class)
- .put(QueryCacheSettings.QUERY_CACHE_EVERYTHING, true)
+ .put(IndexCacheModule.QUERY_CACHE_TYPE, IndexCacheModule.INDEX_QUERY_CACHE)
+ .put(IndexCacheModule.QUERY_CACHE_EVERYTHING, true)
.build();
}
diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java
index 3a9135cb731..e193d2ad69b 100644
--- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java
+++ b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java
@@ -32,11 +32,6 @@ import java.util.Map;
*/
public class CustomHighlighter implements Highlighter {
- @Override
- public String[] names() {
- return new String[] { "test-custom" };
- }
-
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java
index e7c69793c2e..705265ea5f6 100644
--- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java
+++ b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java
@@ -35,6 +35,6 @@ public class CustomHighlighterPlugin extends AbstractPlugin {
}
public void onModule(SearchModule highlightModule) {
- highlightModule.registerHighlighter(CustomHighlighter.class);
+ highlightModule.registerHighlighter("test-custom", CustomHighlighter.class);
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java
index d82e12ecc1b..45a487cd18e 100644
--- a/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java
@@ -21,8 +21,7 @@ package org.elasticsearch.search.scriptfilter;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.cache.query.QueryCacheModule;
-import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
+import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType;
@@ -50,8 +49,8 @@ public class ScriptQuerySearchIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal))
// aggressive filter caching so that we can assert on the number of iterations of the script filters
- .put(QueryCacheModule.QueryCacheSettings.QUERY_CACHE_TYPE, IndexQueryCache.class)
- .put(QueryCacheSettings.QUERY_CACHE_EVERYTHING, true)
+ .put(IndexCacheModule.QUERY_CACHE_TYPE, IndexCacheModule.INDEX_QUERY_CACHE)
+ .put(IndexCacheModule.QUERY_CACHE_EVERYTHING, true)
.build();
}
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java
index 6e57390a165..e3dfe3b96d3 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java
@@ -55,11 +55,6 @@ public class CustomSuggester extends Suggester checking that [{}] has been cleared", path);
- int count = 0;
- StringBuilder sb = new StringBuilder();
- sb.append("[");
- if (Files.exists(path)) {
- try (DirectoryStream stream = Files.newDirectoryStream(path)) {
- for (Path file : stream) {
- logger.info("--> found file: [{}]", file.toAbsolutePath().toString());
- if (Files.isDirectory(file)) {
- assertPathHasBeenCleared(file);
- } else if (Files.isRegularFile(file)) {
- count++;
- sb.append(file.toAbsolutePath().toString());
- sb.append("\n");
- }
- }
- }
- }
- sb.append("]");
- assertThat(count + " files exist that should have been cleaned:\n" + sb.toString(), count, equalTo(0));
- }
-
protected static class NumShards {
public final int numPrimaries;
public final int numReplicas;
diff --git a/core/src/test/java/org/elasticsearch/test/ESTestCase.java b/core/src/test/java/org/elasticsearch/test/ESTestCase.java
index 7d23e423697..2ebcdd02661 100644
--- a/core/src/test/java/org/elasticsearch/test/ESTestCase.java
+++ b/core/src/test/java/org/elasticsearch/test/ESTestCase.java
@@ -46,25 +46,26 @@ import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.util.concurrent.EsAbortPolicy;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
-import org.elasticsearch.test.cache.recycler.MockBigArrays;
-import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
+import org.elasticsearch.common.util.MockBigArrays;
+import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.test.junit.listeners.AssertionErrorThreadDumpPrinter;
import org.elasticsearch.test.junit.listeners.LoggingListener;
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
-import org.elasticsearch.test.search.MockSearchService;
+import org.elasticsearch.search.MockSearchService;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.*;
import org.junit.rules.RuleChain;
import java.io.IOException;
import java.lang.reflect.Field;
+import java.nio.file.DirectoryStream;
import java.nio.file.FileSystem;
+import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.Callable;
@@ -73,6 +74,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import static com.google.common.collect.Lists.newArrayList;
+import static org.hamcrest.Matchers.equalTo;
/**
* Base testcase for randomized unit testing with Elasticsearch
@@ -581,4 +583,40 @@ public abstract class ESTestCase extends LuceneTestCase {
return enabled;
}
+ /**
+ * Asserts that there are no files in the specified path
+ */
+ public void assertPathHasBeenCleared(String path) throws Exception {
+ assertPathHasBeenCleared(PathUtils.get(path));
+ }
+
+ /**
+ * Asserts that there are no files in the specified path
+ */
+ public void assertPathHasBeenCleared(Path path) throws Exception {
+ logger.info("--> checking that [{}] has been cleared", path);
+ int count = 0;
+ StringBuilder sb = new StringBuilder();
+ sb.append("[");
+ if (Files.exists(path)) {
+ try (DirectoryStream stream = Files.newDirectoryStream(path)) {
+ for (Path file : stream) {
+ // Skip files added by Lucene's ExtraFS
+ if (file.getFileName().toString().startsWith("extra")) {
+ continue;
+ }
+ logger.info("--> found file: [{}]", file.toAbsolutePath().toString());
+ if (Files.isDirectory(file)) {
+ assertPathHasBeenCleared(file);
+ } else if (Files.isRegularFile(file)) {
+ count++;
+ sb.append(file.toAbsolutePath().toString());
+ sb.append("\n");
+ }
+ }
+ }
+ }
+ sb.append("]");
+ assertThat(count + " files exist that should have been cleaned:\n" + sb.toString(), count, equalTo(0));
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java
index 065732ed8a4..2a17f9eb06d 100644
--- a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java
+++ b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java
@@ -42,7 +42,6 @@ import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
-import org.elasticsearch.cache.recycler.PageCacheRecyclerModule;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.cluster.ClusterName;
@@ -72,20 +71,16 @@ import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.util.BigArraysModule;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.index.IndexService;
-import org.elasticsearch.index.cache.query.QueryCacheModule;
-import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
-import org.elasticsearch.index.cache.query.index.IndexQueryCache;
-import org.elasticsearch.index.cache.query.none.NoneQueryCache;
+import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.engine.CommitStats;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineClosedException;
import org.elasticsearch.index.shard.IndexShard;
-import org.elasticsearch.index.shard.IndexShardModule;
+import org.elasticsearch.index.shard.MockEngineFactoryPlugin;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@@ -95,16 +90,14 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.indices.store.IndicesStore;
import org.elasticsearch.node.Node;
+import org.elasticsearch.node.NodeMocksPlugin;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.SearchService;
-import org.elasticsearch.test.cache.recycler.MockBigArrays;
-import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.test.disruption.ServiceDisruptionScheme;
-import org.elasticsearch.test.engine.MockEngineFactory;
-import org.elasticsearch.test.search.MockSearchService;
+import org.elasticsearch.search.MockSearchService;
import org.elasticsearch.test.store.MockFSIndexStore;
import org.elasticsearch.test.transport.AssertingLocalTransport;
import org.elasticsearch.test.transport.MockTransportService;
@@ -390,11 +383,12 @@ public final class InternalTestCluster extends TestCluster {
Builder builder = Settings.settingsBuilder()
.put(SETTING_CLUSTER_NODE_SEED, seed);
if (ENABLE_MOCK_MODULES && usually(random)) {
- builder.extendArray("plugin.types", MockTransportService.Plugin.class.getName(), MockFSIndexStore.Plugin.class.getName());
- builder.put(IndexShardModule.ENGINE_FACTORY, MockEngineFactory.class);
- builder.put(PageCacheRecyclerModule.CACHE_IMPL, MockPageCacheRecycler.class.getName());
- builder.put(BigArraysModule.IMPL, MockBigArrays.class.getName());
- builder.put(SearchModule.SEARCH_SERVICE_IMPL, MockSearchService.class.getName());
+ builder.extendArray("plugin.types",
+ MockTransportService.Plugin.class.getName(),
+ MockFSIndexStore.Plugin.class.getName(),
+ NodeMocksPlugin.class.getName(),
+ MockEngineFactoryPlugin.class.getName(),
+ MockSearchService.Plugin.class.getName());
}
if (isLocalTransportConfigured()) {
builder.extendArray("plugin.types", AssertingLocalTransport.Plugin.class.getName());
@@ -457,11 +451,11 @@ public final class InternalTestCluster extends TestCluster {
}
if (random.nextBoolean()) {
- builder.put(QueryCacheModule.QueryCacheSettings.QUERY_CACHE_TYPE, random.nextBoolean() ? IndexQueryCache.class : NoneQueryCache.class);
+ builder.put(IndexCacheModule.QUERY_CACHE_TYPE, random.nextBoolean() ? IndexCacheModule.INDEX_QUERY_CACHE : IndexCacheModule.NONE_QUERY_CACHE);
}
if (random.nextBoolean()) {
- builder.put(QueryCacheSettings.QUERY_CACHE_EVERYTHING, random.nextBoolean());
+ builder.put(IndexCacheModule.QUERY_CACHE_EVERYTHING, random.nextBoolean());
}
if (random.nextBoolean()) {
diff --git a/core/src/test/java/org/elasticsearch/test/engine/MockEngineFactory.java b/core/src/test/java/org/elasticsearch/test/engine/MockEngineFactory.java
index 602268d037b..360849542f6 100644
--- a/core/src/test/java/org/elasticsearch/test/engine/MockEngineFactory.java
+++ b/core/src/test/java/org/elasticsearch/test/engine/MockEngineFactory.java
@@ -18,25 +18,41 @@
*/
package org.elasticsearch.test.engine;
-import org.elasticsearch.common.settings.Settings;
+import org.apache.lucene.index.FilterDirectoryReader;
+import org.elasticsearch.common.inject.BindingAnnotation;
+import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.engine.EngineFactory;
-import org.elasticsearch.plugins.AbstractPlugin;
-import org.elasticsearch.transport.TransportModule;
-/**
- *
- */
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+import static java.lang.annotation.ElementType.FIELD;
+import static java.lang.annotation.ElementType.PARAMETER;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
public final class MockEngineFactory implements EngineFactory {
+ @BindingAnnotation
+ @Target({FIELD, PARAMETER})
+ @Retention(RUNTIME)
+ public @interface MockReaderType {
+ }
+
+ private Class extends FilterDirectoryReader> wrapper;
+
+ @Inject
+ public MockEngineFactory(@MockReaderType Class wrapper) {
+ this.wrapper = wrapper;
+ }
@Override
public Engine newReadWriteEngine(EngineConfig config, boolean skipTranslogRecovery) {
- return new MockInternalEngine(config, skipTranslogRecovery);
+ return new MockInternalEngine(config, skipTranslogRecovery, wrapper);
}
@Override
public Engine newReadOnlyEngine(EngineConfig config) {
- return new MockShadowEngine(config);
+ return new MockShadowEngine(config, wrapper);
}
}
diff --git a/core/src/test/java/org/elasticsearch/test/engine/MockEngineSupport.java b/core/src/test/java/org/elasticsearch/test/engine/MockEngineSupport.java
index 78b0439c593..3649a7b1080 100644
--- a/core/src/test/java/org/elasticsearch/test/engine/MockEngineSupport.java
+++ b/core/src/test/java/org/elasticsearch/test/engine/MockEngineSupport.java
@@ -18,7 +18,6 @@
*/
package org.elasticsearch.test.engine;
-import org.apache.lucene.index.AssertingDirectoryReader;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.IndexReader;
@@ -29,7 +28,6 @@ import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.SearcherManager;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.common.Classes;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
@@ -80,7 +78,7 @@ public final class MockEngineSupport {
}
}
- public MockEngineSupport(EngineConfig config) {
+ public MockEngineSupport(EngineConfig config, Class extends FilterDirectoryReader> wrapper) {
Settings indexSettings = config.getIndexSettings();
shardId = config.getShardId();
filterCache = config.getQueryCache();
@@ -88,13 +86,6 @@ public final class MockEngineSupport {
final long seed = indexSettings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l);
Random random = new Random(seed);
final double ratio = indexSettings.getAsDouble(WRAP_READER_RATIO, 0.0d); // DISABLED by default - AssertingDR is crazy slow
- String readerWrapperType = indexSettings.get(READER_WRAPPER_TYPE);
- Class extends AssertingDirectoryReader > wrapper;
- if (readerWrapperType == null) {
- wrapper = AssertingDirectoryReader.class;
- } else {
- wrapper = Classes.loadClass(getClass().getClassLoader(), readerWrapperType);
- }
boolean wrapReader = random.nextDouble() < ratio;
if (logger.isTraceEnabled()) {
logger.trace("Using [{}] for shard [{}] seed: [{}] wrapReader: [{}]", this.getClass().getName(), shardId, seed, wrapReader);
diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCacheModule.java b/core/src/test/java/org/elasticsearch/test/engine/MockEngineSupportModule.java
similarity index 68%
rename from core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCacheModule.java
rename to core/src/test/java/org/elasticsearch/test/engine/MockEngineSupportModule.java
index 3ecccf1a49a..4f353ee42b7 100644
--- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCacheModule.java
+++ b/core/src/test/java/org/elasticsearch/test/engine/MockEngineSupportModule.java
@@ -16,21 +16,17 @@
* specific language governing permissions and limitations
* under the License.
*/
+package org.elasticsearch.test.engine;
-package org.elasticsearch.index.cache.bitset;
-
+import org.apache.lucene.index.AssertingDirectoryReader;
+import org.apache.lucene.index.FilterDirectoryReader;
import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.common.settings.Settings;
-/**
- */
-public class BitsetFilterCacheModule extends AbstractModule {
-
- public BitsetFilterCacheModule(Settings settings) {
- }
+public class MockEngineSupportModule extends AbstractModule {
+ public Class extends FilterDirectoryReader> wrapperImpl = AssertingDirectoryReader.class;
@Override
protected void configure() {
- bind(BitsetFilterCache.class).asEagerSingleton();
+ bind(Class.class).annotatedWith(MockEngineFactory.MockReaderType.class).toInstance(wrapperImpl);
}
}
diff --git a/core/src/test/java/org/elasticsearch/test/engine/MockInternalEngine.java b/core/src/test/java/org/elasticsearch/test/engine/MockInternalEngine.java
index ed4dc95795e..616d873786e 100644
--- a/core/src/test/java/org/elasticsearch/test/engine/MockInternalEngine.java
+++ b/core/src/test/java/org/elasticsearch/test/engine/MockInternalEngine.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.test.engine;
+import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.SearcherManager;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -30,17 +31,19 @@ import java.io.IOException;
final class MockInternalEngine extends InternalEngine {
private MockEngineSupport support;
private final boolean randomizeFlushOnClose;
+ private Class extends FilterDirectoryReader> wrapperClass;
-
- MockInternalEngine(EngineConfig config, boolean skipInitialTranslogRecovery) throws EngineException {
+ MockInternalEngine(EngineConfig config, boolean skipInitialTranslogRecovery, Class extends FilterDirectoryReader> wrapper) throws EngineException {
super(config, skipInitialTranslogRecovery);
randomizeFlushOnClose = IndexMetaData.isOnSharedFilesystem(config.getIndexSettings()) == false;
+ wrapperClass = wrapper;
+
}
private synchronized MockEngineSupport support() {
// lazy initialized since we need it already on super() ctor execution :(
if (support == null) {
- support = new MockEngineSupport(config());
+ support = new MockEngineSupport(config(), wrapperClass);
}
return support;
}
diff --git a/core/src/test/java/org/elasticsearch/test/engine/MockShadowEngine.java b/core/src/test/java/org/elasticsearch/test/engine/MockShadowEngine.java
index 1ed920b20fc..f05f69bf275 100644
--- a/core/src/test/java/org/elasticsearch/test/engine/MockShadowEngine.java
+++ b/core/src/test/java/org/elasticsearch/test/engine/MockShadowEngine.java
@@ -19,6 +19,8 @@
package org.elasticsearch.test.engine;
+import org.apache.lucene.index.AssertingDirectoryReader;
+import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.search.AssertingIndexSearcher;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.SearcherManager;
@@ -32,9 +34,9 @@ import java.util.Map;
final class MockShadowEngine extends ShadowEngine {
private final MockEngineSupport support;
- MockShadowEngine(EngineConfig config) {
+ MockShadowEngine(EngineConfig config, Class extends FilterDirectoryReader> wrapper) {
super(config);
- this.support = new MockEngineSupport(config);
+ this.support = new MockEngineSupport(config, wrapper);
}
@Override
diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java
index 1c5f34b8ce2..3ffc945bf2f 100644
--- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java
@@ -29,8 +29,8 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.cache.recycler.MockBigArrays;
-import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
+import org.elasticsearch.common.util.MockBigArrays;
+import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.netty.NettyTransport;
import org.junit.After;
diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java
index 704dbe9e85e..1c4cac7078e 100644
--- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java
@@ -31,7 +31,7 @@ import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.cache.recycler.MockBigArrays;
+import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.test.junit.rule.RepeatOnExceptionRule;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BindTransportException;
diff --git a/dev-tools/prepare_release.py b/dev-tools/prepare_release.py
new file mode 100644
index 00000000000..9f3e72d6f1a
--- /dev/null
+++ b/dev-tools/prepare_release.py
@@ -0,0 +1,186 @@
+# Licensed to Elasticsearch under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Elasticsearch licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on
+# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+# either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+# Prepare a release
+#
+# This script prepares a new release by creating two commits
+#
+# First commit: Update the Version.java to remove the snapshot bit
+# First commit: Remove the -SNAPSHOT suffix in all pom.xml files
+# Second commit: Update Documentation flags
+#
+# USAGE:
+#
+# python3 ./dev-tools/prepare-release.py
+#
+# Note: Ensure the script is run from the root directory
+#
+
+import fnmatch
+import subprocess
+import tempfile
+import re
+import os
+import shutil
+
+VERSION_FILE = 'core/src/main/java/org/elasticsearch/Version.java'
+POM_FILE = 'pom.xml'
+
+def run(command):
+ if os.system('%s' % (command)):
+ raise RuntimeError(' FAILED: %s' % (command))
+
+def ensure_checkout_is_clean():
+ # Make sure no local mods:
+ s = subprocess.check_output('git diff --shortstat', shell=True)
+ if len(s) > 0:
+ raise RuntimeError('git diff --shortstat is non-empty: got:\n%s' % s)
+
+ # Make sure no untracked files:
+ s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
+ if 'Untracked files:' in s:
+ raise RuntimeError('git status shows untracked files: got:\n%s' % s)
+
+ # Make sure we have all changes from origin:
+ if 'is behind' in s:
+ raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin" in this branch: got:\n%s' % (s))
+
+ # Make sure we no local unpushed changes (this is supposed to be a clean area):
+ if 'is ahead' in s:
+ raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout ", "git reset --hard origin/" in this branch: got:\n%s' % (s))
+
+# Reads the given file and applies the
+# callback to it. If the callback changed
+# a line the given file is replaced with
+# the modified input.
+def process_file(file_path, line_callback):
+ fh, abs_path = tempfile.mkstemp()
+ modified = False
+ with open(abs_path,'w', encoding='utf-8') as new_file:
+ with open(file_path, encoding='utf-8') as old_file:
+ for line in old_file:
+ new_line = line_callback(line)
+ modified = modified or (new_line != line)
+ new_file.write(new_line)
+ os.close(fh)
+ if modified:
+ #Remove original file
+ os.remove(file_path)
+ #Move new file
+ shutil.move(abs_path, file_path)
+ return True
+ else:
+ # nothing to do - just remove the tmp file
+ os.remove(abs_path)
+ return False
+
+# Moves the pom.xml file from a snapshot to a release
+def remove_maven_snapshot(poms, release):
+ for pom in poms:
+ if pom:
+ #print('Replacing SNAPSHOT version in file %s' % (pom))
+ pattern = '%s-SNAPSHOT' % (release)
+ replacement = '%s' % (release)
+ def callback(line):
+ return line.replace(pattern, replacement)
+ process_file(pom, callback)
+
+# Moves the Version.java file from a snapshot to a release
+def remove_version_snapshot(version_file, release):
+ # 1.0.0.Beta1 -> 1_0_0_Beta1
+ release = release.replace('.', '_')
+ release = release.replace('-', '_')
+ pattern = 'new Version(V_%s_ID, true' % (release)
+ replacement = 'new Version(V_%s_ID, false' % (release)
+ def callback(line):
+ return line.replace(pattern, replacement)
+ processed = process_file(version_file, callback)
+ if not processed:
+ raise RuntimeError('failed to remove snapshot version for %s' % (release))
+
+# finds all the pom files that do have a -SNAPSHOT version
+def find_pom_files_with_snapshots():
+ files = subprocess.check_output('find . -name pom.xml -exec grep -l ".*-SNAPSHOT" {} ";"', shell=True)
+ return files.decode('utf-8').split('\n')
+
+# Checks the pom.xml for the release version.
+# This method fails if the pom file has no SNAPSHOT version set ie.
+# if the version is already on a release version we fail.
+# Returns the next version string ie. 0.90.7
+def find_release_version():
+ with open('pom.xml', encoding='utf-8') as file:
+ for line in file:
+ match = re.search(r'(.+)-SNAPSHOT', line)
+ if match:
+ return match.group(1)
+ raise RuntimeError('Could not find release version in branch')
+
+# Stages the given files for the next git commit
+def add_pending_files(*files):
+ for file in files:
+ if file:
+ # print("Adding file: %s" % (file))
+ run('git add %s' % (file))
+
+# Executes a git commit with 'release [version]' as the commit message
+def commit_release(release):
+ run('git commit -m "Release: Change version from %s-SNAPSHOT to %s"' % (release, release))
+
+def commit_feature_flags(release):
+ run('git commit -m "Update Documentation Feature Flags [%s]"' % release)
+
+# Walks the given directory path (defaults to 'docs')
+# and replaces all 'coming[$version]' tags with
+# 'added[$version]'. This method only accesses asciidoc files.
+def update_reference_docs(release_version, path='docs'):
+ pattern = 'coming[%s' % (release_version)
+ replacement = 'added[%s' % (release_version)
+ pending_files = []
+ def callback(line):
+ return line.replace(pattern, replacement)
+ for root, _, file_names in os.walk(path):
+ for file_name in fnmatch.filter(file_names, '*.asciidoc'):
+ full_path = os.path.join(root, file_name)
+ if process_file(full_path, callback):
+ pending_files.append(os.path.join(root, file_name))
+ return pending_files
+
+if __name__ == "__main__":
+ release_version = find_release_version()
+
+ print('*** Preparing release version: [%s]' % release_version)
+
+ ensure_checkout_is_clean()
+ pom_files = find_pom_files_with_snapshots()
+
+ remove_maven_snapshot(pom_files, release_version)
+ remove_version_snapshot(VERSION_FILE, release_version)
+
+ pending_files = pom_files
+ pending_files.append(VERSION_FILE)
+ add_pending_files(*pending_files) # expects var args use * to expand
+ commit_release(release_version)
+
+ pending_files = update_reference_docs(release_version)
+ # split commits for docs and version to enable easy cherry-picking
+ if pending_files:
+ add_pending_files(*pending_files) # expects var args use * to expand
+ commit_feature_flags(release_version)
+ else:
+ print('WARNING: no documentation references updates for release %s' % (release_version))
+
+ print('*** Done removing snapshot version. Run git push manually.')
+
diff --git a/docs/java-api/query-dsl/compound-queries.asciidoc b/docs/java-api/query-dsl/compound-queries.asciidoc
index 03e86e1fbfe..1335ef12de8 100644
--- a/docs/java-api/query-dsl/compound-queries.asciidoc
+++ b/docs/java-api/query-dsl/compound-queries.asciidoc
@@ -51,7 +51,7 @@ Combine a query clause in query context with another in filter context. deprecat
<>::
-Limits the number of documents examined per shard. deprecated[1.6.0]
+Limits the number of documents examined per shard.
include::constant-score-query.asciidoc[]
diff --git a/docs/java-api/query-dsl/limit-query.asciidoc b/docs/java-api/query-dsl/limit-query.asciidoc
index b0796ef4e31..d2654f42e9c 100644
--- a/docs/java-api/query-dsl/limit-query.asciidoc
+++ b/docs/java-api/query-dsl/limit-query.asciidoc
@@ -1,8 +1,6 @@
[[java-query-dsl-limit-query]]
==== Limit Query
-deprecated[1.6.0, Use <> instead]
-
See {ref}/query-dsl-limit-query.html[Limit Query]
[source,java]
diff --git a/docs/reference/aggregations/pipeline.asciidoc b/docs/reference/aggregations/pipeline.asciidoc
index dfaaa74de21..b31fda65ca3 100644
--- a/docs/reference/aggregations/pipeline.asciidoc
+++ b/docs/reference/aggregations/pipeline.asciidoc
@@ -2,7 +2,7 @@
== Pipeline Aggregations
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/avg-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/avg-bucket-aggregation.asciidoc
index 8cfeb00dbcd..8311f58cdf3 100644
--- a/docs/reference/aggregations/pipeline/avg-bucket-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/avg-bucket-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-avg-bucket-aggregation]]
=== Avg Bucket Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/bucket-script-aggregation.asciidoc b/docs/reference/aggregations/pipeline/bucket-script-aggregation.asciidoc
index 3618138d6ef..6c790403af0 100644
--- a/docs/reference/aggregations/pipeline/bucket-script-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/bucket-script-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-bucket-script-aggregation]]
=== Bucket Script Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/bucket-selector-aggregation.asciidoc b/docs/reference/aggregations/pipeline/bucket-selector-aggregation.asciidoc
index ac29f21bdd9..7ac4f66dba4 100644
--- a/docs/reference/aggregations/pipeline/bucket-selector-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/bucket-selector-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-bucket-selector-aggregation]]
=== Bucket Selector Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/cumulative-sum-aggregation.asciidoc b/docs/reference/aggregations/pipeline/cumulative-sum-aggregation.asciidoc
index e1336a1ea32..88fcd83831e 100644
--- a/docs/reference/aggregations/pipeline/cumulative-sum-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/cumulative-sum-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-cumulative-sum-aggregation]]
=== Cumulative Sum Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/derivative-aggregation.asciidoc b/docs/reference/aggregations/pipeline/derivative-aggregation.asciidoc
index 234f39c3c92..93580409140 100644
--- a/docs/reference/aggregations/pipeline/derivative-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/derivative-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-derivative-aggregation]]
=== Derivative Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/max-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/max-bucket-aggregation.asciidoc
index b75bedbaf0e..0d15cc02e2a 100644
--- a/docs/reference/aggregations/pipeline/max-bucket-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/max-bucket-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-max-bucket-aggregation]]
=== Max Bucket Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/min-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/min-bucket-aggregation.asciidoc
index 171c59c1fe0..ed02f7b2051 100644
--- a/docs/reference/aggregations/pipeline/min-bucket-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/min-bucket-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-min-bucket-aggregation]]
=== Min Bucket Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc
index 83517c4430e..b7c86d5826c 100644
--- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-movavg-aggregation]]
=== Moving Average Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/serial-diff-aggregation.asciidoc b/docs/reference/aggregations/pipeline/serial-diff-aggregation.asciidoc
index b6a9cf720fc..84283bd9f3f 100644
--- a/docs/reference/aggregations/pipeline/serial-diff-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/serial-diff-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-serialdiff-aggregation]]
=== Serial Differencing Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/aggregations/pipeline/sum-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/sum-bucket-aggregation.asciidoc
index e0909cfcf31..3729056d783 100644
--- a/docs/reference/aggregations/pipeline/sum-bucket-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/sum-bucket-aggregation.asciidoc
@@ -1,7 +1,7 @@
[[search-aggregations-pipeline-sum-bucket-aggregation]]
=== Sum Bucket Aggregation
-coming[2.0.0]
+coming[2.0.0-beta1]
experimental[]
diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc
index 00f73cc333a..8093dd32d7d 100644
--- a/docs/reference/cluster/stats.asciidoc
+++ b/docs/reference/cluster/stats.asciidoc
@@ -15,6 +15,7 @@ Will return, for example:
["source","js",subs="attributes,callouts"]
--------------------------------------------------
{
+ "timestamp": 1439326129256,
"cluster_name": "elasticsearch",
"status": "green",
"indices": {
@@ -61,12 +62,35 @@ Will return, for example:
"memory_size_in_bytes": 0,
"evictions": 0
},
+ "id_cache": {
+ "memory_size": "0b",
+ "memory_size_in_bytes": 0
+ },
"completion": {
"size": "0b",
"size_in_bytes": 0
},
"segments": {
- "count": 2
+ "count": 2,
+ "memory": "6.4kb",
+ "memory_in_bytes": 6596,
+ "index_writer_memory": "0b",
+ "index_writer_memory_in_bytes": 0,
+ "index_writer_max_memory": "275.7mb",
+ "index_writer_max_memory_in_bytes": 289194639,
+ "version_map_memory": "0b",
+ "version_map_memory_in_bytes": 0,
+ "fixed_bit_set": "0b",
+ "fixed_bit_set_memory_in_bytes": 0
+ },
+ "percolate": {
+ "total": 0,
+ "get_time": "0s",
+ "time_in_millis": 0,
+ "current": 0,
+ "memory_size_in_bytes": -1,
+ "memory_size": "-1b",
+ "queries": 0
}
},
"nodes": {
diff --git a/docs/reference/docs/termvectors.asciidoc b/docs/reference/docs/termvectors.asciidoc
index d76e18b9dba..7530ff7faea 100644
--- a/docs/reference/docs/termvectors.asciidoc
+++ b/docs/reference/docs/termvectors.asciidoc
@@ -81,7 +81,7 @@ omit :
[float]
==== Distributed frequencies
-coming[2.0]
+coming[2.0.0-beta1]
Setting `dfs` to `true` (default is `false`) will return the term statistics
or the field statistics of the entire index, and not just at the shard. Use it
@@ -90,7 +90,7 @@ with caution as distributed frequencies can have a serious performance impact.
[float]
==== Terms Filtering
-coming[2.0]
+coming[2.0.0-beta1]
With the parameter `filter`, the terms returned could also be filtered based
on their tf-idf scores. This could be useful in order find out a good
diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc
index 526a6519892..64daef19ee6 100644
--- a/docs/reference/index.asciidoc
+++ b/docs/reference/index.asciidoc
@@ -1,8 +1,8 @@
[[elasticsearch-reference]]
= Elasticsearch Reference
-:version: 1.5.2
-:branch: 1.5
+:version: 2.0.0-beta1
+:branch: 2.0
:jdk: 1.8.0_25
:defguide: https://www.elastic.co/guide/en/elasticsearch/guide/current
diff --git a/docs/reference/indices/analyze.asciidoc b/docs/reference/indices/analyze.asciidoc
index 1026a113f1e..d17f409c24e 100644
--- a/docs/reference/indices/analyze.asciidoc
+++ b/docs/reference/indices/analyze.asciidoc
@@ -16,7 +16,7 @@ curl -XGET 'localhost:9200/_analyze' -d '
}'
--------------------------------------------------
-coming[2.0.0, body based parameters were added in 2.0.0]
+coming[2.0.0-beta1, body based parameters were added in 2.0.0]
If text parameter is provided as array of strings, it is analyzed as a multi-valued field.
@@ -29,7 +29,7 @@ curl -XGET 'localhost:9200/_analyze' -d '
}'
--------------------------------------------------
-coming[2.0.0, body based parameters were added in 2.0.0]
+coming[2.0.0-beta1, body based parameters were added in 2.0.0]
Or by building a custom transient analyzer out of tokenizers,
token filters and char filters. Token filters can use the shorter 'filters'
@@ -53,7 +53,7 @@ curl -XGET 'localhost:9200/_analyze' -d '
}'
--------------------------------------------------
-coming[2.0.0, body based parameters were added in 2.0.0]
+coming[2.0.0-beta1, body based parameters were added in 2.0.0]
It can also run against a specific index:
@@ -78,7 +78,7 @@ curl -XGET 'localhost:9200/test/_analyze' -d '
}'
--------------------------------------------------
-coming[2.0.0, body based parameters were added in 2.0.0]
+coming[2.0.0-beta1, body based parameters were added in 2.0.0]
Also, the analyzer can be derived based on a field mapping, for example:
@@ -91,7 +91,7 @@ curl -XGET 'localhost:9200/test/_analyze' -d '
}'
--------------------------------------------------
-coming[2.0.0, body based parameters were added in 2.0.0]
+coming[2.0.0-beta1, body based parameters were added in 2.0.0]
Will cause the analysis to happen based on the analyzer configured in the
mapping for `obj1.field1` (and if not, the default index analyzer).
diff --git a/docs/reference/indices/upgrade.asciidoc b/docs/reference/indices/upgrade.asciidoc
index bb6747daf09..f381526c3ef 100644
--- a/docs/reference/indices/upgrade.asciidoc
+++ b/docs/reference/indices/upgrade.asciidoc
@@ -51,7 +51,7 @@ Elasticsearch 2.0. Upgrading will:
* Rewrite old segments in the latest Lucene format.
* Add the `index.version.minimum_compatible` setting to the index, to mark it as
- 2.0 compatible coming[1.6.0].
+ 2.0 compatible
Instead of upgrading all segments that weren't written with the most recent
version of Lucene, you can choose to do the minimum work required before
diff --git a/docs/reference/mapping/fields/parent-field.asciidoc b/docs/reference/mapping/fields/parent-field.asciidoc
index 48fa567e217..22e46c4f765 100644
--- a/docs/reference/mapping/fields/parent-field.asciidoc
+++ b/docs/reference/mapping/fields/parent-field.asciidoc
@@ -1,7 +1,7 @@
[[mapping-parent-field]]
=== `_parent` field
-added[2.0.0,The parent-child implementation has been completely rewritten. It is advisable to reindex any 1.x indices which use parent-child to take advantage of the new optimizations]
+added[2.0.0-beta1,The parent-child implementation has been completely rewritten. It is advisable to reindex any 1.x indices which use parent-child to take advantage of the new optimizations]
A parent-child relationship can be established between documents in the same
index by making one mapping type the parent of another:
diff --git a/docs/reference/migration/migrate_2_0.asciidoc b/docs/reference/migration/migrate_2_0.asciidoc
index 4bdf11784f2..14ac164c69d 100644
--- a/docs/reference/migration/migrate_2_0.asciidoc
+++ b/docs/reference/migration/migrate_2_0.asciidoc
@@ -4,6 +4,276 @@
This section discusses the changes that you need to be aware of when migrating
your application to Elasticsearch 2.0.
+[float]
+=== Indices created before 0.90
+
+Elasticsearch 2.0 can read indices created in version 0.90 and above. If any
+of your indices were created before 0.90 you will need to upgrade to the
+latest 1.x version of Elasticsearch first, in order to upgrade your indices or
+to delete the old indices. Elasticsearch will not start in the presence of old
+indices.
+
+[float]
+=== Elasticsearch migration plugin
+
+We have provided the https://github.com/elastic/elasticsearch-migration[Elasticsearch migration plugin]
+to help you detect any issues that you may have when upgrading to
+Elasticsearch 2.0. Please install and run the plugin *before* upgrading.
+
+=== Mapping
+
+
+Remove file based default mappings #10870 (issue: #10620)
+Validate dynamic mappings updates on the master node. #10634 (issues: #8650, #8688)
+Remove the ability to have custom per-field postings and doc values formats. #9741 (issue: #8746)
+Remove support for new indexes using path setting in object/nested fields or index_name in any field #9570 (issue: #6677)
+Remove index_analyzer setting to simplify analyzer logic #9451 (issue: #9371)
+Remove type level default analyzers #9430 (issues: #8874, #9365)
+Add doc values support to boolean fields. #7961 (issues: #4678, #7851)
+
+
+A number of changes have been made to mappings to remove ambiguity and to
+ensure that conflicting mappings cannot be created.
+
+==== Conflicting field mappings
+
+Fields with the same name, in the same index, in different types, must have
+the same mapping, with the exception of the <>, <>,
+<>, <>, <>, and <>
+parameters, which may have different settings per field.
+
+[source,js]
+---------------
+PUT my_index
+{
+ "mappings": {
+ "type_one": {
+ "properties": {
+ "name": { <1>
+ "type": "string"
+ }
+ }
+ },
+ "type_two": {
+ "properties": {
+ "name": { <1>
+ "type": "string",
+ "analyzer": "english"
+ }
+ }
+ }
+ }
+}
+---------------
+<1> The two `name` fields have conflicting mappings and will prevent Elasticsearch
+ from starting.
+
+Elasticsearch will not start in the presence of conflicting field mappings.
+These indices must be deleted or reindexed using a new mapping.
+
+The `ignore_conflicts` option of the put mappings API has been removed.
+Conflicts can't be ignored anymore.
+
+==== Fields cannot be referenced by short name
+
+A field can no longer be referenced using its short name. Instead, the full
+path to the field is required. For instance:
+
+[source,js]
+---------------
+PUT my_index
+{
+ "mappings": {
+ "my_type": {
+ "properties": {
+ "title": { "type": "string" }, <1>
+ "name": {
+ "properties": {
+ "title": { "type": "string" }, <2>
+ "first": { "type": "string" },
+ "last": { "type": "string" }
+ }
+ }
+ }
+ }
+ }
+}
+---------------
+<1> This field is referred to as `title`.
+<2> This field is referred to as `name.title`.
+
+Previously, the two `title` fields in the example above could have been
+confused with each other when using the short name `title`.
+
+=== Type name prefix removed
+
+Previously, two fields with the same name in two different types could
+sometimes be disambiguated by prepending the type name. As a side effect, it
+would add a filter on the type name to the relevant query. This feature was
+ambiguous -- a type name could be confused with a field name -- and didn't
+work everywhere e.g. aggregations.
+
+Instead, fields should be specified with the full path, but without a type
+name prefix. If you wish to filter by the `_type` field, either specify the
+type in the URL or add an explicit filter.
+
+The following example query in 1.x:
+
+[source,js]
+----------------------------
+GET my_index/_search
+{
+ "query": {
+ "match": {
+ "my_type.some_field": "quick brown fox"
+ }
+ }
+}
+----------------------------
+
+would be rewritten in 2.0 as:
+
+[source,js]
+----------------------------
+GET my_index/my_type/_search <1>
+{
+ "query": {
+ "match": {
+ "some_field": "quick brown fox" <2>
+ }
+ }
+}
+----------------------------
+<1> The type name can be specified in the URL to act as a filter.
+<2> The field name should be specified without the type prefix.
+
+==== Field names may not contain dots
+
+In 1.x, it was possible to create fields with dots in their name, for
+instance:
+
+[source,js]
+----------------------------
+PUT my_index
+{
+ "mappings": {
+ "my_type": {
+ "properties": {
+ "foo.bar": { <1>
+ "type": "string"
+ },
+ "foo": {
+ "properties": {
+ "bar": { <1>
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ }
+}
+----------------------------
+<1> These two fields cannot be distinguised as both are referred to as `foo.bar`.
+
+You can no longer create fields with dots in the name.
+
+==== Type names may not start with a dot
+
+In 1.x, Elasticsearch would issue a warning if a type name included a dot,
+e.g. `my.type`. Now that type names are no longer used to distinguish between
+fields in differnt types, this warning has been relaxed: type names may now
+contain dots, but they may not *begin* with a dot. The only exception to this
+is the special `.percolator` type.
+
+==== Types may no longer be deleted
+
+In 1.x it was possible to delete a type mapping, along with all of the
+documents of that type, using the delete mapping API. This is no longer
+supported, because remnants of the fields in the type could remain in the
+index, causing corruption later on.
+
+==== Type meta-fields
+
+The <> associated with had configuration options
+removed, to make them more reliable:
+
+* `_id` configuration can no longer be changed. If you need to sort, use the <> field instead.
+* `_type` configuration can no longer be changed.
+* `_index` configuration can no longer be changed.
+* `_routing` configuration is limited to marking routing as required.
+* `_field_names` configuration is limited to disabling the field.
+* `_size` configuration is limited to enabling the field.
+* `_timestamp` configuration is limited to enabling the field, setting format and default value.
+* `_boost` has been removed.
+* `_analyzer` has been removed.
+
+Importantly, *meta-fields can no longer be specified as part of the document
+body.* Instead, they must be specified in the query string parameters. For
+instance, in 1.x, the `routing` could be specified as follows:
+
+[source,json]
+-----------------------------
+PUT my_index
+{
+ "mappings": {
+ "my_type": {
+ "_routing": {
+ "path": "group" <1>
+ },
+ "properties": {
+ "group": { <1>
+ "type": "string"
+ }
+ }
+ }
+ }
+}
+
+PUT my_index/my_type/1 <2>
+{
+ "group": "foo"
+}
+-----------------------------
+<1> This 1.x mapping tells Elasticsearch to extract the `routing` value from the `group` field in the document body.
+<2> This indexing request uses a `routing` value of `foo`.
+
+In 2.0, the routing must be specified explicitly:
+
+[source,json]
+-----------------------------
+PUT my_index
+{
+ "mappings": {
+ "my_type": {
+ "_routing": {
+ "required": true <1>
+ },
+ "properties": {
+ "group": {
+ "type": "string"
+ }
+ }
+ }
+ }
+}
+
+PUT my_index/my_type/1?routing=bar <2>
+{
+ "group": "foo"
+}
+-----------------------------
+<1> Routing can be marked as required to ensure it is not forgotten during indexing.
+<2> This indexing request uses a `routing` value of `bar`.
+
+==== Other mapping changes
+
+* The setting `index.mapping.allow_type_wrapper` has been removed. Documents should always be sent without the type as the root element.
+* The `binary` field does not support the `compress` and `compress_threshold` options anymore.
+
+
+
+
=== Networking
Elasticsearch now binds to the loopback interface by default (usually 127.0.0.1
@@ -188,141 +458,6 @@ Delete api requires a routing value when deleting a document belonging to a type
mapping, whereas previous elasticsearch versions would trigger a broadcast delete on all shards belonging to the index.
A `RoutingMissingException` is now thrown instead.
-=== Mappings
-
-* The setting `index.mapping.allow_type_wrapper` has been removed. Documents should always be sent without the type as the root element.
-* The delete mappings API has been removed. Mapping types can no longer be deleted.
-* Mapping type names can no longer start with dots.
-* The `ignore_conflicts` option of the put mappings API has been removed. Conflicts can't be ignored anymore.
-* The `binary` field does not support the `compress` and `compress_threshold` options anymore.
-
-==== Removed type prefix on field names in queries
-Types can no longer be specified on fields within queries. Instead, specify type restrictions in the search request.
-
-The following is an example query in 1.x over types `t1` and `t2`:
-
-[source,js]
----------------
-curl -XGET 'localhost:9200/index/_search'
-{
- "query": {
- "bool": {
- "should": [
- {"match": { "t1.field_only_in_t1": "foo" }},
- {"match": { "t2.field_only_in_t2": "bar" }}
- ]
- }
- }
-}
----------------
-
-In 2.0, the query should look like the following:
-
-[source,js]
----------------
-curl -XGET 'localhost:9200/index/t1,t2/_search'
-{
- "query": {
- "bool": {
- "should": [
- {"match": { "field_only_in_t1": "foo" }},
- {"match": { "field_only_in_t2": "bar" }}
- ]
- }
- }
-}
----------------
-
-==== Removed short name field access
-Field names in queries, aggregations, etc. must now use the complete name. Use of the short name
-caused ambiguities in field lookups when the same name existed within multiple object mappings.
-
-The following example illustrates the difference between 1.x and 2.0.
-
-Given these mappings:
-
-[source,js]
----------------
-curl -XPUT 'localhost:9200/index'
-{
- "mappings": {
- "type": {
- "properties": {
- "name": {
- "type": "object",
- "properties": {
- "first": {"type": "string"},
- "last": {"type": "string"}
- }
- }
- }
- }
- }
-}
----------------
-
-The following query was possible in 1.x:
-
-[source,js]
----------------
-curl -XGET 'localhost:9200/index/type/_search'
-{
- "query": {
- "match": { "first": "foo" }
- }
-}
----------------
-
-In 2.0, the same query should now be:
-
-[source,js]
----------------
-curl -XGET 'localhost:9200/index/type/_search'
-{
- "query": {
- "match": { "name.first": "foo" }
- }
-}
----------------
-
-==== Removed support for `.` in field name mappings
-Prior to Elasticsearch 2.0, a field could be defined to have a `.` in its name.
-Mappings like the one below have been deprecated for some time and they will be
-blocked in Elasticsearch 2.0.
-
-[source,js]
----------------
-curl -XPUT 'localhost:9200/index'
-{
- "mappings": {
- "type": {
- "properties": {
- "name.first": {
- "type": "string"
- }
- }
- }
- }
-}
----------------
-
-==== Meta fields have limited configuration
-Meta fields (those beginning with underscore) are fields used by elasticsearch
-to provide special features. They now have limited configuration options.
-
-* `_id` configuration can no longer be changed. If you need to sort, use `_uid` instead.
-* `_type` configuration can no longer be changed.
-* `_index` configuration can no longer be changed.
-* `_routing` configuration is limited to requiring the field.
-* `_boost` has been removed.
-* `_field_names` configuration is limited to disabling the field.
-* `_size` configuration is limited to enabling the field.
-* `_timestamp` configuration is limited to enabling the field, setting format and default value
-
-==== Meta fields in documents
-Meta fields can no longer be specified within a document. They should be specified
-via the API. For example, instead of adding a field `_parent` within a document,
-use the `parent` url parameter when indexing that document.
==== Default date format now is `strictDateOptionalTime`
@@ -389,10 +524,6 @@ the user-friendly representation of boolean fields: `false`/`true`:
Fields of type `murmur3` can no longer change `doc_values` or `index` setting.
They are always stored with doc values, and not indexed.
-==== Source field configuration
-The `_source` field no longer supports `includes` and `excludes` parameters. When
-`_source` is enabled, the entire original source will be stored.
-
==== Config based mappings
The ability to specify mappings in configuration files has been removed. To specify
default mappings that apply to multiple indexes, use index templates.
@@ -437,10 +568,10 @@ script.indexed: on
=== Script parameters
-Deprecated script parameters `id`, `file`, `scriptField`, `script_id`, `script_file`,
+Deprecated script parameters `id`, `file`, `scriptField`, `script_id`, `script_file`,
`script`, `lang` and `params`. The <> should be used in their place.
-The deprecated script parameters have been removed from the Java API so applications using the Java API will
+The deprecated script parameters have been removed from the Java API so applications using the Java API will
need to be updated.
=== Groovy scripts sandbox
diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc
index a05193a2398..86fd952a700 100644
--- a/docs/reference/modules/snapshots.asciidoc
+++ b/docs/reference/modules/snapshots.asciidoc
@@ -258,7 +258,7 @@ GET /_snapshot/my_backup/_all
-----------------------------------
// AUTOSENSE
-coming[2.0] A currently running snapshot can be retrieved using the following command:
+coming[2.0.0-beta1] A currently running snapshot can be retrieved using the following command:
[source,sh]
-----------------------------------
diff --git a/docs/reference/query-dsl/and-query.asciidoc b/docs/reference/query-dsl/and-query.asciidoc
index 864feac4cf2..5ef23af2c1a 100644
--- a/docs/reference/query-dsl/and-query.asciidoc
+++ b/docs/reference/query-dsl/and-query.asciidoc
@@ -1,7 +1,7 @@
[[query-dsl-and-query]]
=== And Query
-deprecated[2.0.0, Use the `bool` query instead]
+deprecated[2.0.0-beta1, Use the `bool` query instead]
A query that matches documents using the `AND` boolean operator on other
queries.
diff --git a/docs/reference/query-dsl/compound-queries.asciidoc b/docs/reference/query-dsl/compound-queries.asciidoc
index adce4bebd59..0228ddd90aa 100644
--- a/docs/reference/query-dsl/compound-queries.asciidoc
+++ b/docs/reference/query-dsl/compound-queries.asciidoc
@@ -47,11 +47,11 @@ Synonyms for the `bool` query.
<>::
-Combine a query clause in query context with another in filter context. deprecated[2.0.0,Use the `bool` query instead]
+Combine a query clause in query context with another in filter context. deprecated[2.0.0-beta1,Use the `bool` query instead]
<>::
-Limits the number of documents examined per shard. deprecated[1.6.0]
+Limits the number of documents examined per shard.
include::constant-score-query.asciidoc[]
diff --git a/docs/reference/query-dsl/filtered-query.asciidoc b/docs/reference/query-dsl/filtered-query.asciidoc
index 0129e6ad1a2..5d399d07df4 100644
--- a/docs/reference/query-dsl/filtered-query.asciidoc
+++ b/docs/reference/query-dsl/filtered-query.asciidoc
@@ -1,7 +1,7 @@
[[query-dsl-filtered-query]]
=== Filtered Query
-deprecated[2.0.0, Use the `bool` query instead with a `must` clause for the query and a `filter` clause for the filter]
+deprecated[2.0.0-beta1, Use the `bool` query instead with a `must` clause for the query and a `filter` clause for the filter]
The `filtered` query is used to combine a query which will be used for
scoring with another query which will only be used for filtering the result
diff --git a/docs/reference/query-dsl/limit-query.asciidoc b/docs/reference/query-dsl/limit-query.asciidoc
index 1cfb0b852a9..198ad7862ab 100644
--- a/docs/reference/query-dsl/limit-query.asciidoc
+++ b/docs/reference/query-dsl/limit-query.asciidoc
@@ -1,8 +1,6 @@
[[query-dsl-limit-query]]
=== Limit Query
-deprecated[1.6.0, Use <> instead]
-
A limit query limits the number of documents (per shard) to execute on.
For example:
diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc
index 79fc4a288b2..9c42881fde5 100644
--- a/docs/reference/query-dsl/mlt-query.asciidoc
+++ b/docs/reference/query-dsl/mlt-query.asciidoc
@@ -149,7 +149,7 @@ input, the other one for term selection and for query formation.
==== Document Input Parameters
[horizontal]
-`like`:: coming[2.0]
+`like`:: coming[2.0.0-beta1]
The only *required* parameter of the MLT query is `like` and follows a
versatile syntax, in which the user can specify free form text and/or a single
or multiple documents (see examples above). The syntax to specify documents is
@@ -162,7 +162,7 @@ follows a similar syntax to the `per_field_analyzer` parameter of the
Additionally, to provide documents not necessarily present in the index,
<> are also supported.
-`unlike`:: coming[2.0]
+`unlike`:: coming[2.0.0-beta1]
The `unlike` parameter is used in conjunction with `like` in order not to
select terms found in a chosen set of documents. In other words, we could ask
for documents `like: "Apple"`, but `unlike: "cake crumble tree"`. The syntax
@@ -172,10 +172,10 @@ is the same as `like`.
A list of fields to fetch and analyze the text from. Defaults to the `_all`
field for free text and to all possible fields for document inputs.
-`like_text`:: deprecated[2.0,Replaced by `like`]
+`like_text`:: deprecated[2.0.0-beta1,Replaced by `like`]
The text to find documents like it.
-`ids` or `docs`:: deprecated[2.0,Replaced by `like`]
+`ids` or `docs`:: deprecated[2.0.0-beta1,Replaced by `like`]
A list of documents following the same syntax as the <>.
[float]
diff --git a/docs/reference/query-dsl/or-query.asciidoc b/docs/reference/query-dsl/or-query.asciidoc
index 8a75b625d44..46005dc58c8 100644
--- a/docs/reference/query-dsl/or-query.asciidoc
+++ b/docs/reference/query-dsl/or-query.asciidoc
@@ -1,7 +1,7 @@
[[query-dsl-or-query]]
=== Or Query
-deprecated[2.0.0, Use the `bool` query instead]
+deprecated[2.0.0-beta1, Use the `bool` query instead]
A query that matches documents using the `OR` boolean operator on other
queries.
diff --git a/docs/reference/search/request/scroll.asciidoc b/docs/reference/search/request/scroll.asciidoc
index b96033719d2..338d5d0de1c 100644
--- a/docs/reference/search/request/scroll.asciidoc
+++ b/docs/reference/search/request/scroll.asciidoc
@@ -63,7 +63,7 @@ curl -XGET <1> 'localhost:9200/_search/scroll' <2> -d'
'
--------------------------------------------------
-coming[2.0.0, body based parameters were added in 2.0.0]
+coming[2.0.0-beta1, body based parameters were added in 2.0.0]
<1> `GET` or `POST` can be used.
<2> The URL should not include the `index` or `type` name -- these
@@ -188,7 +188,7 @@ curl -XDELETE localhost:9200/_search/scroll -d '
}'
---------------------------------------
-coming[2.0.0, Body based parameters were added in 2.0.0]
+coming[2.0.0-beta1, Body based parameters were added in 2.0.0]
Multiple scroll IDs can be passed as array:
@@ -200,7 +200,7 @@ curl -XDELETE localhost:9200/_search/scroll -d '
}'
---------------------------------------
-coming[2.0.0, Body based parameters were added in 2.0.0]
+coming[2.0.0-beta1, Body based parameters were added in 2.0.0]
All search contexts can be cleared with the `_all` parameter:
diff --git a/docs/reference/search/request/search-type.asciidoc b/docs/reference/search/request/search-type.asciidoc
index abc8f59a140..6ad75dc5245 100644
--- a/docs/reference/search/request/search-type.asciidoc
+++ b/docs/reference/search/request/search-type.asciidoc
@@ -65,7 +65,7 @@ scoring.
[[count]]
==== Count
-deprecated[2.0.0, `count` does not provide any benefits over `query_then_fetch` with a `size` of `0`]
+deprecated[2.0.0-beta1, `count` does not provide any benefits over `query_then_fetch` with a `size` of `0`]
Parameter value: *count*.
diff --git a/docs/reference/search/uri-request.asciidoc b/docs/reference/search/uri-request.asciidoc
index 9a3e2be1643..646dfa54c67 100644
--- a/docs/reference/search/uri-request.asciidoc
+++ b/docs/reference/search/uri-request.asciidoc
@@ -104,7 +104,7 @@ Defaults to no terminate_after.
|`search_type` |The type of the search operation to perform. Can be
`dfs_query_then_fetch`, `query_then_fetch`, `scan` or `count`
-deprecated[2.0,Replaced by `size: 0`]. Defaults to `query_then_fetch`. See
+deprecated[2.0.0-beta1,Replaced by `size: 0`]. Defaults to `query_then_fetch`. See
<> for
more details on the different types of search that can be performed.
|=======================================================================
diff --git a/docs/reference/search/validate.asciidoc b/docs/reference/search/validate.asciidoc
index 3593f4682d1..b47f63e6942 100644
--- a/docs/reference/search/validate.asciidoc
+++ b/docs/reference/search/validate.asciidoc
@@ -104,7 +104,7 @@ curl -XGET 'http://localhost:9200/twitter/tweet/_validate/query?q=post_date:foo&
}
--------------------------------------------------
-coming[1.6] When the query is valid, the explanation defaults to the string
+When the query is valid, the explanation defaults to the string
representation of that query. With `rewrite` set to `true`, the explanation
is more detailed showing the actual Lucene query that will be executed.
diff --git a/docs/reference/setup/cluster_restart.asciidoc b/docs/reference/setup/cluster_restart.asciidoc
index 5e1a55eed1c..32e82008814 100644
--- a/docs/reference/setup/cluster_restart.asciidoc
+++ b/docs/reference/setup/cluster_restart.asciidoc
@@ -41,8 +41,6 @@ PUT /_cluster/settings
==== Step 2: Perform a synced flush
-added[1.6.0,Synced flush is only supported in Elasticsearch 1.6.0 and above]
-
Shard recovery will be much faster if you stop indexing and issue a
<> request:
diff --git a/docs/reference/setup/rolling_upgrade.asciidoc b/docs/reference/setup/rolling_upgrade.asciidoc
index 5792c34cdbc..2ac2963a239 100644
--- a/docs/reference/setup/rolling_upgrade.asciidoc
+++ b/docs/reference/setup/rolling_upgrade.asciidoc
@@ -32,8 +32,6 @@ PUT /_cluster/settings
==== Step 2: Stop non-essential indexing and perform a synced flush (Optional)
-added[1.6.0,Synced flush is only supported in Elasticsearch 1.6.0 and above]
-
You may happily continue indexing during the upgrade. However, shard recovery
will be much faster if you temporarily stop non-essential indexing and issue a
<> request:
diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java
index 3516a1342d9..8369809aee2 100644
--- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java
+++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java
@@ -33,8 +33,7 @@ public class SimpleIcuAnalysisTests extends ESTestCase {
@Test
public void testDefaultsIcuAnalysis() {
Settings settings = settingsBuilder()
- .put("path.home", createTempDir())
- .loadFromClasspath("org/elasticsearch/index/analysis/phonetic-1.yml").build();
+ .put("path.home", createTempDir()).build();
AnalysisService analysisService = createAnalysisService(settings);
TokenizerFactory tokenizerFactory = analysisService.tokenizer("icu_tokenizer");
diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java
index 3468a8e2bdc..be9f00768d8 100644
--- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java
+++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java
@@ -191,9 +191,10 @@ public class KuromojiAnalysisTests extends ESTestCase {
public AnalysisService createAnalysisService() {
+ String json = "/org/elasticsearch/index/analysis/kuromoji_analysis.json";
Settings settings = Settings.settingsBuilder()
.put("path.home", createTempDir())
- .loadFromClasspath("org/elasticsearch/index/analysis/kuromoji_analysis.json")
+ .loadFromStream(json, getClass().getResourceAsStream(json))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java
index ed3da957571..e501d06cf3e 100644
--- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java
+++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java
@@ -45,7 +45,8 @@ public class SimplePhoneticAnalysisTests extends ESTestCase {
@Test
public void testPhoneticTokenFilterFactory() {
- Settings settings = settingsBuilder().loadFromClasspath("org/elasticsearch/index/analysis/phonetic-1.yml")
+ String yaml = "/org/elasticsearch/index/analysis/phonetic-1.yml";
+ Settings settings = settingsBuilder().loadFromStream(yaml, getClass().getResourceAsStream(yaml))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", createTempDir())
.build();
diff --git a/plugins/cloud-aws/README.md b/plugins/cloud-aws/README.md
index 08957b0ebd5..1f5f53997c8 100644
--- a/plugins/cloud-aws/README.md
+++ b/plugins/cloud-aws/README.md
@@ -187,7 +187,7 @@ The following settings are supported:
* `region`: The region where bucket is located. Defaults to US Standard
* `endpoint`: The endpoint to the S3 API. Defaults to AWS's default S3 endpoint. Note that setting a region overrides the endpoint setting.
* `protocol`: The protocol to use (`http` or `https`). Defaults to value of `cloud.aws.protocol` or `cloud.aws.s3.protocol`.
-* `base_path`: Specifies the path within bucket to repository data. Defaults to root directory.
+* `base_path`: Specifies the path within bucket to repository data. Defaults to value of `repositories.s3.base_path` or to root directory if not set.
* `access_key`: The access key to use for authentication. Defaults to value of `cloud.aws.access_key`.
* `secret_key`: The secret key to use for authentication. Defaults to value of `cloud.aws.secret_key`.
* `chunk_size`: Big files can be broken down into chunks during snapshotting if needed. The chunk size can be specified in bytes or by using size value notation, i.e. `1g`, `10m`, `5k`. Defaults to `100m`.
diff --git a/plugins/cloud-aws/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/cloud-aws/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java
index ecd391956e7..4be35ba1098 100644
--- a/plugins/cloud-aws/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java
+++ b/plugins/cloud-aws/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java
@@ -123,7 +123,7 @@ public class S3Repository extends BlobStoreRepository {
bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries);
blobStore = new S3BlobStore(settings, s3Service.client(endpoint, protocol, region, repositorySettings.settings().get("access_key"), repositorySettings.settings().get("secret_key"), maxRetries), bucket, region, serverSideEncryption, bufferSize, maxRetries);
- String basePath = repositorySettings.settings().get("base_path", null);
+ String basePath = repositorySettings.settings().get("base_path", settings.get("repositories.s3.base_path"));
if (Strings.hasLength(basePath)) {
BlobPath path = new BlobPath();
for(String elem : Strings.splitStringToArray(basePath, '/')) {
diff --git a/plugins/cloud-aws/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/cloud-aws/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java
index 25dd8b96def..23441d5f509 100644
--- a/plugins/cloud-aws/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java
+++ b/plugins/cloud-aws/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java
@@ -64,6 +64,7 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTest {
.put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false)
.put("cloud.enabled", true)
.put("plugin.types", CloudAwsPlugin.class.getName())
+ .put("repositories.s3.base_path", basePath)
.build();
}
@@ -85,11 +86,17 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTest {
@Test @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch-cloud-aws/issues/211")
public void testSimpleWorkflow() {
Client client = client();
+ Settings.Builder settings = Settings.settingsBuilder()
+ .put("chunk_size", randomIntBetween(1000, 10000));
+
+ // We sometime test getting the base_path from node settings using repositories.s3.base_path
+ if (usually()) {
+ settings.put("base_path", basePath);
+ }
+
logger.info("--> creating s3 repository with bucket[{}] and path [{}]", internalCluster().getInstance(Settings.class).get("repositories.s3.bucket"), basePath);
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
- .setType("s3").setSettings(Settings.settingsBuilder()
- .put("base_path", basePath)
- .put("chunk_size", randomIntBetween(1000, 10000))
+ .setType("s3").setSettings(settings
).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
@@ -342,7 +349,7 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTest {
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType("s3").setSettings(Settings.settingsBuilder()
.put("base_path", basePath)
- ).get();
+ ).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
logger.info("--> restore non existing snapshot");
diff --git a/pom.xml b/pom.xml
index a66f5ba5b43..f9b31932f76 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1318,7 +1318,7 @@ org.eclipse.jdt.ui.text.custom_code_templates=
aws-release
AWS Release Repository
- s3://download.elasticsearch.org/elasticsearch/release
+ s3://download.elasticsearch.org/elasticsearch/staging
aws-snapshot
diff --git a/qa/smoke-test-shaded/pom.xml b/qa/smoke-test-shaded/pom.xml
index 0b968e13d55..711259e6dba 100644
--- a/qa/smoke-test-shaded/pom.xml
+++ b/qa/smoke-test-shaded/pom.xml
@@ -23,7 +23,7 @@
org.elasticsearch.distribution.shaded
elasticsearch
- 2.0.0-SNAPSHOT
+ ${elasticsearch.version}
org.hamcrest