Merge branch 'master' into feature/query-refactoring
Conflicts: core/src/main/java/org/elasticsearch/index/query/TermsLookupQueryBuilder.java core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
This commit is contained in:
commit
f8a90edab2
10
core/pom.xml
10
core/pom.xml
|
@ -309,6 +309,16 @@
|
|||
<include>org/elasticsearch/bootstrap/BootstrapForTesting.class</include>
|
||||
<include>org/elasticsearch/common/cli/CliToolTestCase.class</include>
|
||||
<include>org/elasticsearch/common/cli/CliToolTestCase$*.class</include>
|
||||
<include>org/elasticsearch/cluster/MockInternalClusterInfoService.class</include>
|
||||
<include>org/elasticsearch/cluster/MockInternalClusterInfoService$*.class</include>
|
||||
<include>org/elasticsearch/index/shard/MockEngineFactoryPlugin.class</include>
|
||||
<include>org/elasticsearch/search/MockSearchService.class</include>
|
||||
<include>org/elasticsearch/search/MockSearchService$*.class</include>
|
||||
<include>org/elasticsearch/cache/recycler/MockPageCacheRecycler.class</include>
|
||||
<include>org/elasticsearch/cache/recycler/MockPageCacheRecycler$*.class</include>
|
||||
<include>org/elasticsearch/common/util/MockBigArrays.class</include>
|
||||
<include>org/elasticsearch/common/util/MockBigArrays$*.class</include>
|
||||
<include>org/elasticsearch/node/NodeMocksPlugin.class</include>
|
||||
</includes>
|
||||
<excludes>
|
||||
<!-- unit tests for yaml suite parser & rest spec parser need to be excluded -->
|
||||
|
|
|
@ -192,7 +192,7 @@ public class Bootstrap {
|
|||
@SuppressForbidden(reason = "Exception#printStackTrace()")
|
||||
private static void setupLogging(Settings settings, Environment environment) {
|
||||
try {
|
||||
settings.getClassLoader().loadClass("org.apache.log4j.Logger");
|
||||
Class.forName("org.apache.log4j.Logger");
|
||||
LogConfigurator.configure(settings);
|
||||
} catch (ClassNotFoundException e) {
|
||||
// no log4j
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cache.recycler;
|
||||
|
||||
import org.elasticsearch.common.Classes;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class PageCacheRecyclerModule extends AbstractModule {
|
||||
|
||||
public static final String CACHE_IMPL = "cache.recycler.page_cache_impl";
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
public PageCacheRecyclerModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
String impl = settings.get(CACHE_IMPL);
|
||||
if (impl == null) {
|
||||
bind(PageCacheRecycler.class).asEagerSingleton();
|
||||
} else {
|
||||
Class<? extends PageCacheRecycler> implClass = Classes.loadClass(getClass().getClassLoader(), impl);
|
||||
bind(PageCacheRecycler.class).to(implClass).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -52,6 +52,9 @@ public class ClusterModule extends AbstractModule implements SpawnModules {
|
|||
|
||||
private Set<Class<? extends IndexTemplateFilter>> indexTemplateFilters = new HashSet<>();
|
||||
|
||||
// pkg private so tests can mock
|
||||
Class<? extends ClusterInfoService> clusterInfoServiceImpl = InternalClusterInfoService.class;
|
||||
|
||||
public ClusterModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
@ -88,13 +91,7 @@ public class ClusterModule extends AbstractModule implements SpawnModules {
|
|||
bind(NodeIndexDeletedAction.class).asEagerSingleton();
|
||||
bind(NodeMappingRefreshAction.class).asEagerSingleton();
|
||||
bind(MappingUpdatedAction.class).asEagerSingleton();
|
||||
|
||||
String impl = settings.get(CLUSTER_SERVICE_IMPL);
|
||||
Class<? extends ClusterInfoService> implClass = InternalClusterInfoService.class;
|
||||
if (impl != null) {
|
||||
implClass = Classes.loadClass(getClass().getClassLoader(), impl);
|
||||
}
|
||||
bind(ClusterInfoService.class).to(implClass).asEagerSingleton();
|
||||
bind(ClusterInfoService.class).to(clusterInfoServiceImpl).asEagerSingleton();
|
||||
|
||||
Multibinder<IndexTemplateFilter> mbinder = Multibinder.newSetBinder(binder(), IndexTemplateFilter.class);
|
||||
for (Class<? extends IndexTemplateFilter> indexTemplateFilter : indexTemplateFilters) {
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor;
|
|||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
import org.elasticsearch.cluster.Diffable;
|
||||
|
@ -251,7 +252,12 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
|
|||
if (hashFunction == null) {
|
||||
routingHashFunction = MURMUR3_HASH_FUNCTION;
|
||||
} else {
|
||||
final Class<? extends HashFunction> hashFunctionClass = Classes.loadClass(getClass().getClassLoader(), hashFunction);
|
||||
final Class<? extends HashFunction> hashFunctionClass;
|
||||
try {
|
||||
hashFunctionClass = Class.forName(hashFunction).asSubclass(HashFunction.class);
|
||||
} catch (ClassNotFoundException|NoClassDefFoundError e) {
|
||||
throw new ElasticsearchException("failed to load custom hash function [" + hashFunction + "]", e);
|
||||
}
|
||||
try {
|
||||
routingHashFunction = hashFunctionClass.newInstance();
|
||||
} catch (InstantiationException | IllegalAccessException e) {
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.cluster.metadata;
|
|||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.routing.DjbHashFunction;
|
||||
import org.elasticsearch.cluster.routing.HashFunction;
|
||||
|
@ -78,7 +79,11 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
|
|||
pre20HashFunction = DjbHashFunction.class;
|
||||
break;
|
||||
default:
|
||||
pre20HashFunction = Classes.loadClass(getClass().getClassLoader(), pre20HashFunctionName);
|
||||
try {
|
||||
pre20HashFunction = Class.forName(pre20HashFunctionName).asSubclass(HashFunction.class);
|
||||
} catch (ClassNotFoundException|NoClassDefFoundError e) {
|
||||
throw new ElasticsearchException("failed to load custom hash function [" + pre20HashFunctionName + "]", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
pre20HashFunction = DjbHashFunction.class;
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.cluster.routing.allocation;
|
|||
|
||||
import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;
|
||||
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
|
||||
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider;
|
||||
|
@ -42,6 +43,7 @@ import org.elasticsearch.common.inject.multibindings.Multibinder;
|
|||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.ExtensionPoint;
|
||||
import org.elasticsearch.gateway.GatewayAllocator;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
@ -84,55 +86,43 @@ public class AllocationModule extends AbstractModule {
|
|||
DiskThresholdDecider.class,
|
||||
SnapshotInProgressAllocationDecider.class));
|
||||
|
||||
|
||||
private final Settings settings;
|
||||
private final Map<String, Class<? extends ShardsAllocator>> shardsAllocators = new HashMap<>();
|
||||
private final Set<Class<? extends AllocationDecider>> allocationDeciders = new HashSet<>();
|
||||
private final ExtensionPoint.TypeExtensionPoint<ShardsAllocator> shardsAllocators = new ExtensionPoint.TypeExtensionPoint<>("shards_allocator", ShardsAllocator.class);
|
||||
private final ExtensionPoint.SetExtensionPoint<AllocationDecider> allocationDeciders = new ExtensionPoint.SetExtensionPoint<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class);
|
||||
|
||||
public AllocationModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
this.allocationDeciders.addAll(DEFAULT_ALLOCATION_DECIDERS);
|
||||
registerShardAllocator(BALANCED_ALLOCATOR, BalancedShardsAllocator.class);
|
||||
registerShardAllocator(EVEN_SHARD_COUNT_ALLOCATOR, BalancedShardsAllocator.class);
|
||||
for (Class<? extends AllocationDecider> decider : DEFAULT_ALLOCATION_DECIDERS) {
|
||||
allocationDeciders.registerExtension(decider);
|
||||
}
|
||||
shardsAllocators.registerExtension(BALANCED_ALLOCATOR, BalancedShardsAllocator.class);
|
||||
shardsAllocators.registerExtension(EVEN_SHARD_COUNT_ALLOCATOR, BalancedShardsAllocator.class);
|
||||
}
|
||||
|
||||
/** Register a custom allocation decider */
|
||||
public void registerAllocationDecider(Class<? extends AllocationDecider> allocationDecider) {
|
||||
boolean isNew = allocationDeciders.add(allocationDecider);
|
||||
if (isNew == false) {
|
||||
throw new IllegalArgumentException("Cannot register AllocationDecider " + allocationDecider.getName() + " twice");
|
||||
}
|
||||
allocationDeciders.registerExtension(allocationDecider);
|
||||
}
|
||||
|
||||
/** Register a custom shard allocator with the given name */
|
||||
public void registerShardAllocator(String name, Class<? extends ShardsAllocator> clazz) {
|
||||
Class<? extends ShardsAllocator> existing = shardsAllocators.put(name, clazz);
|
||||
if (existing != null) {
|
||||
throw new IllegalArgumentException("Cannot register ShardAllocator [" + name + "] to " + clazz.getName() + ", already registered to " + existing.getName());
|
||||
}
|
||||
shardsAllocators.registerExtension(name, clazz);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
|
||||
// bind ShardsAllocator
|
||||
final String shardsAllocatorType = settings.get(AllocationModule.SHARDS_ALLOCATOR_TYPE_KEY, AllocationModule.BALANCED_ALLOCATOR);
|
||||
final Class<? extends ShardsAllocator> shardsAllocator = shardsAllocators.get(shardsAllocatorType);
|
||||
if (shardsAllocator == null) {
|
||||
throw new IllegalArgumentException("Unknown ShardsAllocator type [" + shardsAllocatorType + "]");
|
||||
} else if (shardsAllocatorType.equals(EVEN_SHARD_COUNT_ALLOCATOR)) {
|
||||
String shardsAllocatorType = shardsAllocators.bindType(binder(), settings, AllocationModule.SHARDS_ALLOCATOR_TYPE_KEY, AllocationModule.BALANCED_ALLOCATOR);
|
||||
if (shardsAllocatorType.equals(EVEN_SHARD_COUNT_ALLOCATOR)) {
|
||||
final ESLogger logger = Loggers.getLogger(getClass(), settings);
|
||||
logger.warn("{} allocator has been removed in 2.0 using {} instead", AllocationModule.EVEN_SHARD_COUNT_ALLOCATOR, AllocationModule.BALANCED_ALLOCATOR);
|
||||
}
|
||||
bind(ShardsAllocator.class).to(shardsAllocator).asEagerSingleton();
|
||||
|
||||
// bind AllocationDeciders
|
||||
Multibinder<AllocationDecider> allocationMultibinder = Multibinder.newSetBinder(binder(), AllocationDecider.class);
|
||||
for (Class<? extends AllocationDecider> allocation : allocationDeciders) {
|
||||
allocationMultibinder.addBinding().to(allocation).asEagerSingleton();
|
||||
}
|
||||
allocationDeciders.bind(binder());
|
||||
|
||||
bind(GatewayAllocator.class).asEagerSingleton();
|
||||
bind(AllocationDeciders.class).asEagerSingleton();
|
||||
bind(AllocationService.class).asEagerSingleton();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,17 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.bootstrap.Elasticsearch;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.NoClassSettingsException;
|
||||
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.common.Strings.toCamelCase;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -41,34 +31,6 @@ public class Classes {
|
|||
*/
|
||||
private static final char PACKAGE_SEPARATOR = '.';
|
||||
|
||||
/**
|
||||
* Return the default ClassLoader to use: typically the thread context
|
||||
* ClassLoader, if available; the ClassLoader that loaded the ClassUtils
|
||||
* class will be used as fallback.
|
||||
* <p/>
|
||||
* <p>Call this method if you intend to use the thread context ClassLoader
|
||||
* in a scenario where you absolutely need a non-null ClassLoader reference:
|
||||
* for example, for class path resource loading (but not necessarily for
|
||||
* <code>Class.forName</code>, which accepts a <code>null</code> ClassLoader
|
||||
* reference as well).
|
||||
*
|
||||
* @return the default ClassLoader (never <code>null</code>)
|
||||
* @see java.lang.Thread#getContextClassLoader()
|
||||
*/
|
||||
public static ClassLoader getDefaultClassLoader() {
|
||||
ClassLoader cl = null;
|
||||
try {
|
||||
cl = Thread.currentThread().getContextClassLoader();
|
||||
} catch (Throwable ex) {
|
||||
// Cannot access thread context ClassLoader - falling back to system class loader...
|
||||
}
|
||||
if (cl == null) {
|
||||
// No thread context class loader -> use class loader of this class.
|
||||
cl = Classes.class.getClassLoader();
|
||||
}
|
||||
return cl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the name of the package of the given class:
|
||||
* e.g. "java.lang" for the <code>java.lang.String</code> class.
|
||||
|
@ -93,13 +55,5 @@ public class Classes {
|
|||
return !clazz.isInterface() && !Modifier.isAbstract(modifiers);
|
||||
}
|
||||
|
||||
public static <T> Class<? extends T> loadClass(ClassLoader classLoader, String className) {
|
||||
try {
|
||||
return (Class<? extends T>) classLoader.loadClass(className);
|
||||
} catch (ClassNotFoundException|NoClassDefFoundError e) {
|
||||
throw new ElasticsearchException("failed to load class [" + className + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
private Classes() {}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ public class ShapesAvailability {
|
|||
static {
|
||||
boolean xSPATIAL4J_AVAILABLE;
|
||||
try {
|
||||
Classes.getDefaultClassLoader().loadClass("com.spatial4j.core.shape.impl.PointImpl");
|
||||
Class.forName("com.spatial4j.core.shape.impl.PointImpl");
|
||||
xSPATIAL4J_AVAILABLE = true;
|
||||
} catch (Throwable t) {
|
||||
xSPATIAL4J_AVAILABLE = false;
|
||||
|
@ -40,7 +40,7 @@ public class ShapesAvailability {
|
|||
|
||||
boolean xJTS_AVAILABLE;
|
||||
try {
|
||||
Classes.getDefaultClassLoader().loadClass("com.vividsolutions.jts.geom.GeometryFactory");
|
||||
Class.forName("com.vividsolutions.jts.geom.GeometryFactory");
|
||||
xJTS_AVAILABLE = true;
|
||||
} catch (Throwable t) {
|
||||
xJTS_AVAILABLE = false;
|
||||
|
|
|
@ -30,10 +30,6 @@ import java.lang.reflect.Constructor;
|
|||
*/
|
||||
public class Modules {
|
||||
|
||||
public static Module createModule(String moduleClass, Settings settings) throws ClassNotFoundException {
|
||||
return createModule((Class<? extends Module>) settings.getClassLoader().loadClass(moduleClass), settings);
|
||||
}
|
||||
|
||||
public static Module createModule(Class<? extends Module> moduleClass, @Nullable Settings settings) {
|
||||
Constructor<? extends Module> constructor;
|
||||
try {
|
||||
|
|
|
@ -79,9 +79,8 @@ public final class Settings implements ToXContent {
|
|||
|
||||
private ImmutableMap<String, String> settings;
|
||||
private final ImmutableMap<String, String> forcedUnderscoreSettings;
|
||||
private transient ClassLoader classLoader;
|
||||
|
||||
Settings(Map<String, String> settings, ClassLoader classLoader) {
|
||||
Settings(Map<String, String> settings) {
|
||||
// we use a sorted map for consistent serialization when using getAsMap()
|
||||
// TODO: use Collections.unmodifiableMap with a TreeMap
|
||||
this.settings = ImmutableSortedMap.copyOf(settings);
|
||||
|
@ -96,22 +95,6 @@ public final class Settings implements ToXContent {
|
|||
}
|
||||
}
|
||||
this.forcedUnderscoreSettings = forcedUnderscoreSettings == null ? ImmutableMap.<String, String>of() : ImmutableMap.copyOf(forcedUnderscoreSettings);
|
||||
this.classLoader = classLoader;
|
||||
}
|
||||
|
||||
/**
|
||||
* The class loader associated with this settings, or {@link org.elasticsearch.common.Classes#getDefaultClassLoader()}
|
||||
* if not set.
|
||||
*/
|
||||
public ClassLoader getClassLoader() {
|
||||
return this.classLoader == null ? Classes.getDefaultClassLoader() : classLoader;
|
||||
}
|
||||
|
||||
/**
|
||||
* The class loader associated with this settings, but only if explicitly set, otherwise <tt>null</tt>.
|
||||
*/
|
||||
public ClassLoader getClassLoaderIfSet() {
|
||||
return this.classLoader;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -227,7 +210,6 @@ public final class Settings implements ToXContent {
|
|||
builder.put(entry.getKey().substring(prefix.length()), entry.getValue());
|
||||
}
|
||||
}
|
||||
builder.classLoader(classLoader);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
|
@ -648,7 +630,7 @@ public final class Settings implements ToXContent {
|
|||
}
|
||||
Map<String, Settings> retVal = new LinkedHashMap<>();
|
||||
for (Map.Entry<String, Map<String, String>> entry : map.entrySet()) {
|
||||
retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue()), classLoader));
|
||||
retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue())));
|
||||
}
|
||||
return Collections.unmodifiableMap(retVal);
|
||||
}
|
||||
|
@ -701,17 +683,13 @@ public final class Settings implements ToXContent {
|
|||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
Settings that = (Settings) o;
|
||||
|
||||
if (classLoader != null ? !classLoader.equals(that.classLoader) : that.classLoader != null) return false;
|
||||
if (settings != null ? !settings.equals(that.settings) : that.settings != null) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = settings != null ? settings.hashCode() : 0;
|
||||
result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -769,8 +747,6 @@ public final class Settings implements ToXContent {
|
|||
|
||||
private final Map<String, String> map = new LinkedHashMap<>();
|
||||
|
||||
private ClassLoader classLoader;
|
||||
|
||||
private Builder() {
|
||||
|
||||
}
|
||||
|
@ -998,7 +974,6 @@ public final class Settings implements ToXContent {
|
|||
public Builder put(Settings settings) {
|
||||
removeNonArraysFieldsIfNewSettingsContainsFieldAsArray(settings.getAsMap());
|
||||
map.putAll(settings.getAsMap());
|
||||
classLoader = settings.getClassLoaderIfSet();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -1118,31 +1093,6 @@ public final class Settings implements ToXContent {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from classpath that represents them using the
|
||||
* {@link SettingsLoaderFactory#loaderFromSource(String)}.
|
||||
*/
|
||||
public Builder loadFromClasspath(String resourceName) throws SettingsException {
|
||||
ClassLoader classLoader = this.classLoader;
|
||||
if (classLoader == null) {
|
||||
classLoader = Classes.getDefaultClassLoader();
|
||||
}
|
||||
InputStream is = classLoader.getResourceAsStream(resourceName);
|
||||
if (is == null) {
|
||||
return this;
|
||||
}
|
||||
|
||||
return loadFromStream(resourceName, is);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the class loader associated with the settings built.
|
||||
*/
|
||||
public Builder classLoader(ClassLoader classLoader) {
|
||||
this.classLoader = classLoader;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts all the properties with keys starting with the provided <tt>prefix</tt>.
|
||||
*
|
||||
|
@ -1270,7 +1220,7 @@ public final class Settings implements ToXContent {
|
|||
* set on this builder.
|
||||
*/
|
||||
public Settings build() {
|
||||
return new Settings(Collections.unmodifiableMap(map), classLoader);
|
||||
return new Settings(Collections.unmodifiableMap(map));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import org.elasticsearch.common.Classes;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import static org.elasticsearch.common.inject.Modules.createModule;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class BigArraysModule extends AbstractModule {
|
||||
|
||||
public static final String IMPL = "common.util.big_arrays_impl";
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
public BigArraysModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
String impl = settings.get(IMPL);
|
||||
if (impl == null) {
|
||||
bind(BigArrays.class).asEagerSingleton();
|
||||
} else {
|
||||
Class<? extends BigArrays> implClass = Classes.loadClass(getClass().getClassLoader(), impl);
|
||||
bind(BigArrays.class).to(implClass).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,194 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import org.elasticsearch.common.inject.Binder;
|
||||
import org.elasticsearch.common.inject.multibindings.MapBinder;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* This class defines an official elasticsearch extension point. It registers
|
||||
* all extensions by a single name and ensures that extensions are not registered
|
||||
* more than once.
|
||||
*/
|
||||
public abstract class ExtensionPoint<T> {
|
||||
protected final String name;
|
||||
protected final Class<T> extensionClass;
|
||||
protected final Class<?>[] singletons;
|
||||
|
||||
/**
|
||||
* Creates a new extension point
|
||||
*
|
||||
* @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
|
||||
* @param extensionClass the base class that should be extended
|
||||
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
||||
*/
|
||||
public ExtensionPoint(String name, Class<T> extensionClass, Class<?>... singletons) {
|
||||
this.name = name;
|
||||
this.extensionClass = extensionClass;
|
||||
this.singletons = singletons;
|
||||
}
|
||||
|
||||
/**
|
||||
* Binds the extension as well as the singletons to the given guice binder.
|
||||
*
|
||||
* @param binder the binder to use
|
||||
*/
|
||||
public final void bind(Binder binder) {
|
||||
if (singletons == null || singletons.length == 0) {
|
||||
throw new IllegalStateException("Can't bind empty or null singletons");
|
||||
}
|
||||
for (Class<?> c : singletons) {
|
||||
binder.bind(c).asEagerSingleton();
|
||||
}
|
||||
bindExtensions(binder);
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclasses can bind their type, map or set exentions here.
|
||||
*/
|
||||
protected abstract void bindExtensions(Binder binder);
|
||||
|
||||
/**
|
||||
* A map based extension point which allows to register keyed implementations ie. parsers or some kind of strategies.
|
||||
*/
|
||||
public static class MapExtensionPoint<T> extends ExtensionPoint<T> {
|
||||
private final Map<String, Class<? extends T>> extensions = new HashMap<>();
|
||||
private final Set<String> reservedKeys;
|
||||
|
||||
/**
|
||||
* Creates a new {@link org.elasticsearch.common.util.ExtensionPoint.MapExtensionPoint}
|
||||
*
|
||||
* @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
|
||||
* @param extensionClass the base class that should be extended
|
||||
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
||||
* @param reservedKeys a set of reserved keys by internal implementations
|
||||
*/
|
||||
public MapExtensionPoint(String name, Class<T> extensionClass, Set<String> reservedKeys, Class<?>... singletons) {
|
||||
super(name, extensionClass, singletons);
|
||||
this.reservedKeys = reservedKeys;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the extension for the given key or <code>null</code>
|
||||
*/
|
||||
public Class<? extends T> getExtension(String type) {
|
||||
return extensions.get(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers an extension class for a given key. This method will thr
|
||||
*
|
||||
* @param key the extensions key
|
||||
* @param extension the extension
|
||||
* @throws IllegalArgumentException iff the key is already registered or if the key is a reserved key for an internal implementation
|
||||
*/
|
||||
public final void registerExtension(String key, Class<? extends T> extension) {
|
||||
if (extensions.containsKey(key) || reservedKeys.contains(key)) {
|
||||
throw new IllegalArgumentException("Can't register the same [" + this.name + "] more than once for [" + key + "]");
|
||||
}
|
||||
extensions.put(key, extension);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final void bindExtensions(Binder binder) {
|
||||
MapBinder<String, T> parserMapBinder = MapBinder.newMapBinder(binder, String.class, extensionClass);
|
||||
for (Map.Entry<String, Class<? extends T>> clazz : extensions.entrySet()) {
|
||||
parserMapBinder.addBinding(clazz.getKey()).to(clazz.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A Type extension point which basically allows to registerd keyed extensions like {@link org.elasticsearch.common.util.ExtensionPoint.MapExtensionPoint}
|
||||
* but doesn't instantiate and bind all the registered key value pairs but instead replace a singleton based on a given setting via {@link #bindType(Binder, Settings, String, String)}
|
||||
* Note: {@link #bind(Binder)} is not supported by this class
|
||||
*/
|
||||
public static final class TypeExtensionPoint<T> extends MapExtensionPoint<T> {
|
||||
|
||||
public TypeExtensionPoint(String name, Class<T> extensionClass) {
|
||||
super(name, extensionClass, Collections.EMPTY_SET);
|
||||
}
|
||||
|
||||
/**
|
||||
* Binds the extension class to the class that is registered for the give configured for the settings key in
|
||||
* the settings object.
|
||||
*
|
||||
* @param binder the binder to use
|
||||
* @param settings the settings to look up the key to find the implemetation to bind
|
||||
* @param settingsKey the key to use with the settings
|
||||
* @param defaultValue the default value if they settings doesn't contain the key
|
||||
* @return the actual bound type key
|
||||
*/
|
||||
public String bindType(Binder binder, Settings settings, String settingsKey, String defaultValue) {
|
||||
final String type = settings.get(settingsKey, defaultValue);
|
||||
final Class<? extends T> instance = getExtension(type);
|
||||
if (instance == null) {
|
||||
throw new IllegalArgumentException("Unknown [" + this.name + "] type [" + type + "]");
|
||||
}
|
||||
binder.bind(extensionClass).to(instance).asEagerSingleton();
|
||||
return type;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* A set based extension point which allows to register extended classes that might be used to chain additional functionality etc.
|
||||
*/
|
||||
public final static class SetExtensionPoint<T> extends ExtensionPoint<T> {
|
||||
private final Set<Class<? extends T>> extensions = new HashSet<>();
|
||||
|
||||
/**
|
||||
* Creates a new {@link org.elasticsearch.common.util.ExtensionPoint.SetExtensionPoint}
|
||||
*
|
||||
* @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
|
||||
* @param extensionClass the base class that should be extended
|
||||
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
||||
*/
|
||||
public SetExtensionPoint(String name, Class<T> extensionClass, Class<?>... singletons) {
|
||||
super(name, extensionClass, singletons);
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a new extension
|
||||
*
|
||||
* @param extension the extension to register
|
||||
* @throws IllegalArgumentException iff the class is already registered
|
||||
*/
|
||||
public final void registerExtension(Class<? extends T> extension) {
|
||||
if (extensions.contains(extension)) {
|
||||
throw new IllegalArgumentException("Can't register the same [" + this.name + "] more than once for [" + extension.getName() + "]");
|
||||
}
|
||||
extensions.add(extension);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final void bindExtensions(Binder binder) {
|
||||
Multibinder<T> allocationMultibinder = Multibinder.newSetBinder(binder, extensionClass);
|
||||
for (Class<? extends T> clazz : extensions) {
|
||||
allocationMultibinder.addBinding().to(clazz);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -319,13 +319,14 @@ public class Environment {
|
|||
}
|
||||
}
|
||||
// try and load it from the classpath directly
|
||||
URL resource = settings.getClassLoader().getResource(path);
|
||||
// TODO: remove this, callers can look up their own config on classpath
|
||||
URL resource = getClass().getClassLoader().getResource(path);
|
||||
if (resource != null) {
|
||||
return resource;
|
||||
}
|
||||
// try and load it from the classpath with config/ prefix
|
||||
if (!path.startsWith("config/")) {
|
||||
resource = settings.getClassLoader().getResource("config/" + path);
|
||||
resource = getClass().getClassLoader().getResource("config/" + path);
|
||||
if (resource != null) {
|
||||
return resource;
|
||||
}
|
||||
|
|
|
@ -20,15 +20,21 @@
|
|||
package org.elasticsearch.index.cache;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Scopes;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCacheModule;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.QueryCache;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class IndexCacheModule extends AbstractModule {
|
||||
|
||||
public static final String INDEX_QUERY_CACHE = "index";
|
||||
public static final String NONE_QUERY_CACHE = "none";
|
||||
public static final String QUERY_CACHE_TYPE = "index.queries.cache.type";
|
||||
// for test purposes only
|
||||
public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
public IndexCacheModule(Settings settings) {
|
||||
|
@ -37,9 +43,17 @@ public class IndexCacheModule extends AbstractModule {
|
|||
|
||||
@Override
|
||||
protected void configure() {
|
||||
new QueryCacheModule(settings).configure(binder());
|
||||
new BitsetFilterCacheModule(settings).configure(binder());
|
||||
|
||||
String queryCacheType = settings.get(QUERY_CACHE_TYPE, INDEX_QUERY_CACHE);
|
||||
Class<? extends QueryCache> queryCacheImpl;
|
||||
if (queryCacheType.equals(INDEX_QUERY_CACHE)) {
|
||||
queryCacheImpl = IndexQueryCache.class;
|
||||
} else if (queryCacheType.equals(NONE_QUERY_CACHE)) {
|
||||
queryCacheImpl = NoneQueryCache.class;
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unknown QueryCache type [" + queryCacheType + "]");
|
||||
}
|
||||
bind(QueryCache.class).to(queryCacheImpl).in(Scopes.SINGLETON);
|
||||
bind(BitsetFilterCache.class).asEagerSingleton();
|
||||
bind(IndexCache.class).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,56 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.query;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.AliasOrIndex;
|
||||
import org.elasticsearch.common.Classes;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Scopes;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class QueryCacheModule extends AbstractModule {
|
||||
|
||||
public static final class QueryCacheSettings {
|
||||
public static final String QUERY_CACHE_TYPE = "index.queries.cache.type";
|
||||
// for test purposes only
|
||||
public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
|
||||
}
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
public QueryCacheModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
Class<? extends IndexQueryCache> queryCacheClass = IndexQueryCache.class;
|
||||
String customQueryCache = settings.get(QueryCacheSettings.QUERY_CACHE_TYPE);
|
||||
if (customQueryCache != null) {
|
||||
// TODO: make this only useable from tests
|
||||
queryCacheClass = Classes.loadClass(getClass().getClassLoader(), customQueryCache);
|
||||
}
|
||||
bind(QueryCache.class).to(queryCacheClass).in(Scopes.SINGLETON);
|
||||
}
|
||||
}
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.percolator;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -261,7 +262,9 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple
|
|||
try (Engine.Searcher searcher = shard.engine().acquireSearcher("percolator_load_queries")) {
|
||||
Query query = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorService.TYPE_NAME));
|
||||
QueriesLoaderCollector queryCollector = new QueriesLoaderCollector(PercolatorQueriesRegistry.this, logger, mapperService, indexFieldDataService);
|
||||
searcher.searcher().search(query, queryCollector);
|
||||
IndexSearcher indexSearcher = new IndexSearcher(searcher.reader());
|
||||
indexSearcher.setQueryCache(null);
|
||||
indexSearcher.search(query, queryCollector);
|
||||
Map<BytesRef, Query> queries = queryCollector.queries();
|
||||
for (Map.Entry<BytesRef, Query> entry : queries.entrySet()) {
|
||||
Query previousQuery = percolateQueries.put(entry.getKey(), entry.getValue());
|
||||
|
|
|
@ -34,5 +34,5 @@ public class TermsLookupQueryBuilder extends TermsQueryBuilder {
|
|||
@Override
|
||||
public String getWriteableName() {
|
||||
return TermsQueryBuilder.NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -36,6 +36,10 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
|
||||
private final Object values;
|
||||
|
||||
private String minimumShouldMatch;
|
||||
|
||||
private Boolean disableCoord;
|
||||
|
||||
private String execution;
|
||||
|
||||
private String lookupIndex;
|
||||
|
@ -43,7 +47,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
private String lookupId;
|
||||
private String lookupRouting;
|
||||
private String lookupPath;
|
||||
private Boolean lookupCache;
|
||||
|
||||
/**
|
||||
* A filter for a field based on several terms matching on any of them.
|
||||
|
@ -133,7 +136,31 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
}
|
||||
|
||||
/**
|
||||
<<<<<<< HEAD
|
||||
* Sets the index name to lookup the terms from.
|
||||
=======
|
||||
* Sets the minimum number of matches across the provided terms. Defaults to <tt>1</tt>.
|
||||
* @deprecated use [bool] query instead
|
||||
*/
|
||||
@Deprecated
|
||||
public TermsQueryBuilder minimumShouldMatch(String minimumShouldMatch) {
|
||||
this.minimumShouldMatch = minimumShouldMatch;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Disables <tt>Similarity#coord(int,int)</tt> in scoring. Defaults to <tt>false</tt>.
|
||||
* @deprecated use [bool] query instead
|
||||
*/
|
||||
@Deprecated
|
||||
public TermsQueryBuilder disableCoord(boolean disableCoord) {
|
||||
this.disableCoord = disableCoord;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
|
||||
>>>>>>> master
|
||||
*/
|
||||
public TermsQueryBuilder lookupIndex(String lookupIndex) {
|
||||
this.lookupIndex = lookupIndex;
|
||||
|
@ -169,11 +196,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
return this;
|
||||
}
|
||||
|
||||
public TermsQueryBuilder lookupCache(boolean lookupCache) {
|
||||
this.lookupCache = lookupCache;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
|
@ -187,9 +209,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
if (lookupRouting != null) {
|
||||
builder.field("routing", lookupRouting);
|
||||
}
|
||||
if (lookupCache != null) {
|
||||
builder.field("cache", lookupCache);
|
||||
}
|
||||
builder.field("path", lookupPath);
|
||||
builder.endObject();
|
||||
} else {
|
||||
|
@ -199,7 +218,16 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
builder.field("execution", execution);
|
||||
}
|
||||
|
||||
if (minimumShouldMatch != null) {
|
||||
builder.field("minimum_should_match", minimumShouldMatch);
|
||||
}
|
||||
|
||||
if (disableCoord != null) {
|
||||
builder.field("disable_coord", disableCoord);
|
||||
}
|
||||
|
||||
printBoostAndQueryName(builder);
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
|
|
|
@ -50,6 +50,7 @@ import java.util.List;
|
|||
public class TermsQueryParser extends BaseQueryParserTemp {
|
||||
|
||||
private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match").withAllDeprecated("Use [bool] query instead");
|
||||
private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord").withAllDeprecated("Use [bool] query instead");
|
||||
private Client client;
|
||||
|
||||
@Deprecated
|
||||
|
@ -149,7 +150,7 @@ public class TermsQueryParser extends BaseQueryParserTemp {
|
|||
minShouldMatch = parser.textOrNull();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (("disable_coord").equals(currentFieldName) || ("disableCoord").equals(currentFieldName)) {
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
|
||||
disableCoord = parser.booleanValue();
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.query.functionscore;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
|
|
|
@ -58,8 +58,8 @@ import org.elasticsearch.index.IndexService;
|
|||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.aliases.IndexAliasesService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.IndexCacheModule;
|
||||
import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
import org.elasticsearch.index.cache.request.ShardRequestCache;
|
||||
import org.elasticsearch.index.codec.CodecService;
|
||||
|
@ -249,7 +249,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
final QueryCachingPolicy cachingPolicy;
|
||||
// the query cache is a node-level thing, however we want the most popular filters
|
||||
// to be computed on a per-shard basis
|
||||
if (indexSettings.getAsBoolean(QueryCacheSettings.QUERY_CACHE_EVERYTHING, false)) {
|
||||
if (indexSettings.getAsBoolean(IndexCacheModule.QUERY_CACHE_EVERYTHING, false)) {
|
||||
cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE;
|
||||
} else {
|
||||
cachingPolicy = new UsageTrackingQueryCachingPolicy();
|
||||
|
|
|
@ -40,12 +40,13 @@ import org.elasticsearch.index.translog.TranslogService;
|
|||
*/
|
||||
public class IndexShardModule extends AbstractModule {
|
||||
|
||||
public static final String ENGINE_FACTORY = "index.engine.factory";
|
||||
|
||||
private final ShardId shardId;
|
||||
private final Settings settings;
|
||||
private final boolean primary;
|
||||
|
||||
// pkg private so tests can mock
|
||||
Class<? extends EngineFactory> engineFactoryImpl = InternalEngineFactory.class;
|
||||
|
||||
public IndexShardModule(ShardId shardId, boolean primary, Settings settings) {
|
||||
this.settings = settings;
|
||||
this.shardId = shardId;
|
||||
|
@ -70,13 +71,7 @@ public class IndexShardModule extends AbstractModule {
|
|||
bind(TranslogService.class).asEagerSingleton();
|
||||
}
|
||||
|
||||
Class<? extends InternalEngineFactory> engineFactoryClass = InternalEngineFactory.class;
|
||||
String customEngineFactory = settings.get(ENGINE_FACTORY);
|
||||
if (customEngineFactory != null) {
|
||||
// TODO: make this only useable from tests
|
||||
engineFactoryClass = Classes.loadClass(getClass().getClassLoader(), customEngineFactory);
|
||||
}
|
||||
bind(EngineFactory.class).to(engineFactoryClass);
|
||||
bind(EngineFactory.class).to(engineFactoryImpl);
|
||||
bind(StoreRecoveryService.class).asEagerSingleton();
|
||||
bind(ShardPercolateService.class).asEagerSingleton();
|
||||
bind(ShardTermVectorsService.class).asEagerSingleton();
|
||||
|
|
|
@ -300,7 +300,6 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
Settings indexSettings = settingsBuilder()
|
||||
.put(this.settings)
|
||||
.put(settings)
|
||||
.classLoader(settings.getClassLoader())
|
||||
.build();
|
||||
|
||||
ModulesBuilder modules = new ModulesBuilder();
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.Build;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionModule;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecyclerModule;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClientModule;
|
||||
import org.elasticsearch.cluster.ClusterModule;
|
||||
|
@ -44,7 +43,6 @@ import org.elasticsearch.common.logging.Loggers;
|
|||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.util.BigArraysModule;
|
||||
import org.elasticsearch.discovery.Discovery;
|
||||
import org.elasticsearch.discovery.DiscoveryModule;
|
||||
import org.elasticsearch.discovery.DiscoveryService;
|
||||
|
@ -71,7 +69,6 @@ import org.elasticsearch.monitor.MonitorModule;
|
|||
import org.elasticsearch.monitor.MonitorService;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.node.internal.NodeModule;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.percolator.PercolatorModule;
|
||||
import org.elasticsearch.percolator.PercolatorService;
|
||||
|
@ -161,9 +158,7 @@ public class Node implements Releasable {
|
|||
try {
|
||||
ModulesBuilder modules = new ModulesBuilder();
|
||||
modules.add(new Version.Module(version));
|
||||
modules.add(new PageCacheRecyclerModule(settings));
|
||||
modules.add(new CircuitBreakerModule(settings));
|
||||
modules.add(new BigArraysModule(settings));
|
||||
modules.add(new PluginsModule(settings, pluginsService));
|
||||
modules.add(new SettingsModule(settings));
|
||||
modules.add(new NodeModule(this));
|
||||
|
|
|
@ -17,9 +17,11 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.node.internal;
|
||||
package org.elasticsearch.node;
|
||||
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.service.NodeService;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
|
@ -31,12 +33,27 @@ public class NodeModule extends AbstractModule {
|
|||
|
||||
private final Node node;
|
||||
|
||||
// pkg private so tests can mock
|
||||
Class<? extends PageCacheRecycler> pageCacheRecyclerImpl = PageCacheRecycler.class;
|
||||
Class<? extends BigArrays> bigArraysImpl = BigArrays.class;
|
||||
|
||||
public NodeModule(Node node) {
|
||||
this.node = node;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
if (pageCacheRecyclerImpl == PageCacheRecycler.class) {
|
||||
bind(PageCacheRecycler.class).asEagerSingleton();
|
||||
} else {
|
||||
bind(PageCacheRecycler.class).to(pageCacheRecyclerImpl).asEagerSingleton();
|
||||
}
|
||||
if (bigArraysImpl == BigArrays.class) {
|
||||
bind(BigArrays.class).asEagerSingleton();
|
||||
} else {
|
||||
bind(BigArrays.class).to(bigArraysImpl).asEagerSingleton();
|
||||
}
|
||||
|
||||
bind(Node.class).toInstance(node);
|
||||
bind(NodeSettingsService.class).asEagerSingleton();
|
||||
bind(NodeService.class).asEagerSingleton();
|
|
@ -180,7 +180,7 @@ public class InternalSettingsPreparer {
|
|||
|
||||
static Settings replacePromptPlaceholders(Settings settings, Terminal terminal) {
|
||||
UnmodifiableIterator<Map.Entry<String, String>> iter = settings.getAsMap().entrySet().iterator();
|
||||
Settings.Builder builder = Settings.builder().classLoader(settings.getClassLoaderIfSet());
|
||||
Settings.Builder builder = Settings.builder();
|
||||
|
||||
while (iter.hasNext()) {
|
||||
Map.Entry<String, String> entry = iter.next();
|
||||
|
|
|
@ -56,6 +56,8 @@ import static org.elasticsearch.common.io.FileSystemUtils.moveFilesWithoutOverwr
|
|||
*/
|
||||
public class PluginManager {
|
||||
|
||||
public static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging";
|
||||
|
||||
public enum OutputMode {
|
||||
DEFAULT, SILENT, VERBOSE
|
||||
}
|
||||
|
@ -430,8 +432,8 @@ public class PluginManager {
|
|||
// Elasticsearch new download service uses groupId org.elasticsearch.plugins from 2.0.0
|
||||
if (user == null) {
|
||||
// TODO Update to https
|
||||
if (Version.CURRENT.snapshot()) {
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/snapshot/org/elasticsearch/plugin/%s/%s-SNAPSHOT/%s-%s-SNAPSHOT.zip", repo, version, repo, version));
|
||||
if (!Strings.isNullOrEmpty(System.getProperty(PROPERTY_SUPPORT_STAGING_URLS))) {
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/org/elasticsearch/plugin/%s/%s/%s-%s.zip", repo, version, repo, version));
|
||||
}
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip", repo, version, repo, version));
|
||||
} else {
|
||||
|
|
|
@ -95,7 +95,7 @@ public class PluginsService extends AbstractComponent {
|
|||
// this is a hack for what is between unit and integration tests...
|
||||
String[] defaultPluginsClasses = settings.getAsArray("plugin.types");
|
||||
for (String pluginClass : defaultPluginsClasses) {
|
||||
Plugin plugin = loadPlugin(pluginClass, settings);
|
||||
Plugin plugin = loadPlugin(pluginClass, settings, getClass().getClassLoader());
|
||||
PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), false, "NA", true, pluginClass, false);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("plugin loaded from settings [{}]", pluginInfo);
|
||||
|
@ -347,7 +347,7 @@ public class PluginsService extends AbstractComponent {
|
|||
// pluginmanager does it, but we do it again, in case lusers mess with jar files manually
|
||||
try {
|
||||
final List<URL> jars = new ArrayList<>();
|
||||
ClassLoader parentLoader = settings.getClassLoader();
|
||||
ClassLoader parentLoader = getClass().getClassLoader();
|
||||
if (parentLoader instanceof URLClassLoader) {
|
||||
for (URL url : ((URLClassLoader) parentLoader).getURLs()) {
|
||||
jars.add(url);
|
||||
|
@ -360,16 +360,11 @@ public class PluginsService extends AbstractComponent {
|
|||
}
|
||||
|
||||
// create a child to load the plugins in this bundle
|
||||
ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), settings.getClassLoader());
|
||||
Settings settings = Settings.builder()
|
||||
.put(this.settings)
|
||||
.classLoader(loader)
|
||||
.build();
|
||||
|
||||
ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader());
|
||||
for (PluginInfo pluginInfo : bundle.plugins) {
|
||||
final Plugin plugin;
|
||||
if (pluginInfo.isJvm()) {
|
||||
plugin = loadPlugin(pluginInfo.getClassname(), settings);
|
||||
plugin = loadPlugin(pluginInfo.getClassname(), settings, loader);
|
||||
} else {
|
||||
plugin = new SitePlugin(pluginInfo.getName(), pluginInfo.getDescription());
|
||||
}
|
||||
|
@ -380,9 +375,9 @@ public class PluginsService extends AbstractComponent {
|
|||
return plugins.build();
|
||||
}
|
||||
|
||||
private Plugin loadPlugin(String className, Settings settings) {
|
||||
private Plugin loadPlugin(String className, Settings settings, ClassLoader loader) {
|
||||
try {
|
||||
Class<? extends Plugin> pluginClass = settings.getClassLoader().loadClass(className).asSubclass(Plugin.class);
|
||||
Class<? extends Plugin> pluginClass = loader.loadClass(className).asSubclass(Plugin.class);
|
||||
|
||||
try {
|
||||
return pluginClass.getConstructor(Settings.class).newInstance(settings);
|
||||
|
|
|
@ -79,21 +79,21 @@ public class ScriptModule extends AbstractModule {
|
|||
multibinder.addBinding().to(NativeScriptEngineService.class);
|
||||
|
||||
try {
|
||||
settings.getClassLoader().loadClass("groovy.lang.GroovyClassLoader");
|
||||
Class.forName("groovy.lang.GroovyClassLoader");
|
||||
multibinder.addBinding().to(GroovyScriptEngineService.class).asEagerSingleton();
|
||||
} catch (Throwable t) {
|
||||
Loggers.getLogger(ScriptService.class, settings).debug("failed to load groovy", t);
|
||||
}
|
||||
|
||||
try {
|
||||
settings.getClassLoader().loadClass("com.github.mustachejava.Mustache");
|
||||
Class.forName("com.github.mustachejava.Mustache");
|
||||
multibinder.addBinding().to(MustacheScriptEngineService.class).asEagerSingleton();
|
||||
} catch (Throwable t) {
|
||||
Loggers.getLogger(ScriptService.class, settings).debug("failed to load mustache", t);
|
||||
}
|
||||
|
||||
try {
|
||||
settings.getClassLoader().loadClass("org.apache.lucene.expressions.Expression");
|
||||
Class.forName("org.apache.lucene.expressions.Expression");
|
||||
multibinder.addBinding().to(ExpressionScriptEngineService.class).asEagerSingleton();
|
||||
} catch (Throwable t) {
|
||||
Loggers.getLogger(ScriptService.class, settings).debug("failed to load lucene expressions", t);
|
||||
|
|
|
@ -70,7 +70,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
|
|||
config.addCompilationCustomizers(imports);
|
||||
// Add BigDecimal -> Double transformer
|
||||
config.addCompilationCustomizers(new GroovyBigDecimalTransformer(CompilePhase.CONVERSION));
|
||||
this.loader = new GroovyClassLoader(settings.getClassLoader(), config);
|
||||
this.loader = new GroovyClassLoader(getClass().getClassLoader(), config);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.elasticsearch.common.Classes;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
|
@ -150,24 +148,25 @@ import org.elasticsearch.search.suggest.SuggestPhase;
|
|||
import org.elasticsearch.search.suggest.Suggester;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SearchModule extends AbstractModule {
|
||||
|
||||
public static final String SEARCH_SERVICE_IMPL = "search.service_impl";
|
||||
|
||||
private final Settings settings;
|
||||
private final List<Class<? extends Aggregator.Parser>> aggParsers = Lists.newArrayList();
|
||||
private final List<Class<? extends PipelineAggregator.Parser>> pipelineAggParsers = Lists.newArrayList();
|
||||
private final List<Class<? extends Highlighter>> highlighters = Lists.newArrayList();
|
||||
private final List<Class<? extends Suggester>> suggesters = Lists.newArrayList();
|
||||
private final List<Class<? extends ScoreFunctionParser>> functionScoreParsers = Lists.newArrayList();
|
||||
private final List<Class<? extends FetchSubPhase>> fetchSubPhases = Lists.newArrayList();
|
||||
private final List<Class<? extends SignificanceHeuristicParser>> heuristicParsers = Lists.newArrayList();
|
||||
private final List<Class<? extends MovAvgModel.AbstractModelParser>> modelParsers = Lists.newArrayList();
|
||||
private final Set<Class<? extends Aggregator.Parser>> aggParsers = new HashSet<>();
|
||||
private final Set<Class<? extends PipelineAggregator.Parser>> pipelineAggParsers = new HashSet<>();
|
||||
private final Highlighters highlighters = new Highlighters();
|
||||
private final Suggesters suggesters = new Suggesters();
|
||||
private final Set<Class<? extends ScoreFunctionParser>> functionScoreParsers = new HashSet<>();
|
||||
private final Set<Class<? extends FetchSubPhase>> fetchSubPhases = new HashSet<>();
|
||||
private final Set<Class<? extends SignificanceHeuristicParser>> heuristicParsers = new HashSet<>();
|
||||
private final Set<Class<? extends MovAvgModel.AbstractModelParser>> modelParsers = new HashSet<>();
|
||||
|
||||
// pkg private so tests can mock
|
||||
Class<? extends SearchService> searchServiceImpl = SearchService.class;
|
||||
|
||||
public SearchModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
|
@ -182,12 +181,12 @@ public class SearchModule extends AbstractModule {
|
|||
MovAvgModelStreams.registerStream(stream);
|
||||
}
|
||||
|
||||
public void registerHighlighter(Class<? extends Highlighter> clazz) {
|
||||
highlighters.add(clazz);
|
||||
public void registerHighlighter(String key, Class<? extends Highlighter> clazz) {
|
||||
highlighters.registerExtension(key, clazz);
|
||||
}
|
||||
|
||||
public void registerSuggester(Class<? extends Suggester> suggester) {
|
||||
suggesters.add(suggester);
|
||||
public void registerSuggester(String key, Class<? extends Suggester> suggester) {
|
||||
suggesters.registerExtension(key, suggester);
|
||||
}
|
||||
|
||||
public void registerFunctionScoreParser(Class<? extends ScoreFunctionParser> parser) {
|
||||
|
@ -245,14 +244,7 @@ public class SearchModule extends AbstractModule {
|
|||
}
|
||||
|
||||
protected void configureSuggesters() {
|
||||
Multibinder<Suggester> suggesterMultibinder = Multibinder.newSetBinder(binder(), Suggester.class);
|
||||
for (Class<? extends Suggester> clazz : suggesters) {
|
||||
suggesterMultibinder.addBinding().to(clazz);
|
||||
}
|
||||
|
||||
bind(SuggestParseElement.class).asEagerSingleton();
|
||||
bind(SuggestPhase.class).asEagerSingleton();
|
||||
bind(Suggesters.class).asEagerSingleton();
|
||||
suggesters.bind(binder());
|
||||
}
|
||||
|
||||
protected void configureFunctionScore() {
|
||||
|
@ -264,11 +256,7 @@ public class SearchModule extends AbstractModule {
|
|||
}
|
||||
|
||||
protected void configureHighlighters() {
|
||||
Multibinder<Highlighter> multibinder = Multibinder.newSetBinder(binder(), Highlighter.class);
|
||||
for (Class<? extends Highlighter> highlighter : highlighters) {
|
||||
multibinder.addBinding().to(highlighter);
|
||||
}
|
||||
bind(Highlighters.class).asEagerSingleton();
|
||||
highlighters.bind(binder());
|
||||
}
|
||||
|
||||
protected void configureAggs() {
|
||||
|
@ -347,13 +335,10 @@ public class SearchModule extends AbstractModule {
|
|||
bind(SearchServiceTransportAction.class).asEagerSingleton();
|
||||
bind(MoreLikeThisFetchService.class).asEagerSingleton();
|
||||
|
||||
// search service -- testing only!
|
||||
String impl = settings.get(SEARCH_SERVICE_IMPL);
|
||||
if (impl == null) {
|
||||
if (searchServiceImpl == SearchService.class) {
|
||||
bind(SearchService.class).asEagerSingleton();
|
||||
} else {
|
||||
Class<? extends SearchService> implClass = Classes.loadClass(getClass().getClassLoader(), impl);
|
||||
bind(SearchService.class).to(implClass).asEagerSingleton();
|
||||
bind(SearchService.class).to(searchServiceImpl).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -414,4 +399,5 @@ public class SearchModule extends AbstractModule {
|
|||
BucketSelectorPipelineAggregator.registerStreams();
|
||||
SerialDiffPipelineAggregator.registerStreams();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -40,7 +40,6 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
|
|||
private String timeZone;
|
||||
private String format;
|
||||
private String offset;
|
||||
private float factor = 1.0f;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
|
@ -99,15 +98,6 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a factor to apply to values of the field, typically used if times
|
||||
* are stored in seconds instead of milliseconds.
|
||||
*/
|
||||
public DateHistogramBuilder factor(float factor) {
|
||||
this.factor = factor;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the format to use for dates.
|
||||
*/
|
||||
|
@ -176,10 +166,6 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
|
|||
builder.field("offset", offset);
|
||||
}
|
||||
|
||||
if (factor != 1.0f) {
|
||||
builder.field("factor", factor);
|
||||
}
|
||||
|
||||
if (format != null) {
|
||||
builder.field("format", format);
|
||||
}
|
||||
|
|
|
@ -64,6 +64,9 @@ public class SignificanceHeuristicStreams {
|
|||
* @param stream The stream to register
|
||||
*/
|
||||
public static synchronized void registerStream(Stream stream) {
|
||||
if (STREAMS.containsKey(stream.getName())) {
|
||||
throw new IllegalArgumentException("Can't register stream with name [" + stream.getName() + "] more than once");
|
||||
}
|
||||
HashMap<String, Stream> map = new HashMap<>();
|
||||
map.putAll(STREAMS);
|
||||
map.put(stream.getName(), stream);
|
||||
|
|
|
@ -64,6 +64,9 @@ public class MovAvgModelStreams {
|
|||
* @param stream The stream to register
|
||||
*/
|
||||
public static synchronized void registerStream(Stream stream) {
|
||||
if (STREAMS.containsKey(stream.getName())) {
|
||||
throw new IllegalArgumentException("Can't register stream with name [" + stream.getName() + "] more than once");
|
||||
}
|
||||
HashMap<String, Stream> map = new HashMap<>();
|
||||
map.putAll(STREAMS);
|
||||
map.put(stream.getName(), stream);
|
||||
|
|
|
@ -49,11 +49,6 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
this.termVectorMultiValue = settings.getAsBoolean("search.highlight.term_vector_multi_value", true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[]{"fvh", "fast-vector-highlighter"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public HighlightField highlight(HighlighterContext highlighterContext) {
|
||||
SearchContextHighlight.Field field = highlighterContext.field;
|
||||
|
|
|
@ -25,8 +25,6 @@ import org.elasticsearch.index.mapper.FieldMapper;
|
|||
*/
|
||||
public interface Highlighter {
|
||||
|
||||
String[] names();
|
||||
|
||||
HighlightField highlight(HighlighterContext highlighterContext);
|
||||
|
||||
boolean canHighlight(FieldMapper fieldMapper);
|
||||
|
|
|
@ -18,44 +18,74 @@
|
|||
*/
|
||||
package org.elasticsearch.search.highlight;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.ExtensionPoint;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
* An extensions point and registry for all the highlighters a node supports.
|
||||
*/
|
||||
public class Highlighters {
|
||||
public class Highlighters extends ExtensionPoint.MapExtensionPoint<Highlighter> {
|
||||
|
||||
@Deprecated // remove in 3.0
|
||||
private static final String FAST_VECTOR_HIGHLIGHTER = "fast-vector-highlighter";
|
||||
private static final String FVH = "fvh";
|
||||
@Deprecated // remove in 3.0
|
||||
private static final String HIGHLIGHTER = "highlighter";
|
||||
private static final String PLAIN = "plain";
|
||||
@Deprecated // remove in 3.0
|
||||
private static final String POSTINGS_HIGHLIGHTER = "postings-highlighter";
|
||||
private static final String POSTINGS = "postings";
|
||||
|
||||
|
||||
private final Map<String, Highlighter> parsers;
|
||||
private final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Highlighters.class.getName()));
|
||||
|
||||
public Highlighters(){
|
||||
this(Collections.EMPTY_MAP);
|
||||
}
|
||||
|
||||
private Highlighters(Map<String, Highlighter> parsers) {
|
||||
super("highlighter", Highlighter.class, new HashSet<>(Arrays.asList(FVH, FAST_VECTOR_HIGHLIGHTER, PLAIN, HIGHLIGHTER, POSTINGS, POSTINGS_HIGHLIGHTER)),
|
||||
Highlighters.class);
|
||||
this.parsers = Collections.unmodifiableMap(parsers);
|
||||
}
|
||||
|
||||
@Inject
|
||||
public Highlighters(Settings settings, Set<Highlighter> parsers) {
|
||||
public Highlighters(Settings settings, Map<String, Highlighter> parsers) {
|
||||
this(addBuiltIns(settings, parsers));
|
||||
}
|
||||
|
||||
private static Map<String, Highlighter> addBuiltIns(Settings settings, Map<String, Highlighter> parsers) {
|
||||
// build in highlighers
|
||||
Map<String, Highlighter> map = new HashMap<>();
|
||||
add(map, new FastVectorHighlighter(settings));
|
||||
add(map, new PlainHighlighter());
|
||||
add(map, new PostingsHighlighter());
|
||||
for (Highlighter highlighter : parsers) {
|
||||
add(map, highlighter);
|
||||
}
|
||||
this.parsers = Collections.unmodifiableMap(map);
|
||||
map.put(FVH, new FastVectorHighlighter(settings));
|
||||
map.put(FAST_VECTOR_HIGHLIGHTER, map.get(FVH));
|
||||
map.put(PLAIN, new PlainHighlighter());
|
||||
map.put(HIGHLIGHTER, map.get(PLAIN));
|
||||
map.put(POSTINGS, new PostingsHighlighter());
|
||||
map.put(POSTINGS_HIGHLIGHTER, map.get(POSTINGS));
|
||||
map.putAll(parsers);
|
||||
return map;
|
||||
}
|
||||
|
||||
public Highlighter get(String type) {
|
||||
switch (type) {
|
||||
case FAST_VECTOR_HIGHLIGHTER:
|
||||
deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", FAST_VECTOR_HIGHLIGHTER, FVH);
|
||||
break;
|
||||
case HIGHLIGHTER:
|
||||
deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", HIGHLIGHTER, PLAIN);
|
||||
break;
|
||||
case POSTINGS_HIGHLIGHTER:
|
||||
deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", POSTINGS_HIGHLIGHTER, POSTINGS);
|
||||
break;
|
||||
}
|
||||
return parsers.get(type);
|
||||
}
|
||||
|
||||
private void add(Map<String, Highlighter> map, Highlighter highlighter) {
|
||||
for (String type : highlighter.names()) {
|
||||
map.put(type, highlighter);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -47,11 +47,6 @@ public class PlainHighlighter implements Highlighter {
|
|||
|
||||
private static final String CACHE_KEY = "highlight-plain";
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[] { "plain", "highlighter" };
|
||||
}
|
||||
|
||||
@Override
|
||||
public HighlightField highlight(HighlighterContext highlighterContext) {
|
||||
SearchContextHighlight.Field field = highlighterContext.field;
|
||||
|
|
|
@ -40,11 +40,6 @@ public class PostingsHighlighter implements Highlighter {
|
|||
|
||||
private static final String CACHE_KEY = "highlight-postings";
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[]{"postings", "postings-highlighter"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public HighlightField highlight(HighlighterContext highlighterContext) {
|
||||
|
||||
|
|
|
@ -29,8 +29,6 @@ public abstract class Suggester<T extends SuggestionSearchContext.SuggestionCont
|
|||
protected abstract Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>
|
||||
innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException;
|
||||
|
||||
public abstract String[] names();
|
||||
|
||||
public abstract SuggestContextParser getContextParser();
|
||||
|
||||
public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>
|
||||
|
|
|
@ -18,45 +18,46 @@
|
|||
*/
|
||||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.inject.Binder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.util.ExtensionPoint;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
|
||||
import org.elasticsearch.search.suggest.term.TermSuggester;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class Suggesters {
|
||||
public final class Suggesters extends ExtensionPoint.MapExtensionPoint<Suggester> {
|
||||
private final Map<String, Suggester> parsers;
|
||||
|
||||
public Suggesters() {
|
||||
this(Collections.EMPTY_MAP);
|
||||
}
|
||||
|
||||
public Suggesters(Map<String, Suggester> suggesters) {
|
||||
super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestParseElement.class, SuggestPhase.class);
|
||||
this.parsers = Collections.unmodifiableMap(suggesters);
|
||||
}
|
||||
|
||||
@Inject
|
||||
public Suggesters(Set<Suggester> suggesters, ScriptService scriptService) {
|
||||
public Suggesters(Map<String, Suggester> suggesters, ScriptService scriptService) {
|
||||
this(addBuildIns(suggesters, scriptService));
|
||||
}
|
||||
|
||||
private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters, ScriptService scriptService) {
|
||||
final Map<String, Suggester> map = new HashMap<>();
|
||||
add(map, new PhraseSuggester(scriptService));
|
||||
add(map, new TermSuggester());
|
||||
add(map, new CompletionSuggester());
|
||||
for (Suggester suggester : suggesters) {
|
||||
add(map, suggester);
|
||||
}
|
||||
this.parsers = Collections.unmodifiableMap(map);
|
||||
map.put("phrase", new PhraseSuggester(scriptService));
|
||||
map.put("term", new TermSuggester());
|
||||
map.put("completion", new CompletionSuggester());
|
||||
map.putAll(suggesters);
|
||||
return map;
|
||||
}
|
||||
|
||||
public Suggester get(String type) {
|
||||
return parsers.get(type);
|
||||
}
|
||||
|
||||
private void add(Map<String, Suggester> map, Suggester suggester) {
|
||||
for (String type : suggester.names()) {
|
||||
map.put(type, suggester);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -101,11 +101,6 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
|
|||
return completionSuggestion;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[] { "completion" };
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestContextParser getContextParser() {
|
||||
return new CompletionSuggestParser(this);
|
||||
|
|
|
@ -150,11 +150,6 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
|
|||
return scriptService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[] {"phrase"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestContextParser getContextParser() {
|
||||
return new PhraseSuggestParser(this);
|
||||
|
|
|
@ -65,11 +65,6 @@ public final class TermSuggester extends Suggester<TermSuggestionContext> {
|
|||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[] {"term"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestContextParser getContextParser() {
|
||||
return new TermSuggestParser(this);
|
||||
|
|
|
@ -167,7 +167,7 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
pkg.append(p.getFileName().toString()).append(".");
|
||||
}
|
||||
pkg.append(filename.substring(0, filename.length() - 6));
|
||||
return Thread.currentThread().getContextClassLoader().loadClass(pkg.toString());
|
||||
return getClass().getClassLoader().loadClass(pkg.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,15 +17,16 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.cache.recycler;
|
||||
package org.elasticsearch.cache.recycler;
|
||||
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.recycler.Recycler.V;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.NodeModule;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.cluster;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.MockDiskUsagesIT;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
/**
|
||||
* Fake ClusterInfoService class that allows updating the nodes stats disk
|
||||
* usage with fake values
|
||||
*/
|
||||
public class MockInternalClusterInfoService extends InternalClusterInfoService {
|
||||
|
||||
public static class Plugin extends AbstractPlugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "mock-cluster-info-service";
|
||||
}
|
||||
@Override
|
||||
public String description() {
|
||||
return "a mock cluster info service for testing";
|
||||
}
|
||||
public void onModule(ClusterModule module) {
|
||||
module.clusterInfoServiceImpl = MockInternalClusterInfoService.class;
|
||||
}
|
||||
}
|
||||
|
||||
private final ClusterName clusterName;
|
||||
private volatile NodeStats[] stats = new NodeStats[3];
|
||||
|
||||
@Inject
|
||||
public MockInternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService,
|
||||
TransportNodesStatsAction transportNodesStatsAction,
|
||||
TransportIndicesStatsAction transportIndicesStatsAction,
|
||||
ClusterService clusterService, ThreadPool threadPool) {
|
||||
super(settings, nodeSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool);
|
||||
this.clusterName = ClusterName.clusterNameFromSettings(settings);
|
||||
stats[0] = MockDiskUsagesIT.makeStats("node_t1", new DiskUsage("node_t1", "n1", 100, 100));
|
||||
stats[1] = MockDiskUsagesIT.makeStats("node_t2", new DiskUsage("node_t2", "n2", 100, 100));
|
||||
stats[2] = MockDiskUsagesIT.makeStats("node_t3", new DiskUsage("node_t3", "n3", 100, 100));
|
||||
}
|
||||
|
||||
public void setN1Usage(String nodeName, DiskUsage newUsage) {
|
||||
stats[0] = MockDiskUsagesIT.makeStats(nodeName, newUsage);
|
||||
}
|
||||
|
||||
public void setN2Usage(String nodeName, DiskUsage newUsage) {
|
||||
stats[1] = MockDiskUsagesIT.makeStats(nodeName, newUsage);
|
||||
}
|
||||
|
||||
public void setN3Usage(String nodeName, DiskUsage newUsage) {
|
||||
stats[2] = MockDiskUsagesIT.makeStats(nodeName, newUsage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CountDownLatch updateNodeStats(final ActionListener<NodesStatsResponse> listener) {
|
||||
NodesStatsResponse response = new NodesStatsResponse(clusterName, stats);
|
||||
listener.onResponse(response);
|
||||
return new CountDownLatch(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CountDownLatch updateIndicesStats(final ActionListener<IndicesStatsResponse> listener) {
|
||||
// Not used, so noop
|
||||
return new CountDownLatch(0);
|
||||
}
|
||||
}
|
|
@ -59,8 +59,7 @@ public class AllocationModuleTests extends ModuleTestCase {
|
|||
try {
|
||||
module.registerAllocationDecider(EnableAllocationDecider.class);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage().contains("Cannot register AllocationDecider"));
|
||||
assertTrue(e.getMessage().contains("twice"));
|
||||
assertEquals(e.getMessage(), "Can't register the same [allocation_decider] more than once for [" + EnableAllocationDecider.class.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,14 +81,14 @@ public class AllocationModuleTests extends ModuleTestCase {
|
|||
try {
|
||||
module.registerShardAllocator(AllocationModule.BALANCED_ALLOCATOR, FakeShardsAllocator.class);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage().contains("already registered"));
|
||||
assertEquals(e.getMessage(), "Can't register the same [shards_allocator] more than once for [balanced]");
|
||||
}
|
||||
}
|
||||
|
||||
public void testUnknownShardsAllocator() {
|
||||
Settings settings = Settings.builder().put(AllocationModule.SHARDS_ALLOCATOR_TYPE_KEY, "dne").build();
|
||||
AllocationModule module = new AllocationModule(settings);
|
||||
assertBindingFailure(module, "Unknown ShardsAllocator");
|
||||
assertBindingFailure(module, "Unknown [shards_allocator]");
|
||||
}
|
||||
|
||||
public void testEvenShardsAllocatorBackcompat() {
|
||||
|
|
|
@ -20,28 +20,20 @@
|
|||
package org.elasticsearch.cluster.routing.allocation.decider;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction;
|
||||
import org.elasticsearch.cluster.*;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import static com.google.common.collect.Lists.newArrayList;
|
||||
import static com.google.common.collect.Maps.newHashMap;
|
||||
|
@ -57,8 +49,8 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
|
|||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
// Use the mock internal cluster info service, which has fake-able disk usages
|
||||
.put(ClusterModule.CLUSTER_SERVICE_IMPL, MockInternalClusterInfoService.class.getName())
|
||||
// Use the mock internal cluster info service, which has fake-able disk usages
|
||||
.extendArray("plugin.types", MockInternalClusterInfoService.Plugin.class.getName())
|
||||
// Update more frequently
|
||||
.put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "1s")
|
||||
.build();
|
||||
|
@ -183,50 +175,4 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
|
|||
null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fake ClusterInfoService class that allows updating the nodes stats disk
|
||||
* usage with fake values
|
||||
*/
|
||||
public static class MockInternalClusterInfoService extends InternalClusterInfoService {
|
||||
|
||||
private final ClusterName clusterName;
|
||||
private volatile NodeStats[] stats = new NodeStats[3];
|
||||
|
||||
@Inject
|
||||
public MockInternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService,
|
||||
TransportNodesStatsAction transportNodesStatsAction,
|
||||
TransportIndicesStatsAction transportIndicesStatsAction,
|
||||
ClusterService clusterService, ThreadPool threadPool) {
|
||||
super(settings, nodeSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool);
|
||||
this.clusterName = ClusterName.clusterNameFromSettings(settings);
|
||||
stats[0] = makeStats("node_t1", new DiskUsage("node_t1", "n1", 100, 100));
|
||||
stats[1] = makeStats("node_t2", new DiskUsage("node_t2", "n2", 100, 100));
|
||||
stats[2] = makeStats("node_t3", new DiskUsage("node_t3", "n3", 100, 100));
|
||||
}
|
||||
|
||||
public void setN1Usage(String nodeName, DiskUsage newUsage) {
|
||||
stats[0] = makeStats(nodeName, newUsage);
|
||||
}
|
||||
|
||||
public void setN2Usage(String nodeName, DiskUsage newUsage) {
|
||||
stats[1] = makeStats(nodeName, newUsage);
|
||||
}
|
||||
|
||||
public void setN3Usage(String nodeName, DiskUsage newUsage) {
|
||||
stats[2] = makeStats(nodeName, newUsage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CountDownLatch updateNodeStats(final ActionListener<NodesStatsResponse> listener) {
|
||||
NodesStatsResponse response = new NodesStatsResponse(clusterName, stats);
|
||||
listener.onResponse(response);
|
||||
return new CountDownLatch(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CountDownLatch updateIndicesStats(final ActionListener<IndicesStatsResponse> listener) {
|
||||
// Not used, so noop
|
||||
return new CountDownLatch(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,6 +72,37 @@ public abstract class ModuleTestCase extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the module and checks a Map<String, Class> of the "to" class
|
||||
* is bound to "theClas".
|
||||
*/
|
||||
public void assertMapMultiBinding(Module module, Class to, Class theClass) {
|
||||
List<Element> elements = Elements.getElements(module);
|
||||
Set<Type> bindings = new HashSet<>();
|
||||
boolean providerFound = false;
|
||||
for (Element element : elements) {
|
||||
if (element instanceof LinkedKeyBinding) {
|
||||
LinkedKeyBinding binding = (LinkedKeyBinding)element;
|
||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
bindings.add(binding.getLinkedKey().getTypeLiteral().getType());
|
||||
}
|
||||
} else if (element instanceof ProviderInstanceBinding) {
|
||||
ProviderInstanceBinding binding = (ProviderInstanceBinding)element;
|
||||
String setType = binding.getKey().getTypeLiteral().getType().toString();
|
||||
if (setType.equals("java.util.Map<java.lang.String, " + to.getName() + ">")) {
|
||||
providerFound = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bindings.contains(theClass) == false) {
|
||||
fail("Expected to find " + theClass.getName() + " as binding to " + to.getName() + ", found these classes:\n" + bindings);
|
||||
}
|
||||
assertTrue("Did not find provider for map of " + to.getName(), providerFound);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Configures the module and checks a Set of the "to" class
|
||||
* is bound to "classes". There may be more classes bound
|
||||
|
|
|
@ -34,8 +34,9 @@ public class JsonSettingsLoaderTests extends ESTestCase {
|
|||
|
||||
@Test
|
||||
public void testSimpleJsonSettings() throws Exception {
|
||||
String json = "/org/elasticsearch/common/settings/loader/test-settings.json";
|
||||
Settings settings = settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/common/settings/loader/test-settings.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.build();
|
||||
|
||||
assertThat(settings.get("test1.value1"), equalTo("value1"));
|
||||
|
|
|
@ -34,8 +34,9 @@ public class YamlSettingsLoaderTests extends ESTestCase {
|
|||
|
||||
@Test
|
||||
public void testSimpleYamlSettings() throws Exception {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/test-settings.yml";
|
||||
Settings settings = settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/common/settings/loader/test-settings.yml")
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.build();
|
||||
|
||||
assertThat(settings.get("test1.value1"), equalTo("value1"));
|
||||
|
@ -52,15 +53,17 @@ public class YamlSettingsLoaderTests extends ESTestCase {
|
|||
|
||||
@Test(expected = SettingsException.class)
|
||||
public void testIndentation() {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/indentation-settings.yml";
|
||||
settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/common/settings/loader/indentation-settings.yml")
|
||||
.build();
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test(expected = SettingsException.class)
|
||||
public void testIndentationWithExplicitDocumentStart() {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml";
|
||||
settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml")
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.build();
|
||||
}
|
||||
}
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
|
|||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.cache.recycler.MockBigArrays;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.cache.recycler;
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.RandomizedContext;
|
||||
import com.carrotsearch.randomizedtesting.SeedUtils;
|
||||
|
@ -30,15 +30,8 @@ import org.apache.lucene.util.Accountables;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.util.BigArray;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.ByteArray;
|
||||
import org.elasticsearch.common.util.DoubleArray;
|
||||
import org.elasticsearch.common.util.FloatArray;
|
||||
import org.elasticsearch.common.util.IntArray;
|
||||
import org.elasticsearch.common.util.LongArray;
|
||||
import org.elasticsearch.common.util.ObjectArray;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Collection;
|
|
@ -27,8 +27,8 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
|||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.cache.recycler.MockBigArrays;
|
||||
import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
import org.jboss.netty.buffer.ChannelBuffers;
|
||||
|
|
|
@ -29,8 +29,8 @@ import org.elasticsearch.http.netty.pipelining.OrderedDownstreamChannelEvent;
|
|||
import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.cache.recycler.MockBigArrays;
|
||||
import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
import org.jboss.netty.buffer.ChannelBuffers;
|
||||
|
|
|
@ -79,7 +79,7 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private Settings loadFromClasspath(String path) {
|
||||
return settingsBuilder().loadFromClasspath(path)
|
||||
return settingsBuilder().loadFromStream(path, getClass().getResourceAsStream(path))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
|
@ -88,13 +88,13 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
|
||||
@Test
|
||||
public void testSimpleConfigurationJson() {
|
||||
Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.json");
|
||||
Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.json");
|
||||
testSimpleConfiguration(settings);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleConfigurationYaml() {
|
||||
Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.yml");
|
||||
Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.yml");
|
||||
testSimpleConfiguration(settings);
|
||||
}
|
||||
|
||||
|
@ -107,8 +107,9 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
|
||||
@Test
|
||||
public void testVersionedAnalyzers() throws Exception {
|
||||
String yaml = "/org/elasticsearch/index/analysis/test1.yml";
|
||||
Settings settings2 = settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/test1.yml")
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0)
|
||||
.build();
|
||||
|
|
|
@ -39,7 +39,7 @@ public class AnalysisTestsHelper {
|
|||
|
||||
public static AnalysisService createAnalysisServiceFromClassPath(Path baseDir, String resource) {
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.loadFromClasspath(resource)
|
||||
.loadFromStream(resource, AnalysisTestsHelper.class.getResourceAsStream(resource))
|
||||
.put("path.home", baseDir.toString())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.io.StringReader;
|
|||
|
||||
public class CJKFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
||||
private static final String RESOURCE = "org/elasticsearch/index/analysis/cjk_analysis.json";
|
||||
private static final String RESOURCE = "/org/elasticsearch/index/analysis/cjk_analysis.json";
|
||||
|
||||
@Test
|
||||
public void testDefault() throws IOException {
|
||||
|
|
|
@ -115,16 +115,18 @@ public class CompoundAnalysisTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private Settings getJsonSettings() {
|
||||
String json = "/org/elasticsearch/index/analysis/test1.json";
|
||||
return settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/test1.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
}
|
||||
|
||||
private Settings getYamlSettings() {
|
||||
String yaml = "/org/elasticsearch/index/analysis/test1.yml";
|
||||
return settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/test1.yml")
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
|
|
|
@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
|
||||
public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
||||
private static final String RESOURCE = "org/elasticsearch/index/analysis/keep_analysis.json";
|
||||
private static final String RESOURCE = "/org/elasticsearch/index/analysis/keep_analysis.json";
|
||||
|
||||
|
||||
@Test
|
||||
|
|
|
@ -41,10 +41,11 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
|
|||
|
||||
@Test
|
||||
public void testPatternCaptureTokenFilter() throws Exception {
|
||||
String json = "/org/elasticsearch/index/analysis/pattern_capture.json";
|
||||
Index index = new Index("test");
|
||||
Settings settings = settingsBuilder()
|
||||
.put("path.home", createTempDir())
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/pattern_capture.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
||||
|
|
|
@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
@ThreadLeakScope(Scope.NONE)
|
||||
public class ShingleTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
||||
private static final String RESOURCE = "org/elasticsearch/index/analysis/shingle_analysis.json";
|
||||
private static final String RESOURCE = "/org/elasticsearch/index/analysis/shingle_analysis.json";
|
||||
|
||||
@Test
|
||||
public void testDefault() throws IOException {
|
||||
|
|
|
@ -41,9 +41,10 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase {
|
|||
|
||||
@Test
|
||||
public void testDefaultsCompoundAnalysis() throws Exception {
|
||||
String json = "/org/elasticsearch/index/analysis/stop.json";
|
||||
Index index = new Index("test");
|
||||
Settings settings = settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/stop.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
|
|
|
@ -134,8 +134,9 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
|
||||
@Test
|
||||
public void testCommonGramsAnalysis() throws IOException {
|
||||
String json = "/org/elasticsearch/index/analysis/commongrams/commongrams.json";
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
{
|
||||
|
@ -218,8 +219,9 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
|
||||
@Test
|
||||
public void testQueryModeCommonGramsAnalysis() throws IOException {
|
||||
String json = "/org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json";
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
{
|
||||
|
|
|
@ -59,8 +59,9 @@ public class SynonymsAnalysisTest extends ESTestCase {
|
|||
|
||||
@Test
|
||||
public void testSynonymsAnalysis() throws IOException {
|
||||
String json = "/org/elasticsearch/index/analysis/synonyms/synonyms.json";
|
||||
Settings settings = settingsBuilder().
|
||||
loadFromClasspath("org/elasticsearch/index/analysis/synonyms/synonyms.json")
|
||||
loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
|
||||
|
|
|
@ -20,9 +20,11 @@ package org.elasticsearch.index.shard;
|
|||
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||
|
@ -30,6 +32,7 @@ import org.elasticsearch.cluster.routing.TestShardRouting;
|
|||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.ShardLock;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
|
@ -48,6 +51,7 @@ import org.junit.Test;
|
|||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
@ -56,6 +60,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
|
|||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
|
@ -434,4 +439,89 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
response = client().prepareSearch("test").get();
|
||||
assertHitCount(response, 0l);
|
||||
}
|
||||
|
||||
public void testIndexDirIsDeletedWhenShardRemoved() throws Exception {
|
||||
Environment env = getInstanceFromNode(Environment.class);
|
||||
Path idxPath = env.sharedDataFile().resolve(randomAsciiOfLength(10));
|
||||
logger.info("--> idxPath: [{}]", idxPath);
|
||||
Settings idxSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_DATA_PATH, idxPath)
|
||||
.build();
|
||||
createIndex("test", idxSettings);
|
||||
ensureGreen("test");
|
||||
client().prepareIndex("test", "bar", "1").setSource("{}").setRefresh(true).get();
|
||||
SearchResponse response = client().prepareSearch("test").get();
|
||||
assertHitCount(response, 1l);
|
||||
client().admin().indices().prepareDelete("test").get();
|
||||
assertPathHasBeenCleared(idxPath);
|
||||
}
|
||||
|
||||
public void testIndexCanChangeCustomDataPath() throws Exception {
|
||||
Environment env = getInstanceFromNode(Environment.class);
|
||||
Path idxPath = env.sharedDataFile().resolve(randomAsciiOfLength(10));
|
||||
final String INDEX = "idx";
|
||||
Path startDir = idxPath.resolve("start-" + randomAsciiOfLength(10));
|
||||
Path endDir = idxPath.resolve("end-" + randomAsciiOfLength(10));
|
||||
logger.info("--> start dir: [{}]", startDir.toAbsolutePath().toString());
|
||||
logger.info("--> end dir: [{}]", endDir.toAbsolutePath().toString());
|
||||
// temp dirs are automatically created, but the end dir is what
|
||||
// startDir is going to be renamed as, so it needs to be deleted
|
||||
// otherwise we get all sorts of errors about the directory
|
||||
// already existing
|
||||
IOUtils.rm(endDir);
|
||||
|
||||
Settings sb = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_DATA_PATH, startDir.toAbsolutePath().toString())
|
||||
.build();
|
||||
Settings sb2 = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_DATA_PATH, endDir.toAbsolutePath().toString())
|
||||
.build();
|
||||
|
||||
logger.info("--> creating an index with data_path [{}]", startDir.toAbsolutePath().toString());
|
||||
createIndex(INDEX, sb);
|
||||
ensureGreen(INDEX);
|
||||
client().prepareIndex(INDEX, "bar", "1").setSource("{}").setRefresh(true).get();
|
||||
|
||||
SearchResponse resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
|
||||
assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
|
||||
|
||||
logger.info("--> closing the index [{}]", INDEX);
|
||||
client().admin().indices().prepareClose(INDEX).get();
|
||||
logger.info("--> index closed, re-opening...");
|
||||
client().admin().indices().prepareOpen(INDEX).get();
|
||||
logger.info("--> index re-opened");
|
||||
ensureGreen(INDEX);
|
||||
|
||||
resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
|
||||
assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
|
||||
|
||||
// Now, try closing and changing the settings
|
||||
|
||||
logger.info("--> closing the index [{}]", INDEX);
|
||||
client().admin().indices().prepareClose(INDEX).get();
|
||||
|
||||
logger.info("--> moving data on disk [{}] to [{}]", startDir.getFileName(), endDir.getFileName());
|
||||
assert Files.exists(endDir) == false : "end directory should not exist!";
|
||||
Files.move(startDir, endDir, StandardCopyOption.REPLACE_EXISTING);
|
||||
|
||||
logger.info("--> updating settings...");
|
||||
client().admin().indices().prepareUpdateSettings(INDEX)
|
||||
.setSettings(sb2)
|
||||
.setIndicesOptions(IndicesOptions.fromOptions(true, false, true, true))
|
||||
.get();
|
||||
|
||||
assert Files.exists(startDir) == false : "start dir shouldn't exist";
|
||||
|
||||
logger.info("--> settings updated and files moved, re-opening index");
|
||||
client().admin().indices().prepareOpen(INDEX).get();
|
||||
logger.info("--> index re-opened");
|
||||
ensureGreen(INDEX);
|
||||
|
||||
resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
|
||||
assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
|
||||
|
||||
assertAcked(client().admin().indices().prepareDelete(INDEX));
|
||||
assertPathHasBeenCleared(startDir.toAbsolutePath().toString());
|
||||
assertPathHasBeenCleared(endDir.toAbsolutePath().toString());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.shard;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.test.engine.MockEngineFactory;
|
||||
import org.elasticsearch.test.engine.MockEngineSupportModule;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
// this must exist in the same package as IndexShardModule to allow access to setting the impl
|
||||
public class MockEngineFactoryPlugin extends AbstractPlugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "mock-engine-factory";
|
||||
}
|
||||
@Override
|
||||
public String description() {
|
||||
return "a mock engine factory for testing";
|
||||
}
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> indexModules() {
|
||||
List<Class<? extends Module>> modules = new ArrayList<>();
|
||||
modules.add(MockEngineSupportModule.class);
|
||||
return modules;
|
||||
}
|
||||
public void onModule(IndexShardModule module) {
|
||||
module.engineFactoryImpl = MockEngineFactory.class;
|
||||
}
|
||||
}
|
|
@ -1,164 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.indices;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
* Tests for custom data path locations and templates
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
|
||||
public class IndicesCustomDataPathIT extends ESIntegTestCase {
|
||||
|
||||
private String path;
|
||||
|
||||
private Settings nodeSettings(Path dataPath) {
|
||||
return nodeSettings(dataPath.toString());
|
||||
}
|
||||
|
||||
private Settings nodeSettings(String dataPath) {
|
||||
return Settings.builder()
|
||||
.put("node.add_id_to_custom_path", false)
|
||||
.put("path.shared_data", dataPath)
|
||||
.put("index.store.fs.fs_lock", randomFrom("native", "simple"))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
path = createTempDir().toAbsolutePath().toString();
|
||||
}
|
||||
|
||||
@After
|
||||
public void teardown() throws Exception {
|
||||
IOUtils.deleteFilesIgnoringExceptions(PathUtils.get(path));
|
||||
}
|
||||
|
||||
@Test
|
||||
@TestLogging("_root:DEBUG,index:TRACE")
|
||||
@AwaitsFix(bugUrl = "path shenanigans, Lee is looking into it")
|
||||
public void testDataPathCanBeChanged() throws Exception {
|
||||
final String INDEX = "idx";
|
||||
Path root = createTempDir();
|
||||
internalCluster().startNodesAsync(1, nodeSettings(root));
|
||||
Path startDir = root.resolve("start");
|
||||
Path endDir = root.resolve("end");
|
||||
logger.info("--> start dir: [{}]", startDir.toAbsolutePath().toString());
|
||||
logger.info("--> end dir: [{}]", endDir.toAbsolutePath().toString());
|
||||
// temp dirs are automatically created, but the end dir is what
|
||||
// startDir is going to be renamed as, so it needs to be deleted
|
||||
// otherwise we get all sorts of errors about the directory
|
||||
// already existing
|
||||
IOUtils.rm(endDir);
|
||||
|
||||
Settings.Builder sb = Settings.builder().put(IndexMetaData.SETTING_DATA_PATH,
|
||||
startDir.toAbsolutePath().toString());
|
||||
Settings.Builder sb2 = Settings.builder().put(IndexMetaData.SETTING_DATA_PATH,
|
||||
endDir.toAbsolutePath().toString());
|
||||
|
||||
logger.info("--> creating an index with data_path [{}]", startDir.toAbsolutePath().toString());
|
||||
client().admin().indices().prepareCreate(INDEX).setSettings(sb).get();
|
||||
ensureGreen(INDEX);
|
||||
|
||||
indexRandom(true, client().prepareIndex(INDEX, "doc", "1").setSource("{\"body\": \"foo\"}"));
|
||||
|
||||
SearchResponse resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
|
||||
assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
|
||||
|
||||
logger.info("--> closing the index [{}]", INDEX);
|
||||
client().admin().indices().prepareClose(INDEX).get();
|
||||
logger.info("--> index closed, re-opening...");
|
||||
client().admin().indices().prepareOpen(INDEX).get();
|
||||
logger.info("--> index re-opened");
|
||||
ensureGreen(INDEX);
|
||||
|
||||
resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
|
||||
assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
|
||||
|
||||
// Now, try closing and changing the settings
|
||||
|
||||
logger.info("--> closing the index [{}]", INDEX);
|
||||
client().admin().indices().prepareClose(INDEX).get();
|
||||
|
||||
logger.info("--> moving data on disk [{}] to [{}]", startDir.getFileName(), endDir.getFileName());
|
||||
assert Files.exists(endDir) == false : "end directory should not exist!";
|
||||
Files.move(startDir, endDir, StandardCopyOption.REPLACE_EXISTING);
|
||||
|
||||
logger.info("--> updating settings...");
|
||||
client().admin().indices().prepareUpdateSettings(INDEX)
|
||||
.setSettings(sb2)
|
||||
.setIndicesOptions(IndicesOptions.fromOptions(true, false, true, true))
|
||||
.get();
|
||||
|
||||
assert Files.exists(startDir) == false : "start dir shouldn't exist";
|
||||
|
||||
logger.info("--> settings updated and files moved, re-opening index");
|
||||
client().admin().indices().prepareOpen(INDEX).get();
|
||||
logger.info("--> index re-opened");
|
||||
ensureGreen(INDEX);
|
||||
|
||||
resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
|
||||
assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
|
||||
|
||||
assertAcked(client().admin().indices().prepareDelete(INDEX));
|
||||
assertPathHasBeenCleared(startDir.toAbsolutePath().toString());
|
||||
assertPathHasBeenCleared(endDir.toAbsolutePath().toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
@AwaitsFix(bugUrl = "path shenanigans, Lee is looking into it")
|
||||
public void testIndexCreatedWithCustomPathAndTemplate() throws Exception {
|
||||
final String INDEX = "myindex2";
|
||||
internalCluster().startNodesAsync(1, nodeSettings(path));
|
||||
|
||||
logger.info("--> creating an index with data_path [{}]", path);
|
||||
Settings.Builder sb = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_DATA_PATH, path)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0);
|
||||
|
||||
client().admin().indices().prepareCreate(INDEX).setSettings(sb).get();
|
||||
ensureGreen(INDEX);
|
||||
|
||||
indexRandom(true, client().prepareIndex(INDEX, "doc", "1").setSource("{\"body\": \"foo\"}"));
|
||||
|
||||
SearchResponse resp = client().prepareSearch(INDEX).setQuery(matchAllQuery()).get();
|
||||
assertThat("found the hit", resp.getHits().getTotalHits(), equalTo(1L));
|
||||
assertAcked(client().admin().indices().prepareDelete(INDEX));
|
||||
assertPathHasBeenCleared(path);
|
||||
}
|
||||
}
|
|
@ -35,9 +35,11 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.engine.MockEngineSupport;
|
||||
import org.elasticsearch.test.engine.MockEngineSupportModule;
|
||||
import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -105,7 +107,7 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
|||
|
||||
Settings.Builder settings = settingsBuilder()
|
||||
.put(indexSettings())
|
||||
.put(MockEngineSupport.READER_WRAPPER_TYPE, RandomExceptionDirectoryReaderWrapper.class.getName())
|
||||
.extendArray("plugin.types", RandomExceptionDirectoryReaderWrapper.Plugin.class.getName())
|
||||
.put(EXCEPTION_TOP_LEVEL_RATIO_KEY, topLevelRate)
|
||||
.put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate)
|
||||
.put(MockEngineSupport.WRAP_READER_RATIO, 1.0d);
|
||||
|
@ -199,6 +201,21 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
|||
|
||||
// TODO: Generalize this class and add it as a utility
|
||||
public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper {
|
||||
|
||||
public static class Plugin extends AbstractPlugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "random-exception-reader-wrapper";
|
||||
}
|
||||
@Override
|
||||
public String description() {
|
||||
return "a mock reader wrapper that throws random exceptions for testing";
|
||||
}
|
||||
public void onModule(MockEngineSupportModule module) {
|
||||
module.wrapperImpl = RandomExceptionDirectoryReaderWrapper.class;
|
||||
}
|
||||
}
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
static class ThrowingSubReaderWrapper extends SubReaderWrapper implements ThrowingLeafReaderWrapper.Thrower {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.indices.stats;
|
||||
|
||||
import org.elasticsearch.index.cache.IndexCacheModule;
|
||||
import org.elasticsearch.index.shard.MergeSchedulerConfig;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
@ -39,9 +40,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.shard.MergePolicyConfig;
|
||||
|
@ -79,8 +78,8 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
//Filter/Query cache is cleaned periodically, default is 60s, so make sure it runs often. Thread.sleep for 60s is bad
|
||||
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal))
|
||||
.put(IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL, "1ms")
|
||||
.put(QueryCacheSettings.QUERY_CACHE_EVERYTHING, true)
|
||||
.put(QueryCacheModule.QueryCacheSettings.QUERY_CACHE_TYPE, IndexQueryCache.class)
|
||||
.put(IndexCacheModule.QUERY_CACHE_EVERYTHING, true)
|
||||
.put(IndexCacheModule.QUERY_CACHE_TYPE, IndexCacheModule.INDEX_QUERY_CACHE)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.node;
|
||||
|
||||
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
|
||||
public class NodeMocksPlugin extends AbstractPlugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "node-mocks";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "a plugin to setup mocks for node level classes";
|
||||
}
|
||||
|
||||
public void onModule(NodeModule module) {
|
||||
module.pageCacheRecyclerImpl = MockPageCacheRecycler.class;
|
||||
module.bigArraysImpl = MockBigArrays.class;
|
||||
}
|
||||
}
|
|
@ -222,19 +222,4 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
|||
assertThat(settings.get("name"), is("prompted name 0"));
|
||||
assertThat(settings.get("node.name"), is("prompted name 0"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPreserveSettingsClassloader() {
|
||||
final ClassLoader classLoader = URLClassLoader.newInstance(new URL[0]);
|
||||
Settings settings = settingsBuilder()
|
||||
.put("foo", "bar")
|
||||
.put("path.home", createTempDir())
|
||||
.classLoader(classLoader)
|
||||
.build();
|
||||
|
||||
Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(settings, randomBoolean());
|
||||
|
||||
Settings preparedSettings = tuple.v1();
|
||||
assertThat(preparedSettings.getClassLoaderIfSet(), is(classLoader));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -33,7 +34,6 @@ import java.util.Iterator;
|
|||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
|
@ -42,6 +42,11 @@ import static org.hamcrest.Matchers.is;
|
|||
*/
|
||||
public class PluginManagerUnitTests extends ESTestCase {
|
||||
|
||||
@After
|
||||
public void cleanSystemProperty() {
|
||||
System.clearProperty(PluginManager.PROPERTY_SUPPORT_STAGING_URLS);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThatConfigDirectoryCanBeOutsideOfElasticsearchHomeDirectory() throws IOException {
|
||||
String pluginName = randomAsciiOfLength(10);
|
||||
|
@ -66,19 +71,24 @@ public class PluginManagerUnitTests extends ESTestCase {
|
|||
String pluginName = randomAsciiOfLength(10);
|
||||
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(pluginName);
|
||||
|
||||
assertThat(handle.urls(), hasSize(Version.CURRENT.snapshot() ? 2 : 1));
|
||||
boolean supportStagingUrls = randomBoolean();
|
||||
if (supportStagingUrls) {
|
||||
System.setProperty(PluginManager.PROPERTY_SUPPORT_STAGING_URLS, "true");
|
||||
}
|
||||
|
||||
Iterator<URL> iterator = handle.urls().iterator();
|
||||
|
||||
if (Version.CURRENT.snapshot()) {
|
||||
String expectedSnapshotUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/snapshot/org/elasticsearch/plugin/%s/%s-SNAPSHOT/%s-%s-SNAPSHOT.zip",
|
||||
if (supportStagingUrls) {
|
||||
String expectedStagingURL = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||
pluginName, Version.CURRENT.number(), pluginName, Version.CURRENT.number());
|
||||
assertThat(iterator.next(), is(new URL(expectedSnapshotUrl)));
|
||||
assertThat(iterator.next(), is(new URL(expectedStagingURL)));
|
||||
}
|
||||
|
||||
URL expected = new URL("http", "download.elastic.co", "/elasticsearch/release/org/elasticsearch/plugin/" + pluginName + "/" + Version.CURRENT.number() + "/" +
|
||||
pluginName + "-" + Version.CURRENT.number() + ".zip");
|
||||
pluginName + "-" + Version.CURRENT.number() + ".zip");
|
||||
assertThat(iterator.next(), is(expected));
|
||||
|
||||
assertThat(iterator.hasNext(), is(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -87,18 +97,24 @@ public class PluginManagerUnitTests extends ESTestCase {
|
|||
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(randomPluginName);
|
||||
assertThat(handle.name, is(randomPluginName.replaceAll("^elasticsearch-", "")));
|
||||
|
||||
assertThat(handle.urls(), hasSize(Version.CURRENT.snapshot() ? 2 : 1));
|
||||
boolean supportStagingUrls = randomBoolean();
|
||||
if (supportStagingUrls) {
|
||||
System.setProperty(PluginManager.PROPERTY_SUPPORT_STAGING_URLS, "true");
|
||||
}
|
||||
|
||||
Iterator<URL> iterator = handle.urls().iterator();
|
||||
|
||||
if (Version.CURRENT.snapshot()) {
|
||||
String expectedSnapshotUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/snapshot/org/elasticsearch/plugin/%s/%s-SNAPSHOT/%s-%s-SNAPSHOT.zip",
|
||||
if (supportStagingUrls) {
|
||||
String expectedStagingUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
||||
assertThat(iterator.next(), is(new URL(expectedSnapshotUrl)));
|
||||
assertThat(iterator.next(), is(new URL(expectedStagingUrl)));
|
||||
}
|
||||
|
||||
String releaseUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
||||
assertThat(iterator.next(), is(new URL(releaseUrl)));
|
||||
|
||||
assertThat(iterator.hasNext(), is(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.search;
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.indices.IndicesService;
|
|||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.cache.request.IndicesRequestCache;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchService;
|
||||
import org.elasticsearch.search.dfs.DfsPhase;
|
||||
|
@ -42,6 +43,20 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||
|
||||
public class MockSearchService extends SearchService {
|
||||
|
||||
public static class Plugin extends AbstractPlugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "mock-search-service";
|
||||
}
|
||||
@Override
|
||||
public String description() {
|
||||
return "a mock search service for testing";
|
||||
}
|
||||
public void onModule(SearchModule module) {
|
||||
module.searchServiceImpl = MockSearchService.class;
|
||||
}
|
||||
}
|
||||
|
||||
private static final Map<SearchContext, Throwable> ACTIVE_SEARCH_CONTEXTS = new ConcurrentHashMap<>();
|
||||
|
||||
/** Throw an {@link AssertionError} if there are still in-flight contexts. */
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.elasticsearch.common.inject.ModuleTestCase;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.search.highlight.CustomHighlighter;
|
||||
import org.elasticsearch.search.highlight.Highlighter;
|
||||
import org.elasticsearch.search.highlight.PlainHighlighter;
|
||||
import org.elasticsearch.search.suggest.CustomSuggester;
|
||||
import org.elasticsearch.search.suggest.Suggester;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
|
||||
/**
|
||||
*/
|
||||
public class SearchModuleTests extends ModuleTestCase {
|
||||
|
||||
public void testDoubleRegister() {
|
||||
SearchModule module = new SearchModule(Settings.EMPTY);
|
||||
try {
|
||||
module.registerHighlighter("fvh", PlainHighlighter.class);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "Can't register the same [highlighter] more than once for [fvh]");
|
||||
}
|
||||
|
||||
try {
|
||||
module.registerSuggester("term", PhraseSuggester.class);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [term]");
|
||||
}
|
||||
}
|
||||
|
||||
public void testRegisterSuggester() {
|
||||
SearchModule module = new SearchModule(Settings.EMPTY);
|
||||
module.registerSuggester("custom", CustomSuggester.class);
|
||||
try {
|
||||
module.registerSuggester("custom", CustomSuggester.class);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [custom]");
|
||||
}
|
||||
assertMapMultiBinding(module, Suggester.class, CustomSuggester.class);
|
||||
}
|
||||
|
||||
public void testRegisterHighlighter() {
|
||||
SearchModule module = new SearchModule(Settings.EMPTY);
|
||||
module.registerHighlighter("custom", CustomHighlighter.class);
|
||||
try {
|
||||
module.registerHighlighter("custom", CustomHighlighter.class);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "Can't register the same [highlighter] more than once for [custom]");
|
||||
}
|
||||
assertMapMultiBinding(module, Highlighter.class, CustomHighlighter.class);
|
||||
}
|
||||
}
|
|
@ -35,10 +35,12 @@ import org.elasticsearch.common.settings.Settings.Builder;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.engine.MockEngineSupport;
|
||||
import org.elasticsearch.test.engine.MockEngineSupportModule;
|
||||
import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.test.store.MockFSDirectoryService;
|
||||
|
@ -250,7 +252,7 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase {
|
|||
|
||||
Builder settings = settingsBuilder()
|
||||
.put(indexSettings())
|
||||
.put(MockEngineSupport.READER_WRAPPER_TYPE, RandomExceptionDirectoryReaderWrapper.class.getName())
|
||||
.extendArray("plugin.types", RandomExceptionDirectoryReaderWrapper.Plugin.class.getName())
|
||||
.put(EXCEPTION_TOP_LEVEL_RATIO_KEY, topLevelRate)
|
||||
.put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate)
|
||||
.put(MockEngineSupport.WRAP_READER_RATIO, 1.0d);
|
||||
|
@ -310,6 +312,21 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase {
|
|||
|
||||
|
||||
public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper {
|
||||
|
||||
public static class Plugin extends AbstractPlugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "random-exception-reader-wrapper";
|
||||
}
|
||||
@Override
|
||||
public String description() {
|
||||
return "a mock reader wrapper that throws random exceptions for testing";
|
||||
}
|
||||
public void onModule(MockEngineSupportModule module) {
|
||||
module.wrapperImpl = RandomExceptionDirectoryReaderWrapper.class;
|
||||
}
|
||||
}
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
static class ThrowingSubReaderWrapper extends FilterDirectoryReader.SubReaderWrapper implements ThrowingLeafReaderWrapper.Thrower {
|
||||
|
|
|
@ -29,8 +29,7 @@ import org.elasticsearch.action.search.SearchType;
|
|||
import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
|
||||
import org.elasticsearch.index.cache.IndexCacheModule;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.query.HasChildQueryBuilder;
|
||||
|
@ -74,8 +73,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
|||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal))
|
||||
// aggressive filter caching so that we can assert on the filter cache size
|
||||
.put(QueryCacheModule.QueryCacheSettings.QUERY_CACHE_TYPE, IndexQueryCache.class)
|
||||
.put(QueryCacheSettings.QUERY_CACHE_EVERYTHING, true)
|
||||
.put(IndexCacheModule.QUERY_CACHE_TYPE, IndexCacheModule.INDEX_QUERY_CACHE)
|
||||
.put(IndexCacheModule.QUERY_CACHE_EVERYTHING, true)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -32,11 +32,6 @@ import java.util.Map;
|
|||
*/
|
||||
public class CustomHighlighter implements Highlighter {
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[] { "test-custom" };
|
||||
}
|
||||
|
||||
@Override
|
||||
public HighlightField highlight(HighlighterContext highlighterContext) {
|
||||
SearchContextHighlight.Field field = highlighterContext.field;
|
||||
|
|
|
@ -35,6 +35,6 @@ public class CustomHighlighterPlugin extends AbstractPlugin {
|
|||
}
|
||||
|
||||
public void onModule(SearchModule highlightModule) {
|
||||
highlightModule.registerHighlighter(CustomHighlighter.class);
|
||||
highlightModule.registerHighlighter("test-custom", CustomHighlighter.class);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,8 +21,7 @@ package org.elasticsearch.search.scriptfilter;
|
|||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
|
||||
import org.elasticsearch.index.cache.IndexCacheModule;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
|
@ -50,8 +49,8 @@ public class ScriptQuerySearchIT extends ESIntegTestCase {
|
|||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal))
|
||||
// aggressive filter caching so that we can assert on the number of iterations of the script filters
|
||||
.put(QueryCacheModule.QueryCacheSettings.QUERY_CACHE_TYPE, IndexQueryCache.class)
|
||||
.put(QueryCacheSettings.QUERY_CACHE_EVERYTHING, true)
|
||||
.put(IndexCacheModule.QUERY_CACHE_TYPE, IndexCacheModule.INDEX_QUERY_CACHE)
|
||||
.put(IndexCacheModule.QUERY_CACHE_EVERYTHING, true)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -55,11 +55,6 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
|
|||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[] {"custom"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestContextParser getContextParser() {
|
||||
return new SuggestContextParser() {
|
||||
|
|
|
@ -37,7 +37,7 @@ public class CustomSuggesterPlugin extends AbstractPlugin {
|
|||
}
|
||||
|
||||
public void onModule(SearchModule searchModule) {
|
||||
searchModule.registerSuggester(CustomSuggester.class);
|
||||
searchModule.registerSuggester("custom", CustomSuggester.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1808,39 +1808,6 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
return nodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that there are no files in the specified path
|
||||
*/
|
||||
public void assertPathHasBeenCleared(String path) throws Exception {
|
||||
assertPathHasBeenCleared(PathUtils.get(path));
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that there are no files in the specified path
|
||||
*/
|
||||
public void assertPathHasBeenCleared(Path path) throws Exception {
|
||||
logger.info("--> checking that [{}] has been cleared", path);
|
||||
int count = 0;
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("[");
|
||||
if (Files.exists(path)) {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(path)) {
|
||||
for (Path file : stream) {
|
||||
logger.info("--> found file: [{}]", file.toAbsolutePath().toString());
|
||||
if (Files.isDirectory(file)) {
|
||||
assertPathHasBeenCleared(file);
|
||||
} else if (Files.isRegularFile(file)) {
|
||||
count++;
|
||||
sb.append(file.toAbsolutePath().toString());
|
||||
sb.append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
sb.append("]");
|
||||
assertThat(count + " files exist that should have been cleaned:\n" + sb.toString(), count, equalTo(0));
|
||||
}
|
||||
|
||||
protected static class NumShards {
|
||||
public final int numPrimaries;
|
||||
public final int numReplicas;
|
||||
|
|
|
@ -46,25 +46,26 @@ import org.elasticsearch.common.io.PathUtils;
|
|||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.EsAbortPolicy;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.test.cache.recycler.MockBigArrays;
|
||||
import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.test.junit.listeners.AssertionErrorThreadDumpPrinter;
|
||||
import org.elasticsearch.test.junit.listeners.LoggingListener;
|
||||
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
|
||||
import org.elasticsearch.test.search.MockSearchService;
|
||||
import org.elasticsearch.search.MockSearchService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.junit.*;
|
||||
import org.junit.rules.RuleChain;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Field;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.FileSystem;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Callable;
|
||||
|
@ -73,6 +74,7 @@ import java.util.concurrent.Executors;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static com.google.common.collect.Lists.newArrayList;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
* Base testcase for randomized unit testing with Elasticsearch
|
||||
|
@ -581,4 +583,40 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that there are no files in the specified path
|
||||
*/
|
||||
public void assertPathHasBeenCleared(String path) throws Exception {
|
||||
assertPathHasBeenCleared(PathUtils.get(path));
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that there are no files in the specified path
|
||||
*/
|
||||
public void assertPathHasBeenCleared(Path path) throws Exception {
|
||||
logger.info("--> checking that [{}] has been cleared", path);
|
||||
int count = 0;
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("[");
|
||||
if (Files.exists(path)) {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(path)) {
|
||||
for (Path file : stream) {
|
||||
// Skip files added by Lucene's ExtraFS
|
||||
if (file.getFileName().toString().startsWith("extra")) {
|
||||
continue;
|
||||
}
|
||||
logger.info("--> found file: [{}]", file.toAbsolutePath().toString());
|
||||
if (Files.isDirectory(file)) {
|
||||
assertPathHasBeenCleared(file);
|
||||
} else if (Files.isRegularFile(file)) {
|
||||
count++;
|
||||
sb.append(file.toAbsolutePath().toString());
|
||||
sb.append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
sb.append("]");
|
||||
assertThat(count + " files exist that should have been cleaned:\n" + sb.toString(), count, equalTo(0));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecyclerModule;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
|
@ -72,20 +71,16 @@ import org.elasticsearch.common.transport.TransportAddress;
|
|||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.BigArraysModule;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.http.HttpServerTransport;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheModule.QueryCacheSettings;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
import org.elasticsearch.index.cache.IndexCacheModule;
|
||||
import org.elasticsearch.index.engine.CommitStats;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.EngineClosedException;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.IndexShardModule;
|
||||
import org.elasticsearch.index.shard.MockEngineFactoryPlugin;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
|
@ -95,16 +90,14 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
|||
import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||
import org.elasticsearch.indices.store.IndicesStore;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeMocksPlugin;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.node.service.NodeService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.SearchService;
|
||||
import org.elasticsearch.test.cache.recycler.MockBigArrays;
|
||||
import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.test.disruption.ServiceDisruptionScheme;
|
||||
import org.elasticsearch.test.engine.MockEngineFactory;
|
||||
import org.elasticsearch.test.search.MockSearchService;
|
||||
import org.elasticsearch.search.MockSearchService;
|
||||
import org.elasticsearch.test.store.MockFSIndexStore;
|
||||
import org.elasticsearch.test.transport.AssertingLocalTransport;
|
||||
import org.elasticsearch.test.transport.MockTransportService;
|
||||
|
@ -390,11 +383,12 @@ public final class InternalTestCluster extends TestCluster {
|
|||
Builder builder = Settings.settingsBuilder()
|
||||
.put(SETTING_CLUSTER_NODE_SEED, seed);
|
||||
if (ENABLE_MOCK_MODULES && usually(random)) {
|
||||
builder.extendArray("plugin.types", MockTransportService.Plugin.class.getName(), MockFSIndexStore.Plugin.class.getName());
|
||||
builder.put(IndexShardModule.ENGINE_FACTORY, MockEngineFactory.class);
|
||||
builder.put(PageCacheRecyclerModule.CACHE_IMPL, MockPageCacheRecycler.class.getName());
|
||||
builder.put(BigArraysModule.IMPL, MockBigArrays.class.getName());
|
||||
builder.put(SearchModule.SEARCH_SERVICE_IMPL, MockSearchService.class.getName());
|
||||
builder.extendArray("plugin.types",
|
||||
MockTransportService.Plugin.class.getName(),
|
||||
MockFSIndexStore.Plugin.class.getName(),
|
||||
NodeMocksPlugin.class.getName(),
|
||||
MockEngineFactoryPlugin.class.getName(),
|
||||
MockSearchService.Plugin.class.getName());
|
||||
}
|
||||
if (isLocalTransportConfigured()) {
|
||||
builder.extendArray("plugin.types", AssertingLocalTransport.Plugin.class.getName());
|
||||
|
@ -457,11 +451,11 @@ public final class InternalTestCluster extends TestCluster {
|
|||
}
|
||||
|
||||
if (random.nextBoolean()) {
|
||||
builder.put(QueryCacheModule.QueryCacheSettings.QUERY_CACHE_TYPE, random.nextBoolean() ? IndexQueryCache.class : NoneQueryCache.class);
|
||||
builder.put(IndexCacheModule.QUERY_CACHE_TYPE, random.nextBoolean() ? IndexCacheModule.INDEX_QUERY_CACHE : IndexCacheModule.NONE_QUERY_CACHE);
|
||||
}
|
||||
|
||||
if (random.nextBoolean()) {
|
||||
builder.put(QueryCacheSettings.QUERY_CACHE_EVERYTHING, random.nextBoolean());
|
||||
builder.put(IndexCacheModule.QUERY_CACHE_EVERYTHING, random.nextBoolean());
|
||||
}
|
||||
|
||||
if (random.nextBoolean()) {
|
||||
|
|
|
@ -18,25 +18,41 @@
|
|||
*/
|
||||
package org.elasticsearch.test.engine;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.elasticsearch.common.inject.BindingAnnotation;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.EngineConfig;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.transport.TransportModule;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import static java.lang.annotation.ElementType.FIELD;
|
||||
import static java.lang.annotation.ElementType.PARAMETER;
|
||||
import static java.lang.annotation.RetentionPolicy.RUNTIME;
|
||||
|
||||
public final class MockEngineFactory implements EngineFactory {
|
||||
@BindingAnnotation
|
||||
@Target({FIELD, PARAMETER})
|
||||
@Retention(RUNTIME)
|
||||
public @interface MockReaderType {
|
||||
}
|
||||
|
||||
private Class<? extends FilterDirectoryReader> wrapper;
|
||||
|
||||
@Inject
|
||||
public MockEngineFactory(@MockReaderType Class wrapper) {
|
||||
this.wrapper = wrapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Engine newReadWriteEngine(EngineConfig config, boolean skipTranslogRecovery) {
|
||||
return new MockInternalEngine(config, skipTranslogRecovery);
|
||||
return new MockInternalEngine(config, skipTranslogRecovery, wrapper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Engine newReadOnlyEngine(EngineConfig config) {
|
||||
return new MockShadowEngine(config);
|
||||
return new MockShadowEngine(config, wrapper);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.test.engine;
|
||||
|
||||
import org.apache.lucene.index.AssertingDirectoryReader;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
@ -29,7 +28,6 @@ import org.apache.lucene.search.QueryCachingPolicy;
|
|||
import org.apache.lucene.search.SearcherManager;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Classes;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -80,7 +78,7 @@ public final class MockEngineSupport {
|
|||
}
|
||||
}
|
||||
|
||||
public MockEngineSupport(EngineConfig config) {
|
||||
public MockEngineSupport(EngineConfig config, Class<? extends FilterDirectoryReader> wrapper) {
|
||||
Settings indexSettings = config.getIndexSettings();
|
||||
shardId = config.getShardId();
|
||||
filterCache = config.getQueryCache();
|
||||
|
@ -88,13 +86,6 @@ public final class MockEngineSupport {
|
|||
final long seed = indexSettings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l);
|
||||
Random random = new Random(seed);
|
||||
final double ratio = indexSettings.getAsDouble(WRAP_READER_RATIO, 0.0d); // DISABLED by default - AssertingDR is crazy slow
|
||||
String readerWrapperType = indexSettings.get(READER_WRAPPER_TYPE);
|
||||
Class<? extends AssertingDirectoryReader > wrapper;
|
||||
if (readerWrapperType == null) {
|
||||
wrapper = AssertingDirectoryReader.class;
|
||||
} else {
|
||||
wrapper = Classes.loadClass(getClass().getClassLoader(), readerWrapperType);
|
||||
}
|
||||
boolean wrapReader = random.nextDouble() < ratio;
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("Using [{}] for shard [{}] seed: [{}] wrapReader: [{}]", this.getClass().getName(), shardId, seed, wrapReader);
|
||||
|
|
|
@ -16,21 +16,17 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.test.engine;
|
||||
|
||||
package org.elasticsearch.index.cache.bitset;
|
||||
|
||||
import org.apache.lucene.index.AssertingDirectoryReader;
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class BitsetFilterCacheModule extends AbstractModule {
|
||||
|
||||
public BitsetFilterCacheModule(Settings settings) {
|
||||
}
|
||||
public class MockEngineSupportModule extends AbstractModule {
|
||||
public Class<? extends FilterDirectoryReader> wrapperImpl = AssertingDirectoryReader.class;
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(BitsetFilterCache.class).asEagerSingleton();
|
||||
bind(Class.class).annotatedWith(MockEngineFactory.MockReaderType.class).toInstance(wrapperImpl);
|
||||
}
|
||||
}
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.test.engine;
|
||||
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.SearcherManager;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -30,17 +31,19 @@ import java.io.IOException;
|
|||
final class MockInternalEngine extends InternalEngine {
|
||||
private MockEngineSupport support;
|
||||
private final boolean randomizeFlushOnClose;
|
||||
private Class<? extends FilterDirectoryReader> wrapperClass;
|
||||
|
||||
|
||||
MockInternalEngine(EngineConfig config, boolean skipInitialTranslogRecovery) throws EngineException {
|
||||
MockInternalEngine(EngineConfig config, boolean skipInitialTranslogRecovery, Class<? extends FilterDirectoryReader> wrapper) throws EngineException {
|
||||
super(config, skipInitialTranslogRecovery);
|
||||
randomizeFlushOnClose = IndexMetaData.isOnSharedFilesystem(config.getIndexSettings()) == false;
|
||||
wrapperClass = wrapper;
|
||||
|
||||
}
|
||||
|
||||
private synchronized MockEngineSupport support() {
|
||||
// lazy initialized since we need it already on super() ctor execution :(
|
||||
if (support == null) {
|
||||
support = new MockEngineSupport(config());
|
||||
support = new MockEngineSupport(config(), wrapperClass);
|
||||
}
|
||||
return support;
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.test.engine;
|
||||
|
||||
import org.apache.lucene.index.AssertingDirectoryReader;
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.apache.lucene.search.AssertingIndexSearcher;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.SearcherManager;
|
||||
|
@ -32,9 +34,9 @@ import java.util.Map;
|
|||
final class MockShadowEngine extends ShadowEngine {
|
||||
private final MockEngineSupport support;
|
||||
|
||||
MockShadowEngine(EngineConfig config) {
|
||||
MockShadowEngine(EngineConfig config, Class<? extends FilterDirectoryReader> wrapper) {
|
||||
super(config);
|
||||
this.support = new MockEngineSupport(config);
|
||||
this.support = new MockEngineSupport(config, wrapper);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -29,8 +29,8 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.cache.recycler.MockBigArrays;
|
||||
import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.netty.NettyTransport;
|
||||
import org.junit.After;
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.cache.recycler.MockBigArrays;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.test.junit.rule.RepeatOnExceptionRule;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.BindTransportException;
|
||||
|
|
|
@ -0,0 +1,186 @@
|
|||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on
|
||||
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
# either express or implied. See the License for the specific
|
||||
# language governing permissions and limitations under the License.
|
||||
|
||||
# Prepare a release
|
||||
#
|
||||
# This script prepares a new release by creating two commits
|
||||
#
|
||||
# First commit: Update the Version.java to remove the snapshot bit
|
||||
# First commit: Remove the -SNAPSHOT suffix in all pom.xml files
|
||||
# Second commit: Update Documentation flags
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# python3 ./dev-tools/prepare-release.py
|
||||
#
|
||||
# Note: Ensure the script is run from the root directory
|
||||
#
|
||||
|
||||
import fnmatch
|
||||
import subprocess
|
||||
import tempfile
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
|
||||
VERSION_FILE = 'core/src/main/java/org/elasticsearch/Version.java'
|
||||
POM_FILE = 'pom.xml'
|
||||
|
||||
def run(command):
|
||||
if os.system('%s' % (command)):
|
||||
raise RuntimeError(' FAILED: %s' % (command))
|
||||
|
||||
def ensure_checkout_is_clean():
|
||||
# Make sure no local mods:
|
||||
s = subprocess.check_output('git diff --shortstat', shell=True)
|
||||
if len(s) > 0:
|
||||
raise RuntimeError('git diff --shortstat is non-empty: got:\n%s' % s)
|
||||
|
||||
# Make sure no untracked files:
|
||||
s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
|
||||
if 'Untracked files:' in s:
|
||||
raise RuntimeError('git status shows untracked files: got:\n%s' % s)
|
||||
|
||||
# Make sure we have all changes from origin:
|
||||
if 'is behind' in s:
|
||||
raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin" in this branch: got:\n%s' % (s))
|
||||
|
||||
# Make sure we no local unpushed changes (this is supposed to be a clean area):
|
||||
if 'is ahead' in s:
|
||||
raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout ", "git reset --hard origin/" in this branch: got:\n%s' % (s))
|
||||
|
||||
# Reads the given file and applies the
|
||||
# callback to it. If the callback changed
|
||||
# a line the given file is replaced with
|
||||
# the modified input.
|
||||
def process_file(file_path, line_callback):
|
||||
fh, abs_path = tempfile.mkstemp()
|
||||
modified = False
|
||||
with open(abs_path,'w', encoding='utf-8') as new_file:
|
||||
with open(file_path, encoding='utf-8') as old_file:
|
||||
for line in old_file:
|
||||
new_line = line_callback(line)
|
||||
modified = modified or (new_line != line)
|
||||
new_file.write(new_line)
|
||||
os.close(fh)
|
||||
if modified:
|
||||
#Remove original file
|
||||
os.remove(file_path)
|
||||
#Move new file
|
||||
shutil.move(abs_path, file_path)
|
||||
return True
|
||||
else:
|
||||
# nothing to do - just remove the tmp file
|
||||
os.remove(abs_path)
|
||||
return False
|
||||
|
||||
# Moves the pom.xml file from a snapshot to a release
|
||||
def remove_maven_snapshot(poms, release):
|
||||
for pom in poms:
|
||||
if pom:
|
||||
#print('Replacing SNAPSHOT version in file %s' % (pom))
|
||||
pattern = '<version>%s-SNAPSHOT</version>' % (release)
|
||||
replacement = '<version>%s</version>' % (release)
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
process_file(pom, callback)
|
||||
|
||||
# Moves the Version.java file from a snapshot to a release
|
||||
def remove_version_snapshot(version_file, release):
|
||||
# 1.0.0.Beta1 -> 1_0_0_Beta1
|
||||
release = release.replace('.', '_')
|
||||
release = release.replace('-', '_')
|
||||
pattern = 'new Version(V_%s_ID, true' % (release)
|
||||
replacement = 'new Version(V_%s_ID, false' % (release)
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
processed = process_file(version_file, callback)
|
||||
if not processed:
|
||||
raise RuntimeError('failed to remove snapshot version for %s' % (release))
|
||||
|
||||
# finds all the pom files that do have a -SNAPSHOT version
|
||||
def find_pom_files_with_snapshots():
|
||||
files = subprocess.check_output('find . -name pom.xml -exec grep -l "<version>.*-SNAPSHOT</version>" {} ";"', shell=True)
|
||||
return files.decode('utf-8').split('\n')
|
||||
|
||||
# Checks the pom.xml for the release version.
|
||||
# This method fails if the pom file has no SNAPSHOT version set ie.
|
||||
# if the version is already on a release version we fail.
|
||||
# Returns the next version string ie. 0.90.7
|
||||
def find_release_version():
|
||||
with open('pom.xml', encoding='utf-8') as file:
|
||||
for line in file:
|
||||
match = re.search(r'<version>(.+)-SNAPSHOT</version>', line)
|
||||
if match:
|
||||
return match.group(1)
|
||||
raise RuntimeError('Could not find release version in branch')
|
||||
|
||||
# Stages the given files for the next git commit
|
||||
def add_pending_files(*files):
|
||||
for file in files:
|
||||
if file:
|
||||
# print("Adding file: %s" % (file))
|
||||
run('git add %s' % (file))
|
||||
|
||||
# Executes a git commit with 'release [version]' as the commit message
|
||||
def commit_release(release):
|
||||
run('git commit -m "Release: Change version from %s-SNAPSHOT to %s"' % (release, release))
|
||||
|
||||
def commit_feature_flags(release):
|
||||
run('git commit -m "Update Documentation Feature Flags [%s]"' % release)
|
||||
|
||||
# Walks the given directory path (defaults to 'docs')
|
||||
# and replaces all 'coming[$version]' tags with
|
||||
# 'added[$version]'. This method only accesses asciidoc files.
|
||||
def update_reference_docs(release_version, path='docs'):
|
||||
pattern = 'coming[%s' % (release_version)
|
||||
replacement = 'added[%s' % (release_version)
|
||||
pending_files = []
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
for root, _, file_names in os.walk(path):
|
||||
for file_name in fnmatch.filter(file_names, '*.asciidoc'):
|
||||
full_path = os.path.join(root, file_name)
|
||||
if process_file(full_path, callback):
|
||||
pending_files.append(os.path.join(root, file_name))
|
||||
return pending_files
|
||||
|
||||
if __name__ == "__main__":
|
||||
release_version = find_release_version()
|
||||
|
||||
print('*** Preparing release version: [%s]' % release_version)
|
||||
|
||||
ensure_checkout_is_clean()
|
||||
pom_files = find_pom_files_with_snapshots()
|
||||
|
||||
remove_maven_snapshot(pom_files, release_version)
|
||||
remove_version_snapshot(VERSION_FILE, release_version)
|
||||
|
||||
pending_files = pom_files
|
||||
pending_files.append(VERSION_FILE)
|
||||
add_pending_files(*pending_files) # expects var args use * to expand
|
||||
commit_release(release_version)
|
||||
|
||||
pending_files = update_reference_docs(release_version)
|
||||
# split commits for docs and version to enable easy cherry-picking
|
||||
if pending_files:
|
||||
add_pending_files(*pending_files) # expects var args use * to expand
|
||||
commit_feature_flags(release_version)
|
||||
else:
|
||||
print('WARNING: no documentation references updates for release %s' % (release_version))
|
||||
|
||||
print('*** Done removing snapshot version. Run git push manually.')
|
||||
|
|
@ -51,7 +51,7 @@ Combine a query clause in query context with another in filter context. deprecat
|
|||
|
||||
<<java-query-dsl-limit-query,`limit` query>>::
|
||||
|
||||
Limits the number of documents examined per shard. deprecated[1.6.0]
|
||||
Limits the number of documents examined per shard.
|
||||
|
||||
|
||||
include::constant-score-query.asciidoc[]
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
[[java-query-dsl-limit-query]]
|
||||
==== Limit Query
|
||||
|
||||
deprecated[1.6.0, Use <<java-search-terminate-after,terminateAfter()>> instead]
|
||||
|
||||
See {ref}/query-dsl-limit-query.html[Limit Query]
|
||||
|
||||
[source,java]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
== Pipeline Aggregations
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[[search-aggregations-pipeline-avg-bucket-aggregation]]
|
||||
=== Avg Bucket Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[[search-aggregations-pipeline-bucket-script-aggregation]]
|
||||
=== Bucket Script Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue