mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 02:14:54 +00:00
Merge branch 'master' into resolve_your_own_config
Conflicts: core/src/main/java/org/elasticsearch/env/Environment.java core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTest.java plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java
This commit is contained in:
commit
f3d63095db
@ -192,7 +192,7 @@ public class Bootstrap {
|
||||
@SuppressForbidden(reason = "Exception#printStackTrace()")
|
||||
private static void setupLogging(Settings settings, Environment environment) {
|
||||
try {
|
||||
settings.getClassLoader().loadClass("org.apache.log4j.Logger");
|
||||
Class.forName("org.apache.log4j.Logger");
|
||||
LogConfigurator.configure(settings);
|
||||
} catch (ClassNotFoundException e) {
|
||||
// no log4j
|
||||
|
@ -23,6 +23,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
import org.elasticsearch.cluster.Diffable;
|
||||
@ -251,7 +252,12 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
|
||||
if (hashFunction == null) {
|
||||
routingHashFunction = MURMUR3_HASH_FUNCTION;
|
||||
} else {
|
||||
final Class<? extends HashFunction> hashFunctionClass = Classes.loadClass(getClass().getClassLoader(), hashFunction);
|
||||
final Class<? extends HashFunction> hashFunctionClass;
|
||||
try {
|
||||
hashFunctionClass = Class.forName(hashFunction).asSubclass(HashFunction.class);
|
||||
} catch (ClassNotFoundException|NoClassDefFoundError e) {
|
||||
throw new ElasticsearchException("failed to load custom hash function [" + hashFunction + "]", e);
|
||||
}
|
||||
try {
|
||||
routingHashFunction = hashFunctionClass.newInstance();
|
||||
} catch (InstantiationException | IllegalAccessException e) {
|
||||
|
@ -20,6 +20,7 @@ package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.routing.DjbHashFunction;
|
||||
import org.elasticsearch.cluster.routing.HashFunction;
|
||||
@ -78,7 +79,11 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
|
||||
pre20HashFunction = DjbHashFunction.class;
|
||||
break;
|
||||
default:
|
||||
pre20HashFunction = Classes.loadClass(getClass().getClassLoader(), pre20HashFunctionName);
|
||||
try {
|
||||
pre20HashFunction = Class.forName(pre20HashFunctionName).asSubclass(HashFunction.class);
|
||||
} catch (ClassNotFoundException|NoClassDefFoundError e) {
|
||||
throw new ElasticsearchException("failed to load custom hash function [" + pre20HashFunctionName + "]", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
pre20HashFunction = DjbHashFunction.class;
|
||||
|
@ -19,17 +19,7 @@
|
||||
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.bootstrap.Elasticsearch;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.NoClassSettingsException;
|
||||
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.common.Strings.toCamelCase;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -41,34 +31,6 @@ public class Classes {
|
||||
*/
|
||||
private static final char PACKAGE_SEPARATOR = '.';
|
||||
|
||||
/**
|
||||
* Return the default ClassLoader to use: typically the thread context
|
||||
* ClassLoader, if available; the ClassLoader that loaded the ClassUtils
|
||||
* class will be used as fallback.
|
||||
* <p/>
|
||||
* <p>Call this method if you intend to use the thread context ClassLoader
|
||||
* in a scenario where you absolutely need a non-null ClassLoader reference:
|
||||
* for example, for class path resource loading (but not necessarily for
|
||||
* <code>Class.forName</code>, which accepts a <code>null</code> ClassLoader
|
||||
* reference as well).
|
||||
*
|
||||
* @return the default ClassLoader (never <code>null</code>)
|
||||
* @see java.lang.Thread#getContextClassLoader()
|
||||
*/
|
||||
public static ClassLoader getDefaultClassLoader() {
|
||||
ClassLoader cl = null;
|
||||
try {
|
||||
cl = Thread.currentThread().getContextClassLoader();
|
||||
} catch (Throwable ex) {
|
||||
// Cannot access thread context ClassLoader - falling back to system class loader...
|
||||
}
|
||||
if (cl == null) {
|
||||
// No thread context class loader -> use class loader of this class.
|
||||
cl = Classes.class.getClassLoader();
|
||||
}
|
||||
return cl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the name of the package of the given class:
|
||||
* e.g. "java.lang" for the <code>java.lang.String</code> class.
|
||||
@ -93,13 +55,5 @@ public class Classes {
|
||||
return !clazz.isInterface() && !Modifier.isAbstract(modifiers);
|
||||
}
|
||||
|
||||
public static <T> Class<? extends T> loadClass(ClassLoader classLoader, String className) {
|
||||
try {
|
||||
return (Class<? extends T>) classLoader.loadClass(className);
|
||||
} catch (ClassNotFoundException|NoClassDefFoundError e) {
|
||||
throw new ElasticsearchException("failed to load class [" + className + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
private Classes() {}
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ public class ShapesAvailability {
|
||||
static {
|
||||
boolean xSPATIAL4J_AVAILABLE;
|
||||
try {
|
||||
Classes.getDefaultClassLoader().loadClass("com.spatial4j.core.shape.impl.PointImpl");
|
||||
Class.forName("com.spatial4j.core.shape.impl.PointImpl");
|
||||
xSPATIAL4J_AVAILABLE = true;
|
||||
} catch (Throwable t) {
|
||||
xSPATIAL4J_AVAILABLE = false;
|
||||
@ -40,7 +40,7 @@ public class ShapesAvailability {
|
||||
|
||||
boolean xJTS_AVAILABLE;
|
||||
try {
|
||||
Classes.getDefaultClassLoader().loadClass("com.vividsolutions.jts.geom.GeometryFactory");
|
||||
Class.forName("com.vividsolutions.jts.geom.GeometryFactory");
|
||||
xJTS_AVAILABLE = true;
|
||||
} catch (Throwable t) {
|
||||
xJTS_AVAILABLE = false;
|
||||
|
@ -30,10 +30,6 @@ import java.lang.reflect.Constructor;
|
||||
*/
|
||||
public class Modules {
|
||||
|
||||
public static Module createModule(String moduleClass, Settings settings) throws ClassNotFoundException {
|
||||
return createModule((Class<? extends Module>) settings.getClassLoader().loadClass(moduleClass), settings);
|
||||
}
|
||||
|
||||
public static Module createModule(Class<? extends Module> moduleClass, @Nullable Settings settings) {
|
||||
Constructor<? extends Module> constructor;
|
||||
try {
|
||||
|
@ -79,9 +79,8 @@ public final class Settings implements ToXContent {
|
||||
|
||||
private ImmutableMap<String, String> settings;
|
||||
private final ImmutableMap<String, String> forcedUnderscoreSettings;
|
||||
private transient ClassLoader classLoader;
|
||||
|
||||
Settings(Map<String, String> settings, ClassLoader classLoader) {
|
||||
Settings(Map<String, String> settings) {
|
||||
// we use a sorted map for consistent serialization when using getAsMap()
|
||||
// TODO: use Collections.unmodifiableMap with a TreeMap
|
||||
this.settings = ImmutableSortedMap.copyOf(settings);
|
||||
@ -96,22 +95,6 @@ public final class Settings implements ToXContent {
|
||||
}
|
||||
}
|
||||
this.forcedUnderscoreSettings = forcedUnderscoreSettings == null ? ImmutableMap.<String, String>of() : ImmutableMap.copyOf(forcedUnderscoreSettings);
|
||||
this.classLoader = classLoader;
|
||||
}
|
||||
|
||||
/**
|
||||
* The class loader associated with this settings, or {@link org.elasticsearch.common.Classes#getDefaultClassLoader()}
|
||||
* if not set.
|
||||
*/
|
||||
public ClassLoader getClassLoader() {
|
||||
return this.classLoader == null ? Classes.getDefaultClassLoader() : classLoader;
|
||||
}
|
||||
|
||||
/**
|
||||
* The class loader associated with this settings, but only if explicitly set, otherwise <tt>null</tt>.
|
||||
*/
|
||||
public ClassLoader getClassLoaderIfSet() {
|
||||
return this.classLoader;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -227,7 +210,6 @@ public final class Settings implements ToXContent {
|
||||
builder.put(entry.getKey().substring(prefix.length()), entry.getValue());
|
||||
}
|
||||
}
|
||||
builder.classLoader(classLoader);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@ -648,7 +630,7 @@ public final class Settings implements ToXContent {
|
||||
}
|
||||
Map<String, Settings> retVal = new LinkedHashMap<>();
|
||||
for (Map.Entry<String, Map<String, String>> entry : map.entrySet()) {
|
||||
retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue()), classLoader));
|
||||
retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue())));
|
||||
}
|
||||
return Collections.unmodifiableMap(retVal);
|
||||
}
|
||||
@ -701,17 +683,13 @@ public final class Settings implements ToXContent {
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
Settings that = (Settings) o;
|
||||
|
||||
if (classLoader != null ? !classLoader.equals(that.classLoader) : that.classLoader != null) return false;
|
||||
if (settings != null ? !settings.equals(that.settings) : that.settings != null) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = settings != null ? settings.hashCode() : 0;
|
||||
result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -769,8 +747,6 @@ public final class Settings implements ToXContent {
|
||||
|
||||
private final Map<String, String> map = new LinkedHashMap<>();
|
||||
|
||||
private ClassLoader classLoader;
|
||||
|
||||
private Builder() {
|
||||
|
||||
}
|
||||
@ -998,7 +974,6 @@ public final class Settings implements ToXContent {
|
||||
public Builder put(Settings settings) {
|
||||
removeNonArraysFieldsIfNewSettingsContainsFieldAsArray(settings.getAsMap());
|
||||
map.putAll(settings.getAsMap());
|
||||
classLoader = settings.getClassLoaderIfSet();
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -1106,31 +1081,6 @@ public final class Settings implements ToXContent {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from classpath that represents them using the
|
||||
* {@link SettingsLoaderFactory#loaderFromSource(String)}.
|
||||
*/
|
||||
public Builder loadFromClasspath(String resourceName) throws SettingsException {
|
||||
ClassLoader classLoader = this.classLoader;
|
||||
if (classLoader == null) {
|
||||
classLoader = Classes.getDefaultClassLoader();
|
||||
}
|
||||
InputStream is = classLoader.getResourceAsStream(resourceName);
|
||||
if (is == null) {
|
||||
throw new SettingsException("Failed to load settings from [" + resourceName + "]");
|
||||
}
|
||||
|
||||
return loadFromStream(resourceName, is);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the class loader associated with the settings built.
|
||||
*/
|
||||
public Builder classLoader(ClassLoader classLoader) {
|
||||
this.classLoader = classLoader;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts all the properties with keys starting with the provided <tt>prefix</tt>.
|
||||
*
|
||||
@ -1258,7 +1208,7 @@ public final class Settings implements ToXContent {
|
||||
* set on this builder.
|
||||
*/
|
||||
public Settings build() {
|
||||
return new Settings(Collections.unmodifiableMap(map), classLoader);
|
||||
return new Settings(Collections.unmodifiableMap(map));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -34,7 +34,6 @@ public class TermsLookupQueryBuilder extends QueryBuilder {
|
||||
private String lookupId;
|
||||
private String lookupRouting;
|
||||
private String lookupPath;
|
||||
private Boolean lookupCache;
|
||||
|
||||
private String queryName;
|
||||
|
||||
@ -87,11 +86,6 @@ public class TermsLookupQueryBuilder extends QueryBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
public TermsLookupQueryBuilder lookupCache(boolean lookupCache) {
|
||||
this.lookupCache = lookupCache;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(TermsQueryParser.NAME);
|
||||
@ -105,9 +99,6 @@ public class TermsLookupQueryBuilder extends QueryBuilder {
|
||||
if (lookupRouting != null) {
|
||||
builder.field("routing", lookupRouting);
|
||||
}
|
||||
if (lookupCache != null) {
|
||||
builder.field("cache", lookupCache);
|
||||
}
|
||||
builder.field("path", lookupPath);
|
||||
builder.endObject();
|
||||
|
||||
|
@ -32,6 +32,10 @@ public class TermsQueryBuilder extends QueryBuilder implements BoostableQueryBui
|
||||
|
||||
private final Object values;
|
||||
|
||||
private String minimumShouldMatch;
|
||||
|
||||
private Boolean disableCoord;
|
||||
|
||||
private String queryName;
|
||||
|
||||
private String execution;
|
||||
@ -125,6 +129,26 @@ public class TermsQueryBuilder extends QueryBuilder implements BoostableQueryBui
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the minimum number of matches across the provided terms. Defaults to <tt>1</tt>.
|
||||
* @deprecated use [bool] query instead
|
||||
*/
|
||||
@Deprecated
|
||||
public TermsQueryBuilder minimumShouldMatch(String minimumShouldMatch) {
|
||||
this.minimumShouldMatch = minimumShouldMatch;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Disables <tt>Similarity#coord(int,int)</tt> in scoring. Defaults to <tt>false</tt>.
|
||||
* @deprecated use [bool] query instead
|
||||
*/
|
||||
@Deprecated
|
||||
public TermsQueryBuilder disableCoord(boolean disableCoord) {
|
||||
this.disableCoord = disableCoord;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
|
||||
*/
|
||||
@ -148,6 +172,14 @@ public class TermsQueryBuilder extends QueryBuilder implements BoostableQueryBui
|
||||
builder.field("execution", execution);
|
||||
}
|
||||
|
||||
if (minimumShouldMatch != null) {
|
||||
builder.field("minimum_should_match", minimumShouldMatch);
|
||||
}
|
||||
|
||||
if (disableCoord != null) {
|
||||
builder.field("disable_coord", disableCoord);
|
||||
}
|
||||
|
||||
if (boost != -1) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
|
@ -51,6 +51,7 @@ public class TermsQueryParser implements QueryParser {
|
||||
|
||||
public static final String NAME = "terms";
|
||||
private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match").withAllDeprecated("Use [bool] query instead");
|
||||
private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord").withAllDeprecated("Use [bool] query instead");
|
||||
private Client client;
|
||||
|
||||
@Deprecated
|
||||
@ -149,7 +150,7 @@ public class TermsQueryParser implements QueryParser {
|
||||
minShouldMatch = parser.textOrNull();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (("disable_coord").equals(currentFieldName) || ("disableCoord").equals(currentFieldName)) {
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
|
||||
disableCoord = parser.booleanValue();
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
|
@ -300,7 +300,6 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
||||
Settings indexSettings = settingsBuilder()
|
||||
.put(this.settings)
|
||||
.put(settings)
|
||||
.classLoader(settings.getClassLoader())
|
||||
.build();
|
||||
|
||||
ModulesBuilder modules = new ModulesBuilder();
|
||||
|
@ -190,7 +190,7 @@ public class InternalSettingsPreparer {
|
||||
|
||||
static Settings replacePromptPlaceholders(Settings settings, Terminal terminal) {
|
||||
UnmodifiableIterator<Map.Entry<String, String>> iter = settings.getAsMap().entrySet().iterator();
|
||||
Settings.Builder builder = Settings.builder().classLoader(settings.getClassLoaderIfSet());
|
||||
Settings.Builder builder = Settings.builder();
|
||||
|
||||
while (iter.hasNext()) {
|
||||
Map.Entry<String, String> entry = iter.next();
|
||||
|
@ -56,6 +56,8 @@ import static org.elasticsearch.common.io.FileSystemUtils.moveFilesWithoutOverwr
|
||||
*/
|
||||
public class PluginManager {
|
||||
|
||||
public static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging";
|
||||
|
||||
public enum OutputMode {
|
||||
DEFAULT, SILENT, VERBOSE
|
||||
}
|
||||
@ -430,8 +432,8 @@ public class PluginManager {
|
||||
// Elasticsearch new download service uses groupId org.elasticsearch.plugins from 2.0.0
|
||||
if (user == null) {
|
||||
// TODO Update to https
|
||||
if (Version.CURRENT.snapshot()) {
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/snapshot/org/elasticsearch/plugin/%s/%s-SNAPSHOT/%s-%s-SNAPSHOT.zip", repo, version, repo, version));
|
||||
if (!Strings.isNullOrEmpty(System.getProperty(PROPERTY_SUPPORT_STAGING_URLS))) {
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/org/elasticsearch/plugin/%s/%s/%s-%s.zip", repo, version, repo, version));
|
||||
}
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip", repo, version, repo, version));
|
||||
} else {
|
||||
|
@ -95,7 +95,7 @@ public class PluginsService extends AbstractComponent {
|
||||
// this is a hack for what is between unit and integration tests...
|
||||
String[] defaultPluginsClasses = settings.getAsArray("plugin.types");
|
||||
for (String pluginClass : defaultPluginsClasses) {
|
||||
Plugin plugin = loadPlugin(pluginClass, settings);
|
||||
Plugin plugin = loadPlugin(pluginClass, settings, getClass().getClassLoader());
|
||||
PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), false, "NA", true, pluginClass, false);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("plugin loaded from settings [{}]", pluginInfo);
|
||||
@ -347,7 +347,7 @@ public class PluginsService extends AbstractComponent {
|
||||
// pluginmanager does it, but we do it again, in case lusers mess with jar files manually
|
||||
try {
|
||||
final List<URL> jars = new ArrayList<>();
|
||||
ClassLoader parentLoader = settings.getClassLoader();
|
||||
ClassLoader parentLoader = getClass().getClassLoader();
|
||||
if (parentLoader instanceof URLClassLoader) {
|
||||
for (URL url : ((URLClassLoader) parentLoader).getURLs()) {
|
||||
jars.add(url);
|
||||
@ -360,16 +360,11 @@ public class PluginsService extends AbstractComponent {
|
||||
}
|
||||
|
||||
// create a child to load the plugins in this bundle
|
||||
ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), settings.getClassLoader());
|
||||
Settings settings = Settings.builder()
|
||||
.put(this.settings)
|
||||
.classLoader(loader)
|
||||
.build();
|
||||
|
||||
ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader());
|
||||
for (PluginInfo pluginInfo : bundle.plugins) {
|
||||
final Plugin plugin;
|
||||
if (pluginInfo.isJvm()) {
|
||||
plugin = loadPlugin(pluginInfo.getClassname(), settings);
|
||||
plugin = loadPlugin(pluginInfo.getClassname(), settings, loader);
|
||||
} else {
|
||||
plugin = new SitePlugin(pluginInfo.getName(), pluginInfo.getDescription());
|
||||
}
|
||||
@ -380,9 +375,9 @@ public class PluginsService extends AbstractComponent {
|
||||
return plugins.build();
|
||||
}
|
||||
|
||||
private Plugin loadPlugin(String className, Settings settings) {
|
||||
private Plugin loadPlugin(String className, Settings settings, ClassLoader loader) {
|
||||
try {
|
||||
Class<? extends Plugin> pluginClass = settings.getClassLoader().loadClass(className).asSubclass(Plugin.class);
|
||||
Class<? extends Plugin> pluginClass = loader.loadClass(className).asSubclass(Plugin.class);
|
||||
|
||||
try {
|
||||
return pluginClass.getConstructor(Settings.class).newInstance(settings);
|
||||
|
@ -79,21 +79,21 @@ public class ScriptModule extends AbstractModule {
|
||||
multibinder.addBinding().to(NativeScriptEngineService.class);
|
||||
|
||||
try {
|
||||
settings.getClassLoader().loadClass("groovy.lang.GroovyClassLoader");
|
||||
Class.forName("groovy.lang.GroovyClassLoader");
|
||||
multibinder.addBinding().to(GroovyScriptEngineService.class).asEagerSingleton();
|
||||
} catch (Throwable t) {
|
||||
Loggers.getLogger(ScriptService.class, settings).debug("failed to load groovy", t);
|
||||
}
|
||||
|
||||
try {
|
||||
settings.getClassLoader().loadClass("com.github.mustachejava.Mustache");
|
||||
Class.forName("com.github.mustachejava.Mustache");
|
||||
multibinder.addBinding().to(MustacheScriptEngineService.class).asEagerSingleton();
|
||||
} catch (Throwable t) {
|
||||
Loggers.getLogger(ScriptService.class, settings).debug("failed to load mustache", t);
|
||||
}
|
||||
|
||||
try {
|
||||
settings.getClassLoader().loadClass("org.apache.lucene.expressions.Expression");
|
||||
Class.forName("org.apache.lucene.expressions.Expression");
|
||||
multibinder.addBinding().to(ExpressionScriptEngineService.class).asEagerSingleton();
|
||||
} catch (Throwable t) {
|
||||
Loggers.getLogger(ScriptService.class, settings).debug("failed to load lucene expressions", t);
|
||||
|
@ -70,7 +70,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
|
||||
config.addCompilationCustomizers(imports);
|
||||
// Add BigDecimal -> Double transformer
|
||||
config.addCompilationCustomizers(new GroovyBigDecimalTransformer(CompilePhase.CONVERSION));
|
||||
this.loader = new GroovyClassLoader(settings.getClassLoader(), config);
|
||||
this.loader = new GroovyClassLoader(getClass().getClassLoader(), config);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -152,7 +152,7 @@ public class ExceptionSerializationTests extends ESTestCase {
|
||||
pkg.append(p.getFileName().toString()).append(".");
|
||||
}
|
||||
pkg.append(filename.substring(0, filename.length() - 6));
|
||||
return Thread.currentThread().getContextClassLoader().loadClass(pkg.toString());
|
||||
return getClass().getClassLoader().loadClass(pkg.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -34,8 +34,9 @@ public class JsonSettingsLoaderTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testSimpleJsonSettings() throws Exception {
|
||||
String json = "/org/elasticsearch/common/settings/loader/test-settings.json";
|
||||
Settings settings = settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/common/settings/loader/test-settings.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.build();
|
||||
|
||||
assertThat(settings.get("test1.value1"), equalTo("value1"));
|
||||
|
@ -34,8 +34,9 @@ public class YamlSettingsLoaderTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testSimpleYamlSettings() throws Exception {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/test-settings.yml";
|
||||
Settings settings = settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/common/settings/loader/test-settings.yml")
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.build();
|
||||
|
||||
assertThat(settings.get("test1.value1"), equalTo("value1"));
|
||||
@ -52,28 +53,17 @@ public class YamlSettingsLoaderTests extends ESTestCase {
|
||||
|
||||
@Test(expected = SettingsException.class)
|
||||
public void testIndentation() {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/indentation-settings.yml";
|
||||
settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/common/settings/loader/indentation-settings.yml")
|
||||
.build();
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test(expected = SettingsException.class)
|
||||
public void testIndentationWithExplicitDocumentStart() {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml";
|
||||
settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml")
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testYamlSettingsNoFile() throws Exception {
|
||||
String invalidResourceName = "org/elasticsearch/common/settings/loader/no-test-settings.yml";
|
||||
try {
|
||||
Settings defaultSettings = settingsBuilder().loadFromClasspath(invalidResourceName).build();
|
||||
fail("For a not exiting file an exception should be thrown.");
|
||||
} catch (Exception e) {
|
||||
assertTrue(e instanceof SettingsException);
|
||||
assertThat(e.getMessage(), equalTo("Failed to load settings from [" + invalidResourceName + "]"));
|
||||
}
|
||||
}
|
||||
}
|
@ -79,7 +79,7 @@ public class AnalysisModuleTests extends ESTestCase {
|
||||
}
|
||||
|
||||
private Settings loadFromClasspath(String path) {
|
||||
return settingsBuilder().loadFromClasspath(path)
|
||||
return settingsBuilder().loadFromStream(path, getClass().getResourceAsStream(path))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
@ -88,13 +88,13 @@ public class AnalysisModuleTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testSimpleConfigurationJson() {
|
||||
Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.json");
|
||||
Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.json");
|
||||
testSimpleConfiguration(settings);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleConfigurationYaml() {
|
||||
Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.yml");
|
||||
Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.yml");
|
||||
testSimpleConfiguration(settings);
|
||||
}
|
||||
|
||||
@ -107,8 +107,9 @@ public class AnalysisModuleTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testVersionedAnalyzers() throws Exception {
|
||||
String yaml = "/org/elasticsearch/index/analysis/test1.yml";
|
||||
Settings settings2 = settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/test1.yml")
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0)
|
||||
.build();
|
||||
|
@ -39,7 +39,7 @@ public class AnalysisTestsHelper {
|
||||
|
||||
public static AnalysisService createAnalysisServiceFromClassPath(Path baseDir, String resource) {
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.loadFromClasspath(resource)
|
||||
.loadFromStream(resource, AnalysisTestsHelper.class.getResourceAsStream(resource))
|
||||
.put("path.home", baseDir.toString())
|
||||
.build();
|
||||
|
||||
|
@ -29,7 +29,7 @@ import java.io.StringReader;
|
||||
|
||||
public class CJKFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
||||
private static final String RESOURCE = "org/elasticsearch/index/analysis/cjk_analysis.json";
|
||||
private static final String RESOURCE = "/org/elasticsearch/index/analysis/cjk_analysis.json";
|
||||
|
||||
@Test
|
||||
public void testDefault() throws IOException {
|
||||
|
@ -115,16 +115,18 @@ public class CompoundAnalysisTests extends ESTestCase {
|
||||
}
|
||||
|
||||
private Settings getJsonSettings() {
|
||||
String json = "/org/elasticsearch/index/analysis/test1.json";
|
||||
return settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/test1.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
}
|
||||
|
||||
private Settings getYamlSettings() {
|
||||
String yaml = "/org/elasticsearch/index/analysis/test1.yml";
|
||||
return settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/test1.yml")
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
|
@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
||||
private static final String RESOURCE = "org/elasticsearch/index/analysis/keep_analysis.json";
|
||||
private static final String RESOURCE = "/org/elasticsearch/index/analysis/keep_analysis.json";
|
||||
|
||||
|
||||
@Test
|
||||
|
@ -41,10 +41,11 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
|
||||
|
||||
@Test
|
||||
public void testPatternCaptureTokenFilter() throws Exception {
|
||||
String json = "/org/elasticsearch/index/analysis/pattern_capture.json";
|
||||
Index index = new Index("test");
|
||||
Settings settings = settingsBuilder()
|
||||
.put("path.home", createTempDir())
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/pattern_capture.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
||||
|
@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
||||
@ThreadLeakScope(Scope.NONE)
|
||||
public class ShingleTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
||||
private static final String RESOURCE = "org/elasticsearch/index/analysis/shingle_analysis.json";
|
||||
private static final String RESOURCE = "/org/elasticsearch/index/analysis/shingle_analysis.json";
|
||||
|
||||
@Test
|
||||
public void testDefault() throws IOException {
|
||||
|
@ -41,9 +41,10 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase {
|
||||
|
||||
@Test
|
||||
public void testDefaultsCompoundAnalysis() throws Exception {
|
||||
String json = "/org/elasticsearch/index/analysis/stop.json";
|
||||
Index index = new Index("test");
|
||||
Settings settings = settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/stop.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
|
@ -138,8 +138,9 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
||||
@Test
|
||||
public void testCommonGramsAnalysis() throws IOException {
|
||||
String json = "/org/elasticsearch/index/analysis/commongrams/commongrams.json";
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put("path.home", createHome())
|
||||
.build();
|
||||
{
|
||||
@ -222,9 +223,10 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
||||
@Test
|
||||
public void testQueryModeCommonGramsAnalysis() throws IOException {
|
||||
String json = "/org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json";
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json")
|
||||
.put("path.home", createHome())
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put("path.home", createHome())
|
||||
.build();
|
||||
{
|
||||
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
|
||||
|
@ -70,8 +70,9 @@ public class SynonymsAnalysisTest extends ESTestCase {
|
||||
Files.copy(synonyms, config.resolve("synonyms.txt"));
|
||||
Files.copy(synonymsWordnet, config.resolve("synonyms_wordnet.txt"));
|
||||
|
||||
String json = "/org/elasticsearch/index/analysis/synonyms/synonyms.json";
|
||||
Settings settings = settingsBuilder().
|
||||
loadFromClasspath("org/elasticsearch/index/analysis/synonyms/synonyms.json")
|
||||
loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put("path.home", home)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
|
||||
|
@ -235,19 +235,4 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
||||
assertThat(settings.get("name"), is("prompted name 0"));
|
||||
assertThat(settings.get("node.name"), is("prompted name 0"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPreserveSettingsClassloader() {
|
||||
final ClassLoader classLoader = URLClassLoader.newInstance(new URL[0]);
|
||||
Settings settings = settingsBuilder()
|
||||
.put("foo", "bar")
|
||||
.put("path.home", createTempDir())
|
||||
.classLoader(classLoader)
|
||||
.build();
|
||||
|
||||
Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(settings, randomBoolean());
|
||||
|
||||
Settings preparedSettings = tuple.v1();
|
||||
assertThat(preparedSettings.getClassLoaderIfSet(), is(classLoader));
|
||||
}
|
||||
}
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -33,7 +34,6 @@ import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
@ -42,6 +42,11 @@ import static org.hamcrest.Matchers.is;
|
||||
*/
|
||||
public class PluginManagerUnitTests extends ESTestCase {
|
||||
|
||||
@After
|
||||
public void cleanSystemProperty() {
|
||||
System.clearProperty(PluginManager.PROPERTY_SUPPORT_STAGING_URLS);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThatConfigDirectoryCanBeOutsideOfElasticsearchHomeDirectory() throws IOException {
|
||||
String pluginName = randomAsciiOfLength(10);
|
||||
@ -66,19 +71,24 @@ public class PluginManagerUnitTests extends ESTestCase {
|
||||
String pluginName = randomAsciiOfLength(10);
|
||||
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(pluginName);
|
||||
|
||||
assertThat(handle.urls(), hasSize(Version.CURRENT.snapshot() ? 2 : 1));
|
||||
boolean supportStagingUrls = randomBoolean();
|
||||
if (supportStagingUrls) {
|
||||
System.setProperty(PluginManager.PROPERTY_SUPPORT_STAGING_URLS, "true");
|
||||
}
|
||||
|
||||
Iterator<URL> iterator = handle.urls().iterator();
|
||||
|
||||
if (Version.CURRENT.snapshot()) {
|
||||
String expectedSnapshotUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/snapshot/org/elasticsearch/plugin/%s/%s-SNAPSHOT/%s-%s-SNAPSHOT.zip",
|
||||
if (supportStagingUrls) {
|
||||
String expectedStagingURL = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||
pluginName, Version.CURRENT.number(), pluginName, Version.CURRENT.number());
|
||||
assertThat(iterator.next(), is(new URL(expectedSnapshotUrl)));
|
||||
assertThat(iterator.next(), is(new URL(expectedStagingURL)));
|
||||
}
|
||||
|
||||
URL expected = new URL("http", "download.elastic.co", "/elasticsearch/release/org/elasticsearch/plugin/" + pluginName + "/" + Version.CURRENT.number() + "/" +
|
||||
pluginName + "-" + Version.CURRENT.number() + ".zip");
|
||||
pluginName + "-" + Version.CURRENT.number() + ".zip");
|
||||
assertThat(iterator.next(), is(expected));
|
||||
|
||||
assertThat(iterator.hasNext(), is(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -87,18 +97,24 @@ public class PluginManagerUnitTests extends ESTestCase {
|
||||
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(randomPluginName);
|
||||
assertThat(handle.name, is(randomPluginName.replaceAll("^elasticsearch-", "")));
|
||||
|
||||
assertThat(handle.urls(), hasSize(Version.CURRENT.snapshot() ? 2 : 1));
|
||||
boolean supportStagingUrls = randomBoolean();
|
||||
if (supportStagingUrls) {
|
||||
System.setProperty(PluginManager.PROPERTY_SUPPORT_STAGING_URLS, "true");
|
||||
}
|
||||
|
||||
Iterator<URL> iterator = handle.urls().iterator();
|
||||
|
||||
if (Version.CURRENT.snapshot()) {
|
||||
String expectedSnapshotUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/snapshot/org/elasticsearch/plugin/%s/%s-SNAPSHOT/%s-%s-SNAPSHOT.zip",
|
||||
if (supportStagingUrls) {
|
||||
String expectedStagingUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
||||
assertThat(iterator.next(), is(new URL(expectedSnapshotUrl)));
|
||||
assertThat(iterator.next(), is(new URL(expectedStagingUrl)));
|
||||
}
|
||||
|
||||
String releaseUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
||||
assertThat(iterator.next(), is(new URL(releaseUrl)));
|
||||
|
||||
assertThat(iterator.hasNext(), is(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
186
dev-tools/prepare_release.py
Normal file
186
dev-tools/prepare_release.py
Normal file
@ -0,0 +1,186 @@
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on
|
||||
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
# either express or implied. See the License for the specific
|
||||
# language governing permissions and limitations under the License.
|
||||
|
||||
# Prepare a release
|
||||
#
|
||||
# This script prepares a new release by creating two commits
|
||||
#
|
||||
# First commit: Update the Version.java to remove the snapshot bit
|
||||
# First commit: Remove the -SNAPSHOT suffix in all pom.xml files
|
||||
# Second commit: Update Documentation flags
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# python3 ./dev-tools/prepare-release.py
|
||||
#
|
||||
# Note: Ensure the script is run from the root directory
|
||||
#
|
||||
|
||||
import fnmatch
|
||||
import subprocess
|
||||
import tempfile
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
|
||||
VERSION_FILE = 'core/src/main/java/org/elasticsearch/Version.java'
|
||||
POM_FILE = 'pom.xml'
|
||||
|
||||
def run(command):
|
||||
if os.system('%s' % (command)):
|
||||
raise RuntimeError(' FAILED: %s' % (command))
|
||||
|
||||
def ensure_checkout_is_clean():
|
||||
# Make sure no local mods:
|
||||
s = subprocess.check_output('git diff --shortstat', shell=True)
|
||||
if len(s) > 0:
|
||||
raise RuntimeError('git diff --shortstat is non-empty: got:\n%s' % s)
|
||||
|
||||
# Make sure no untracked files:
|
||||
s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
|
||||
if 'Untracked files:' in s:
|
||||
raise RuntimeError('git status shows untracked files: got:\n%s' % s)
|
||||
|
||||
# Make sure we have all changes from origin:
|
||||
if 'is behind' in s:
|
||||
raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin" in this branch: got:\n%s' % (s))
|
||||
|
||||
# Make sure we no local unpushed changes (this is supposed to be a clean area):
|
||||
if 'is ahead' in s:
|
||||
raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout ", "git reset --hard origin/" in this branch: got:\n%s' % (s))
|
||||
|
||||
# Reads the given file and applies the
|
||||
# callback to it. If the callback changed
|
||||
# a line the given file is replaced with
|
||||
# the modified input.
|
||||
def process_file(file_path, line_callback):
|
||||
fh, abs_path = tempfile.mkstemp()
|
||||
modified = False
|
||||
with open(abs_path,'w', encoding='utf-8') as new_file:
|
||||
with open(file_path, encoding='utf-8') as old_file:
|
||||
for line in old_file:
|
||||
new_line = line_callback(line)
|
||||
modified = modified or (new_line != line)
|
||||
new_file.write(new_line)
|
||||
os.close(fh)
|
||||
if modified:
|
||||
#Remove original file
|
||||
os.remove(file_path)
|
||||
#Move new file
|
||||
shutil.move(abs_path, file_path)
|
||||
return True
|
||||
else:
|
||||
# nothing to do - just remove the tmp file
|
||||
os.remove(abs_path)
|
||||
return False
|
||||
|
||||
# Moves the pom.xml file from a snapshot to a release
|
||||
def remove_maven_snapshot(poms, release):
|
||||
for pom in poms:
|
||||
if pom:
|
||||
#print('Replacing SNAPSHOT version in file %s' % (pom))
|
||||
pattern = '<version>%s-SNAPSHOT</version>' % (release)
|
||||
replacement = '<version>%s</version>' % (release)
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
process_file(pom, callback)
|
||||
|
||||
# Moves the Version.java file from a snapshot to a release
|
||||
def remove_version_snapshot(version_file, release):
|
||||
# 1.0.0.Beta1 -> 1_0_0_Beta1
|
||||
release = release.replace('.', '_')
|
||||
release = release.replace('-', '_')
|
||||
pattern = 'new Version(V_%s_ID, true' % (release)
|
||||
replacement = 'new Version(V_%s_ID, false' % (release)
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
processed = process_file(version_file, callback)
|
||||
if not processed:
|
||||
raise RuntimeError('failed to remove snapshot version for %s' % (release))
|
||||
|
||||
# finds all the pom files that do have a -SNAPSHOT version
|
||||
def find_pom_files_with_snapshots():
|
||||
files = subprocess.check_output('find . -name pom.xml -exec grep -l "<version>.*-SNAPSHOT</version>" {} ";"', shell=True)
|
||||
return files.decode('utf-8').split('\n')
|
||||
|
||||
# Checks the pom.xml for the release version.
|
||||
# This method fails if the pom file has no SNAPSHOT version set ie.
|
||||
# if the version is already on a release version we fail.
|
||||
# Returns the next version string ie. 0.90.7
|
||||
def find_release_version():
|
||||
with open('pom.xml', encoding='utf-8') as file:
|
||||
for line in file:
|
||||
match = re.search(r'<version>(.+)-SNAPSHOT</version>', line)
|
||||
if match:
|
||||
return match.group(1)
|
||||
raise RuntimeError('Could not find release version in branch')
|
||||
|
||||
# Stages the given files for the next git commit
|
||||
def add_pending_files(*files):
|
||||
for file in files:
|
||||
if file:
|
||||
# print("Adding file: %s" % (file))
|
||||
run('git add %s' % (file))
|
||||
|
||||
# Executes a git commit with 'release [version]' as the commit message
|
||||
def commit_release(release):
|
||||
run('git commit -m "Release: Change version from %s-SNAPSHOT to %s"' % (release, release))
|
||||
|
||||
def commit_feature_flags(release):
|
||||
run('git commit -m "Update Documentation Feature Flags [%s]"' % release)
|
||||
|
||||
# Walks the given directory path (defaults to 'docs')
|
||||
# and replaces all 'coming[$version]' tags with
|
||||
# 'added[$version]'. This method only accesses asciidoc files.
|
||||
def update_reference_docs(release_version, path='docs'):
|
||||
pattern = 'coming[%s' % (release_version)
|
||||
replacement = 'added[%s' % (release_version)
|
||||
pending_files = []
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
for root, _, file_names in os.walk(path):
|
||||
for file_name in fnmatch.filter(file_names, '*.asciidoc'):
|
||||
full_path = os.path.join(root, file_name)
|
||||
if process_file(full_path, callback):
|
||||
pending_files.append(os.path.join(root, file_name))
|
||||
return pending_files
|
||||
|
||||
if __name__ == "__main__":
|
||||
release_version = find_release_version()
|
||||
|
||||
print('*** Preparing release version: [%s]' % release_version)
|
||||
|
||||
ensure_checkout_is_clean()
|
||||
pom_files = find_pom_files_with_snapshots()
|
||||
|
||||
remove_maven_snapshot(pom_files, release_version)
|
||||
remove_version_snapshot(VERSION_FILE, release_version)
|
||||
|
||||
pending_files = pom_files
|
||||
pending_files.append(VERSION_FILE)
|
||||
add_pending_files(*pending_files) # expects var args use * to expand
|
||||
commit_release(release_version)
|
||||
|
||||
pending_files = update_reference_docs(release_version)
|
||||
# split commits for docs and version to enable easy cherry-picking
|
||||
if pending_files:
|
||||
add_pending_files(*pending_files) # expects var args use * to expand
|
||||
commit_feature_flags(release_version)
|
||||
else:
|
||||
print('WARNING: no documentation references updates for release %s' % (release_version))
|
||||
|
||||
print('*** Done removing snapshot version. Run git push manually.')
|
||||
|
@ -51,7 +51,7 @@ Combine a query clause in query context with another in filter context. deprecat
|
||||
|
||||
<<java-query-dsl-limit-query,`limit` query>>::
|
||||
|
||||
Limits the number of documents examined per shard. deprecated[1.6.0]
|
||||
Limits the number of documents examined per shard.
|
||||
|
||||
|
||||
include::constant-score-query.asciidoc[]
|
||||
|
@ -1,8 +1,6 @@
|
||||
[[java-query-dsl-limit-query]]
|
||||
==== Limit Query
|
||||
|
||||
deprecated[1.6.0, Use <<java-search-terminate-after,terminateAfter()>> instead]
|
||||
|
||||
See {ref}/query-dsl-limit-query.html[Limit Query]
|
||||
|
||||
[source,java]
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
== Pipeline Aggregations
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-avg-bucket-aggregation]]
|
||||
=== Avg Bucket Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-bucket-script-aggregation]]
|
||||
=== Bucket Script Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-bucket-selector-aggregation]]
|
||||
=== Bucket Selector Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-cumulative-sum-aggregation]]
|
||||
=== Cumulative Sum Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-derivative-aggregation]]
|
||||
=== Derivative Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-max-bucket-aggregation]]
|
||||
=== Max Bucket Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-min-bucket-aggregation]]
|
||||
=== Min Bucket Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-movavg-aggregation]]
|
||||
=== Moving Average Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-serialdiff-aggregation]]
|
||||
=== Serial Differencing Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[search-aggregations-pipeline-sum-bucket-aggregation]]
|
||||
=== Sum Bucket Aggregation
|
||||
|
||||
coming[2.0.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
experimental[]
|
||||
|
||||
|
@ -81,7 +81,7 @@ omit :
|
||||
[float]
|
||||
==== Distributed frequencies
|
||||
|
||||
coming[2.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
Setting `dfs` to `true` (default is `false`) will return the term statistics
|
||||
or the field statistics of the entire index, and not just at the shard. Use it
|
||||
@ -90,7 +90,7 @@ with caution as distributed frequencies can have a serious performance impact.
|
||||
[float]
|
||||
==== Terms Filtering
|
||||
|
||||
coming[2.0]
|
||||
coming[2.0.0-beta1]
|
||||
|
||||
With the parameter `filter`, the terms returned could also be filtered based
|
||||
on their tf-idf scores. This could be useful in order find out a good
|
||||
|
@ -1,8 +1,8 @@
|
||||
[[elasticsearch-reference]]
|
||||
= Elasticsearch Reference
|
||||
|
||||
:version: 1.5.2
|
||||
:branch: 1.5
|
||||
:version: 2.0.0-beta1
|
||||
:branch: 2.0
|
||||
:jdk: 1.8.0_25
|
||||
:defguide: https://www.elastic.co/guide/en/elasticsearch/guide/current
|
||||
|
||||
|
@ -16,7 +16,7 @@ curl -XGET 'localhost:9200/_analyze' -d '
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
coming[2.0.0, body based parameters were added in 2.0.0]
|
||||
coming[2.0.0-beta1, body based parameters were added in 2.0.0]
|
||||
|
||||
If text parameter is provided as array of strings, it is analyzed as a multi-valued field.
|
||||
|
||||
@ -29,7 +29,7 @@ curl -XGET 'localhost:9200/_analyze' -d '
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
coming[2.0.0, body based parameters were added in 2.0.0]
|
||||
coming[2.0.0-beta1, body based parameters were added in 2.0.0]
|
||||
|
||||
Or by building a custom transient analyzer out of tokenizers,
|
||||
token filters and char filters. Token filters can use the shorter 'filters'
|
||||
@ -53,7 +53,7 @@ curl -XGET 'localhost:9200/_analyze' -d '
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
coming[2.0.0, body based parameters were added in 2.0.0]
|
||||
coming[2.0.0-beta1, body based parameters were added in 2.0.0]
|
||||
|
||||
It can also run against a specific index:
|
||||
|
||||
@ -78,7 +78,7 @@ curl -XGET 'localhost:9200/test/_analyze' -d '
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
coming[2.0.0, body based parameters were added in 2.0.0]
|
||||
coming[2.0.0-beta1, body based parameters were added in 2.0.0]
|
||||
|
||||
Also, the analyzer can be derived based on a field mapping, for example:
|
||||
|
||||
@ -91,7 +91,7 @@ curl -XGET 'localhost:9200/test/_analyze' -d '
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
coming[2.0.0, body based parameters were added in 2.0.0]
|
||||
coming[2.0.0-beta1, body based parameters were added in 2.0.0]
|
||||
|
||||
Will cause the analysis to happen based on the analyzer configured in the
|
||||
mapping for `obj1.field1` (and if not, the default index analyzer).
|
||||
|
@ -51,7 +51,7 @@ Elasticsearch 2.0. Upgrading will:
|
||||
|
||||
* Rewrite old segments in the latest Lucene format.
|
||||
* Add the `index.version.minimum_compatible` setting to the index, to mark it as
|
||||
2.0 compatible coming[1.6.0].
|
||||
2.0 compatible
|
||||
|
||||
Instead of upgrading all segments that weren't written with the most recent
|
||||
version of Lucene, you can choose to do the minimum work required before
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[mapping-parent-field]]
|
||||
=== `_parent` field
|
||||
|
||||
added[2.0.0,The parent-child implementation has been completely rewritten. It is advisable to reindex any 1.x indices which use parent-child to take advantage of the new optimizations]
|
||||
added[2.0.0-beta1,The parent-child implementation has been completely rewritten. It is advisable to reindex any 1.x indices which use parent-child to take advantage of the new optimizations]
|
||||
|
||||
A parent-child relationship can be established between documents in the same
|
||||
index by making one mapping type the parent of another:
|
||||
|
@ -4,6 +4,276 @@
|
||||
This section discusses the changes that you need to be aware of when migrating
|
||||
your application to Elasticsearch 2.0.
|
||||
|
||||
[float]
|
||||
=== Indices created before 0.90
|
||||
|
||||
Elasticsearch 2.0 can read indices created in version 0.90 and above. If any
|
||||
of your indices were created before 0.90 you will need to upgrade to the
|
||||
latest 1.x version of Elasticsearch first, in order to upgrade your indices or
|
||||
to delete the old indices. Elasticsearch will not start in the presence of old
|
||||
indices.
|
||||
|
||||
[float]
|
||||
=== Elasticsearch migration plugin
|
||||
|
||||
We have provided the https://github.com/elastic/elasticsearch-migration[Elasticsearch migration plugin]
|
||||
to help you detect any issues that you may have when upgrading to
|
||||
Elasticsearch 2.0. Please install and run the plugin *before* upgrading.
|
||||
|
||||
=== Mapping
|
||||
|
||||
|
||||
Remove file based default mappings #10870 (issue: #10620)
|
||||
Validate dynamic mappings updates on the master node. #10634 (issues: #8650, #8688)
|
||||
Remove the ability to have custom per-field postings and doc values formats. #9741 (issue: #8746)
|
||||
Remove support for new indexes using path setting in object/nested fields or index_name in any field #9570 (issue: #6677)
|
||||
Remove index_analyzer setting to simplify analyzer logic #9451 (issue: #9371)
|
||||
Remove type level default analyzers #9430 (issues: #8874, #9365)
|
||||
Add doc values support to boolean fields. #7961 (issues: #4678, #7851)
|
||||
|
||||
|
||||
A number of changes have been made to mappings to remove ambiguity and to
|
||||
ensure that conflicting mappings cannot be created.
|
||||
|
||||
==== Conflicting field mappings
|
||||
|
||||
Fields with the same name, in the same index, in different types, must have
|
||||
the same mapping, with the exception of the <<copy-to>>, <<dynamic>>,
|
||||
<<enabled>>, <<ignore-above>>, <<include-in-all>>, and <<properties>>
|
||||
parameters, which may have different settings per field.
|
||||
|
||||
[source,js]
|
||||
---------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"type_one": {
|
||||
"properties": {
|
||||
"name": { <1>
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"type_two": {
|
||||
"properties": {
|
||||
"name": { <1>
|
||||
"type": "string",
|
||||
"analyzer": "english"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
---------------
|
||||
<1> The two `name` fields have conflicting mappings and will prevent Elasticsearch
|
||||
from starting.
|
||||
|
||||
Elasticsearch will not start in the presence of conflicting field mappings.
|
||||
These indices must be deleted or reindexed using a new mapping.
|
||||
|
||||
The `ignore_conflicts` option of the put mappings API has been removed.
|
||||
Conflicts can't be ignored anymore.
|
||||
|
||||
==== Fields cannot be referenced by short name
|
||||
|
||||
A field can no longer be referenced using its short name. Instead, the full
|
||||
path to the field is required. For instance:
|
||||
|
||||
[source,js]
|
||||
---------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"my_type": {
|
||||
"properties": {
|
||||
"title": { "type": "string" }, <1>
|
||||
"name": {
|
||||
"properties": {
|
||||
"title": { "type": "string" }, <2>
|
||||
"first": { "type": "string" },
|
||||
"last": { "type": "string" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
---------------
|
||||
<1> This field is referred to as `title`.
|
||||
<2> This field is referred to as `name.title`.
|
||||
|
||||
Previously, the two `title` fields in the example above could have been
|
||||
confused with each other when using the short name `title`.
|
||||
|
||||
=== Type name prefix removed
|
||||
|
||||
Previously, two fields with the same name in two different types could
|
||||
sometimes be disambiguated by prepending the type name. As a side effect, it
|
||||
would add a filter on the type name to the relevant query. This feature was
|
||||
ambiguous -- a type name could be confused with a field name -- and didn't
|
||||
work everywhere e.g. aggregations.
|
||||
|
||||
Instead, fields should be specified with the full path, but without a type
|
||||
name prefix. If you wish to filter by the `_type` field, either specify the
|
||||
type in the URL or add an explicit filter.
|
||||
|
||||
The following example query in 1.x:
|
||||
|
||||
[source,js]
|
||||
----------------------------
|
||||
GET my_index/_search
|
||||
{
|
||||
"query": {
|
||||
"match": {
|
||||
"my_type.some_field": "quick brown fox"
|
||||
}
|
||||
}
|
||||
}
|
||||
----------------------------
|
||||
|
||||
would be rewritten in 2.0 as:
|
||||
|
||||
[source,js]
|
||||
----------------------------
|
||||
GET my_index/my_type/_search <1>
|
||||
{
|
||||
"query": {
|
||||
"match": {
|
||||
"some_field": "quick brown fox" <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
----------------------------
|
||||
<1> The type name can be specified in the URL to act as a filter.
|
||||
<2> The field name should be specified without the type prefix.
|
||||
|
||||
==== Field names may not contain dots
|
||||
|
||||
In 1.x, it was possible to create fields with dots in their name, for
|
||||
instance:
|
||||
|
||||
[source,js]
|
||||
----------------------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"my_type": {
|
||||
"properties": {
|
||||
"foo.bar": { <1>
|
||||
"type": "string"
|
||||
},
|
||||
"foo": {
|
||||
"properties": {
|
||||
"bar": { <1>
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----------------------------
|
||||
<1> These two fields cannot be distinguised as both are referred to as `foo.bar`.
|
||||
|
||||
You can no longer create fields with dots in the name.
|
||||
|
||||
==== Type names may not start with a dot
|
||||
|
||||
In 1.x, Elasticsearch would issue a warning if a type name included a dot,
|
||||
e.g. `my.type`. Now that type names are no longer used to distinguish between
|
||||
fields in differnt types, this warning has been relaxed: type names may now
|
||||
contain dots, but they may not *begin* with a dot. The only exception to this
|
||||
is the special `.percolator` type.
|
||||
|
||||
==== Types may no longer be deleted
|
||||
|
||||
In 1.x it was possible to delete a type mapping, along with all of the
|
||||
documents of that type, using the delete mapping API. This is no longer
|
||||
supported, because remnants of the fields in the type could remain in the
|
||||
index, causing corruption later on.
|
||||
|
||||
==== Type meta-fields
|
||||
|
||||
The <<mapping-fields,meta-fields>> associated with had configuration options
|
||||
removed, to make them more reliable:
|
||||
|
||||
* `_id` configuration can no longer be changed. If you need to sort, use the <<mapping-uid-field,`_uid`>> field instead.
|
||||
* `_type` configuration can no longer be changed.
|
||||
* `_index` configuration can no longer be changed.
|
||||
* `_routing` configuration is limited to marking routing as required.
|
||||
* `_field_names` configuration is limited to disabling the field.
|
||||
* `_size` configuration is limited to enabling the field.
|
||||
* `_timestamp` configuration is limited to enabling the field, setting format and default value.
|
||||
* `_boost` has been removed.
|
||||
* `_analyzer` has been removed.
|
||||
|
||||
Importantly, *meta-fields can no longer be specified as part of the document
|
||||
body.* Instead, they must be specified in the query string parameters. For
|
||||
instance, in 1.x, the `routing` could be specified as follows:
|
||||
|
||||
[source,json]
|
||||
-----------------------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"my_type": {
|
||||
"_routing": {
|
||||
"path": "group" <1>
|
||||
},
|
||||
"properties": {
|
||||
"group": { <1>
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT my_index/my_type/1 <2>
|
||||
{
|
||||
"group": "foo"
|
||||
}
|
||||
-----------------------------
|
||||
<1> This 1.x mapping tells Elasticsearch to extract the `routing` value from the `group` field in the document body.
|
||||
<2> This indexing request uses a `routing` value of `foo`.
|
||||
|
||||
In 2.0, the routing must be specified explicitly:
|
||||
|
||||
[source,json]
|
||||
-----------------------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"my_type": {
|
||||
"_routing": {
|
||||
"required": true <1>
|
||||
},
|
||||
"properties": {
|
||||
"group": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT my_index/my_type/1?routing=bar <2>
|
||||
{
|
||||
"group": "foo"
|
||||
}
|
||||
-----------------------------
|
||||
<1> Routing can be marked as required to ensure it is not forgotten during indexing.
|
||||
<2> This indexing request uses a `routing` value of `bar`.
|
||||
|
||||
==== Other mapping changes
|
||||
|
||||
* The setting `index.mapping.allow_type_wrapper` has been removed. Documents should always be sent without the type as the root element.
|
||||
* The `binary` field does not support the `compress` and `compress_threshold` options anymore.
|
||||
|
||||
|
||||
|
||||
|
||||
=== Networking
|
||||
|
||||
Elasticsearch now binds to the loopback interface by default (usually 127.0.0.1
|
||||
@ -188,141 +458,6 @@ Delete api requires a routing value when deleting a document belonging to a type
|
||||
mapping, whereas previous elasticsearch versions would trigger a broadcast delete on all shards belonging to the index.
|
||||
A `RoutingMissingException` is now thrown instead.
|
||||
|
||||
=== Mappings
|
||||
|
||||
* The setting `index.mapping.allow_type_wrapper` has been removed. Documents should always be sent without the type as the root element.
|
||||
* The delete mappings API has been removed. Mapping types can no longer be deleted.
|
||||
* Mapping type names can no longer start with dots.
|
||||
* The `ignore_conflicts` option of the put mappings API has been removed. Conflicts can't be ignored anymore.
|
||||
* The `binary` field does not support the `compress` and `compress_threshold` options anymore.
|
||||
|
||||
==== Removed type prefix on field names in queries
|
||||
Types can no longer be specified on fields within queries. Instead, specify type restrictions in the search request.
|
||||
|
||||
The following is an example query in 1.x over types `t1` and `t2`:
|
||||
|
||||
[source,js]
|
||||
---------------
|
||||
curl -XGET 'localhost:9200/index/_search'
|
||||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"should": [
|
||||
{"match": { "t1.field_only_in_t1": "foo" }},
|
||||
{"match": { "t2.field_only_in_t2": "bar" }}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
---------------
|
||||
|
||||
In 2.0, the query should look like the following:
|
||||
|
||||
[source,js]
|
||||
---------------
|
||||
curl -XGET 'localhost:9200/index/t1,t2/_search'
|
||||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"should": [
|
||||
{"match": { "field_only_in_t1": "foo" }},
|
||||
{"match": { "field_only_in_t2": "bar" }}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
---------------
|
||||
|
||||
==== Removed short name field access
|
||||
Field names in queries, aggregations, etc. must now use the complete name. Use of the short name
|
||||
caused ambiguities in field lookups when the same name existed within multiple object mappings.
|
||||
|
||||
The following example illustrates the difference between 1.x and 2.0.
|
||||
|
||||
Given these mappings:
|
||||
|
||||
[source,js]
|
||||
---------------
|
||||
curl -XPUT 'localhost:9200/index'
|
||||
{
|
||||
"mappings": {
|
||||
"type": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"first": {"type": "string"},
|
||||
"last": {"type": "string"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
---------------
|
||||
|
||||
The following query was possible in 1.x:
|
||||
|
||||
[source,js]
|
||||
---------------
|
||||
curl -XGET 'localhost:9200/index/type/_search'
|
||||
{
|
||||
"query": {
|
||||
"match": { "first": "foo" }
|
||||
}
|
||||
}
|
||||
---------------
|
||||
|
||||
In 2.0, the same query should now be:
|
||||
|
||||
[source,js]
|
||||
---------------
|
||||
curl -XGET 'localhost:9200/index/type/_search'
|
||||
{
|
||||
"query": {
|
||||
"match": { "name.first": "foo" }
|
||||
}
|
||||
}
|
||||
---------------
|
||||
|
||||
==== Removed support for `.` in field name mappings
|
||||
Prior to Elasticsearch 2.0, a field could be defined to have a `.` in its name.
|
||||
Mappings like the one below have been deprecated for some time and they will be
|
||||
blocked in Elasticsearch 2.0.
|
||||
|
||||
[source,js]
|
||||
---------------
|
||||
curl -XPUT 'localhost:9200/index'
|
||||
{
|
||||
"mappings": {
|
||||
"type": {
|
||||
"properties": {
|
||||
"name.first": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
---------------
|
||||
|
||||
==== Meta fields have limited configuration
|
||||
Meta fields (those beginning with underscore) are fields used by elasticsearch
|
||||
to provide special features. They now have limited configuration options.
|
||||
|
||||
* `_id` configuration can no longer be changed. If you need to sort, use `_uid` instead.
|
||||
* `_type` configuration can no longer be changed.
|
||||
* `_index` configuration can no longer be changed.
|
||||
* `_routing` configuration is limited to requiring the field.
|
||||
* `_boost` has been removed.
|
||||
* `_field_names` configuration is limited to disabling the field.
|
||||
* `_size` configuration is limited to enabling the field.
|
||||
* `_timestamp` configuration is limited to enabling the field, setting format and default value
|
||||
|
||||
==== Meta fields in documents
|
||||
Meta fields can no longer be specified within a document. They should be specified
|
||||
via the API. For example, instead of adding a field `_parent` within a document,
|
||||
use the `parent` url parameter when indexing that document.
|
||||
|
||||
==== Default date format now is `strictDateOptionalTime`
|
||||
|
||||
@ -389,10 +524,6 @@ the user-friendly representation of boolean fields: `false`/`true`:
|
||||
Fields of type `murmur3` can no longer change `doc_values` or `index` setting.
|
||||
They are always stored with doc values, and not indexed.
|
||||
|
||||
==== Source field configuration
|
||||
The `_source` field no longer supports `includes` and `excludes` parameters. When
|
||||
`_source` is enabled, the entire original source will be stored.
|
||||
|
||||
==== Config based mappings
|
||||
The ability to specify mappings in configuration files has been removed. To specify
|
||||
default mappings that apply to multiple indexes, use index templates.
|
||||
@ -437,10 +568,10 @@ script.indexed: on
|
||||
|
||||
=== Script parameters
|
||||
|
||||
Deprecated script parameters `id`, `file`, `scriptField`, `script_id`, `script_file`,
|
||||
Deprecated script parameters `id`, `file`, `scriptField`, `script_id`, `script_file`,
|
||||
`script`, `lang` and `params`. The <<modules-scripting,new script API syntax>> should be used in their place.
|
||||
|
||||
The deprecated script parameters have been removed from the Java API so applications using the Java API will
|
||||
The deprecated script parameters have been removed from the Java API so applications using the Java API will
|
||||
need to be updated.
|
||||
|
||||
=== Groovy scripts sandbox
|
||||
|
@ -258,7 +258,7 @@ GET /_snapshot/my_backup/_all
|
||||
-----------------------------------
|
||||
// AUTOSENSE
|
||||
|
||||
coming[2.0] A currently running snapshot can be retrieved using the following command:
|
||||
coming[2.0.0-beta1] A currently running snapshot can be retrieved using the following command:
|
||||
|
||||
[source,sh]
|
||||
-----------------------------------
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[query-dsl-and-query]]
|
||||
=== And Query
|
||||
|
||||
deprecated[2.0.0, Use the `bool` query instead]
|
||||
deprecated[2.0.0-beta1, Use the `bool` query instead]
|
||||
|
||||
A query that matches documents using the `AND` boolean operator on other
|
||||
queries.
|
||||
|
@ -47,11 +47,11 @@ Synonyms for the `bool` query.
|
||||
|
||||
<<query-dsl-filtered-query,`filtered` query>>::
|
||||
|
||||
Combine a query clause in query context with another in filter context. deprecated[2.0.0,Use the `bool` query instead]
|
||||
Combine a query clause in query context with another in filter context. deprecated[2.0.0-beta1,Use the `bool` query instead]
|
||||
|
||||
<<query-dsl-limit-query,`limit` query>>::
|
||||
|
||||
Limits the number of documents examined per shard. deprecated[1.6.0]
|
||||
Limits the number of documents examined per shard.
|
||||
|
||||
|
||||
include::constant-score-query.asciidoc[]
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[query-dsl-filtered-query]]
|
||||
=== Filtered Query
|
||||
|
||||
deprecated[2.0.0, Use the `bool` query instead with a `must` clause for the query and a `filter` clause for the filter]
|
||||
deprecated[2.0.0-beta1, Use the `bool` query instead with a `must` clause for the query and a `filter` clause for the filter]
|
||||
|
||||
The `filtered` query is used to combine a query which will be used for
|
||||
scoring with another query which will only be used for filtering the result
|
||||
|
@ -1,8 +1,6 @@
|
||||
[[query-dsl-limit-query]]
|
||||
=== Limit Query
|
||||
|
||||
deprecated[1.6.0, Use <<search-request-body,terminate_after>> instead]
|
||||
|
||||
A limit query limits the number of documents (per shard) to execute on.
|
||||
For example:
|
||||
|
||||
|
@ -149,7 +149,7 @@ input, the other one for term selection and for query formation.
|
||||
==== Document Input Parameters
|
||||
|
||||
[horizontal]
|
||||
`like`:: coming[2.0]
|
||||
`like`:: coming[2.0.0-beta1]
|
||||
The only *required* parameter of the MLT query is `like` and follows a
|
||||
versatile syntax, in which the user can specify free form text and/or a single
|
||||
or multiple documents (see examples above). The syntax to specify documents is
|
||||
@ -162,7 +162,7 @@ follows a similar syntax to the `per_field_analyzer` parameter of the
|
||||
Additionally, to provide documents not necessarily present in the index,
|
||||
<<docs-termvectors-artificial-doc,artificial documents>> are also supported.
|
||||
|
||||
`unlike`:: coming[2.0]
|
||||
`unlike`:: coming[2.0.0-beta1]
|
||||
The `unlike` parameter is used in conjunction with `like` in order not to
|
||||
select terms found in a chosen set of documents. In other words, we could ask
|
||||
for documents `like: "Apple"`, but `unlike: "cake crumble tree"`. The syntax
|
||||
@ -172,10 +172,10 @@ is the same as `like`.
|
||||
A list of fields to fetch and analyze the text from. Defaults to the `_all`
|
||||
field for free text and to all possible fields for document inputs.
|
||||
|
||||
`like_text`:: deprecated[2.0,Replaced by `like`]
|
||||
`like_text`:: deprecated[2.0.0-beta1,Replaced by `like`]
|
||||
The text to find documents like it.
|
||||
|
||||
`ids` or `docs`:: deprecated[2.0,Replaced by `like`]
|
||||
`ids` or `docs`:: deprecated[2.0.0-beta1,Replaced by `like`]
|
||||
A list of documents following the same syntax as the <<docs-multi-get,Multi GET API>>.
|
||||
|
||||
[float]
|
||||
|
@ -1,7 +1,7 @@
|
||||
[[query-dsl-or-query]]
|
||||
=== Or Query
|
||||
|
||||
deprecated[2.0.0, Use the `bool` query instead]
|
||||
deprecated[2.0.0-beta1, Use the `bool` query instead]
|
||||
|
||||
A query that matches documents using the `OR` boolean operator on other
|
||||
queries.
|
||||
|
@ -63,7 +63,7 @@ curl -XGET <1> 'localhost:9200/_search/scroll' <2> -d'
|
||||
'
|
||||
--------------------------------------------------
|
||||
|
||||
coming[2.0.0, body based parameters were added in 2.0.0]
|
||||
coming[2.0.0-beta1, body based parameters were added in 2.0.0]
|
||||
|
||||
<1> `GET` or `POST` can be used.
|
||||
<2> The URL should not include the `index` or `type` name -- these
|
||||
@ -188,7 +188,7 @@ curl -XDELETE localhost:9200/_search/scroll -d '
|
||||
}'
|
||||
---------------------------------------
|
||||
|
||||
coming[2.0.0, Body based parameters were added in 2.0.0]
|
||||
coming[2.0.0-beta1, Body based parameters were added in 2.0.0]
|
||||
|
||||
Multiple scroll IDs can be passed as array:
|
||||
|
||||
@ -200,7 +200,7 @@ curl -XDELETE localhost:9200/_search/scroll -d '
|
||||
}'
|
||||
---------------------------------------
|
||||
|
||||
coming[2.0.0, Body based parameters were added in 2.0.0]
|
||||
coming[2.0.0-beta1, Body based parameters were added in 2.0.0]
|
||||
|
||||
All search contexts can be cleared with the `_all` parameter:
|
||||
|
||||
|
@ -65,7 +65,7 @@ scoring.
|
||||
[[count]]
|
||||
==== Count
|
||||
|
||||
deprecated[2.0.0, `count` does not provide any benefits over `query_then_fetch` with a `size` of `0`]
|
||||
deprecated[2.0.0-beta1, `count` does not provide any benefits over `query_then_fetch` with a `size` of `0`]
|
||||
|
||||
Parameter value: *count*.
|
||||
|
||||
|
@ -104,7 +104,7 @@ Defaults to no terminate_after.
|
||||
|
||||
|`search_type` |The type of the search operation to perform. Can be
|
||||
`dfs_query_then_fetch`, `query_then_fetch`, `scan` or `count`
|
||||
deprecated[2.0,Replaced by `size: 0`]. Defaults to `query_then_fetch`. See
|
||||
deprecated[2.0.0-beta1,Replaced by `size: 0`]. Defaults to `query_then_fetch`. See
|
||||
<<search-request-search-type,_Search Type_>> for
|
||||
more details on the different types of search that can be performed.
|
||||
|=======================================================================
|
||||
|
@ -104,7 +104,7 @@ curl -XGET 'http://localhost:9200/twitter/tweet/_validate/query?q=post_date:foo&
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
coming[1.6] When the query is valid, the explanation defaults to the string
|
||||
When the query is valid, the explanation defaults to the string
|
||||
representation of that query. With `rewrite` set to `true`, the explanation
|
||||
is more detailed showing the actual Lucene query that will be executed.
|
||||
|
||||
|
@ -41,8 +41,6 @@ PUT /_cluster/settings
|
||||
|
||||
==== Step 2: Perform a synced flush
|
||||
|
||||
added[1.6.0,Synced flush is only supported in Elasticsearch 1.6.0 and above]
|
||||
|
||||
Shard recovery will be much faster if you stop indexing and issue a
|
||||
<<indices-synced-flush, synced-flush>> request:
|
||||
|
||||
|
@ -32,8 +32,6 @@ PUT /_cluster/settings
|
||||
|
||||
==== Step 2: Stop non-essential indexing and perform a synced flush (Optional)
|
||||
|
||||
added[1.6.0,Synced flush is only supported in Elasticsearch 1.6.0 and above]
|
||||
|
||||
You may happily continue indexing during the upgrade. However, shard recovery
|
||||
will be much faster if you temporarily stop non-essential indexing and issue a
|
||||
<<indices-synced-flush, synced-flush>> request:
|
||||
|
@ -192,7 +192,6 @@ public class KuromojiAnalysisTests extends ESTestCase {
|
||||
assertSimpleTSOutput(tokenFilter.create(tokenizer), expected);
|
||||
}
|
||||
|
||||
|
||||
public AnalysisService createAnalysisService() throws IOException {
|
||||
InputStream empty_dict = getClass().getResourceAsStream("empty_user_dict.txt");
|
||||
InputStream dict = getClass().getResourceAsStream("user_dict.txt");
|
||||
@ -202,9 +201,10 @@ public class KuromojiAnalysisTests extends ESTestCase {
|
||||
Files.copy(empty_dict, config.resolve("empty_user_dict.txt"));
|
||||
Files.copy(dict, config.resolve("user_dict.txt"));
|
||||
|
||||
String json = "/org/elasticsearch/index/analysis/kuromoji_analysis.json";
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.put("path.home", home)
|
||||
.loadFromClasspath("org/elasticsearch/index/analysis/kuromoji_analysis.json")
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
|
||||
|
@ -45,7 +45,8 @@ public class SimplePhoneticAnalysisTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testPhoneticTokenFilterFactory() {
|
||||
Settings settings = settingsBuilder().loadFromClasspath("org/elasticsearch/index/analysis/phonetic-1.yml")
|
||||
String yaml = "/org/elasticsearch/index/analysis/phonetic-1.yml";
|
||||
Settings settings = settingsBuilder().loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir())
|
||||
.build();
|
||||
|
2
pom.xml
2
pom.xml
@ -1318,7 +1318,7 @@ org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UT
|
||||
<repository>
|
||||
<id>aws-release</id>
|
||||
<name>AWS Release Repository</name>
|
||||
<url>s3://download.elasticsearch.org/elasticsearch/release</url>
|
||||
<url>s3://download.elasticsearch.org/elasticsearch/staging</url>
|
||||
</repository>
|
||||
<snapshotRepository>
|
||||
<id>aws-snapshot</id>
|
||||
|
Loading…
x
Reference in New Issue
Block a user