Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
6b6882900d
11
build.gradle
11
build.gradle
|
@ -226,6 +226,7 @@ subprojects {
|
|||
"org.elasticsearch.distribution.deb:elasticsearch:${version}": ':distribution:packages:deb',
|
||||
"org.elasticsearch.distribution.deb:elasticsearch-oss:${version}": ':distribution:packages:oss-deb',
|
||||
"org.elasticsearch.test:logger-usage:${version}": ':test:logger-usage',
|
||||
"org.elasticsearch.xpack.test:feature-aware:${version}": ':x-pack:test:feature-aware',
|
||||
// for transport client
|
||||
"org.elasticsearch.plugin:transport-netty4-client:${version}": ':modules:transport-netty4',
|
||||
"org.elasticsearch.plugin:reindex-client:${version}": ':modules:reindex',
|
||||
|
@ -311,7 +312,15 @@ gradle.projectsEvaluated {
|
|||
// :test:framework:test cannot run before and after :server:test
|
||||
return
|
||||
}
|
||||
configurations.all {
|
||||
configurations.all { Configuration configuration ->
|
||||
/*
|
||||
* The featureAwarePlugin configuration has a dependency on x-pack:plugin:core and x-pack:plugin:core has a dependency on the
|
||||
* featureAwarePlugin configuration. The below task ordering logic would force :x-pack:plugin:core:test
|
||||
* :x-pack:test:feature-aware:test to depend on each other circularly. We break that cycle here.
|
||||
*/
|
||||
if (configuration.name == "featureAwarePlugin") {
|
||||
return
|
||||
}
|
||||
dependencies.all { Dependency dep ->
|
||||
Project upstreamProject = dependencyToProject(dep)
|
||||
if (upstreamProject != null) {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
[role="xpack"]
|
||||
[testenv="gold+"]
|
||||
[[certgen]]
|
||||
== certgen
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
[role="xpack"]
|
||||
[testenv="gold+"]
|
||||
[[certutil]]
|
||||
== elasticsearch-certutil
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
[role="xpack"]
|
||||
[testenv="gold+"]
|
||||
[[migrate-tool]]
|
||||
== elasticsearch-migrate
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
[role="xpack"]
|
||||
[testenv="gold+"]
|
||||
[[saml-metadata]]
|
||||
== saml-metadata
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
[role="xpack"]
|
||||
[testenv="gold+"]
|
||||
[[setup-passwords]]
|
||||
== elasticsearch-setup-passwords
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
[role="xpack"]
|
||||
[testenv="gold+"]
|
||||
[[syskeygen]]
|
||||
== elasticsearch-syskeygen
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
[role="xpack"]
|
||||
[testenv="gold+"]
|
||||
[[users-command]]
|
||||
== Users Command
|
||||
++++
|
|
@ -22,7 +22,7 @@ include::{xes-repo-dir}/security/configuring-es.asciidoc[]
|
|||
|
||||
include::{xes-repo-dir}/setup/setup-xclient.asciidoc[]
|
||||
|
||||
include::{xes-repo-dir}/settings/configuring-xes.asciidoc[]
|
||||
include::settings/configuring-xes.asciidoc[]
|
||||
|
||||
include::{xes-repo-dir}/setup/bootstrap-checks-xes.asciidoc[]
|
||||
|
||||
|
@ -63,9 +63,9 @@ include::{xes-repo-dir}/monitoring/index.asciidoc[]
|
|||
|
||||
include::{xes-repo-dir}/rollup/index.asciidoc[]
|
||||
|
||||
include::{xes-repo-dir}/rest-api/index.asciidoc[]
|
||||
include::rest-api/index.asciidoc[]
|
||||
|
||||
include::{xes-repo-dir}/commands/index.asciidoc[]
|
||||
include::commands/index.asciidoc[]
|
||||
|
||||
:edit_url:
|
||||
include::how-to.asciidoc[]
|
||||
|
|
|
@ -78,7 +78,7 @@ GET my_index/_search
|
|||
{
|
||||
"query": {
|
||||
"match": {
|
||||
"manager.name": "Alice White" <1>
|
||||
"manager.name": "Alice White"
|
||||
}
|
||||
},
|
||||
"aggs": {
|
||||
|
@ -89,7 +89,7 @@ GET my_index/_search
|
|||
"aggs": {
|
||||
"Employee Ages": {
|
||||
"histogram": {
|
||||
"field": "employees.age", <2>
|
||||
"field": "employees.age",
|
||||
"interval": 5
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
=== Nested datatype
|
||||
|
||||
The `nested` type is a specialised version of the <<object,`object`>> datatype
|
||||
that allows arrays of objects to be indexed and queried independently of each
|
||||
other.
|
||||
that allows arrays of objects to be indexed in a way that they can be queried
|
||||
independently of each other.
|
||||
|
||||
==== How arrays of objects are flattened
|
||||
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
[role="xpack"]
|
||||
[[xpack-api]]
|
||||
= {xpack} APIs
|
||||
|
||||
[partintro]
|
||||
--
|
||||
{xpack} exposes REST APIs that are used by the UI components and can be called
|
||||
directly to configure and access {xpack} features.
|
||||
|
||||
* <<info-api,Info API>>
|
||||
* <<graph-explore-api,Graph Explore API>>
|
||||
* <<licensing-apis,Licensing APIs>>
|
||||
* <<ml-apis,Machine Learning APIs>>
|
||||
* <<security-api,Security APIs>>
|
||||
* <<watcher-api,Watcher APIs>>
|
||||
* <<rollup-apis,Rollup APIs>>
|
||||
* <<migration-api,Migration APIs>>
|
||||
--
|
||||
|
||||
|
||||
include::{xes-repo-dir}/rest-api/info.asciidoc[]
|
||||
include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[]
|
||||
include::{xes-repo-dir}/rest-api/licensing.asciidoc[]
|
||||
include::{xes-repo-dir}/rest-api/migration.asciidoc[]
|
||||
include::{xes-repo-dir}/rest-api/ml-api.asciidoc[]
|
||||
include::{xes-repo-dir}/rest-api/rollup-api.asciidoc[]
|
||||
include::{xes-repo-dir}/rest-api/security.asciidoc[]
|
||||
include::{xes-repo-dir}/rest-api/watcher.asciidoc[]
|
||||
include::{xes-repo-dir}/rest-api/defs.asciidoc[]
|
Before Width: | Height: | Size: 219 KiB After Width: | Height: | Size: 219 KiB |
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.LowerCaseFilter;
|
||||
import org.apache.lucene.analysis.StopFilter;
|
||||
|
@ -79,7 +80,9 @@ import org.apache.lucene.analysis.util.ElisionFilter;
|
|||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.index.analysis.AnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.CharFilterFactory;
|
||||
import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredCharFilter;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredTokenFilter;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredTokenizer;
|
||||
|
@ -87,6 +90,7 @@ import org.elasticsearch.index.analysis.SoraniNormalizationFilterFactory;
|
|||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.TokenizerFactory;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
|
||||
import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy;
|
||||
import org.elasticsearch.plugins.AnalysisPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.tartarus.snowball.ext.DutchStemmer;
|
||||
|
@ -103,6 +107,15 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
|||
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(CommonAnalysisPlugin.class));
|
||||
|
||||
@Override
|
||||
public Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
|
||||
Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> analyzers = new TreeMap<>();
|
||||
analyzers.put("fingerprint", FingerprintAnalyzerProvider::new);
|
||||
analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new);
|
||||
analyzers.put("pattern", PatternAnalyzerProvider::new);
|
||||
return analyzers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
|
||||
Map<String, AnalysisProvider<TokenFilterFactory>> filters = new TreeMap<>();
|
||||
|
@ -197,6 +210,16 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
|||
return tokenizers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<PreBuiltAnalyzerProviderFactory> getPreBuiltAnalyzerProviderFactories() {
|
||||
List<PreBuiltAnalyzerProviderFactory> analyzers = new ArrayList<>();
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE,
|
||||
version -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET)));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, version ->
|
||||
new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET)));
|
||||
return analyzers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<PreConfiguredCharFilter> getPreConfiguredCharFilters() {
|
||||
List<PreConfiguredCharFilter> filters = new ArrayList<>();
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
|
@ -35,7 +35,7 @@ public final class FingerprintAnalyzer extends Analyzer {
|
|||
private final int maxOutputSize;
|
||||
private final CharArraySet stopWords;
|
||||
|
||||
public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize) {
|
||||
FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize) {
|
||||
this.separator = separator;
|
||||
this.maxOutputSize = maxOutputSize;
|
||||
this.stopWords = stopWords;
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
|
@ -25,6 +25,8 @@ import org.elasticsearch.common.ParseField;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -42,7 +44,7 @@ public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider<A
|
|||
|
||||
private final FingerprintAnalyzer analyzer;
|
||||
|
||||
public FingerprintAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
FingerprintAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
|
||||
char separator = parseSeparator(settings);
|
|
@ -21,15 +21,13 @@ package org.elasticsearch.analysis.common;
|
|||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.miscellaneous.FingerprintFilter;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.FingerprintAnalyzerProvider;
|
||||
|
||||
import static org.elasticsearch.index.analysis.FingerprintAnalyzerProvider.DEFAULT_MAX_OUTPUT_SIZE;
|
||||
import static org.elasticsearch.index.analysis.FingerprintAnalyzerProvider.MAX_OUTPUT_SIZE;
|
||||
import static org.elasticsearch.analysis.common.FingerprintAnalyzerProvider.DEFAULT_MAX_OUTPUT_SIZE;
|
||||
import static org.elasticsearch.analysis.common.FingerprintAnalyzerProvider.MAX_OUTPUT_SIZE;
|
||||
|
||||
public class FingerprintTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
|
@ -35,7 +35,7 @@ public final class PatternAnalyzer extends Analyzer {
|
|||
private final boolean lowercase;
|
||||
private final CharArraySet stopWords;
|
||||
|
||||
public PatternAnalyzer(Pattern pattern, boolean lowercase, CharArraySet stopWords) {
|
||||
PatternAnalyzer(Pattern pattern, boolean lowercase, CharArraySet stopWords) {
|
||||
this.pattern = pattern;
|
||||
this.lowercase = lowercase;
|
||||
this.stopWords = stopWords;
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
|
@ -25,6 +25,8 @@ import org.elasticsearch.common.regex.Regex;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
@ -32,7 +34,7 @@ public class PatternAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analy
|
|||
|
||||
private final PatternAnalyzer analyzer;
|
||||
|
||||
public PatternAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
PatternAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
|
||||
final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET;
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.LowerCaseFilter;
|
||||
|
@ -39,7 +39,7 @@ public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase {
|
|||
super(StopAnalyzer.ENGLISH_STOP_WORDS_SET);
|
||||
}
|
||||
|
||||
public StandardHtmlStripAnalyzer(CharArraySet stopwords) {
|
||||
StandardHtmlStripAnalyzer(CharArraySet stopwords) {
|
||||
super(stopwords);
|
||||
}
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.core.StopAnalyzer;
|
||||
|
@ -25,12 +25,14 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProvider<StandardHtmlStripAnalyzer> {
|
||||
|
||||
private final StandardHtmlStripAnalyzer analyzer;
|
||||
|
||||
public StandardHtmlStripAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
StandardHtmlStripAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET;
|
||||
CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords);
|
|
@ -1,4 +1,4 @@
|
|||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
|
@ -1,4 +1,4 @@
|
|||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
|
@ -37,3 +37,35 @@
|
|||
analyzer: bengali
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: বার }
|
||||
|
||||
---
|
||||
"fingerprint":
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: A1 B2 A1 D4 C3
|
||||
analyzer: fingerprint
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: a1 b2 c3 d4 }
|
||||
|
||||
---
|
||||
"standard_html_strip":
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: <bold/> <italic/>
|
||||
analyzer: standard_html_strip
|
||||
- length: { tokens: 2 }
|
||||
- match: { tokens.0.token: bold }
|
||||
- match: { tokens.1.token: italic }
|
||||
|
||||
---
|
||||
"pattern":
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: foo bar
|
||||
analyzer: pattern
|
||||
- length: { tokens: 2 }
|
||||
- match: { tokens.0.token: foo }
|
||||
- match: { tokens.1.token: bar }
|
||||
|
|
|
@ -131,22 +131,19 @@ public class RatedRequestsTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/31104")
|
||||
public void testXContentParsingIsNotLenient() throws IOException {
|
||||
RatedRequest testItem = createTestItem(randomBoolean());
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
BytesReference originalBytes = toShuffledXContent(testItem, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean());
|
||||
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random());
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
|
||||
Exception exception = expectThrows(Exception.class, () -> RatedRequest.fromXContent(parser));
|
||||
if (exception instanceof XContentParseException) {
|
||||
XContentParseException xcpe = (XContentParseException) exception;
|
||||
assertThat(xcpe.getCause().getMessage(), containsString("unknown field"));
|
||||
assertThat(xcpe.getCause().getMessage(), containsString("parser not found"));
|
||||
}
|
||||
if (exception instanceof XContentParseException) {
|
||||
Throwable exception = expectThrows(XContentParseException.class, () -> RatedRequest.fromXContent(parser));
|
||||
if (exception.getCause() != null) {
|
||||
assertThat(exception.getMessage(), containsString("[request] failed to parse field"));
|
||||
exception = exception.getCause();
|
||||
}
|
||||
assertThat(exception.getMessage(), containsString("unknown field"));
|
||||
assertThat(exception.getMessage(), containsString("parser not found"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -76,7 +76,8 @@ public final class Netty4CorsConfig {
|
|||
}
|
||||
|
||||
/**
|
||||
* Determines whether a wildcard origin, '*', is supported.
|
||||
* Determines whether a wildcard origin, '*', is supported. This also means that null origins are
|
||||
* supported.
|
||||
*
|
||||
* @return {@code boolean} true if any origin is allowed.
|
||||
*/
|
||||
|
@ -121,21 +122,21 @@ public final class Netty4CorsConfig {
|
|||
}
|
||||
|
||||
/**
|
||||
* Determines if cookies are supported for CORS requests.
|
||||
* Determines if credentials are supported for CORS requests.
|
||||
*
|
||||
* By default cookies are not included in CORS requests but if isCredentialsAllowed returns
|
||||
* true cookies will be added to CORS requests. Setting this value to true will set the
|
||||
* By default credentials are not included in CORS requests but if isCredentialsAllowed returns
|
||||
* true credentials will be added to CORS requests. Setting this value to true will set the
|
||||
* CORS 'Access-Control-Allow-Credentials' response header to true.
|
||||
*
|
||||
* Please note that cookie support needs to be enabled on the client side as well.
|
||||
* The client needs to opt-in to send cookies by calling:
|
||||
* Please note that credentials support needs to be enabled on the client side as well.
|
||||
* The client needs to opt-in to send credentials by calling:
|
||||
* <pre>
|
||||
* xhr.withCredentials = true;
|
||||
* </pre>
|
||||
* The default value for 'withCredentials' is false in which case no cookies are sent.
|
||||
* Setting this to true will included cookies in cross origin requests.
|
||||
* The default value for 'withCredentials' is false in which case no credentials are sent.
|
||||
* Setting this to true will included credentials in cross origin requests.
|
||||
*
|
||||
* @return {@code true} if cookies are supported.
|
||||
* @return {@code true} if credentials are supported.
|
||||
*/
|
||||
public boolean isCredentialsAllowed() {
|
||||
return allowCredentials;
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
|
|||
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
|
||||
import org.elasticsearch.repositories.hdfs.HdfsBlobStore.Operation;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FilterInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -69,11 +70,13 @@ final class HdfsBlobContainer extends AbstractBlobContainer {
|
|||
|
||||
@Override
|
||||
public void deleteBlob(String blobName) throws IOException {
|
||||
if (!blobExists(blobName)) {
|
||||
throw new NoSuchFileException("Blob [" + blobName + "] does not exist");
|
||||
try {
|
||||
if (store.execute(fileContext -> fileContext.delete(new Path(path, blobName), true)) == false) {
|
||||
throw new NoSuchFileException("Blob [" + blobName + "] does not exist");
|
||||
}
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
throw new NoSuchFileException("[" + blobName + "] blob not found");
|
||||
}
|
||||
|
||||
store.execute(fileContext -> fileContext.delete(new Path(path, blobName), true));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -86,16 +89,17 @@ final class HdfsBlobContainer extends AbstractBlobContainer {
|
|||
|
||||
@Override
|
||||
public InputStream readBlob(String blobName) throws IOException {
|
||||
if (!blobExists(blobName)) {
|
||||
throw new NoSuchFileException("Blob [" + blobName + "] does not exist");
|
||||
}
|
||||
// FSDataInputStream does buffering internally
|
||||
// FSDataInputStream can open connections on read() or skip() so we wrap in
|
||||
// HDFSPrivilegedInputSteam which will ensure that underlying methods will
|
||||
// be called with the proper privileges.
|
||||
return store.execute(fileContext ->
|
||||
new HDFSPrivilegedInputSteam(fileContext.open(new Path(path, blobName), bufferSize), securityContext)
|
||||
);
|
||||
try {
|
||||
return store.execute(fileContext ->
|
||||
new HDFSPrivilegedInputSteam(fileContext.open(new Path(path, blobName), bufferSize), securityContext)
|
||||
);
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
throw new NoSuchFileException("[" + blobName + "] blob not found");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -96,10 +96,6 @@ class S3BlobContainer extends AbstractBlobContainer {
|
|||
|
||||
@Override
|
||||
public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException {
|
||||
if (blobExists(blobName)) {
|
||||
throw new FileAlreadyExistsException("Blob [" + blobName + "] already exists, cannot overwrite");
|
||||
}
|
||||
|
||||
SocketAccess.doPrivilegedIOException(() -> {
|
||||
if (blobSize <= blobStore.bufferSizeInBytes()) {
|
||||
executeSingleUpload(blobStore, buildKey(blobName), inputStream, blobSize);
|
||||
|
|
|
@ -64,6 +64,11 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
|
|||
return randomMockS3BlobStore();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testVerifyOverwriteFails() {
|
||||
assumeFalse("not implemented because of S3's weak consistency model", true);
|
||||
}
|
||||
|
||||
public void testExecuteSingleUploadBlobSizeTooLarge() {
|
||||
final long blobSize = ByteSizeUnit.GB.toBytes(randomIntBetween(6, 10));
|
||||
final S3BlobStore blobStore = mock(S3BlobStore.class);
|
||||
|
|
|
@ -36,6 +36,8 @@ import org.elasticsearch.common.util.concurrent.ThreadContext;
|
|||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.http.HttpHandlingSettings;
|
||||
import org.elasticsearch.http.HttpPipelinedRequest;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsConfig;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsHandler;
|
||||
import org.elasticsearch.nio.FlushOperation;
|
||||
import org.elasticsearch.nio.InboundChannelBuffer;
|
||||
import org.elasticsearch.nio.NioSocketChannel;
|
||||
|
@ -50,6 +52,8 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED;
|
||||
|
||||
public class HttpReadWriteHandler implements ReadWriteHandler {
|
||||
|
||||
private final NettyAdaptor adaptor;
|
||||
|
@ -57,14 +61,16 @@ public class HttpReadWriteHandler implements ReadWriteHandler {
|
|||
private final NioHttpServerTransport transport;
|
||||
private final HttpHandlingSettings settings;
|
||||
private final NamedXContentRegistry xContentRegistry;
|
||||
private final NioCorsConfig corsConfig;
|
||||
private final ThreadContext threadContext;
|
||||
|
||||
HttpReadWriteHandler(NioSocketChannel nioChannel, NioHttpServerTransport transport, HttpHandlingSettings settings,
|
||||
NamedXContentRegistry xContentRegistry, ThreadContext threadContext) {
|
||||
NamedXContentRegistry xContentRegistry, NioCorsConfig corsConfig, ThreadContext threadContext) {
|
||||
this.nioChannel = nioChannel;
|
||||
this.transport = transport;
|
||||
this.settings = settings;
|
||||
this.xContentRegistry = xContentRegistry;
|
||||
this.corsConfig = corsConfig;
|
||||
this.threadContext = threadContext;
|
||||
|
||||
List<ChannelHandler> handlers = new ArrayList<>(5);
|
||||
|
@ -78,6 +84,9 @@ public class HttpReadWriteHandler implements ReadWriteHandler {
|
|||
if (settings.isCompression()) {
|
||||
handlers.add(new HttpContentCompressor(settings.getCompressionLevel()));
|
||||
}
|
||||
if (settings.isCorsEnabled()) {
|
||||
handlers.add(new NioCorsHandler(corsConfig));
|
||||
}
|
||||
handlers.add(new NioHttpPipeliningHandler(transport.getLogger(), settings.getPipeliningMaxEvents()));
|
||||
|
||||
adaptor = new NettyAdaptor(handlers.toArray(new ChannelHandler[0]));
|
||||
|
@ -178,7 +187,7 @@ public class HttpReadWriteHandler implements ReadWriteHandler {
|
|||
int sequence = pipelinedRequest.getSequence();
|
||||
BigArrays bigArrays = transport.getBigArrays();
|
||||
try {
|
||||
innerChannel = new NioHttpChannel(nioChannel, bigArrays, httpRequest, sequence, settings, threadContext);
|
||||
innerChannel = new NioHttpChannel(nioChannel, bigArrays, httpRequest, sequence, settings, corsConfig, threadContext);
|
||||
} catch (final IllegalArgumentException e) {
|
||||
if (badRequestCause == null) {
|
||||
badRequestCause = e;
|
||||
|
@ -191,7 +200,7 @@ public class HttpReadWriteHandler implements ReadWriteHandler {
|
|||
Collections.emptyMap(), // we are going to dispatch the request as a bad request, drop all parameters
|
||||
copiedRequest.uri(),
|
||||
copiedRequest);
|
||||
innerChannel = new NioHttpChannel(nioChannel, bigArrays, innerRequest, sequence, settings, threadContext);
|
||||
innerChannel = new NioHttpChannel(nioChannel, bigArrays, innerRequest, sequence, settings, corsConfig, threadContext);
|
||||
}
|
||||
channel = innerChannel;
|
||||
}
|
||||
|
|
|
@ -41,6 +41,8 @@ import org.elasticsearch.common.lease.Releasables;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.http.HttpHandlingSettings;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsConfig;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsHandler;
|
||||
import org.elasticsearch.nio.NioSocketChannel;
|
||||
import org.elasticsearch.rest.AbstractRestChannel;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
|
@ -58,17 +60,19 @@ public class NioHttpChannel extends AbstractRestChannel {
|
|||
|
||||
private final BigArrays bigArrays;
|
||||
private final int sequence;
|
||||
private final NioCorsConfig corsConfig;
|
||||
private final ThreadContext threadContext;
|
||||
private final FullHttpRequest nettyRequest;
|
||||
private final NioSocketChannel nioChannel;
|
||||
private final boolean resetCookies;
|
||||
|
||||
NioHttpChannel(NioSocketChannel nioChannel, BigArrays bigArrays, NioHttpRequest request, int sequence,
|
||||
HttpHandlingSettings settings, ThreadContext threadContext) {
|
||||
HttpHandlingSettings settings, NioCorsConfig corsConfig, ThreadContext threadContext) {
|
||||
super(request, settings.getDetailedErrorsEnabled());
|
||||
this.nioChannel = nioChannel;
|
||||
this.bigArrays = bigArrays;
|
||||
this.sequence = sequence;
|
||||
this.corsConfig = corsConfig;
|
||||
this.threadContext = threadContext;
|
||||
this.nettyRequest = request.getRequest();
|
||||
this.resetCookies = settings.isResetCookies();
|
||||
|
@ -87,6 +91,8 @@ public class NioHttpChannel extends AbstractRestChannel {
|
|||
}
|
||||
resp.setStatus(getStatus(response.status()));
|
||||
|
||||
NioCorsHandler.setCorsResponseHeaders(nettyRequest, resp, corsConfig);
|
||||
|
||||
String opaque = nettyRequest.headers().get("X-Opaque-Id");
|
||||
if (opaque != null) {
|
||||
setHeaderField(resp, "X-Opaque-Id", opaque);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.http.nio;
|
||||
|
||||
import io.netty.handler.codec.http.HttpMethod;
|
||||
import io.netty.handler.timeout.ReadTimeoutException;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
|
@ -28,6 +29,7 @@ import org.elasticsearch.ExceptionsHelper;
|
|||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.network.NetworkAddress;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
|
@ -38,11 +40,13 @@ import org.elasticsearch.common.unit.ByteSizeValue;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.http.AbstractHttpServerTransport;
|
||||
import org.elasticsearch.http.BindHttpException;
|
||||
import org.elasticsearch.http.HttpHandlingSettings;
|
||||
import org.elasticsearch.http.HttpServerTransport;
|
||||
import org.elasticsearch.http.HttpStats;
|
||||
import org.elasticsearch.http.AbstractHttpServerTransport;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsConfig;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsConfigBuilder;
|
||||
import org.elasticsearch.nio.AcceptingSelector;
|
||||
import org.elasticsearch.nio.AcceptorEventHandler;
|
||||
import org.elasticsearch.nio.BytesChannelContext;
|
||||
|
@ -56,6 +60,7 @@ import org.elasticsearch.nio.ServerChannelContext;
|
|||
import org.elasticsearch.nio.SocketChannelContext;
|
||||
import org.elasticsearch.nio.SocketEventHandler;
|
||||
import org.elasticsearch.nio.SocketSelector;
|
||||
import org.elasticsearch.rest.RestUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -64,15 +69,23 @@ import java.net.InetSocketAddress;
|
|||
import java.nio.channels.ServerSocketChannel;
|
||||
import java.nio.channels.SocketChannel;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.common.settings.Setting.intSetting;
|
||||
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_HEADERS;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_MAX_AGE;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION_LEVEL;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_DETAILED_ERRORS_ENABLED;
|
||||
|
@ -86,6 +99,7 @@ import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_TCP_RECE
|
|||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_TCP_REUSE_ADDRESS;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_TCP_SEND_BUFFER_SIZE;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING_MAX_EVENTS;
|
||||
import static org.elasticsearch.http.nio.cors.NioCorsHandler.ANY_ORIGIN;
|
||||
|
||||
public class NioHttpServerTransport extends AbstractHttpServerTransport {
|
||||
|
||||
|
@ -115,6 +129,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
|
|||
private final Set<NioSocketChannel> socketChannels = Collections.newSetFromMap(new ConcurrentHashMap<>());
|
||||
private NioGroup nioGroup;
|
||||
private HttpChannelFactory channelFactory;
|
||||
private final NioCorsConfig corsConfig;
|
||||
|
||||
public NioHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool,
|
||||
NamedXContentRegistry xContentRegistry, HttpServerTransport.Dispatcher dispatcher) {
|
||||
|
@ -136,6 +151,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
|
|||
SETTING_HTTP_COMPRESSION_LEVEL.get(settings),
|
||||
SETTING_HTTP_DETAILED_ERRORS_ENABLED.get(settings),
|
||||
pipeliningMaxEvents);
|
||||
this.corsConfig = buildCorsConfig(settings);
|
||||
|
||||
this.tcpNoDelay = SETTING_HTTP_TCP_NO_DELAY.get(settings);
|
||||
this.tcpKeepAlive = SETTING_HTTP_TCP_KEEP_ALIVE.get(settings);
|
||||
|
@ -279,6 +295,38 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
|
|||
logger.warn(new ParameterizedMessage("exception caught on transport layer [thread={}]", Thread.currentThread().getName()), ex);
|
||||
}
|
||||
|
||||
static NioCorsConfig buildCorsConfig(Settings settings) {
|
||||
if (SETTING_CORS_ENABLED.get(settings) == false) {
|
||||
return NioCorsConfigBuilder.forOrigins().disable().build();
|
||||
}
|
||||
String origin = SETTING_CORS_ALLOW_ORIGIN.get(settings);
|
||||
final NioCorsConfigBuilder builder;
|
||||
if (Strings.isNullOrEmpty(origin)) {
|
||||
builder = NioCorsConfigBuilder.forOrigins();
|
||||
} else if (origin.equals(ANY_ORIGIN)) {
|
||||
builder = NioCorsConfigBuilder.forAnyOrigin();
|
||||
} else {
|
||||
Pattern p = RestUtils.checkCorsSettingForRegex(origin);
|
||||
if (p == null) {
|
||||
builder = NioCorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin));
|
||||
} else {
|
||||
builder = NioCorsConfigBuilder.forPattern(p);
|
||||
}
|
||||
}
|
||||
if (SETTING_CORS_ALLOW_CREDENTIALS.get(settings)) {
|
||||
builder.allowCredentials();
|
||||
}
|
||||
String[] strMethods = Strings.tokenizeToStringArray(SETTING_CORS_ALLOW_METHODS.get(settings), ",");
|
||||
HttpMethod[] methods = Arrays.stream(strMethods)
|
||||
.map(HttpMethod::valueOf)
|
||||
.toArray(HttpMethod[]::new);
|
||||
return builder.allowedRequestMethods(methods)
|
||||
.maxAge(SETTING_CORS_MAX_AGE.get(settings))
|
||||
.allowedRequestHeaders(Strings.tokenizeToStringArray(SETTING_CORS_ALLOW_HEADERS.get(settings), ","))
|
||||
.shortCircuit()
|
||||
.build();
|
||||
}
|
||||
|
||||
private void closeChannels(List<NioChannel> channels) {
|
||||
List<ActionFuture<Void>> futures = new ArrayList<>(channels.size());
|
||||
|
||||
|
@ -315,7 +363,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
|
|||
public NioSocketChannel createChannel(SocketSelector selector, SocketChannel channel) throws IOException {
|
||||
NioSocketChannel nioChannel = new NioSocketChannel(channel);
|
||||
HttpReadWriteHandler httpReadWritePipeline = new HttpReadWriteHandler(nioChannel,NioHttpServerTransport.this,
|
||||
httpHandlingSettings, xContentRegistry, threadPool.getThreadContext());
|
||||
httpHandlingSettings, xContentRegistry, corsConfig, threadPool.getThreadContext());
|
||||
Consumer<Exception> exceptionHandler = (e) -> exceptionCaught(nioChannel, e);
|
||||
SocketChannelContext context = new BytesChannelContext(nioChannel, selector, exceptionHandler, httpReadWritePipeline,
|
||||
InboundChannelBuffer.allocatingInstance());
|
||||
|
|
|
@ -0,0 +1,236 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.http.nio.cors;
|
||||
|
||||
import io.netty.handler.codec.http.DefaultHttpHeaders;
|
||||
import io.netty.handler.codec.http.EmptyHttpHeaders;
|
||||
import io.netty.handler.codec.http.HttpHeaders;
|
||||
import io.netty.handler.codec.http.HttpMethod;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Configuration for Cross-Origin Resource Sharing (CORS).
|
||||
*
|
||||
* This class was lifted from the Netty project:
|
||||
* https://github.com/netty/netty
|
||||
*/
|
||||
public final class NioCorsConfig {
|
||||
|
||||
private final Optional<Set<String>> origins;
|
||||
private final Optional<Pattern> pattern;
|
||||
private final boolean anyOrigin;
|
||||
private final boolean enabled;
|
||||
private final boolean allowCredentials;
|
||||
private final long maxAge;
|
||||
private final Set<HttpMethod> allowedRequestMethods;
|
||||
private final Set<String> allowedRequestHeaders;
|
||||
private final boolean allowNullOrigin;
|
||||
private final Map<CharSequence, Callable<?>> preflightHeaders;
|
||||
private final boolean shortCircuit;
|
||||
|
||||
NioCorsConfig(final NioCorsConfigBuilder builder) {
|
||||
origins = builder.origins.map(s -> new LinkedHashSet<>(s));
|
||||
pattern = builder.pattern;
|
||||
anyOrigin = builder.anyOrigin;
|
||||
enabled = builder.enabled;
|
||||
allowCredentials = builder.allowCredentials;
|
||||
maxAge = builder.maxAge;
|
||||
allowedRequestMethods = builder.requestMethods;
|
||||
allowedRequestHeaders = builder.requestHeaders;
|
||||
allowNullOrigin = builder.allowNullOrigin;
|
||||
preflightHeaders = builder.preflightHeaders;
|
||||
shortCircuit = builder.shortCircuit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if support for CORS is enabled.
|
||||
*
|
||||
* @return {@code true} if support for CORS is enabled, false otherwise.
|
||||
*/
|
||||
public boolean isCorsSupportEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether a wildcard origin, '*', is supported. This also means that null origins are
|
||||
* supported.
|
||||
*
|
||||
* @return {@code boolean} true if any origin is allowed.
|
||||
*/
|
||||
public boolean isAnyOriginSupported() {
|
||||
return anyOrigin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the set of allowed origins.
|
||||
*
|
||||
* @return {@code Set} the allowed origins.
|
||||
*/
|
||||
public Optional<Set<String>> origins() {
|
||||
return origins;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the input origin is allowed by this configuration.
|
||||
*
|
||||
* @return {@code true} if the origin is allowed, otherwise {@code false}
|
||||
*/
|
||||
public boolean isOriginAllowed(final String origin) {
|
||||
if (origins.isPresent()) {
|
||||
return origins.get().contains(origin);
|
||||
} else if (pattern.isPresent()) {
|
||||
return pattern.get().matcher(origin).matches();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Web browsers may set the 'Origin' request header to 'null' if a resource is loaded
|
||||
* from the local file system.
|
||||
*
|
||||
* If isNullOriginAllowed is true then the server will response with the wildcard for the
|
||||
* the CORS response header 'Access-Control-Allow-Origin'.
|
||||
*
|
||||
* @return {@code true} if a 'null' origin should be supported.
|
||||
*/
|
||||
public boolean isNullOriginAllowed() {
|
||||
return allowNullOrigin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if credentials are supported for CORS requests.
|
||||
*
|
||||
* By default credentials are not included in CORS requests but if isCredentialsAllowed returns
|
||||
* true credentials will be added to CORS requests. Setting this value to true will set the
|
||||
* CORS 'Access-Control-Allow-Credentials' response header to true.
|
||||
*
|
||||
* Please note that credentials support needs to be enabled on the client side as well.
|
||||
* The client needs to opt-in to send credentials by calling:
|
||||
* <pre>
|
||||
* xhr.withCredentials = true;
|
||||
* </pre>
|
||||
* The default value for 'withCredentials' is false in which case no credentials are sent.
|
||||
* Setting this to true will included cookies in cross origin requests.
|
||||
*
|
||||
* @return {@code true} if credentials are supported.
|
||||
*/
|
||||
public boolean isCredentialsAllowed() {
|
||||
return allowCredentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the maxAge setting.
|
||||
*
|
||||
* When making a preflight request the client has to perform two request with can be inefficient.
|
||||
* This setting will set the CORS 'Access-Control-Max-Age' response header and enables the
|
||||
* caching of the preflight response for the specified time. During this time no preflight
|
||||
* request will be made.
|
||||
*
|
||||
* @return {@code long} the time in seconds that a preflight request may be cached.
|
||||
*/
|
||||
public long maxAge() {
|
||||
return maxAge;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the allowed set of Request Methods. The Http methods that should be returned in the
|
||||
* CORS 'Access-Control-Request-Method' response header.
|
||||
*
|
||||
* @return {@code Set} of {@link HttpMethod}s that represent the allowed Request Methods.
|
||||
*/
|
||||
public Set<HttpMethod> allowedRequestMethods() {
|
||||
return Collections.unmodifiableSet(allowedRequestMethods);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the allowed set of Request Headers.
|
||||
*
|
||||
* The header names returned from this method will be used to set the CORS
|
||||
* 'Access-Control-Allow-Headers' response header.
|
||||
*
|
||||
* @return {@code Set<String>} of strings that represent the allowed Request Headers.
|
||||
*/
|
||||
public Set<String> allowedRequestHeaders() {
|
||||
return Collections.unmodifiableSet(allowedRequestHeaders);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns HTTP response headers that should be added to a CORS preflight response.
|
||||
*
|
||||
* @return {@link HttpHeaders} the HTTP response headers to be added.
|
||||
*/
|
||||
public HttpHeaders preflightResponseHeaders() {
|
||||
if (preflightHeaders.isEmpty()) {
|
||||
return EmptyHttpHeaders.INSTANCE;
|
||||
}
|
||||
final HttpHeaders preflightHeaders = new DefaultHttpHeaders();
|
||||
for (Map.Entry<CharSequence, Callable<?>> entry : this.preflightHeaders.entrySet()) {
|
||||
final Object value = getValue(entry.getValue());
|
||||
if (value instanceof Iterable) {
|
||||
preflightHeaders.add(entry.getKey().toString(), (Iterable<?>) value);
|
||||
} else {
|
||||
preflightHeaders.add(entry.getKey().toString(), value);
|
||||
}
|
||||
}
|
||||
return preflightHeaders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether a CORS request should be rejected if it's invalid before being
|
||||
* further processing.
|
||||
*
|
||||
* CORS headers are set after a request is processed. This may not always be desired
|
||||
* and this setting will check that the Origin is valid and if it is not valid no
|
||||
* further processing will take place, and a error will be returned to the calling client.
|
||||
*
|
||||
* @return {@code true} if a CORS request should short-circuit upon receiving an invalid Origin header.
|
||||
*/
|
||||
public boolean isShortCircuit() {
|
||||
return shortCircuit;
|
||||
}
|
||||
|
||||
private static <T> T getValue(final Callable<T> callable) {
|
||||
try {
|
||||
return callable.call();
|
||||
} catch (final Exception e) {
|
||||
throw new IllegalStateException("Could not generate value for callable [" + callable + ']', e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CorsConfig[enabled=" + enabled +
|
||||
", origins=" + origins +
|
||||
", anyOrigin=" + anyOrigin +
|
||||
", isCredentialsAllowed=" + allowCredentials +
|
||||
", maxAge=" + maxAge +
|
||||
", allowedRequestMethods=" + allowedRequestMethods +
|
||||
", allowedRequestHeaders=" + allowedRequestHeaders +
|
||||
", preflightHeaders=" + preflightHeaders + ']';
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,357 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.http.nio.cors;
|
||||
|
||||
import io.netty.handler.codec.http.HttpMethod;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Builder used to configure and build a {@link NioCorsConfig} instance.
|
||||
*
|
||||
* This class was lifted from the Netty project:
|
||||
* https://github.com/netty/netty
|
||||
*/
|
||||
public final class NioCorsConfigBuilder {
|
||||
|
||||
/**
|
||||
* Creates a Builder instance with it's origin set to '*'.
|
||||
*
|
||||
* @return Builder to support method chaining.
|
||||
*/
|
||||
public static NioCorsConfigBuilder forAnyOrigin() {
|
||||
return new NioCorsConfigBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link NioCorsConfigBuilder} instance with the specified origin.
|
||||
*
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public static NioCorsConfigBuilder forOrigin(final String origin) {
|
||||
if ("*".equals(origin)) {
|
||||
return new NioCorsConfigBuilder();
|
||||
}
|
||||
return new NioCorsConfigBuilder(origin);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create a {@link NioCorsConfigBuilder} instance with the specified pattern origin.
|
||||
*
|
||||
* @param pattern the regular expression pattern to match incoming origins on.
|
||||
* @return {@link NioCorsConfigBuilder} with the configured origin pattern.
|
||||
*/
|
||||
public static NioCorsConfigBuilder forPattern(final Pattern pattern) {
|
||||
if (pattern == null) {
|
||||
throw new IllegalArgumentException("CORS pattern cannot be null");
|
||||
}
|
||||
return new NioCorsConfigBuilder(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link NioCorsConfigBuilder} instance with the specified origins.
|
||||
*
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public static NioCorsConfigBuilder forOrigins(final String... origins) {
|
||||
return new NioCorsConfigBuilder(origins);
|
||||
}
|
||||
|
||||
Optional<Set<String>> origins;
|
||||
Optional<Pattern> pattern;
|
||||
final boolean anyOrigin;
|
||||
boolean allowNullOrigin;
|
||||
boolean enabled = true;
|
||||
boolean allowCredentials;
|
||||
long maxAge;
|
||||
final Set<HttpMethod> requestMethods = new HashSet<>();
|
||||
final Set<String> requestHeaders = new HashSet<>();
|
||||
final Map<CharSequence, Callable<?>> preflightHeaders = new HashMap<>();
|
||||
private boolean noPreflightHeaders;
|
||||
boolean shortCircuit;
|
||||
|
||||
/**
|
||||
* Creates a new Builder instance with the origin passed in.
|
||||
*
|
||||
* @param origins the origin to be used for this builder.
|
||||
*/
|
||||
NioCorsConfigBuilder(final String... origins) {
|
||||
this.origins = Optional.of(new LinkedHashSet<>(Arrays.asList(origins)));
|
||||
pattern = Optional.empty();
|
||||
anyOrigin = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Builder instance allowing any origin, "*" which is the
|
||||
* wildcard origin.
|
||||
*
|
||||
*/
|
||||
NioCorsConfigBuilder() {
|
||||
anyOrigin = true;
|
||||
origins = Optional.empty();
|
||||
pattern = Optional.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Builder instance allowing any origin that matches the pattern.
|
||||
*
|
||||
* @param pattern the pattern to match against for incoming origins.
|
||||
*/
|
||||
NioCorsConfigBuilder(final Pattern pattern) {
|
||||
this.pattern = Optional.of(pattern);
|
||||
origins = Optional.empty();
|
||||
anyOrigin = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Web browsers may set the 'Origin' request header to 'null' if a resource is loaded
|
||||
* from the local file system. Calling this method will enable a successful CORS response
|
||||
* with a wildcard for the CORS response header 'Access-Control-Allow-Origin'.
|
||||
*
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
NioCorsConfigBuilder allowNullOrigin() {
|
||||
allowNullOrigin = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Disables CORS support.
|
||||
*
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public NioCorsConfigBuilder disable() {
|
||||
enabled = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* By default cookies are not included in CORS requests, but this method will enable cookies to
|
||||
* be added to CORS requests. Calling this method will set the CORS 'Access-Control-Allow-Credentials'
|
||||
* response header to true.
|
||||
*
|
||||
* Please note, that cookie support needs to be enabled on the client side as well.
|
||||
* The client needs to opt-in to send cookies by calling:
|
||||
* <pre>
|
||||
* xhr.withCredentials = true;
|
||||
* </pre>
|
||||
* The default value for 'withCredentials' is false in which case no cookies are sent.
|
||||
* Setting this to true will included cookies in cross origin requests.
|
||||
*
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public NioCorsConfigBuilder allowCredentials() {
|
||||
allowCredentials = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* When making a preflight request the client has to perform two request with can be inefficient.
|
||||
* This setting will set the CORS 'Access-Control-Max-Age' response header and enables the
|
||||
* caching of the preflight response for the specified time. During this time no preflight
|
||||
* request will be made.
|
||||
*
|
||||
* @param max the maximum time, in seconds, that the preflight response may be cached.
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public NioCorsConfigBuilder maxAge(final long max) {
|
||||
maxAge = max;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the allowed set of HTTP Request Methods that should be returned in the
|
||||
* CORS 'Access-Control-Request-Method' response header.
|
||||
*
|
||||
* @param methods the {@link HttpMethod}s that should be allowed.
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public NioCorsConfigBuilder allowedRequestMethods(final HttpMethod... methods) {
|
||||
requestMethods.addAll(Arrays.asList(methods));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the if headers that should be returned in the CORS 'Access-Control-Allow-Headers'
|
||||
* response header.
|
||||
*
|
||||
* If a client specifies headers on the request, for example by calling:
|
||||
* <pre>
|
||||
* xhr.setRequestHeader('My-Custom-Header', "SomeValue");
|
||||
* </pre>
|
||||
* the server will receive the above header name in the 'Access-Control-Request-Headers' of the
|
||||
* preflight request. The server will then decide if it allows this header to be sent for the
|
||||
* real request (remember that a preflight is not the real request but a request asking the server
|
||||
* if it allow a request).
|
||||
*
|
||||
* @param headers the headers to be added to the preflight 'Access-Control-Allow-Headers' response header.
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public NioCorsConfigBuilder allowedRequestHeaders(final String... headers) {
|
||||
requestHeaders.addAll(Arrays.asList(headers));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns HTTP response headers that should be added to a CORS preflight response.
|
||||
*
|
||||
* An intermediary like a load balancer might require that a CORS preflight request
|
||||
* have certain headers set. This enables such headers to be added.
|
||||
*
|
||||
* @param name the name of the HTTP header.
|
||||
* @param values the values for the HTTP header.
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public NioCorsConfigBuilder preflightResponseHeader(final CharSequence name, final Object... values) {
|
||||
if (values.length == 1) {
|
||||
preflightHeaders.put(name, new ConstantValueGenerator(values[0]));
|
||||
} else {
|
||||
preflightResponseHeader(name, Arrays.asList(values));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns HTTP response headers that should be added to a CORS preflight response.
|
||||
*
|
||||
* An intermediary like a load balancer might require that a CORS preflight request
|
||||
* have certain headers set. This enables such headers to be added.
|
||||
*
|
||||
* @param name the name of the HTTP header.
|
||||
* @param value the values for the HTTP header.
|
||||
* @param <T> the type of values that the Iterable contains.
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public <T> NioCorsConfigBuilder preflightResponseHeader(final CharSequence name, final Iterable<T> value) {
|
||||
preflightHeaders.put(name, new ConstantValueGenerator(value));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns HTTP response headers that should be added to a CORS preflight response.
|
||||
*
|
||||
* An intermediary like a load balancer might require that a CORS preflight request
|
||||
* have certain headers set. This enables such headers to be added.
|
||||
*
|
||||
* Some values must be dynamically created when the HTTP response is created, for
|
||||
* example the 'Date' response header. This can be accomplished by using a Callable
|
||||
* which will have its 'call' method invoked when the HTTP response is created.
|
||||
*
|
||||
* @param name the name of the HTTP header.
|
||||
* @param valueGenerator a Callable which will be invoked at HTTP response creation.
|
||||
* @param <T> the type of the value that the Callable can return.
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public <T> NioCorsConfigBuilder preflightResponseHeader(final CharSequence name, final Callable<T> valueGenerator) {
|
||||
preflightHeaders.put(name, valueGenerator);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies that no preflight response headers should be added to a preflight response.
|
||||
*
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public NioCorsConfigBuilder noPreflightResponseHeaders() {
|
||||
noPreflightHeaders = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies that a CORS request should be rejected if it's invalid before being
|
||||
* further processing.
|
||||
*
|
||||
* CORS headers are set after a request is processed. This may not always be desired
|
||||
* and this setting will check that the Origin is valid and if it is not valid no
|
||||
* further processing will take place, and a error will be returned to the calling client.
|
||||
*
|
||||
* @return {@link NioCorsConfigBuilder} to support method chaining.
|
||||
*/
|
||||
public NioCorsConfigBuilder shortCircuit() {
|
||||
shortCircuit = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a {@link NioCorsConfig} with settings specified by previous method calls.
|
||||
*
|
||||
* @return {@link NioCorsConfig} the configured CorsConfig instance.
|
||||
*/
|
||||
public NioCorsConfig build() {
|
||||
if (preflightHeaders.isEmpty() && !noPreflightHeaders) {
|
||||
preflightHeaders.put("date", DateValueGenerator.INSTANCE);
|
||||
preflightHeaders.put("content-length", new ConstantValueGenerator("0"));
|
||||
}
|
||||
return new NioCorsConfig(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* This class is used for preflight HTTP response values that do not need to be
|
||||
* generated, but instead the value is "static" in that the same value will be returned
|
||||
* for each call.
|
||||
*/
|
||||
private static final class ConstantValueGenerator implements Callable<Object> {
|
||||
|
||||
private final Object value;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*
|
||||
* @param value the value that will be returned when the call method is invoked.
|
||||
*/
|
||||
private ConstantValueGenerator(final Object value) {
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("value must not be null");
|
||||
}
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object call() {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This callable is used for the DATE preflight HTTP response HTTP header.
|
||||
* It's value must be generated when the response is generated, hence will be
|
||||
* different for every call.
|
||||
*/
|
||||
private static final class DateValueGenerator implements Callable<Date> {
|
||||
|
||||
static final DateValueGenerator INSTANCE = new DateValueGenerator();
|
||||
|
||||
@Override
|
||||
public Date call() throws Exception {
|
||||
return new Date();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,235 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.http.nio.cors;
|
||||
|
||||
import io.netty.channel.ChannelDuplexHandler;
|
||||
import io.netty.channel.ChannelFutureListener;
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.handler.codec.http.DefaultFullHttpResponse;
|
||||
import io.netty.handler.codec.http.HttpHeaderNames;
|
||||
import io.netty.handler.codec.http.HttpHeaders;
|
||||
import io.netty.handler.codec.http.HttpMethod;
|
||||
import io.netty.handler.codec.http.HttpRequest;
|
||||
import io.netty.handler.codec.http.HttpResponse;
|
||||
import io.netty.handler.codec.http.HttpResponseStatus;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Handles <a href="http://www.w3.org/TR/cors/">Cross Origin Resource Sharing</a> (CORS) requests.
|
||||
* <p>
|
||||
* This handler can be configured using a {@link NioCorsConfig}, please
|
||||
* refer to this class for details about the configuration options available.
|
||||
*
|
||||
* This code was borrowed from Netty 4 and refactored to work for Elasticsearch's Netty 3 setup.
|
||||
*/
|
||||
public class NioCorsHandler extends ChannelDuplexHandler {
|
||||
|
||||
public static final String ANY_ORIGIN = "*";
|
||||
private static Pattern SCHEME_PATTERN = Pattern.compile("^https?://");
|
||||
|
||||
private final NioCorsConfig config;
|
||||
private HttpRequest request;
|
||||
|
||||
/**
|
||||
* Creates a new instance with the specified {@link NioCorsConfig}.
|
||||
*/
|
||||
public NioCorsHandler(final NioCorsConfig config) {
|
||||
if (config == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
|
||||
if (config.isCorsSupportEnabled() && msg instanceof HttpRequest) {
|
||||
request = (HttpRequest) msg;
|
||||
if (isPreflightRequest(request)) {
|
||||
handlePreflight(ctx, request);
|
||||
return;
|
||||
}
|
||||
if (config.isShortCircuit() && !validateOrigin()) {
|
||||
forbidden(ctx, request);
|
||||
return;
|
||||
}
|
||||
}
|
||||
ctx.fireChannelRead(msg);
|
||||
}
|
||||
|
||||
public static void setCorsResponseHeaders(HttpRequest request, HttpResponse resp, NioCorsConfig config) {
|
||||
if (!config.isCorsSupportEnabled()) {
|
||||
return;
|
||||
}
|
||||
String originHeader = request.headers().get(HttpHeaderNames.ORIGIN);
|
||||
if (!Strings.isNullOrEmpty(originHeader)) {
|
||||
final String originHeaderVal;
|
||||
if (config.isAnyOriginSupported()) {
|
||||
originHeaderVal = ANY_ORIGIN;
|
||||
} else if (config.isOriginAllowed(originHeader) || isSameOrigin(originHeader, request.headers().get(HttpHeaderNames.HOST))) {
|
||||
originHeaderVal = originHeader;
|
||||
} else {
|
||||
originHeaderVal = null;
|
||||
}
|
||||
if (originHeaderVal != null) {
|
||||
resp.headers().add(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN, originHeaderVal);
|
||||
}
|
||||
}
|
||||
if (config.isCredentialsAllowed()) {
|
||||
resp.headers().add(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
|
||||
}
|
||||
}
|
||||
|
||||
private void handlePreflight(final ChannelHandlerContext ctx, final HttpRequest request) {
|
||||
final HttpResponse response = new DefaultFullHttpResponse(request.protocolVersion(), HttpResponseStatus.OK, true, true);
|
||||
if (setOrigin(response)) {
|
||||
setAllowMethods(response);
|
||||
setAllowHeaders(response);
|
||||
setAllowCredentials(response);
|
||||
setMaxAge(response);
|
||||
setPreflightHeaders(response);
|
||||
ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE);
|
||||
} else {
|
||||
forbidden(ctx, request);
|
||||
}
|
||||
}
|
||||
|
||||
private static void forbidden(final ChannelHandlerContext ctx, final HttpRequest request) {
|
||||
ctx.writeAndFlush(new DefaultFullHttpResponse(request.protocolVersion(), HttpResponseStatus.FORBIDDEN))
|
||||
.addListener(ChannelFutureListener.CLOSE);
|
||||
}
|
||||
|
||||
private static boolean isSameOrigin(final String origin, final String host) {
|
||||
if (Strings.isNullOrEmpty(host) == false) {
|
||||
// strip protocol from origin
|
||||
final String originDomain = SCHEME_PATTERN.matcher(origin).replaceFirst("");
|
||||
if (host.equals(originDomain)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a non CORS specification feature which enables the setting of preflight
|
||||
* response headers that might be required by intermediaries.
|
||||
*
|
||||
* @param response the HttpResponse to which the preflight response headers should be added.
|
||||
*/
|
||||
private void setPreflightHeaders(final HttpResponse response) {
|
||||
response.headers().add(config.preflightResponseHeaders());
|
||||
}
|
||||
|
||||
private boolean setOrigin(final HttpResponse response) {
|
||||
final String origin = request.headers().get(HttpHeaderNames.ORIGIN);
|
||||
if (!Strings.isNullOrEmpty(origin)) {
|
||||
if ("null".equals(origin) && config.isNullOriginAllowed()) {
|
||||
setAnyOrigin(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (config.isAnyOriginSupported()) {
|
||||
if (config.isCredentialsAllowed()) {
|
||||
echoRequestOrigin(response);
|
||||
setVaryHeader(response);
|
||||
} else {
|
||||
setAnyOrigin(response);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (config.isOriginAllowed(origin)) {
|
||||
setOrigin(response, origin);
|
||||
setVaryHeader(response);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean validateOrigin() {
|
||||
if (config.isAnyOriginSupported()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
final String origin = request.headers().get(HttpHeaderNames.ORIGIN);
|
||||
if (Strings.isNullOrEmpty(origin)) {
|
||||
// Not a CORS request so we cannot validate it. It may be a non CORS request.
|
||||
return true;
|
||||
}
|
||||
|
||||
if ("null".equals(origin) && config.isNullOriginAllowed()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// if the origin is the same as the host of the request, then allow
|
||||
if (isSameOrigin(origin, request.headers().get(HttpHeaderNames.HOST))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return config.isOriginAllowed(origin);
|
||||
}
|
||||
|
||||
private void echoRequestOrigin(final HttpResponse response) {
|
||||
setOrigin(response, request.headers().get(HttpHeaderNames.ORIGIN));
|
||||
}
|
||||
|
||||
private static void setVaryHeader(final HttpResponse response) {
|
||||
response.headers().set(HttpHeaderNames.VARY, HttpHeaderNames.ORIGIN);
|
||||
}
|
||||
|
||||
private static void setAnyOrigin(final HttpResponse response) {
|
||||
setOrigin(response, ANY_ORIGIN);
|
||||
}
|
||||
|
||||
private static void setOrigin(final HttpResponse response, final String origin) {
|
||||
response.headers().set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN, origin);
|
||||
}
|
||||
|
||||
private void setAllowCredentials(final HttpResponse response) {
|
||||
if (config.isCredentialsAllowed()
|
||||
&& !response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN).equals(ANY_ORIGIN)) {
|
||||
response.headers().set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean isPreflightRequest(final HttpRequest request) {
|
||||
final HttpHeaders headers = request.headers();
|
||||
return request.method().equals(HttpMethod.OPTIONS) &&
|
||||
headers.contains(HttpHeaderNames.ORIGIN) &&
|
||||
headers.contains(HttpHeaderNames.ACCESS_CONTROL_REQUEST_METHOD);
|
||||
}
|
||||
|
||||
private void setAllowMethods(final HttpResponse response) {
|
||||
response.headers().set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_METHODS, config.allowedRequestMethods().stream()
|
||||
.map(m -> m.name().trim())
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
private void setAllowHeaders(final HttpResponse response) {
|
||||
response.headers().set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_HEADERS, config.allowedRequestHeaders());
|
||||
}
|
||||
|
||||
private void setMaxAge(final HttpResponse response) {
|
||||
response.headers().set(HttpHeaderNames.ACCESS_CONTROL_MAX_AGE, config.maxAge());
|
||||
}
|
||||
|
||||
}
|
|
@ -39,6 +39,8 @@ import org.elasticsearch.common.unit.ByteSizeValue;
|
|||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.http.HttpHandlingSettings;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsConfig;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsConfigBuilder;
|
||||
import org.elasticsearch.nio.FlushOperation;
|
||||
import org.elasticsearch.nio.InboundChannelBuffer;
|
||||
import org.elasticsearch.nio.NioSocketChannel;
|
||||
|
@ -95,7 +97,8 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
|
|||
SETTING_PIPELINING_MAX_EVENTS.getDefault(settings));
|
||||
ThreadContext threadContext = new ThreadContext(settings);
|
||||
nioSocketChannel = mock(NioSocketChannel.class);
|
||||
handler = new HttpReadWriteHandler(nioSocketChannel, transport, httpHandlingSettings, NamedXContentRegistry.EMPTY, threadContext);
|
||||
handler = new HttpReadWriteHandler(nioSocketChannel, transport, httpHandlingSettings, NamedXContentRegistry.EMPTY,
|
||||
NioCorsConfigBuilder.forAnyOrigin().build(), threadContext);
|
||||
}
|
||||
|
||||
public void testSuccessfulDecodeHttpRequest() throws IOException {
|
||||
|
|
|
@ -0,0 +1,349 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.http.nio;
|
||||
|
||||
import io.netty.buffer.Unpooled;
|
||||
import io.netty.handler.codec.http.DefaultFullHttpRequest;
|
||||
import io.netty.handler.codec.http.FullHttpRequest;
|
||||
import io.netty.handler.codec.http.FullHttpResponse;
|
||||
import io.netty.handler.codec.http.HttpHeaderNames;
|
||||
import io.netty.handler.codec.http.HttpHeaderValues;
|
||||
import io.netty.handler.codec.http.HttpMethod;
|
||||
import io.netty.handler.codec.http.HttpResponse;
|
||||
import io.netty.handler.codec.http.HttpVersion;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.common.util.MockPageCacheRecycler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.http.HttpHandlingSettings;
|
||||
import org.elasticsearch.http.HttpTransportSettings;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsConfig;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsHandler;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.nio.NioSocketChannel;
|
||||
import org.elasticsearch.nio.SocketChannelContext;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.channels.ClosedChannelException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.atLeastOnce;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class NioHttpChannelTests extends ESTestCase {
|
||||
|
||||
private ThreadPool threadPool;
|
||||
private MockBigArrays bigArrays;
|
||||
private NioSocketChannel nioChannel;
|
||||
private SocketChannelContext channelContext;
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
nioChannel = mock(NioSocketChannel.class);
|
||||
channelContext = mock(SocketChannelContext.class);
|
||||
when(nioChannel.getContext()).thenReturn(channelContext);
|
||||
threadPool = new TestThreadPool("test");
|
||||
bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService());
|
||||
}
|
||||
|
||||
@After
|
||||
public void shutdown() throws Exception {
|
||||
if (threadPool != null) {
|
||||
threadPool.shutdownNow();
|
||||
}
|
||||
}
|
||||
|
||||
public void testResponse() {
|
||||
final FullHttpResponse response = executeRequest(Settings.EMPTY, "request-host");
|
||||
assertThat(response.content(), equalTo(ByteBufUtils.toByteBuf(new TestResponse().content())));
|
||||
}
|
||||
|
||||
public void testCorsEnabledWithoutAllowOrigins() {
|
||||
// Set up a HTTP transport with only the CORS enabled setting
|
||||
Settings settings = Settings.builder()
|
||||
.put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true)
|
||||
.build();
|
||||
HttpResponse response = executeRequest(settings, "remote-host", "request-host");
|
||||
// inspect response and validate
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), nullValue());
|
||||
}
|
||||
|
||||
public void testCorsEnabledWithAllowOrigins() {
|
||||
final String originValue = "remote-host";
|
||||
// create a http transport with CORS enabled and allow origin configured
|
||||
Settings settings = Settings.builder()
|
||||
.put(SETTING_CORS_ENABLED.getKey(), true)
|
||||
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue)
|
||||
.build();
|
||||
HttpResponse response = executeRequest(settings, originValue, "request-host");
|
||||
// inspect response and validate
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
|
||||
String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
|
||||
assertThat(allowedOrigins, is(originValue));
|
||||
}
|
||||
|
||||
public void testCorsAllowOriginWithSameHost() {
|
||||
String originValue = "remote-host";
|
||||
String host = "remote-host";
|
||||
// create a http transport with CORS enabled
|
||||
Settings settings = Settings.builder()
|
||||
.put(SETTING_CORS_ENABLED.getKey(), true)
|
||||
.build();
|
||||
HttpResponse response = executeRequest(settings, originValue, host);
|
||||
// inspect response and validate
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
|
||||
String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
|
||||
assertThat(allowedOrigins, is(originValue));
|
||||
|
||||
originValue = "http://" + originValue;
|
||||
response = executeRequest(settings, originValue, host);
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
|
||||
allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
|
||||
assertThat(allowedOrigins, is(originValue));
|
||||
|
||||
originValue = originValue + ":5555";
|
||||
host = host + ":5555";
|
||||
response = executeRequest(settings, originValue, host);
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
|
||||
allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
|
||||
assertThat(allowedOrigins, is(originValue));
|
||||
|
||||
originValue = originValue.replace("http", "https");
|
||||
response = executeRequest(settings, originValue, host);
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
|
||||
allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
|
||||
assertThat(allowedOrigins, is(originValue));
|
||||
}
|
||||
|
||||
public void testThatStringLiteralWorksOnMatch() {
|
||||
final String originValue = "remote-host";
|
||||
Settings settings = Settings.builder()
|
||||
.put(SETTING_CORS_ENABLED.getKey(), true)
|
||||
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue)
|
||||
.put(SETTING_CORS_ALLOW_METHODS.getKey(), "get, options, post")
|
||||
.put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true)
|
||||
.build();
|
||||
HttpResponse response = executeRequest(settings, originValue, "request-host");
|
||||
// inspect response and validate
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
|
||||
String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
|
||||
assertThat(allowedOrigins, is(originValue));
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS), equalTo("true"));
|
||||
}
|
||||
|
||||
public void testThatAnyOriginWorks() {
|
||||
final String originValue = NioCorsHandler.ANY_ORIGIN;
|
||||
Settings settings = Settings.builder()
|
||||
.put(SETTING_CORS_ENABLED.getKey(), true)
|
||||
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue)
|
||||
.build();
|
||||
HttpResponse response = executeRequest(settings, originValue, "request-host");
|
||||
// inspect response and validate
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
|
||||
String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
|
||||
assertThat(allowedOrigins, is(originValue));
|
||||
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS), nullValue());
|
||||
}
|
||||
|
||||
public void testHeadersSet() {
|
||||
Settings settings = Settings.builder().build();
|
||||
final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/");
|
||||
httpRequest.headers().add(HttpHeaderNames.ORIGIN, "remote");
|
||||
final NioHttpRequest request = new NioHttpRequest(xContentRegistry(), httpRequest);
|
||||
HttpHandlingSettings handlingSettings = HttpHandlingSettings.fromSettings(settings);
|
||||
NioCorsConfig corsConfig = NioHttpServerTransport.buildCorsConfig(settings);
|
||||
|
||||
// send a response
|
||||
NioHttpChannel channel = new NioHttpChannel(nioChannel, bigArrays, request, 1, handlingSettings, corsConfig,
|
||||
threadPool.getThreadContext());
|
||||
TestResponse resp = new TestResponse();
|
||||
final String customHeader = "custom-header";
|
||||
final String customHeaderValue = "xyz";
|
||||
resp.addHeader(customHeader, customHeaderValue);
|
||||
channel.sendResponse(resp);
|
||||
|
||||
// inspect what was written
|
||||
ArgumentCaptor<Object> responseCaptor = ArgumentCaptor.forClass(Object.class);
|
||||
verify(channelContext).sendMessage(responseCaptor.capture(), any());
|
||||
Object nioResponse = responseCaptor.getValue();
|
||||
HttpResponse response = ((NioHttpResponse) nioResponse).getResponse();
|
||||
assertThat(response.headers().get("non-existent-header"), nullValue());
|
||||
assertThat(response.headers().get(customHeader), equalTo(customHeaderValue));
|
||||
assertThat(response.headers().get(HttpHeaderNames.CONTENT_LENGTH), equalTo(Integer.toString(resp.content().length())));
|
||||
assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE), equalTo(resp.contentType()));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testReleaseInListener() throws IOException {
|
||||
final Settings settings = Settings.builder().build();
|
||||
final NamedXContentRegistry registry = xContentRegistry();
|
||||
final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/");
|
||||
final NioHttpRequest request = new NioHttpRequest(registry, httpRequest);
|
||||
HttpHandlingSettings handlingSettings = HttpHandlingSettings.fromSettings(settings);
|
||||
NioCorsConfig corsConfig = NioHttpServerTransport.buildCorsConfig(settings);
|
||||
|
||||
NioHttpChannel channel = new NioHttpChannel(nioChannel, bigArrays, request, 1, handlingSettings,
|
||||
corsConfig, threadPool.getThreadContext());
|
||||
final BytesRestResponse response = new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR,
|
||||
JsonXContent.contentBuilder().startObject().endObject());
|
||||
assertThat(response.content(), not(instanceOf(Releasable.class)));
|
||||
|
||||
// ensure we have reserved bytes
|
||||
if (randomBoolean()) {
|
||||
BytesStreamOutput out = channel.bytesOutput();
|
||||
assertThat(out, instanceOf(ReleasableBytesStreamOutput.class));
|
||||
} else {
|
||||
try (XContentBuilder builder = channel.newBuilder()) {
|
||||
// do something builder
|
||||
builder.startObject().endObject();
|
||||
}
|
||||
}
|
||||
|
||||
channel.sendResponse(response);
|
||||
Class<BiConsumer<Void, Exception>> listenerClass = (Class<BiConsumer<Void, Exception>>) (Class) BiConsumer.class;
|
||||
ArgumentCaptor<BiConsumer<Void, Exception>> listenerCaptor = ArgumentCaptor.forClass(listenerClass);
|
||||
verify(channelContext).sendMessage(any(), listenerCaptor.capture());
|
||||
BiConsumer<Void, Exception> listener = listenerCaptor.getValue();
|
||||
if (randomBoolean()) {
|
||||
listener.accept(null, null);
|
||||
} else {
|
||||
listener.accept(null, new ClosedChannelException());
|
||||
}
|
||||
// ESTestCase#after will invoke ensureAllArraysAreReleased which will fail if the response content was not released
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testConnectionClose() throws Exception {
|
||||
final Settings settings = Settings.builder().build();
|
||||
final FullHttpRequest httpRequest;
|
||||
final boolean close = randomBoolean();
|
||||
if (randomBoolean()) {
|
||||
httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/");
|
||||
if (close) {
|
||||
httpRequest.headers().add(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE);
|
||||
}
|
||||
} else {
|
||||
httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_0, HttpMethod.GET, "/");
|
||||
if (!close) {
|
||||
httpRequest.headers().add(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
|
||||
}
|
||||
}
|
||||
final NioHttpRequest request = new NioHttpRequest(xContentRegistry(), httpRequest);
|
||||
|
||||
HttpHandlingSettings handlingSettings = HttpHandlingSettings.fromSettings(settings);
|
||||
NioCorsConfig corsConfig = NioHttpServerTransport.buildCorsConfig(settings);
|
||||
|
||||
NioHttpChannel channel = new NioHttpChannel(nioChannel, bigArrays, request, 1, handlingSettings,
|
||||
corsConfig, threadPool.getThreadContext());
|
||||
final TestResponse resp = new TestResponse();
|
||||
channel.sendResponse(resp);
|
||||
Class<BiConsumer<Void, Exception>> listenerClass = (Class<BiConsumer<Void, Exception>>) (Class) BiConsumer.class;
|
||||
ArgumentCaptor<BiConsumer<Void, Exception>> listenerCaptor = ArgumentCaptor.forClass(listenerClass);
|
||||
verify(channelContext).sendMessage(any(), listenerCaptor.capture());
|
||||
BiConsumer<Void, Exception> listener = listenerCaptor.getValue();
|
||||
listener.accept(null, null);
|
||||
if (close) {
|
||||
verify(nioChannel, times(1)).close();
|
||||
} else {
|
||||
verify(nioChannel, times(0)).close();
|
||||
}
|
||||
}
|
||||
|
||||
private FullHttpResponse executeRequest(final Settings settings, final String host) {
|
||||
return executeRequest(settings, null, host);
|
||||
}
|
||||
|
||||
private FullHttpResponse executeRequest(final Settings settings, final String originValue, final String host) {
|
||||
// construct request and send it over the transport layer
|
||||
final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/");
|
||||
if (originValue != null) {
|
||||
httpRequest.headers().add(HttpHeaderNames.ORIGIN, originValue);
|
||||
}
|
||||
httpRequest.headers().add(HttpHeaderNames.HOST, host);
|
||||
final NioHttpRequest request = new NioHttpRequest(xContentRegistry(), httpRequest);
|
||||
|
||||
HttpHandlingSettings httpHandlingSettings = HttpHandlingSettings.fromSettings(settings);
|
||||
NioCorsConfig corsConfig = NioHttpServerTransport.buildCorsConfig(settings);
|
||||
NioHttpChannel channel = new NioHttpChannel(nioChannel, bigArrays, request, 1, httpHandlingSettings, corsConfig,
|
||||
threadPool.getThreadContext());
|
||||
channel.sendResponse(new TestResponse());
|
||||
|
||||
// get the response
|
||||
ArgumentCaptor<Object> responseCaptor = ArgumentCaptor.forClass(Object.class);
|
||||
verify(channelContext, atLeastOnce()).sendMessage(responseCaptor.capture(), any());
|
||||
return ((NioHttpResponse) responseCaptor.getValue()).getResponse();
|
||||
}
|
||||
|
||||
private static class TestResponse extends RestResponse {
|
||||
|
||||
private final BytesReference reference;
|
||||
|
||||
TestResponse() {
|
||||
reference = ByteBufUtils.toBytesReference(Unpooled.copiedBuffer("content", StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String contentType() {
|
||||
return "text";
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesReference content() {
|
||||
return reference;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return RestStatus.OK;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -32,6 +32,7 @@ import io.netty.handler.codec.http.HttpResponseStatus;
|
|||
import io.netty.handler.codec.http.HttpUtil;
|
||||
import io.netty.handler.codec.http.HttpVersion;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
|
@ -45,6 +46,7 @@ import org.elasticsearch.http.BindHttpException;
|
|||
import org.elasticsearch.http.HttpServerTransport;
|
||||
import org.elasticsearch.http.HttpTransportSettings;
|
||||
import org.elasticsearch.http.NullDispatcher;
|
||||
import org.elasticsearch.http.nio.cors.NioCorsConfig;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
|
@ -58,9 +60,19 @@ import org.junit.Before;
|
|||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_HEADERS;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_MAX_AGE;
|
||||
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
|
||||
import static org.elasticsearch.rest.RestStatus.OK;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -94,36 +106,36 @@ public class NioHttpServerTransportTests extends ESTestCase {
|
|||
bigArrays = null;
|
||||
}
|
||||
|
||||
// public void testCorsConfig() {
|
||||
// final Set<String> methods = new HashSet<>(Arrays.asList("get", "options", "post"));
|
||||
// final Set<String> headers = new HashSet<>(Arrays.asList("Content-Type", "Content-Length"));
|
||||
// final String prefix = randomBoolean() ? " " : ""; // sometimes have a leading whitespace between comma delimited elements
|
||||
// final Settings settings = Settings.builder()
|
||||
// .put(SETTING_CORS_ENABLED.getKey(), true)
|
||||
// .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "*")
|
||||
// .put(SETTING_CORS_ALLOW_METHODS.getKey(), collectionToDelimitedString(methods, ",", prefix, ""))
|
||||
// .put(SETTING_CORS_ALLOW_HEADERS.getKey(), collectionToDelimitedString(headers, ",", prefix, ""))
|
||||
// .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true)
|
||||
// .build();
|
||||
// final Netty4CorsConfig corsConfig = Netty4HttpServerTransport.buildCorsConfig(settings);
|
||||
// assertTrue(corsConfig.isAnyOriginSupported());
|
||||
// assertEquals(headers, corsConfig.allowedRequestHeaders());
|
||||
// assertEquals(methods, corsConfig.allowedRequestMethods().stream().map(HttpMethod::name).collect(Collectors.toSet()));
|
||||
// }
|
||||
public void testCorsConfig() {
|
||||
final Set<String> methods = new HashSet<>(Arrays.asList("get", "options", "post"));
|
||||
final Set<String> headers = new HashSet<>(Arrays.asList("Content-Type", "Content-Length"));
|
||||
final String prefix = randomBoolean() ? " " : ""; // sometimes have a leading whitespace between comma delimited elements
|
||||
final Settings settings = Settings.builder()
|
||||
.put(SETTING_CORS_ENABLED.getKey(), true)
|
||||
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "*")
|
||||
.put(SETTING_CORS_ALLOW_METHODS.getKey(), Strings.collectionToDelimitedString(methods, ",", prefix, ""))
|
||||
.put(SETTING_CORS_ALLOW_HEADERS.getKey(), Strings.collectionToDelimitedString(headers, ",", prefix, ""))
|
||||
.put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true)
|
||||
.build();
|
||||
final NioCorsConfig corsConfig = NioHttpServerTransport.buildCorsConfig(settings);
|
||||
assertTrue(corsConfig.isAnyOriginSupported());
|
||||
assertEquals(headers, corsConfig.allowedRequestHeaders());
|
||||
assertEquals(methods, corsConfig.allowedRequestMethods().stream().map(HttpMethod::name).collect(Collectors.toSet()));
|
||||
}
|
||||
|
||||
// public void testCorsConfigWithDefaults() {
|
||||
// final Set<String> methods = Strings.commaDelimitedListToSet(SETTING_CORS_ALLOW_METHODS.getDefault(Settings.EMPTY));
|
||||
// final Set<String> headers = Strings.commaDelimitedListToSet(SETTING_CORS_ALLOW_HEADERS.getDefault(Settings.EMPTY));
|
||||
// final long maxAge = SETTING_CORS_MAX_AGE.getDefault(Settings.EMPTY);
|
||||
// final Settings settings = Settings.builder().put(SETTING_CORS_ENABLED.getKey(), true).build();
|
||||
// final Netty4CorsConfig corsConfig = Netty4HttpServerTransport.buildCorsConfig(settings);
|
||||
// assertFalse(corsConfig.isAnyOriginSupported());
|
||||
// assertEquals(Collections.emptySet(), corsConfig.origins().get());
|
||||
// assertEquals(headers, corsConfig.allowedRequestHeaders());
|
||||
// assertEquals(methods, corsConfig.allowedRequestMethods().stream().map(HttpMethod::name).collect(Collectors.toSet()));
|
||||
// assertEquals(maxAge, corsConfig.maxAge());
|
||||
// assertFalse(corsConfig.isCredentialsAllowed());
|
||||
// }
|
||||
public void testCorsConfigWithDefaults() {
|
||||
final Set<String> methods = Strings.commaDelimitedListToSet(SETTING_CORS_ALLOW_METHODS.getDefault(Settings.EMPTY));
|
||||
final Set<String> headers = Strings.commaDelimitedListToSet(SETTING_CORS_ALLOW_HEADERS.getDefault(Settings.EMPTY));
|
||||
final long maxAge = SETTING_CORS_MAX_AGE.getDefault(Settings.EMPTY);
|
||||
final Settings settings = Settings.builder().put(SETTING_CORS_ENABLED.getKey(), true).build();
|
||||
final NioCorsConfig corsConfig = NioHttpServerTransport.buildCorsConfig(settings);
|
||||
assertFalse(corsConfig.isAnyOriginSupported());
|
||||
assertEquals(Collections.emptySet(), corsConfig.origins().get());
|
||||
assertEquals(headers, corsConfig.allowedRequestHeaders());
|
||||
assertEquals(methods, corsConfig.allowedRequestMethods().stream().map(HttpMethod::name).collect(Collectors.toSet()));
|
||||
assertEquals(maxAge, corsConfig.maxAge());
|
||||
assertFalse(corsConfig.isCredentialsAllowed());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that {@link NioHttpServerTransport} supports the "Expect: 100-continue" HTTP header
|
||||
|
|
|
@ -30,6 +30,10 @@ import java.nio.charset.StandardCharsets;
|
|||
* Basic test that indexed documents survive the rolling restart. See
|
||||
* {@link RecoveryIT} for much more in depth testing of the mechanism
|
||||
* by which they survive.
|
||||
* <p>
|
||||
* This test is an almost exact copy of <code>IndexingIT</code> in the
|
||||
* xpack rolling restart tests. We should work on a way to remove this
|
||||
* duplication but for now we have no real way to share code.
|
||||
*/
|
||||
public class IndexingIT extends AbstractRollingTestCase {
|
||||
public void testIndexing() throws IOException {
|
||||
|
|
|
@ -206,6 +206,10 @@ import org.elasticsearch.common.settings.IndexScopedSettings;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.persistent.CompletionPersistentTaskAction;
|
||||
import org.elasticsearch.persistent.RemovePersistentTaskAction;
|
||||
import org.elasticsearch.persistent.StartPersistentTaskAction;
|
||||
import org.elasticsearch.persistent.UpdatePersistentTaskStatusAction;
|
||||
import org.elasticsearch.plugins.ActionPlugin;
|
||||
import org.elasticsearch.plugins.ActionPlugin.ActionHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -241,7 +245,6 @@ import org.elasticsearch.rest.action.admin.cluster.RestRemoteClusterInfoAction;
|
|||
import org.elasticsearch.rest.action.admin.cluster.RestRestoreSnapshotAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.RestSnapshotsStatusAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.RestVerifyRepositoryAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestResizeHandler;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestAnalyzeAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestClearIndicesCacheAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestCloseIndexAction;
|
||||
|
@ -252,7 +255,6 @@ import org.elasticsearch.rest.action.admin.indices.RestFlushAction;
|
|||
import org.elasticsearch.rest.action.admin.indices.RestForceMergeAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestGetAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestGetAllAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestGetAllMappingsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestGetFieldMappingAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestGetIndexTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestGetIndicesAction;
|
||||
|
@ -269,6 +271,7 @@ import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction;
|
|||
import org.elasticsearch.rest.action.admin.indices.RestPutMappingAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestRecoveryAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestRefreshAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestResizeHandler;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestRolloverIndexAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestSyncedFlushAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.RestUpdateSettingsAction;
|
||||
|
@ -313,10 +316,6 @@ import org.elasticsearch.rest.action.search.RestSearchAction;
|
|||
import org.elasticsearch.rest.action.search.RestSearchScrollAction;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.usage.UsageService;
|
||||
import org.elasticsearch.persistent.CompletionPersistentTaskAction;
|
||||
import org.elasticsearch.persistent.RemovePersistentTaskAction;
|
||||
import org.elasticsearch.persistent.StartPersistentTaskAction;
|
||||
import org.elasticsearch.persistent.UpdatePersistentTaskStatusAction;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
@ -556,7 +555,6 @@ public class ActionModule extends AbstractModule {
|
|||
registerHandler.accept(new RestSnapshotsStatusAction(settings, restController));
|
||||
|
||||
registerHandler.accept(new RestGetAllAliasesAction(settings, restController));
|
||||
registerHandler.accept(new RestGetAllMappingsAction(settings, restController));
|
||||
registerHandler.accept(new RestGetIndicesAction(settings, restController, indexScopedSettings, settingsFilter));
|
||||
registerHandler.accept(new RestIndicesStatsAction(settings, restController));
|
||||
registerHandler.accept(new RestIndicesSegmentsAction(settings, restController));
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -32,8 +31,6 @@ import org.elasticsearch.client.Client;
|
|||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterStateListener;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
|
@ -57,6 +54,7 @@ import java.util.HashSet;
|
|||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.UnaryOperator;
|
||||
|
||||
|
@ -74,7 +72,7 @@ public class TemplateUpgradeService extends AbstractComponent implements Cluster
|
|||
|
||||
public final Client client;
|
||||
|
||||
private final AtomicInteger updatesInProgress = new AtomicInteger();
|
||||
final AtomicInteger upgradesInProgress = new AtomicInteger();
|
||||
|
||||
private ImmutableOpenMap<String, IndexTemplateMetaData> lastTemplateMetaData;
|
||||
|
||||
|
@ -103,8 +101,8 @@ public class TemplateUpgradeService extends AbstractComponent implements Cluster
|
|||
return;
|
||||
}
|
||||
|
||||
if (updatesInProgress.get() > 0) {
|
||||
// we are already running some updates - skip this cluster state update
|
||||
if (upgradesInProgress.get() > 0) {
|
||||
// we are already running some upgrades - skip this cluster state update
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -124,7 +122,7 @@ public class TemplateUpgradeService extends AbstractComponent implements Cluster
|
|||
lastTemplateMetaData = templates;
|
||||
Optional<Tuple<Map<String, BytesReference>, Set<String>>> changes = calculateTemplateChanges(templates);
|
||||
if (changes.isPresent()) {
|
||||
if (updatesInProgress.compareAndSet(0, changes.get().v1().size() + changes.get().v2().size())) {
|
||||
if (upgradesInProgress.compareAndSet(0, changes.get().v1().size() + changes.get().v2().size() + 1)) {
|
||||
logger.info("Starting template upgrade to version {}, {} templates will be updated and {} will be removed",
|
||||
Version.CURRENT,
|
||||
changes.get().v1().size(),
|
||||
|
@ -133,13 +131,14 @@ public class TemplateUpgradeService extends AbstractComponent implements Cluster
|
|||
final ThreadContext threadContext = threadPool.getThreadContext();
|
||||
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
|
||||
threadContext.markAsSystemContext();
|
||||
threadPool.generic().execute(() -> updateTemplates(changes.get().v1(), changes.get().v2()));
|
||||
threadPool.generic().execute(() -> upgradeTemplates(changes.get().v1(), changes.get().v2()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void updateTemplates(Map<String, BytesReference> changes, Set<String> deletions) {
|
||||
void upgradeTemplates(Map<String, BytesReference> changes, Set<String> deletions) {
|
||||
final AtomicBoolean anyUpgradeFailed = new AtomicBoolean(false);
|
||||
if (threadPool.getThreadContext().isSystemContext() == false) {
|
||||
throw new IllegalStateException("template updates from the template upgrade service should always happen in a system context");
|
||||
}
|
||||
|
@ -151,20 +150,18 @@ public class TemplateUpgradeService extends AbstractComponent implements Cluster
|
|||
client.admin().indices().putTemplate(request, new ActionListener<PutIndexTemplateResponse>() {
|
||||
@Override
|
||||
public void onResponse(PutIndexTemplateResponse response) {
|
||||
if (updatesInProgress.decrementAndGet() == 0) {
|
||||
logger.info("Finished upgrading templates to version {}", Version.CURRENT);
|
||||
}
|
||||
if (response.isAcknowledged() == false) {
|
||||
anyUpgradeFailed.set(true);
|
||||
logger.warn("Error updating template [{}], request was not acknowledged", change.getKey());
|
||||
}
|
||||
tryFinishUpgrade(anyUpgradeFailed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
if (updatesInProgress.decrementAndGet() == 0) {
|
||||
logger.info("Templates were upgraded to version {}", Version.CURRENT);
|
||||
}
|
||||
anyUpgradeFailed.set(true);
|
||||
logger.warn(new ParameterizedMessage("Error updating template [{}]", change.getKey()), e);
|
||||
tryFinishUpgrade(anyUpgradeFailed);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -175,27 +172,51 @@ public class TemplateUpgradeService extends AbstractComponent implements Cluster
|
|||
client.admin().indices().deleteTemplate(request, new ActionListener<DeleteIndexTemplateResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteIndexTemplateResponse response) {
|
||||
updatesInProgress.decrementAndGet();
|
||||
if (response.isAcknowledged() == false) {
|
||||
anyUpgradeFailed.set(true);
|
||||
logger.warn("Error deleting template [{}], request was not acknowledged", template);
|
||||
}
|
||||
tryFinishUpgrade(anyUpgradeFailed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
updatesInProgress.decrementAndGet();
|
||||
anyUpgradeFailed.set(true);
|
||||
if (e instanceof IndexTemplateMissingException == false) {
|
||||
// we might attempt to delete the same template from different nodes - so that's ok if template doesn't exist
|
||||
// otherwise we need to warn
|
||||
logger.warn(new ParameterizedMessage("Error deleting template [{}]", template), e);
|
||||
}
|
||||
tryFinishUpgrade(anyUpgradeFailed);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
int getUpdatesInProgress() {
|
||||
return updatesInProgress.get();
|
||||
void tryFinishUpgrade(AtomicBoolean anyUpgradeFailed) {
|
||||
assert upgradesInProgress.get() > 0;
|
||||
if (upgradesInProgress.decrementAndGet() == 1) {
|
||||
try {
|
||||
// this is the last upgrade, the templates should now be in the desired state
|
||||
if (anyUpgradeFailed.get()) {
|
||||
logger.info("Templates were partially upgraded to version {}", Version.CURRENT);
|
||||
} else {
|
||||
logger.info("Templates were upgraded successfuly to version {}", Version.CURRENT);
|
||||
}
|
||||
// Check upgraders are satisfied after the update completed. If they still
|
||||
// report that changes are required, this might indicate a bug or that something
|
||||
// else tinkering with the templates during the upgrade.
|
||||
final ImmutableOpenMap<String, IndexTemplateMetaData> upgradedTemplates =
|
||||
clusterService.state().getMetaData().getTemplates();
|
||||
final boolean changesRequired = calculateTemplateChanges(upgradedTemplates).isPresent();
|
||||
if (changesRequired) {
|
||||
logger.warn("Templates are still reported as out of date after the upgrade. The template upgrade will be retried.");
|
||||
}
|
||||
} finally {
|
||||
final int noMoreUpgrades = upgradesInProgress.decrementAndGet();
|
||||
assert noMoreUpgrades == 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Optional<Tuple<Map<String, BytesReference>, Set<String>>> calculateTemplateChanges(
|
||||
|
|
|
@ -19,6 +19,19 @@
|
|||
|
||||
package org.elasticsearch.http;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION_LEVEL;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_DETAILED_ERRORS_ENABLED;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CHUNK_SIZE;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_HEADER_SIZE;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_RESET_COOKIES;
|
||||
import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING_MAX_EVENTS;
|
||||
|
||||
public class HttpHandlingSettings {
|
||||
|
||||
private final int maxContentLength;
|
||||
|
@ -30,6 +43,7 @@ public class HttpHandlingSettings {
|
|||
private final int compressionLevel;
|
||||
private final boolean detailedErrorsEnabled;
|
||||
private final int pipeliningMaxEvents;
|
||||
private boolean corsEnabled;
|
||||
|
||||
public HttpHandlingSettings(int maxContentLength, int maxChunkSize, int maxHeaderSize, int maxInitialLineLength,
|
||||
boolean resetCookies, boolean compression, int compressionLevel, boolean detailedErrorsEnabled,
|
||||
|
@ -45,6 +59,18 @@ public class HttpHandlingSettings {
|
|||
this.pipeliningMaxEvents = pipeliningMaxEvents;
|
||||
}
|
||||
|
||||
public static HttpHandlingSettings fromSettings(Settings settings) {
|
||||
return new HttpHandlingSettings(Math.toIntExact(SETTING_HTTP_MAX_CONTENT_LENGTH.get(settings).getBytes()),
|
||||
Math.toIntExact(SETTING_HTTP_MAX_CHUNK_SIZE.get(settings).getBytes()),
|
||||
Math.toIntExact(SETTING_HTTP_MAX_HEADER_SIZE.get(settings).getBytes()),
|
||||
Math.toIntExact(SETTING_HTTP_MAX_INITIAL_LINE_LENGTH.get(settings).getBytes()),
|
||||
SETTING_HTTP_RESET_COOKIES.get(settings),
|
||||
SETTING_HTTP_COMPRESSION.get(settings),
|
||||
SETTING_HTTP_COMPRESSION_LEVEL.get(settings),
|
||||
SETTING_HTTP_DETAILED_ERRORS_ENABLED.get(settings),
|
||||
SETTING_PIPELINING_MAX_EVENTS.get(settings));
|
||||
}
|
||||
|
||||
public int getMaxContentLength() {
|
||||
return maxContentLength;
|
||||
}
|
||||
|
@ -80,4 +106,8 @@ public class HttpHandlingSettings {
|
|||
public int getPipeliningMaxEvents() {
|
||||
return pipeliningMaxEvents;
|
||||
}
|
||||
|
||||
public boolean isCorsEnabled() {
|
||||
return corsEnabled;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.lucene.analysis.Analyzer;
|
|||
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -70,14 +69,16 @@ public final class AnalysisRegistry implements Closeable {
|
|||
Map<String, AnalysisProvider<AnalyzerProvider<?>>> normalizers,
|
||||
Map<String, PreConfiguredCharFilter> preConfiguredCharFilters,
|
||||
Map<String, PreConfiguredTokenFilter> preConfiguredTokenFilters,
|
||||
Map<String, PreConfiguredTokenizer> preConfiguredTokenizers) {
|
||||
Map<String, PreConfiguredTokenizer> preConfiguredTokenizers,
|
||||
Map<String, PreBuiltAnalyzerProviderFactory> preConfiguredAnalyzers) {
|
||||
this.environment = environment;
|
||||
this.charFilters = unmodifiableMap(charFilters);
|
||||
this.tokenFilters = unmodifiableMap(tokenFilters);
|
||||
this.tokenizers = unmodifiableMap(tokenizers);
|
||||
this.analyzers = unmodifiableMap(analyzers);
|
||||
this.normalizers = unmodifiableMap(normalizers);
|
||||
prebuiltAnalysis = new PrebuiltAnalysis(preConfiguredCharFilters, preConfiguredTokenFilters, preConfiguredTokenizers);
|
||||
prebuiltAnalysis =
|
||||
new PrebuiltAnalysis(preConfiguredCharFilters, preConfiguredTokenFilters, preConfiguredTokenizers, preConfiguredAnalyzers);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -398,13 +399,15 @@ public final class AnalysisRegistry implements Closeable {
|
|||
private PrebuiltAnalysis(
|
||||
Map<String, PreConfiguredCharFilter> preConfiguredCharFilters,
|
||||
Map<String, PreConfiguredTokenFilter> preConfiguredTokenFilters,
|
||||
Map<String, PreConfiguredTokenizer> preConfiguredTokenizers) {
|
||||
Map<String, PreBuiltAnalyzerProviderFactory> analyzerProviderFactories = new HashMap<>();
|
||||
Map<String, PreConfiguredTokenizer> preConfiguredTokenizers,
|
||||
Map<String, PreBuiltAnalyzerProviderFactory> preConfiguredAnalyzers) {
|
||||
|
||||
// Analyzers
|
||||
Map<String, PreBuiltAnalyzerProviderFactory> analyzerProviderFactories = new HashMap<>();
|
||||
analyzerProviderFactories.putAll(preConfiguredAnalyzers);
|
||||
// Pre-build analyzers
|
||||
for (PreBuiltAnalyzers preBuiltAnalyzerEnum : PreBuiltAnalyzers.values()) {
|
||||
String name = preBuiltAnalyzerEnum.name().toLowerCase(Locale.ROOT);
|
||||
analyzerProviderFactories.put(name, new PreBuiltAnalyzerProviderFactory(name, AnalyzerScope.INDICES, preBuiltAnalyzerEnum.getAnalyzer(Version.CURRENT)));
|
||||
analyzerProviderFactories.put(name, new PreBuiltAnalyzerProviderFactory(name, preBuiltAnalyzerEnum));
|
||||
}
|
||||
|
||||
this.analyzerProviderFactories = Collections.unmodifiableMap(analyzerProviderFactories);
|
||||
|
@ -429,17 +432,10 @@ public final class AnalysisRegistry implements Closeable {
|
|||
return analyzerProviderFactories.get(name);
|
||||
}
|
||||
|
||||
Analyzer analyzer(String name) {
|
||||
PreBuiltAnalyzerProviderFactory analyzerProviderFactory = (PreBuiltAnalyzerProviderFactory) analyzerProviderFactories.get(name);
|
||||
if (analyzerProviderFactory == null) {
|
||||
return null;
|
||||
}
|
||||
return analyzerProviderFactory.analyzer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
IOUtils.close(analyzerProviderFactories.values().stream().map((a) -> ((PreBuiltAnalyzerProviderFactory)a).analyzer()).collect(Collectors.toList()));
|
||||
IOUtils.close(analyzerProviderFactories.values().stream()
|
||||
.map((a) -> ((PreBuiltAnalyzerProviderFactory)a)).collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,41 +22,101 @@ package org.elasticsearch.index.analysis;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModule;
|
||||
import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
|
||||
import org.elasticsearch.indices.analysis.PreBuiltCacheFactory;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class PreBuiltAnalyzerProviderFactory implements AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> {
|
||||
public class PreBuiltAnalyzerProviderFactory extends PreConfiguredAnalysisComponent<AnalyzerProvider<?>> implements Closeable {
|
||||
|
||||
private final PreBuiltAnalyzerProvider analyzerProvider;
|
||||
private final Function<Version, Analyzer> create;
|
||||
private final PreBuiltAnalyzerProvider current;
|
||||
|
||||
public PreBuiltAnalyzerProviderFactory(String name, AnalyzerScope scope, Analyzer analyzer) {
|
||||
analyzerProvider = new PreBuiltAnalyzerProvider(name, scope, analyzer);
|
||||
/**
|
||||
* This constructor only exists to expose analyzers defined in {@link PreBuiltAnalyzers} as {@link PreBuiltAnalyzerProviderFactory}.
|
||||
*/
|
||||
PreBuiltAnalyzerProviderFactory(String name, PreBuiltAnalyzers preBuiltAnalyzer) {
|
||||
super(name, new PreBuiltAnalyzersDelegateCache(name, preBuiltAnalyzer));
|
||||
this.create = preBuiltAnalyzer::getAnalyzer;
|
||||
current = new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDICES, preBuiltAnalyzer.getAnalyzer(Version.CURRENT));
|
||||
}
|
||||
|
||||
public AnalyzerProvider<?> create(String name, Settings settings) {
|
||||
Version indexVersion = Version.indexCreated(settings);
|
||||
if (!Version.CURRENT.equals(indexVersion)) {
|
||||
PreBuiltAnalyzers preBuiltAnalyzers = PreBuiltAnalyzers.getOrDefault(name, null);
|
||||
if (preBuiltAnalyzers != null) {
|
||||
Analyzer analyzer = preBuiltAnalyzers.getAnalyzer(indexVersion);
|
||||
return new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDICES, analyzer);
|
||||
}
|
||||
}
|
||||
|
||||
return analyzerProvider;
|
||||
public PreBuiltAnalyzerProviderFactory(String name, PreBuiltCacheFactory.CachingStrategy cache, Function<Version, Analyzer> create) {
|
||||
super(name, cache);
|
||||
this.create = create;
|
||||
this.current = new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDICES, create.apply(Version.CURRENT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public AnalyzerProvider<?> get(IndexSettings indexSettings, Environment environment, String name, Settings settings)
|
||||
throws IOException {
|
||||
return create(name, settings);
|
||||
public AnalyzerProvider<?> get(IndexSettings indexSettings,
|
||||
Environment environment,
|
||||
String name,
|
||||
Settings settings) throws IOException {
|
||||
Version versionCreated = Version.indexCreated(settings);
|
||||
if (Version.CURRENT.equals(versionCreated) == false) {
|
||||
return super.get(indexSettings, environment, name, settings);
|
||||
} else {
|
||||
return current;
|
||||
}
|
||||
}
|
||||
|
||||
public Analyzer analyzer() {
|
||||
return analyzerProvider.get();
|
||||
@Override
|
||||
protected AnalyzerProvider<?> create(Version version) {
|
||||
assert Version.CURRENT.equals(version) == false;
|
||||
return new PreBuiltAnalyzerProvider(getName(), AnalyzerScope.INDICES, create.apply(version));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
List<Closeable> closeables = cache.values().stream()
|
||||
.map(AnalyzerProvider::get)
|
||||
.collect(Collectors.toList());
|
||||
closeables.add(current.get());
|
||||
IOUtils.close(closeables);
|
||||
}
|
||||
|
||||
/**
|
||||
* A special cache that closes the gap between PreBuiltAnalyzers and PreBuiltAnalyzerProviderFactory.
|
||||
*
|
||||
* This can be removed when all analyzers have been moved away from PreBuiltAnalyzers to
|
||||
* PreBuiltAnalyzerProviderFactory either in server or analysis-common.
|
||||
*/
|
||||
static class PreBuiltAnalyzersDelegateCache implements PreBuiltCacheFactory.PreBuiltCache<AnalyzerProvider<?>> {
|
||||
|
||||
private final String name;
|
||||
private final PreBuiltAnalyzers preBuiltAnalyzer;
|
||||
|
||||
private PreBuiltAnalyzersDelegateCache(String name, PreBuiltAnalyzers preBuiltAnalyzer) {
|
||||
this.name = name;
|
||||
this.preBuiltAnalyzer = preBuiltAnalyzer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AnalyzerProvider<?> get(Version version) {
|
||||
return new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDICES, preBuiltAnalyzer.getAnalyzer(version));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(Version version, AnalyzerProvider<?> analyzerProvider) {
|
||||
// No need to put, because we delegate in get() directly to PreBuiltAnalyzers which already caches.
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<AnalyzerProvider<?>> values() {
|
||||
return preBuiltAnalyzer.getCache().values().stream()
|
||||
// Wrap the analyzer instance in a PreBuiltAnalyzerProvider, this is what PreBuiltAnalyzerProviderFactory#close expects
|
||||
// (other caches are not directly caching analyzers, but analyzer provider instead)
|
||||
.map(analyzer -> new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDICES, analyzer))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,13 +33,18 @@ import java.io.IOException;
|
|||
*/
|
||||
public abstract class PreConfiguredAnalysisComponent<T> implements AnalysisModule.AnalysisProvider<T> {
|
||||
private final String name;
|
||||
private final PreBuiltCacheFactory.PreBuiltCache<T> cache;
|
||||
protected final PreBuiltCacheFactory.PreBuiltCache<T> cache;
|
||||
|
||||
protected PreConfiguredAnalysisComponent(String name, PreBuiltCacheFactory.CachingStrategy cache) {
|
||||
protected PreConfiguredAnalysisComponent(String name, PreBuiltCacheFactory.CachingStrategy cache) {
|
||||
this.name = name;
|
||||
this.cache = PreBuiltCacheFactory.getCache(cache);
|
||||
}
|
||||
|
||||
protected PreConfiguredAnalysisComponent(String name, PreBuiltCacheFactory.PreBuiltCache<T> cache) {
|
||||
this.name = name;
|
||||
this.cache = cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
|
||||
Version versionCreated = Version.indexCreated(settings);
|
||||
|
|
|
@ -29,8 +29,8 @@ import org.apache.lucene.search.similarities.Similarity;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.MemorySizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.codec.CodecService;
|
||||
|
@ -139,10 +139,20 @@ public final class EngineConfig {
|
|||
this.codecService = codecService;
|
||||
this.eventListener = eventListener;
|
||||
codecName = indexSettings.getValue(INDEX_CODEC_SETTING);
|
||||
// We give IndexWriter a "huge" (256 MB) buffer, so it won't flush on its own unless the ES indexing buffer is also huge and/or
|
||||
// there are not too many shards allocated to this node. Instead, IndexingMemoryController periodically checks
|
||||
// and refreshes the most heap-consuming shards when total indexing heap usage across all shards is too high:
|
||||
indexingBufferSize = new ByteSizeValue(256, ByteSizeUnit.MB);
|
||||
// We need to make the indexing buffer for this shard at least as large
|
||||
// as the amount of memory that is available for all engines on the
|
||||
// local node so that decisions to flush segments to disk are made by
|
||||
// IndexingMemoryController rather than Lucene.
|
||||
// Add an escape hatch in case this change proves problematic - it used
|
||||
// to be a fixed amound of RAM: 256 MB.
|
||||
// TODO: Remove this escape hatch in 8.x
|
||||
final String escapeHatchProperty = "es.index.memory.max_index_buffer_size";
|
||||
String maxBufferSize = System.getProperty(escapeHatchProperty);
|
||||
if (maxBufferSize != null) {
|
||||
indexingBufferSize = MemorySizeValue.parseBytesSizeValueOrHeapRatio(maxBufferSize, escapeHatchProperty);
|
||||
} else {
|
||||
indexingBufferSize = IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING.get(indexSettings.getNodeSettings());
|
||||
}
|
||||
this.queryCache = queryCache;
|
||||
this.queryCachingPolicy = queryCachingPolicy;
|
||||
this.translogConfig = translogConfig;
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.elasticsearch.index.analysis.CzechAnalyzerProvider;
|
|||
import org.elasticsearch.index.analysis.DanishAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.DutchAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.EnglishAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.FingerprintAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.FinnishAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.FrenchAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.GalicianAnalyzerProvider;
|
||||
|
@ -59,9 +58,9 @@ import org.elasticsearch.index.analysis.KeywordAnalyzerProvider;
|
|||
import org.elasticsearch.index.analysis.LatvianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.LithuanianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.NorwegianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.PatternAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.PersianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.PortugueseAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredCharFilter;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredTokenFilter;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredTokenizer;
|
||||
|
@ -73,7 +72,6 @@ import org.elasticsearch.index.analysis.SnowballAnalyzerProvider;
|
|||
import org.elasticsearch.index.analysis.SoraniAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.SpanishAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.StandardAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.StandardHtmlStripAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.StandardTokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.StandardTokenizerFactory;
|
||||
import org.elasticsearch.index.analysis.StopAnalyzerProvider;
|
||||
|
@ -122,11 +120,12 @@ public final class AnalysisModule {
|
|||
Map<String, PreConfiguredCharFilter> preConfiguredCharFilters = setupPreConfiguredCharFilters(plugins);
|
||||
Map<String, PreConfiguredTokenFilter> preConfiguredTokenFilters = setupPreConfiguredTokenFilters(plugins);
|
||||
Map<String, PreConfiguredTokenizer> preConfiguredTokenizers = setupPreConfiguredTokenizers(plugins);
|
||||
Map<String, PreBuiltAnalyzerProviderFactory> preConfiguredAnalyzers = setupPreBuiltAnalyzerProviderFactories(plugins);
|
||||
|
||||
analysisRegistry = new AnalysisRegistry(environment,
|
||||
charFilters.getRegistry(), tokenFilters.getRegistry(), tokenizers.getRegistry(),
|
||||
analyzers.getRegistry(), normalizers.getRegistry(),
|
||||
preConfiguredCharFilters, preConfiguredTokenFilters, preConfiguredTokenizers);
|
||||
preConfiguredCharFilters, preConfiguredTokenFilters, preConfiguredTokenizers, preConfiguredAnalyzers);
|
||||
}
|
||||
|
||||
HunspellService getHunspellService() {
|
||||
|
@ -162,6 +161,16 @@ public final class AnalysisModule {
|
|||
return tokenFilters;
|
||||
}
|
||||
|
||||
static Map<String, PreBuiltAnalyzerProviderFactory> setupPreBuiltAnalyzerProviderFactories(List<AnalysisPlugin> plugins) {
|
||||
NamedRegistry<PreBuiltAnalyzerProviderFactory> preConfiguredCharFilters = new NamedRegistry<>("pre-built analyzer");
|
||||
for (AnalysisPlugin plugin : plugins) {
|
||||
for (PreBuiltAnalyzerProviderFactory factory : plugin.getPreBuiltAnalyzerProviderFactories()) {
|
||||
preConfiguredCharFilters.register(factory.getName(), factory);
|
||||
}
|
||||
}
|
||||
return unmodifiableMap(preConfiguredCharFilters.getRegistry());
|
||||
}
|
||||
|
||||
static Map<String, PreConfiguredCharFilter> setupPreConfiguredCharFilters(List<AnalysisPlugin> plugins) {
|
||||
NamedRegistry<PreConfiguredCharFilter> preConfiguredCharFilters = new NamedRegistry<>("pre-configured char_filter");
|
||||
|
||||
|
@ -232,12 +241,10 @@ public final class AnalysisModule {
|
|||
NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> analyzers = new NamedRegistry<>("analyzer");
|
||||
analyzers.register("default", StandardAnalyzerProvider::new);
|
||||
analyzers.register("standard", StandardAnalyzerProvider::new);
|
||||
analyzers.register("standard_html_strip", StandardHtmlStripAnalyzerProvider::new);
|
||||
analyzers.register("simple", SimpleAnalyzerProvider::new);
|
||||
analyzers.register("stop", StopAnalyzerProvider::new);
|
||||
analyzers.register("whitespace", WhitespaceAnalyzerProvider::new);
|
||||
analyzers.register("keyword", KeywordAnalyzerProvider::new);
|
||||
analyzers.register("pattern", PatternAnalyzerProvider::new);
|
||||
analyzers.register("snowball", SnowballAnalyzerProvider::new);
|
||||
analyzers.register("arabic", ArabicAnalyzerProvider::new);
|
||||
analyzers.register("armenian", ArmenianAnalyzerProvider::new);
|
||||
|
@ -274,7 +281,6 @@ public final class AnalysisModule {
|
|||
analyzers.register("swedish", SwedishAnalyzerProvider::new);
|
||||
analyzers.register("turkish", TurkishAnalyzerProvider::new);
|
||||
analyzers.register("thai", ThaiAnalyzerProvider::new);
|
||||
analyzers.register("fingerprint", FingerprintAnalyzerProvider::new);
|
||||
analyzers.extractAndRegister(plugins, AnalysisPlugin::getAnalyzers);
|
||||
return analyzers;
|
||||
}
|
||||
|
|
|
@ -61,10 +61,7 @@ import org.apache.lucene.analysis.sv.SwedishAnalyzer;
|
|||
import org.apache.lucene.analysis.th.ThaiAnalyzer;
|
||||
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.index.analysis.PatternAnalyzer;
|
||||
import org.elasticsearch.index.analysis.SnowballAnalyzer;
|
||||
import org.elasticsearch.index.analysis.StandardHtmlStripAnalyzer;
|
||||
import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy;
|
||||
|
||||
import java.util.Locale;
|
||||
|
@ -141,22 +138,6 @@ public enum PreBuiltAnalyzers {
|
|||
}
|
||||
},
|
||||
|
||||
PATTERN(CachingStrategy.ELASTICSEARCH) {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
return new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET);
|
||||
}
|
||||
},
|
||||
|
||||
STANDARD_HTML_STRIP(CachingStrategy.ELASTICSEARCH) {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
final Analyzer analyzer = new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET);
|
||||
analyzer.setVersion(version.luceneVersion);
|
||||
return analyzer;
|
||||
}
|
||||
},
|
||||
|
||||
ARABIC {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
|
@ -484,7 +465,7 @@ public enum PreBuiltAnalyzers {
|
|||
cache = PreBuiltCacheFactory.getCache(cachingStrategy);
|
||||
}
|
||||
|
||||
PreBuiltCacheFactory.PreBuiltCache<Analyzer> getCache() {
|
||||
public PreBuiltCacheFactory.PreBuiltCache<Analyzer> getCache() {
|
||||
return cache;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,8 @@ package org.elasticsearch.indices.analysis;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -36,8 +38,12 @@ public class PreBuiltCacheFactory {
|
|||
public enum CachingStrategy { ONE, LUCENE, ELASTICSEARCH };
|
||||
|
||||
public interface PreBuiltCache<T> {
|
||||
|
||||
T get(Version version);
|
||||
|
||||
void put(Version version, T t);
|
||||
|
||||
Collection<T> values();
|
||||
}
|
||||
|
||||
private PreBuiltCacheFactory() {}
|
||||
|
@ -71,6 +77,11 @@ public class PreBuiltCacheFactory {
|
|||
public void put(Version version, T model) {
|
||||
this.model = model;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<T> values() {
|
||||
return Collections.singleton(model);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -89,6 +100,11 @@ public class PreBuiltCacheFactory {
|
|||
public void put(Version version, T model) {
|
||||
mapModel.put(version, model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<T> values() {
|
||||
return mapModel.values();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -107,5 +123,10 @@ public class PreBuiltCacheFactory {
|
|||
public void put(org.elasticsearch.Version version, T model) {
|
||||
mapModel.put(version.luceneVersion, model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<T> values() {
|
||||
return mapModel.values();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.env.Environment;
|
|||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.CharFilterFactory;
|
||||
import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredCharFilter;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredTokenFilter;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredTokenizer;
|
||||
|
@ -92,6 +93,13 @@ public interface AnalysisPlugin {
|
|||
return emptyMap();
|
||||
}
|
||||
|
||||
/**
|
||||
* Override to add additional pre-configured {@link Analyzer}s.
|
||||
*/
|
||||
default List<PreBuiltAnalyzerProviderFactory> getPreBuiltAnalyzerProviderFactories() {
|
||||
return emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Override to add additional pre-configured {@link CharFilter}s.
|
||||
*/
|
||||
|
|
|
@ -1,109 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.rest.action.admin.indices;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.action.RestBuilderListener;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.HEAD;
|
||||
import static org.elasticsearch.rest.RestStatus.OK;
|
||||
|
||||
/**
|
||||
* The REST handler for retrieving all mappings
|
||||
*/
|
||||
public class RestGetAllMappingsAction extends BaseRestHandler {
|
||||
|
||||
public RestGetAllMappingsAction(final Settings settings, final RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(GET, "/_mapping", this);
|
||||
controller.registerHandler(GET, "/_mappings", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "get_all_mappings_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
final GetIndexRequest getIndexRequest = new GetIndexRequest();
|
||||
getIndexRequest.indices(Strings.EMPTY_ARRAY);
|
||||
getIndexRequest.features(Feature.MAPPINGS);
|
||||
getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions()));
|
||||
getIndexRequest.local(request.paramAsBoolean("local", getIndexRequest.local()));
|
||||
getIndexRequest.humanReadable(request.paramAsBoolean("human", false));
|
||||
return channel -> client.admin().indices().getIndex(getIndexRequest, new RestBuilderListener<GetIndexResponse>(channel) {
|
||||
|
||||
@Override
|
||||
public RestResponse buildResponse(final GetIndexResponse response, final XContentBuilder builder) throws Exception {
|
||||
builder.startObject();
|
||||
{
|
||||
for (final String index : response.indices()) {
|
||||
builder.startObject(index);
|
||||
{
|
||||
writeMappings(response.mappings().get(index), builder);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
return new BytesRestResponse(OK, builder);
|
||||
}
|
||||
|
||||
private void writeMappings(final ImmutableOpenMap<String, MappingMetaData> mappings,
|
||||
final XContentBuilder builder) throws IOException {
|
||||
builder.startObject("mappings");
|
||||
{
|
||||
for (final ObjectObjectCursor<String, MappingMetaData> typeEntry : mappings) {
|
||||
builder.field(typeEntry.key);
|
||||
builder.map(typeEntry.value.sourceAsMap());
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
|
@ -20,8 +20,6 @@
|
|||
package org.elasticsearch.rest.action.admin.indices;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
|
@ -56,12 +54,13 @@ import java.util.stream.Collectors;
|
|||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.HEAD;
|
||||
import static org.elasticsearch.rest.RestStatus.OK;
|
||||
|
||||
public class RestGetMappingAction extends BaseRestHandler {
|
||||
|
||||
public RestGetMappingAction(final Settings settings, final RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(GET, "/_mapping", this);
|
||||
controller.registerHandler(GET, "/_mappings", this);
|
||||
controller.registerHandler(GET, "/{index}/{type}/_mapping", this);
|
||||
controller.registerHandler(GET, "/{index}/_mappings", this);
|
||||
controller.registerHandler(GET, "/{index}/_mapping", this);
|
||||
|
|
|
@ -20,23 +20,18 @@
|
|||
package org.elasticsearch.rest.action.admin.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.action.RestBuilderListener;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
import static org.elasticsearch.rest.RestStatus.OK;
|
||||
|
||||
public class RestGetSettingsAction extends BaseRestHandler {
|
||||
|
||||
|
@ -68,15 +63,6 @@ public class RestGetSettingsAction extends BaseRestHandler {
|
|||
.names(names);
|
||||
getSettingsRequest.local(request.paramAsBoolean("local", getSettingsRequest.local()));
|
||||
getSettingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getSettingsRequest.masterNodeTimeout()));
|
||||
|
||||
return channel -> client.admin().indices().getSettings(getSettingsRequest, new RestBuilderListener<GetSettingsResponse>(channel) {
|
||||
|
||||
@Override
|
||||
public RestResponse buildResponse(GetSettingsResponse getSettingsResponse, XContentBuilder builder) throws Exception {
|
||||
getSettingsResponse.toXContent(builder, request);
|
||||
return new BytesRestResponse(OK, builder);
|
||||
}
|
||||
});
|
||||
return channel -> client.admin().indices().getSettings(getSettingsRequest, new RestToXContentListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -35,12 +35,16 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.ClusterServiceUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -52,13 +56,16 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
|
||||
import static org.elasticsearch.test.ClusterServiceUtils.setState;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.hamcrest.CoreMatchers.nullValue;
|
||||
import static org.hamcrest.CoreMatchers.startsWith;
|
||||
|
@ -75,8 +82,20 @@ import static org.mockito.Mockito.when;
|
|||
|
||||
public class TemplateUpgradeServiceTests extends ESTestCase {
|
||||
|
||||
private final ClusterService clusterService = new ClusterService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY,
|
||||
ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), null, Collections.emptyMap());
|
||||
private ThreadPool threadPool;
|
||||
private ClusterService clusterService;
|
||||
|
||||
@Before
|
||||
public void setUpTest() throws Exception {
|
||||
threadPool = new TestThreadPool("TemplateUpgradeServiceTests");
|
||||
clusterService = createClusterService(threadPool);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDownTest() throws Exception {
|
||||
threadPool.shutdownNow();
|
||||
clusterService.close();
|
||||
}
|
||||
|
||||
public void testCalculateChangesAddChangeAndDelete() {
|
||||
|
||||
|
@ -90,7 +109,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
|||
IndexTemplateMetaData.builder("changed_test_template").patterns(randomIndexPatterns()).build()
|
||||
);
|
||||
|
||||
TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, null, clusterService, null,
|
||||
final TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, null, clusterService, threadPool,
|
||||
Arrays.asList(
|
||||
templates -> {
|
||||
if (shouldAdd) {
|
||||
|
@ -190,18 +209,18 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
|||
additions.put("add_template_" + i, new BytesArray("{\"index_patterns\" : \"*\", \"order\" : " + i + "}"));
|
||||
}
|
||||
|
||||
ThreadPool threadPool = mock(ThreadPool.class);
|
||||
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
when(threadPool.getThreadContext()).thenReturn(threadContext);
|
||||
TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool,
|
||||
final TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool,
|
||||
Collections.emptyList());
|
||||
|
||||
IllegalStateException ise = expectThrows(IllegalStateException.class, () -> service.updateTemplates(additions, deletions));
|
||||
IllegalStateException ise = expectThrows(IllegalStateException.class, () -> service.upgradeTemplates(additions, deletions));
|
||||
assertThat(ise.getMessage(), containsString("template upgrade service should always happen in a system context"));
|
||||
|
||||
threadContext.markAsSystemContext();
|
||||
service.updateTemplates(additions, deletions);
|
||||
int updatesInProgress = service.getUpdatesInProgress();
|
||||
service.upgradesInProgress.set(additionsCount + deletionsCount + 2); // +2 to skip tryFinishUpgrade
|
||||
final ThreadContext threadContext = threadPool.getThreadContext();
|
||||
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
|
||||
threadContext.markAsSystemContext();
|
||||
service.upgradeTemplates(additions, deletions);
|
||||
}
|
||||
|
||||
assertThat(putTemplateListeners, hasSize(additionsCount));
|
||||
assertThat(deleteTemplateListeners, hasSize(deletionsCount));
|
||||
|
@ -218,30 +237,34 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
|||
|
||||
for (int i = 0; i < deletionsCount; i++) {
|
||||
if (randomBoolean()) {
|
||||
int prevUpdatesInProgress = service.getUpdatesInProgress();
|
||||
int prevUpdatesInProgress = service.upgradesInProgress.get();
|
||||
deleteTemplateListeners.get(i).onFailure(new RuntimeException("test - ignore"));
|
||||
assertThat(prevUpdatesInProgress - service.getUpdatesInProgress(), equalTo(1));
|
||||
assertThat(prevUpdatesInProgress - service.upgradesInProgress.get(), equalTo(1));
|
||||
} else {
|
||||
int prevUpdatesInProgress = service.getUpdatesInProgress();
|
||||
int prevUpdatesInProgress = service.upgradesInProgress.get();
|
||||
deleteTemplateListeners.get(i).onResponse(new DeleteIndexTemplateResponse(randomBoolean()) {
|
||||
|
||||
});
|
||||
assertThat(prevUpdatesInProgress - service.getUpdatesInProgress(), equalTo(1));
|
||||
assertThat(prevUpdatesInProgress - service.upgradesInProgress.get(), equalTo(1));
|
||||
}
|
||||
}
|
||||
assertThat(updatesInProgress - service.getUpdatesInProgress(), equalTo(additionsCount + deletionsCount));
|
||||
// tryFinishUpgrade was skipped
|
||||
assertThat(service.upgradesInProgress.get(), equalTo(2));
|
||||
}
|
||||
|
||||
private static final Set<DiscoveryNode.Role> MASTER_DATA_ROLES =
|
||||
Collections.unmodifiableSet(EnumSet.of(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testClusterStateUpdate() {
|
||||
public void testClusterStateUpdate() throws InterruptedException {
|
||||
|
||||
AtomicReference<ActionListener<PutIndexTemplateResponse>> addedListener = new AtomicReference<>();
|
||||
AtomicReference<ActionListener<PutIndexTemplateResponse>> changedListener = new AtomicReference<>();
|
||||
AtomicReference<ActionListener<DeleteIndexTemplateResponse>> removedListener = new AtomicReference<>();
|
||||
AtomicInteger updateInvocation = new AtomicInteger();
|
||||
final AtomicReference<ActionListener<PutIndexTemplateResponse>> addedListener = new AtomicReference<>();
|
||||
final AtomicReference<ActionListener<PutIndexTemplateResponse>> changedListener = new AtomicReference<>();
|
||||
final AtomicReference<ActionListener<DeleteIndexTemplateResponse>> removedListener = new AtomicReference<>();
|
||||
final Semaphore updateInvocation = new Semaphore(0);
|
||||
final Semaphore calculateInvocation = new Semaphore(0);
|
||||
final Semaphore changedInvocation = new Semaphore(0);
|
||||
final Semaphore finishInvocation = new Semaphore(0);
|
||||
|
||||
MetaData metaData = randomMetaData(
|
||||
IndexTemplateMetaData.builder("user_template").patterns(randomIndexPatterns()).build(),
|
||||
|
@ -249,21 +272,6 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
|||
IndexTemplateMetaData.builder("changed_test_template").patterns(randomIndexPatterns()).build()
|
||||
);
|
||||
|
||||
ThreadPool threadPool = mock(ThreadPool.class);
|
||||
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
when(threadPool.getThreadContext()).thenReturn(threadContext);
|
||||
ExecutorService executorService = mock(ExecutorService.class);
|
||||
when(threadPool.generic()).thenReturn(executorService);
|
||||
doAnswer(invocation -> {
|
||||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 1;
|
||||
assertTrue(threadContext.isSystemContext());
|
||||
Runnable runnable = (Runnable) args[0];
|
||||
runnable.run();
|
||||
updateInvocation.incrementAndGet();
|
||||
return null;
|
||||
}).when(executorService).execute(any(Runnable.class));
|
||||
|
||||
Client mockClient = mock(Client.class);
|
||||
AdminClient mockAdminClient = mock(AdminClient.class);
|
||||
IndicesAdminClient mockIndicesAdminClient = mock(IndicesAdminClient.class);
|
||||
|
@ -293,7 +301,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
|||
return null;
|
||||
}).when(mockIndicesAdminClient).deleteTemplate(any(DeleteIndexTemplateRequest.class), any(ActionListener.class));
|
||||
|
||||
TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool,
|
||||
final TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool,
|
||||
Arrays.asList(
|
||||
templates -> {
|
||||
assertNull(templates.put("added_test_template", IndexTemplateMetaData.builder("added_test_template")
|
||||
|
@ -309,26 +317,63 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
|||
.patterns(Collections.singletonList("*")).order(10).build()));
|
||||
return templates;
|
||||
}
|
||||
));
|
||||
)) {
|
||||
|
||||
@Override
|
||||
void tryFinishUpgrade(AtomicBoolean anyUpgradeFailed) {
|
||||
super.tryFinishUpgrade(anyUpgradeFailed);
|
||||
finishInvocation.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
void upgradeTemplates(Map<String, BytesReference> changes, Set<String> deletions) {
|
||||
super.upgradeTemplates(changes, deletions);
|
||||
updateInvocation.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
Optional<Tuple<Map<String, BytesReference>, Set<String>>>
|
||||
calculateTemplateChanges(ImmutableOpenMap<String, IndexTemplateMetaData> templates) {
|
||||
final Optional<Tuple<Map<String, BytesReference>, Set<String>>> ans = super.calculateTemplateChanges(templates);
|
||||
calculateInvocation.release();
|
||||
return ans;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clusterChanged(ClusterChangedEvent event) {
|
||||
super.clusterChanged(event);
|
||||
changedInvocation.release();
|
||||
}
|
||||
};
|
||||
|
||||
ClusterState prevState = ClusterState.EMPTY_STATE;
|
||||
ClusterState state = ClusterState.builder(prevState).nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("node1", "node1", buildNewFakeTransportAddress(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT)
|
||||
).localNodeId("node1").masterNodeId("node1").build()
|
||||
).metaData(metaData).build();
|
||||
service.clusterChanged(new ClusterChangedEvent("test", state, prevState));
|
||||
setState(clusterService, state);
|
||||
|
||||
assertThat(updateInvocation.get(), equalTo(1));
|
||||
changedInvocation.acquire();
|
||||
assertThat(changedInvocation.availablePermits(), equalTo(0));
|
||||
calculateInvocation.acquire();
|
||||
assertThat(calculateInvocation.availablePermits(), equalTo(0));
|
||||
updateInvocation.acquire();
|
||||
assertThat(updateInvocation.availablePermits(), equalTo(0));
|
||||
assertThat(finishInvocation.availablePermits(), equalTo(0));
|
||||
assertThat(addedListener.get(), notNullValue());
|
||||
assertThat(changedListener.get(), notNullValue());
|
||||
assertThat(removedListener.get(), notNullValue());
|
||||
|
||||
prevState = state;
|
||||
state = ClusterState.builder(prevState).metaData(MetaData.builder(state.metaData()).removeTemplate("user_template")).build();
|
||||
service.clusterChanged(new ClusterChangedEvent("test 2", state, prevState));
|
||||
setState(clusterService, state);
|
||||
|
||||
// Make sure that update wasn't invoked since we are still running
|
||||
assertThat(updateInvocation.get(), equalTo(1));
|
||||
changedInvocation.acquire();
|
||||
assertThat(changedInvocation.availablePermits(), equalTo(0));
|
||||
assertThat(calculateInvocation.availablePermits(), equalTo(0));
|
||||
assertThat(updateInvocation.availablePermits(), equalTo(0));
|
||||
assertThat(finishInvocation.availablePermits(), equalTo(0));
|
||||
|
||||
addedListener.getAndSet(null).onResponse(new PutIndexTemplateResponse(true) {
|
||||
});
|
||||
|
@ -337,19 +382,40 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
|||
removedListener.getAndSet(null).onResponse(new DeleteIndexTemplateResponse(true) {
|
||||
});
|
||||
|
||||
service.clusterChanged(new ClusterChangedEvent("test 3", state, prevState));
|
||||
// 3 upgrades should be completed, in addition to the final calculate
|
||||
finishInvocation.acquire(3);
|
||||
assertThat(finishInvocation.availablePermits(), equalTo(0));
|
||||
calculateInvocation.acquire();
|
||||
assertThat(calculateInvocation.availablePermits(), equalTo(0));
|
||||
|
||||
setState(clusterService, state);
|
||||
|
||||
// Make sure that update was called this time since we are no longer running
|
||||
assertThat(updateInvocation.get(), equalTo(2));
|
||||
changedInvocation.acquire();
|
||||
assertThat(changedInvocation.availablePermits(), equalTo(0));
|
||||
calculateInvocation.acquire();
|
||||
assertThat(calculateInvocation.availablePermits(), equalTo(0));
|
||||
updateInvocation.acquire();
|
||||
assertThat(updateInvocation.availablePermits(), equalTo(0));
|
||||
assertThat(finishInvocation.availablePermits(), equalTo(0));
|
||||
|
||||
addedListener.getAndSet(null).onFailure(new RuntimeException("test - ignore"));
|
||||
changedListener.getAndSet(null).onFailure(new RuntimeException("test - ignore"));
|
||||
removedListener.getAndSet(null).onFailure(new RuntimeException("test - ignore"));
|
||||
|
||||
service.clusterChanged(new ClusterChangedEvent("test 3", state, prevState));
|
||||
finishInvocation.acquire(3);
|
||||
assertThat(finishInvocation.availablePermits(), equalTo(0));
|
||||
calculateInvocation.acquire();
|
||||
assertThat(calculateInvocation.availablePermits(), equalTo(0));
|
||||
|
||||
setState(clusterService, state);
|
||||
|
||||
// Make sure that update wasn't called this time since the index template metadata didn't change
|
||||
assertThat(updateInvocation.get(), equalTo(2));
|
||||
changedInvocation.acquire();
|
||||
assertThat(changedInvocation.availablePermits(), equalTo(0));
|
||||
assertThat(calculateInvocation.availablePermits(), equalTo(0));
|
||||
assertThat(updateInvocation.availablePermits(), equalTo(0));
|
||||
assertThat(finishInvocation.availablePermits(), equalTo(0));
|
||||
}
|
||||
|
||||
private static final int NODE_TEST_ITERS = 100;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.discovery.zen;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
|
@ -94,6 +95,7 @@ import static java.util.Collections.emptySet;
|
|||
import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
@ -137,7 +139,6 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
|
||||
private static final UnicastHostsProvider EMPTY_HOSTS_PROVIDER = Collections::emptyList;
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/28685")
|
||||
public void testSimplePings() throws IOException, InterruptedException, ExecutionException {
|
||||
// use ephemeral ports
|
||||
final Settings settings = Settings.builder().put("cluster.name", "test").put(TcpTransport.PORT.getKey(), 0).build();
|
||||
|
@ -233,9 +234,9 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
ZenPing.PingResponse ping = pingResponses.iterator().next();
|
||||
assertThat(ping.node().getId(), equalTo("UZP_B"));
|
||||
assertThat(ping.getClusterStateVersion(), equalTo(state.version()));
|
||||
assertPingCount(handleA, handleB, 3);
|
||||
assertPingCount(handleA, handleC, 0); // mismatch, shouldn't ping
|
||||
assertPingCount(handleA, handleD, 0); // mismatch, shouldn't ping
|
||||
assertPings(handleA, handleB);
|
||||
assertNoPings(handleA, handleC); // mismatch, shouldn't ping
|
||||
assertNoPings(handleA, handleD); // mismatch, shouldn't ping
|
||||
|
||||
// ping again, this time from B,
|
||||
logger.info("ping from UZP_B");
|
||||
|
@ -244,23 +245,23 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
ping = pingResponses.iterator().next();
|
||||
assertThat(ping.node().getId(), equalTo("UZP_A"));
|
||||
assertThat(ping.getClusterStateVersion(), equalTo(ElectMasterService.MasterCandidate.UNRECOVERED_CLUSTER_VERSION));
|
||||
assertPingCount(handleB, handleA, 3);
|
||||
assertPingCount(handleB, handleC, 0); // mismatch, shouldn't ping
|
||||
assertPingCount(handleB, handleD, 0); // mismatch, shouldn't ping
|
||||
assertPings(handleB, handleA);
|
||||
assertNoPings(handleB, handleC); // mismatch, shouldn't ping
|
||||
assertNoPings(handleB, handleD); // mismatch, shouldn't ping
|
||||
|
||||
logger.info("ping from UZP_C");
|
||||
pingResponses = zenPingC.pingAndWait().toList();
|
||||
assertThat(pingResponses.size(), equalTo(1));
|
||||
assertPingCount(handleC, handleA, 0);
|
||||
assertPingCount(handleC, handleB, 0);
|
||||
assertPingCount(handleC, handleD, 3);
|
||||
assertNoPings(handleC, handleA);
|
||||
assertNoPings(handleC, handleB);
|
||||
assertPings(handleC, handleD);
|
||||
|
||||
logger.info("ping from UZP_D");
|
||||
pingResponses = zenPingD.pingAndWait().toList();
|
||||
assertThat(pingResponses.size(), equalTo(1));
|
||||
assertPingCount(handleD, handleA, 0);
|
||||
assertPingCount(handleD, handleB, 0);
|
||||
assertPingCount(handleD, handleC, 3);
|
||||
assertNoPings(handleD, handleA);
|
||||
assertNoPings(handleD, handleB);
|
||||
assertPings(handleD, handleC);
|
||||
|
||||
zenPingC.close();
|
||||
handleD.counters.clear();
|
||||
|
@ -268,9 +269,9 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
pingResponses = zenPingD.pingAndWait().toList();
|
||||
// check that node does not respond to pings anymore after the ping service has been closed
|
||||
assertThat(pingResponses.size(), equalTo(0));
|
||||
assertPingCount(handleD, handleA, 0);
|
||||
assertPingCount(handleD, handleB, 0);
|
||||
assertPingCount(handleD, handleC, 3);
|
||||
assertNoPings(handleD, handleA);
|
||||
assertNoPings(handleD, handleB);
|
||||
assertPings(handleD, handleC);
|
||||
}
|
||||
|
||||
public void testUnknownHostNotCached() throws ExecutionException, InterruptedException {
|
||||
|
@ -353,8 +354,8 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
ZenPing.PingResponse ping = pingResponses.iterator().next();
|
||||
assertThat(ping.node().getId(), equalTo("UZP_C"));
|
||||
assertThat(ping.getClusterStateVersion(), equalTo(state.version()));
|
||||
assertPingCount(handleA, handleB, 0);
|
||||
assertPingCount(handleA, handleC, 3);
|
||||
assertNoPings(handleA, handleB);
|
||||
assertPings(handleA, handleC);
|
||||
assertNull(handleA.counters.get(handleB.address));
|
||||
}
|
||||
|
||||
|
@ -377,8 +378,8 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
assertThat(secondPingResponses.size(), equalTo(2));
|
||||
final Set<String> ids = new HashSet<>(secondPingResponses.stream().map(p -> p.node().getId()).collect(Collectors.toList()));
|
||||
assertThat(ids, equalTo(new HashSet<>(Arrays.asList("UZP_B", "UZP_C"))));
|
||||
assertPingCount(handleA, handleB, 3);
|
||||
assertPingCount(handleA, handleC, 3);
|
||||
assertPings(handleA, handleB);
|
||||
assertPings(handleA, handleC);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -745,13 +746,30 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
verify(logger).warn(eq("failed to resolve host [127.0.0.1:9300:9300]"), Matchers.any(ExecutionException.class));
|
||||
}
|
||||
|
||||
private void assertPingCount(final NetworkHandle fromNode, final NetworkHandle toNode, int expectedCount) {
|
||||
private void assertNoPings(final NetworkHandle fromNode, final NetworkHandle toNode) {
|
||||
final AtomicInteger counter = fromNode.counters.getOrDefault(toNode.address, new AtomicInteger());
|
||||
final String onNodeName = fromNode.node.getName();
|
||||
assertNotNull("handle for [" + onNodeName + "] has no 'expected' counter", counter);
|
||||
final String forNodeName = toNode.node.getName();
|
||||
assertThat("node [" + onNodeName + "] ping count to [" + forNodeName + "] is unexpected",
|
||||
counter.get(), equalTo(expectedCount));
|
||||
counter.get(), equalTo(0));
|
||||
}
|
||||
|
||||
private void assertPings(final NetworkHandle fromNode, final NetworkHandle toNode) {
|
||||
final AtomicInteger counter = fromNode.counters.getOrDefault(toNode.address, new AtomicInteger());
|
||||
final String onNodeName = fromNode.node.getName();
|
||||
assertNotNull("handle for [" + onNodeName + "] has no 'expected' counter", counter);
|
||||
final String forNodeName = toNode.node.getName();
|
||||
if (Constants.WINDOWS) {
|
||||
// Some of the ping attempts seem to sporadically fail on Windows (see https://github.com/elastic/elasticsearch/issues/28685)
|
||||
// Anyhow, the point of the test is not to assert the exact number of pings, but to check if pinging has taken place or not
|
||||
assertThat("node [" + onNodeName + "] ping count to [" + forNodeName + "] is unexpected",
|
||||
counter.get(), greaterThan(0));
|
||||
} else {
|
||||
assertThat("node [" + onNodeName + "] ping count to [" + forNodeName + "] is unexpected",
|
||||
counter.get(), equalTo(3));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private NetworkHandle startServices(
|
||||
|
|
|
@ -121,7 +121,7 @@ public class IndexModuleTests extends ESTestCase {
|
|||
index = indexSettings.getIndex();
|
||||
environment = TestEnvironment.newEnvironment(settings);
|
||||
emptyAnalysisRegistry = new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(),
|
||||
emptyMap(), emptyMap(), emptyMap());
|
||||
emptyMap(), emptyMap(), emptyMap(), emptyMap());
|
||||
threadPool = new TestThreadPool("test");
|
||||
circuitBreakerService = new NoneCircuitBreakerService();
|
||||
PageCacheRecycler pageCacheRecycler = new PageCacheRecycler(settings);
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.elasticsearch.test.IndexSettingsModule;
|
|||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
@ -48,6 +49,8 @@ import static java.util.Collections.singletonList;
|
|||
import static java.util.Collections.singletonMap;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
public class AnalysisRegistryTests extends ESTestCase {
|
||||
private AnalysisRegistry emptyRegistry;
|
||||
|
@ -58,7 +61,7 @@ public class AnalysisRegistryTests extends ESTestCase {
|
|||
|
||||
private static AnalysisRegistry emptyAnalysisRegistry(Settings settings) {
|
||||
return new AnalysisRegistry(TestEnvironment.newEnvironment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(),
|
||||
emptyMap(), emptyMap(), emptyMap());
|
||||
emptyMap(), emptyMap(), emptyMap(), emptyMap());
|
||||
}
|
||||
|
||||
private static IndexSettings indexSettingsOfCurrentVersion(Settings.Builder settings) {
|
||||
|
@ -224,4 +227,16 @@ public class AnalysisRegistryTests extends ESTestCase {
|
|||
indexAnalyzers.close();
|
||||
indexAnalyzers.close();
|
||||
}
|
||||
|
||||
public void testEnsureCloseInvocationProperlyDelegated() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
PreBuiltAnalyzerProviderFactory mock = mock(PreBuiltAnalyzerProviderFactory.class);
|
||||
AnalysisRegistry registry = new AnalysisRegistry(TestEnvironment.newEnvironment(settings), emptyMap(), emptyMap(),
|
||||
emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), Collections.singletonMap("key", mock));
|
||||
|
||||
registry.close();
|
||||
verify(mock).close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
package org.elasticsearch.search.fetch.subphase.highlight;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
|
@ -32,6 +34,8 @@ import org.elasticsearch.common.settings.Settings.Builder;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.AnalyzerProvider;
|
||||
import org.elasticsearch.index.query.AbstractQueryBuilder;
|
||||
import org.elasticsearch.index.query.IdsQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
|
@ -41,6 +45,8 @@ import org.elasticsearch.index.query.QueryBuilder;
|
|||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModule;
|
||||
import org.elasticsearch.plugins.AnalysisPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
@ -63,6 +69,7 @@ import java.util.HashMap;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.client.Requests.searchRequest;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||
|
@ -106,7 +113,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Arrays.asList(InternalSettingsPlugin.class, MockKeywordPlugin.class);
|
||||
return Arrays.asList(InternalSettingsPlugin.class, MockKeywordPlugin.class, MockWhitespacePlugin.class);
|
||||
}
|
||||
|
||||
public void testHighlightingWithStoredKeyword() throws IOException {
|
||||
|
@ -1599,8 +1606,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("analysis.analyzer.my_analyzer.type", "pattern")
|
||||
.put("analysis.analyzer.my_analyzer.pattern", "\\s+")
|
||||
.put("analysis.analyzer.my_analyzer.type", "mock_whitespace")
|
||||
.build())
|
||||
.addMapping("type", "text", "type=text,analyzer=my_analyzer"));
|
||||
ensureGreen();
|
||||
|
@ -1611,7 +1617,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchQuery("text", "test"))
|
||||
.highlighter(new HighlightBuilder().field("text")).execute().actionGet();
|
||||
// PatternAnalyzer will throw an exception if it is resetted twice
|
||||
// Mock tokenizer will throw an exception if it is resetted twice
|
||||
assertHitCount(response, 1L);
|
||||
}
|
||||
|
||||
|
@ -2976,4 +2982,22 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
assertThat(field.getFragments()[0].string(), equalTo("<em>Hello World</em>"));
|
||||
}
|
||||
}
|
||||
|
||||
public static class MockWhitespacePlugin extends Plugin implements AnalysisPlugin {
|
||||
|
||||
@Override
|
||||
public Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
|
||||
return singletonMap("mock_whitespace", (indexSettings, environment, name, settings) -> {
|
||||
return new AbstractIndexAnalyzerProvider<Analyzer>(indexSettings, name, settings) {
|
||||
|
||||
MockAnalyzer instance = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
|
||||
|
||||
@Override
|
||||
public Analyzer get() {
|
||||
return instance;
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,6 +47,13 @@ import static org.hamcrest.CoreMatchers.notNullValue;
|
|||
*/
|
||||
public abstract class ESBlobStoreContainerTestCase extends ESTestCase {
|
||||
|
||||
public void testReadNonExistingPath() throws IOException {
|
||||
try(BlobStore store = newBlobStore()) {
|
||||
final BlobContainer container = store.blobContainer(new BlobPath());
|
||||
expectThrows(NoSuchFileException.class, () -> container.readBlob("non-existing"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testWriteRead() throws IOException {
|
||||
try(BlobStore store = newBlobStore()) {
|
||||
final BlobContainer container = store.blobContainer(new BlobPath());
|
||||
|
|
|
@ -16,7 +16,6 @@ buildRestTests.expectedUnconvertedCandidates = [
|
|||
'en/ml/functions/rare.asciidoc',
|
||||
'en/ml/functions/sum.asciidoc',
|
||||
'en/ml/functions/time.asciidoc',
|
||||
'en/ml/aggregations.asciidoc',
|
||||
'en/ml/customurl.asciidoc',
|
||||
'en/monitoring/indices.asciidoc',
|
||||
'en/rest-api/security/ssl.asciidoc',
|
||||
|
@ -81,7 +80,6 @@ buildRestTests.expectedUnconvertedCandidates = [
|
|||
'en/rest-api/ml/validate-job.asciidoc',
|
||||
'en/rest-api/security/authenticate.asciidoc',
|
||||
'en/rest-api/watcher/stats.asciidoc',
|
||||
'en/security/authorization/managing-roles.asciidoc',
|
||||
'en/watcher/example-watches/watching-time-series-data.asciidoc',
|
||||
]
|
||||
|
||||
|
@ -282,6 +280,58 @@ setups['library'] = '''
|
|||
{"name": "The Moon is a Harsh Mistress", "author": "Robert A. Heinlein", "release_date": "1966-04-01", "page_count": 288}
|
||||
|
||||
'''
|
||||
setups['farequote_index'] = '''
|
||||
- do:
|
||||
indices.create:
|
||||
index: farequote
|
||||
body:
|
||||
settings:
|
||||
number_of_shards: 1
|
||||
number_of_replicas: 0
|
||||
mappings:
|
||||
metric:
|
||||
properties:
|
||||
time:
|
||||
type: date
|
||||
responsetime:
|
||||
type: float
|
||||
airline:
|
||||
type: keyword
|
||||
'''
|
||||
setups['farequote_data'] = setups['farequote_index'] + '''
|
||||
- do:
|
||||
bulk:
|
||||
index: farequote
|
||||
type: metric
|
||||
refresh: true
|
||||
body: |
|
||||
{"index": {"_id":"1"}}
|
||||
{"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000"}
|
||||
{"index": {"_id":"2"}}
|
||||
{"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000"}
|
||||
{"index": {"_id":"3"}}
|
||||
{"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000"}
|
||||
'''
|
||||
setups['farequote_job'] = setups['farequote_data'] + '''
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: "farequote"
|
||||
body: >
|
||||
{
|
||||
"analysis_config": {
|
||||
"bucket_span": "60m",
|
||||
"detectors": [{
|
||||
"function": "mean",
|
||||
"field_name": "responsetime",
|
||||
"by_field_name": "airline"
|
||||
}],
|
||||
"summary_count_field_name": "doc_count"
|
||||
},
|
||||
"data_description": {
|
||||
"time_field": "time"
|
||||
}
|
||||
}
|
||||
'''
|
||||
setups['server_metrics_index'] = '''
|
||||
- do:
|
||||
indices.create:
|
||||
|
|
|
@ -11,11 +11,12 @@ aggregated data into {xpackml} instead of raw results, which
|
|||
reduces the volume of data that must be considered while detecting anomalies.
|
||||
|
||||
There are some limitations to using aggregations in {dfeeds}, however.
|
||||
Your aggregation must include a buckets aggregation, which in turn must contain
|
||||
a date histogram aggregation. This requirement ensures that the aggregated
|
||||
data is a time series. If you use a terms aggregation and the cardinality of a
|
||||
term is high, then the aggregation might not be effective and you might want
|
||||
to just use the default search and scroll behavior.
|
||||
Your aggregation must include a `date_histogram` aggregation, which in turn must
|
||||
contain a `max` aggregation on the time field. This requirement ensures that the
|
||||
aggregated data is a time series and the timestamp of each bucket is the time
|
||||
of the last record in the bucket. If you use a terms aggregation and the
|
||||
cardinality of a term is high, then the aggregation might not be effective and
|
||||
you might want to just use the default search and scroll behavior.
|
||||
|
||||
When you create or update a job, you can include the names of aggregations, for
|
||||
example:
|
||||
|
@ -27,9 +28,9 @@ PUT _xpack/ml/anomaly_detectors/farequote
|
|||
"analysis_config": {
|
||||
"bucket_span": "60m",
|
||||
"detectors": [{
|
||||
"function":"mean",
|
||||
"field_name":"responsetime",
|
||||
"by_field_name":"airline"
|
||||
"function": "mean",
|
||||
"field_name": "responsetime",
|
||||
"by_field_name": "airline"
|
||||
}],
|
||||
"summary_count_field_name": "doc_count"
|
||||
},
|
||||
|
@ -38,6 +39,8 @@ PUT _xpack/ml/anomaly_detectors/farequote
|
|||
}
|
||||
}
|
||||
----------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:farequote_data]
|
||||
|
||||
In this example, the `airline`, `responsetime`, and `time` fields are
|
||||
aggregations.
|
||||
|
@ -85,7 +88,8 @@ PUT _xpack/ml/datafeeds/datafeed-farequote
|
|||
}
|
||||
}
|
||||
----------------------------------
|
||||
|
||||
// CONSOLE
|
||||
// TEST[setup:farequote_job]
|
||||
|
||||
In this example, the aggregations have names that match the fields that they
|
||||
operate on. That is to say, the `max` aggregation is named `time` and its
|
||||
|
@ -100,35 +104,86 @@ For all other aggregations, if the aggregation name doesn't match the field name
|
|||
there are limitations in the drill-down functionality within the {ml} page in
|
||||
{kib}.
|
||||
|
||||
{dfeeds} support complex nested aggregations, this example uses the `derivative`
|
||||
pipeline aggregation to find the 1st order derivative of the counter
|
||||
`system.network.out.bytes` for each value of the field `beat.name`.
|
||||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
"aggregations": {
|
||||
"beat.name": {
|
||||
"terms": {
|
||||
"field": "beat.name"
|
||||
},
|
||||
"aggregations": {
|
||||
"buckets": {
|
||||
"date_histogram": {
|
||||
"field": "@timestamp",
|
||||
"interval": "5m"
|
||||
},
|
||||
"aggregations": {
|
||||
"@timestamp": {
|
||||
"max": {
|
||||
"field": "@timestamp"
|
||||
}
|
||||
},
|
||||
"bytes_out_average": {
|
||||
"avg": {
|
||||
"field": "system.network.out.bytes"
|
||||
}
|
||||
},
|
||||
"bytes_out_derivative": {
|
||||
"derivative": {
|
||||
"buckets_path": "bytes_out_average"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
When you define an aggregation in a {dfeed}, it must have the following form:
|
||||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
"aggregations" : {
|
||||
"buckets" : {
|
||||
"date_histogram" : {
|
||||
"time_zone": "UTC", ...
|
||||
"aggregations": {
|
||||
["bucketing_aggregation": {
|
||||
"bucket_agg": {
|
||||
...
|
||||
},
|
||||
"aggregations": {
|
||||
"<time_field>": {
|
||||
"max": {
|
||||
"field":"<time_field>"
|
||||
"aggregations": {]
|
||||
"data_histogram_aggregation": {
|
||||
"date_histogram": {
|
||||
"field": "time",
|
||||
},
|
||||
"aggregations": {
|
||||
"timestamp": {
|
||||
"max": {
|
||||
"field": "time"
|
||||
}
|
||||
},
|
||||
[,"<first_term>": {
|
||||
"terms":{...
|
||||
}
|
||||
[,"aggregations" : {
|
||||
[<sub_aggregation>]+
|
||||
} ]
|
||||
}]
|
||||
}
|
||||
}
|
||||
[,"<first_term>": {
|
||||
"terms":{...
|
||||
}
|
||||
[,"aggregations" : {
|
||||
[<sub_aggregation>]+
|
||||
} ]
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
You must specify `buckets` as the aggregation name and `date_histogram` as the
|
||||
aggregation type. For more information, see
|
||||
The top level aggregation must be either a {ref}/search-aggregations-bucket.html[Bucket Aggregation]
|
||||
containing as single sub-aggregation that is a `date_histogram` or the top level aggregation
|
||||
is the required `date_histogram`. There must be exactly 1 `date_histogram` aggregation.
|
||||
For more information, see
|
||||
{ref}/search-aggregations-bucket-datehistogram-aggregation.html[Date Histogram Aggregation].
|
||||
|
||||
NOTE: The `time_zone` parameter in the date histogram aggregation must be set to `UTC`,
|
||||
|
@ -163,6 +218,7 @@ GET .../_search {
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
By default, {es} limits the maximum number of terms returned to 10000. For high
|
||||
cardinality fields, the query might not run. It might return errors related to
|
||||
|
|
|
@ -142,4 +142,4 @@ stored, that is to say the monitoring cluster. To grant all of the necessary per
|
|||
<<config-monitoring-indices,Configure the indices that store the monitoring data>>.
|
||||
|
||||
include::indices.asciidoc[]
|
||||
include::{xes-repo-dir}/settings/monitoring-settings.asciidoc[]
|
||||
include::{es-repo-dir}/settings/monitoring-settings.asciidoc[]
|
|
@ -1,29 +0,0 @@
|
|||
[role="xpack"]
|
||||
[[xpack-api]]
|
||||
= {xpack} APIs
|
||||
|
||||
[partintro]
|
||||
--
|
||||
{xpack} exposes REST APIs that are used by the UI components and can be called
|
||||
directly to configure and access {xpack} features.
|
||||
|
||||
* <<info-api,Info API>>
|
||||
* <<graph-explore-api,Graph Explore API>>
|
||||
* <<licensing-apis,Licensing APIs>>
|
||||
* <<ml-apis,Machine Learning APIs>>
|
||||
* <<security-api,Security APIs>>
|
||||
* <<watcher-api,Watcher APIs>>
|
||||
* <<rollup-apis,Rollup APIs>>
|
||||
* <<migration-api,Migration APIs>>
|
||||
--
|
||||
|
||||
|
||||
include::info.asciidoc[]
|
||||
include::graph/explore.asciidoc[]
|
||||
include::licensing.asciidoc[]
|
||||
include::migration.asciidoc[]
|
||||
include::ml-api.asciidoc[]
|
||||
include::rollup-api.asciidoc[]
|
||||
include::security.asciidoc[]
|
||||
include::watcher.asciidoc[]
|
||||
include::defs.asciidoc[]
|
|
@ -12,6 +12,8 @@ A role is defined by the following JSON structure:
|
|||
"indices": [ ... ] <3>
|
||||
}
|
||||
-----
|
||||
// NOTCONSOLE
|
||||
|
||||
<1> A list of usernames the owners of this role can <<run-as-privilege, impersonate>>.
|
||||
<2> A list of cluster privileges. These privileges define the
|
||||
cluster level actions users with this role are able to execute. This field
|
||||
|
@ -37,6 +39,8 @@ The following describes the structure of an indices permissions entry:
|
|||
"query": "..." <4>
|
||||
}
|
||||
-------
|
||||
// NOTCONSOLE
|
||||
|
||||
<1> A list of indices (or index name patterns) to which the permissions in this
|
||||
entry apply.
|
||||
<2> The index level privileges the owners of the role have on the associated
|
||||
|
@ -77,8 +81,9 @@ The following snippet shows an example definition of a `clicks_admin` role:
|
|||
|
||||
[source,js]
|
||||
-----------
|
||||
POST /_xpack/security/role/clicks_admin
|
||||
{
|
||||
"run_as": [ "clicks_watcher_1" ]
|
||||
"run_as": [ "clicks_watcher_1" ],
|
||||
"cluster": [ "monitor" ],
|
||||
"indices": [
|
||||
{
|
||||
|
@ -92,6 +97,7 @@ The following snippet shows an example definition of a `clicks_admin` role:
|
|||
]
|
||||
}
|
||||
-----------
|
||||
// CONSOLE
|
||||
|
||||
Based on the above definition, users owning the `clicks_admin` role can:
|
||||
|
||||
|
|
|
@ -142,5 +142,5 @@ include::authentication/configuring-ldap-realm.asciidoc[]
|
|||
include::authentication/configuring-native-realm.asciidoc[]
|
||||
include::authentication/configuring-pki-realm.asciidoc[]
|
||||
include::authentication/configuring-saml-realm.asciidoc[]
|
||||
include::{xes-repo-dir}/settings/security-settings.asciidoc[]
|
||||
include::{xes-repo-dir}/settings/audit-settings.asciidoc[]
|
||||
include::{es-repo-dir}/settings/security-settings.asciidoc[]
|
||||
include::{es-repo-dir}/settings/audit-settings.asciidoc[]
|
||||
|
|
|
@ -1,12 +1,8 @@
|
|||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.MavenFilteringHack
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
||||
import org.elasticsearch.gradle.test.NodeInfo
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.StandardCopyOption
|
||||
import org.elasticsearch.gradle.test.RunTask;
|
||||
|
||||
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
@ -17,6 +13,50 @@ dependencies {
|
|||
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
|
||||
}
|
||||
|
||||
subprojects {
|
||||
afterEvaluate {
|
||||
if (project.plugins.hasPlugin(PluginBuildPlugin)) {
|
||||
// see the root Gradle file for additional logic regarding this configuration
|
||||
project.configurations.create('featureAwarePlugin')
|
||||
project.dependencies.add('featureAwarePlugin', project.configurations.compileClasspath)
|
||||
project.dependencies.add(
|
||||
'featureAwarePlugin',
|
||||
"org.elasticsearch.xpack.test:feature-aware:${org.elasticsearch.gradle.VersionProperties.elasticsearch}")
|
||||
project.dependencies.add('featureAwarePlugin', project.sourceSets.main.output.getClassesDirs())
|
||||
|
||||
final Task featureAwareTask = project.tasks.create("featureAwareCheck", LoggedExec) {
|
||||
description = "Runs FeatureAwareCheck on main classes."
|
||||
dependsOn project.configurations.featureAwarePlugin
|
||||
|
||||
final File successMarker = new File(project.buildDir, 'markers/featureAware')
|
||||
outputs.file(successMarker)
|
||||
|
||||
executable = new File(project.runtimeJavaHome, 'bin/java')
|
||||
|
||||
// default to main class files if such a source set exists
|
||||
final List files = []
|
||||
if (project.sourceSets.findByName("main")) {
|
||||
files.add(project.sourceSets.main.output.classesDir)
|
||||
dependsOn project.tasks.classes
|
||||
}
|
||||
// filter out non-existent classes directories from empty source sets
|
||||
final FileCollection classDirectories = project.files(files).filter { it.exists() }
|
||||
|
||||
doFirst {
|
||||
args('-cp', project.configurations.featureAwarePlugin.asPath, 'org.elasticsearch.xpack.test.feature_aware.FeatureAwareCheck')
|
||||
classDirectories.each { args it.getAbsolutePath() }
|
||||
}
|
||||
doLast {
|
||||
successMarker.parentFile.mkdirs()
|
||||
successMarker.setText("", 'UTF-8')
|
||||
}
|
||||
}
|
||||
|
||||
project.precommit.dependsOn featureAwareTask
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://github.com/elastic/x-plugins/issues/724
|
||||
configurations {
|
||||
testArtifacts.extendsFrom testRuntime
|
||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -173,7 +174,7 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject {
|
|||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(id);
|
||||
|
@ -336,6 +337,17 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject {
|
|||
if (indexPattern == null || indexPattern.isEmpty()) {
|
||||
throw new IllegalArgumentException("An index pattern is mandatory.");
|
||||
}
|
||||
if (Regex.isMatchAllPattern(indexPattern)) {
|
||||
throw new IllegalArgumentException("Index pattern must not match all indices (as it would match it's own rollup index");
|
||||
}
|
||||
if (Regex.isSimpleMatchPattern(indexPattern)) {
|
||||
if (Regex.simpleMatch(indexPattern, rollupIndex)) {
|
||||
throw new IllegalArgumentException("Index pattern would match rollup index name which is not allowed.");
|
||||
}
|
||||
}
|
||||
if (indexPattern.equals(rollupIndex)) {
|
||||
throw new IllegalArgumentException("Rollup index may not be the same as the index pattern.");
|
||||
}
|
||||
if (rollupIndex == null || rollupIndex.isEmpty()) {
|
||||
throw new IllegalArgumentException("A rollup index name is mandatory.");
|
||||
}
|
||||
|
|
|
@ -92,4 +92,8 @@ public class SecurityFeatureSetUsage extends XPackFeatureSet.Usage {
|
|||
builder.field(ANONYMOUS_XFIELD, anonymousUsage);
|
||||
}
|
||||
}
|
||||
|
||||
public Map<String, Object> getRealmsUsage() {
|
||||
return Collections.unmodifiableMap(realmsUsage);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -119,11 +119,11 @@ public abstract class Realm implements Comparable<Realm> {
|
|||
*/
|
||||
public abstract void lookupUser(String username, ActionListener<User> listener);
|
||||
|
||||
public Map<String, Object> usageStats() {
|
||||
public void usageStats(ActionListener<Map<String, Object>> listener) {
|
||||
Map<String, Object> stats = new HashMap<>();
|
||||
stats.put("name", name());
|
||||
stats.put("order", order());
|
||||
return stats;
|
||||
listener.onResponse(stats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,8 +27,9 @@ public class ConfigTestHelpers {
|
|||
builder.setId(jobId);
|
||||
builder.setCron(getCronString());
|
||||
builder.setTimeout(new TimeValue(ESTestCase.randomIntBetween(1,100)));
|
||||
builder.setIndexPattern(ESTestCase.randomAlphaOfLengthBetween(1,10));
|
||||
builder.setRollupIndex(ESTestCase.randomAlphaOfLengthBetween(1,10));
|
||||
String indexPattern = ESTestCase.randomAlphaOfLengthBetween(1,10);
|
||||
builder.setIndexPattern(indexPattern);
|
||||
builder.setRollupIndex("rollup_" + indexPattern); // to ensure the index pattern != rollup index
|
||||
builder.setGroupConfig(ConfigTestHelpers.getGroupConfig().build());
|
||||
builder.setPageSize(ESTestCase.randomIntBetween(1,10));
|
||||
if (ESTestCase.randomBoolean()) {
|
||||
|
|
|
@ -95,8 +95,8 @@ public class TransportPutRollupJobAction extends TransportMasterNodeAction<PutRo
|
|||
XPackPlugin.checkReadyForXPackCustomMetadata(clusterState);
|
||||
|
||||
FieldCapabilitiesRequest fieldCapsRequest = new FieldCapabilitiesRequest()
|
||||
.indices(request.getConfig().getIndexPattern())
|
||||
.fields(request.getConfig().getAllFields().toArray(new String[0]));
|
||||
.indices(request.getConfig().getIndexPattern())
|
||||
.fields(request.getConfig().getAllFields().toArray(new String[0]));
|
||||
|
||||
client.fieldCaps(fieldCapsRequest, new ActionListener<FieldCapabilitiesResponse>() {
|
||||
@Override
|
||||
|
|
|
@ -122,6 +122,37 @@ public class ConfigTests extends ESTestCase {
|
|||
assertThat(e.getMessage(), equalTo("An index pattern is mandatory."));
|
||||
}
|
||||
|
||||
public void testMatchAllIndexPattern() {
|
||||
RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
|
||||
job.setIndexPattern("*");
|
||||
Exception e = expectThrows(IllegalArgumentException.class, job::build);
|
||||
assertThat(e.getMessage(), equalTo("Index pattern must not match all indices (as it would match it's own rollup index"));
|
||||
}
|
||||
|
||||
public void testMatchOwnRollupPatternPrefix() {
|
||||
RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
|
||||
job.setIndexPattern("foo-*");
|
||||
job.setRollupIndex("foo-rollup");
|
||||
Exception e = expectThrows(IllegalArgumentException.class, job::build);
|
||||
assertThat(e.getMessage(), equalTo("Index pattern would match rollup index name which is not allowed."));
|
||||
}
|
||||
|
||||
public void testMatchOwnRollupPatternSuffix() {
|
||||
RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
|
||||
job.setIndexPattern("*-rollup");
|
||||
job.setRollupIndex("foo-rollup");
|
||||
Exception e = expectThrows(IllegalArgumentException.class, job::build);
|
||||
assertThat(e.getMessage(), equalTo("Index pattern would match rollup index name which is not allowed."));
|
||||
}
|
||||
|
||||
public void testIndexPatternIdenticalToRollup() {
|
||||
RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
|
||||
job.setIndexPattern("foo");
|
||||
job.setRollupIndex("foo");
|
||||
Exception e = expectThrows(IllegalArgumentException.class, job::build);
|
||||
assertThat(e.getMessage(), equalTo("Rollup index may not be the same as the index pattern."));
|
||||
}
|
||||
|
||||
public void testEmptyRollupIndex() {
|
||||
RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
|
||||
job.setRollupIndex("");
|
||||
|
|
|
@ -86,7 +86,6 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
|
||||
@Override
|
||||
public void usage(ActionListener<XPackFeatureSet.Usage> listener) {
|
||||
Map<String, Object> realmsUsage = buildRealmsUsage(realms);
|
||||
Map<String, Object> sslUsage = sslUsage(settings);
|
||||
Map<String, Object> auditUsage = auditUsage(settings);
|
||||
Map<String, Object> ipFilterUsage = ipFilterUsage(ipFilter);
|
||||
|
@ -94,10 +93,11 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
|
||||
final AtomicReference<Map<String, Object>> rolesUsageRef = new AtomicReference<>();
|
||||
final AtomicReference<Map<String, Object>> roleMappingUsageRef = new AtomicReference<>();
|
||||
final CountDown countDown = new CountDown(2);
|
||||
final AtomicReference<Map<String, Object>> realmsUsageRef = new AtomicReference<>();
|
||||
final CountDown countDown = new CountDown(3);
|
||||
final Runnable doCountDown = () -> {
|
||||
if (countDown.countDown()) {
|
||||
listener.onResponse(new SecurityFeatureSetUsage(available(), enabled(), realmsUsage,
|
||||
listener.onResponse(new SecurityFeatureSetUsage(available(), enabled(), realmsUsageRef.get(),
|
||||
rolesUsageRef.get(), roleMappingUsageRef.get(),
|
||||
sslUsage, auditUsage, ipFilterUsage, anonymousUsage));
|
||||
}
|
||||
|
@ -116,6 +116,12 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
doCountDown.run();
|
||||
}, listener::onFailure);
|
||||
|
||||
final ActionListener<Map<String, Object>> realmsUsageListener =
|
||||
ActionListener.wrap(realmsUsage -> {
|
||||
realmsUsageRef.set(realmsUsage);
|
||||
doCountDown.run();
|
||||
}, listener::onFailure);
|
||||
|
||||
if (rolesStore == null) {
|
||||
rolesStoreUsageListener.onResponse(Collections.emptyMap());
|
||||
} else {
|
||||
|
@ -126,13 +132,11 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
} else {
|
||||
roleMappingStore.usageStats(roleMappingStoreUsageListener);
|
||||
}
|
||||
}
|
||||
|
||||
static Map<String, Object> buildRealmsUsage(Realms realms) {
|
||||
if (realms == null) {
|
||||
return Collections.emptyMap();
|
||||
realmsUsageListener.onResponse(Collections.emptyMap());
|
||||
} else {
|
||||
realms.usageStats(realmsUsageListener);
|
||||
}
|
||||
return realms.usageStats();
|
||||
}
|
||||
|
||||
static Map<String, Object> sslUsage(Settings settings) {
|
||||
|
|
|
@ -992,24 +992,22 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
|
|||
}
|
||||
|
||||
public static Settings customAuditIndexSettings(Settings nodeSettings, Logger logger) {
|
||||
Settings newSettings = Settings.builder()
|
||||
final Settings newSettings = Settings.builder()
|
||||
.put(INDEX_SETTINGS.get(nodeSettings), false)
|
||||
.normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX)
|
||||
.build();
|
||||
if (newSettings.names().isEmpty()) {
|
||||
return Settings.EMPTY;
|
||||
}
|
||||
|
||||
// Filter out forbidden settings:
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put(newSettings.filter(k -> {
|
||||
String name = "index." + k;
|
||||
// Filter out forbidden setting
|
||||
return Settings.builder().put(newSettings.filter(name -> {
|
||||
if (FORBIDDEN_INDEX_SETTING.equals(name)) {
|
||||
logger.warn("overriding the default [{}} setting is forbidden. ignoring...", name);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}));
|
||||
return builder.build();
|
||||
})).build();
|
||||
}
|
||||
|
||||
private void putTemplate(Settings customSettings, Consumer<Exception> consumer) {
|
||||
|
|
|
@ -15,12 +15,16 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.CountDown;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
|
@ -188,46 +192,67 @@ public class Realms extends AbstractComponent implements Iterable<Realm> {
|
|||
return realms;
|
||||
}
|
||||
|
||||
public Map<String, Object> usageStats() {
|
||||
public void usageStats(ActionListener<Map<String, Object>> listener) {
|
||||
Map<String, Object> realmMap = new HashMap<>();
|
||||
for (Realm realm : this) {
|
||||
if (ReservedRealm.TYPE.equals(realm.type())) {
|
||||
continue;
|
||||
final AtomicBoolean failed = new AtomicBoolean(false);
|
||||
final List<Realm> realmList = asList().stream()
|
||||
.filter(r -> ReservedRealm.TYPE.equals(r.type()) == false)
|
||||
.collect(Collectors.toList());
|
||||
final CountDown countDown = new CountDown(realmList.size());
|
||||
final Runnable doCountDown = () -> {
|
||||
if ((realmList.isEmpty() || countDown.countDown()) && failed.get() == false) {
|
||||
final AllowedRealmType allowedRealmType = licenseState.allowedRealmType();
|
||||
// iterate over the factories so we can add enabled & available info
|
||||
for (String type : factories.keySet()) {
|
||||
assert ReservedRealm.TYPE.equals(type) == false;
|
||||
realmMap.compute(type, (key, value) -> {
|
||||
if (value == null) {
|
||||
return MapBuilder.<String, Object>newMapBuilder()
|
||||
.put("enabled", false)
|
||||
.put("available", isRealmTypeAvailable(allowedRealmType, type))
|
||||
.map();
|
||||
}
|
||||
|
||||
assert value instanceof Map;
|
||||
Map<String, Object> realmTypeUsage = (Map<String, Object>) value;
|
||||
realmTypeUsage.put("enabled", true);
|
||||
// the realms iterator returned this type so it must be enabled
|
||||
assert isRealmTypeAvailable(allowedRealmType, type);
|
||||
realmTypeUsage.put("available", true);
|
||||
return value;
|
||||
});
|
||||
}
|
||||
listener.onResponse(realmMap);
|
||||
}
|
||||
};
|
||||
|
||||
if (realmList.isEmpty()) {
|
||||
doCountDown.run();
|
||||
} else {
|
||||
for (Realm realm : realmList) {
|
||||
realm.usageStats(ActionListener.wrap(stats -> {
|
||||
if (failed.get() == false) {
|
||||
synchronized (realmMap) {
|
||||
realmMap.compute(realm.type(), (key, value) -> {
|
||||
if (value == null) {
|
||||
Object realmTypeUsage = convertToMapOfLists(stats);
|
||||
return realmTypeUsage;
|
||||
}
|
||||
assert value instanceof Map;
|
||||
combineMaps((Map<String, Object>) value, stats);
|
||||
return value;
|
||||
});
|
||||
}
|
||||
doCountDown.run();
|
||||
}
|
||||
},
|
||||
e -> {
|
||||
if (failed.compareAndSet(false, true)) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}));
|
||||
}
|
||||
realmMap.compute(realm.type(), (key, value) -> {
|
||||
if (value == null) {
|
||||
Object realmTypeUsage = convertToMapOfLists(realm.usageStats());
|
||||
return realmTypeUsage;
|
||||
}
|
||||
assert value instanceof Map;
|
||||
combineMaps((Map<String, Object>) value, realm.usageStats());
|
||||
return value;
|
||||
});
|
||||
}
|
||||
|
||||
final AllowedRealmType allowedRealmType = licenseState.allowedRealmType();
|
||||
// iterate over the factories so we can add enabled & available info
|
||||
for (String type : factories.keySet()) {
|
||||
assert ReservedRealm.TYPE.equals(type) == false;
|
||||
realmMap.compute(type, (key, value) -> {
|
||||
if (value == null) {
|
||||
return MapBuilder.<String, Object>newMapBuilder()
|
||||
.put("enabled", false)
|
||||
.put("available", isRealmTypeAvailable(allowedRealmType, type))
|
||||
.map();
|
||||
}
|
||||
|
||||
assert value instanceof Map;
|
||||
Map<String, Object> realmTypeUsage = (Map<String, Object>) value;
|
||||
realmTypeUsage.put("enabled", true);
|
||||
// the realms iterator returned this type so it must be enabled
|
||||
assert isRealmTypeAvailable(allowedRealmType, type);
|
||||
realmTypeUsage.put("available", true);
|
||||
return value;
|
||||
});
|
||||
}
|
||||
|
||||
return realmMap;
|
||||
}
|
||||
|
||||
private void addNativeRealms(List<Realm> realms) throws Exception {
|
||||
|
|
|
@ -15,6 +15,8 @@ import org.elasticsearch.xpack.core.security.user.User;
|
|||
import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm;
|
||||
import org.elasticsearch.xpack.security.support.SecurityIndexManager;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted;
|
||||
import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed;
|
||||
|
||||
|
@ -46,6 +48,16 @@ public class NativeRealm extends CachingUsernamePasswordRealm {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void usageStats(ActionListener<Map<String, Object>> listener) {
|
||||
super.usageStats(ActionListener.wrap(stats ->
|
||||
userStore.getUserCount(ActionListener.wrap(size -> {
|
||||
stats.put("size", size);
|
||||
listener.onResponse(stats);
|
||||
}, listener::onFailure))
|
||||
, listener::onFailure));
|
||||
}
|
||||
|
||||
// method is used for testing to verify cache expiration since expireAll is final
|
||||
void clearCache() {
|
||||
expireAll();
|
||||
|
|
|
@ -150,6 +150,30 @@ public class NativeUsersStore extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
void getUserCount(final ActionListener<Long> listener) {
|
||||
if (securityIndex.indexExists() == false) {
|
||||
listener.onResponse(0L);
|
||||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareSearch(SECURITY_INDEX_NAME)
|
||||
.setQuery(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), USER_DOC_TYPE))
|
||||
.setSize(0)
|
||||
.request(),
|
||||
new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(SearchResponse response) {
|
||||
listener.onResponse(response.getHits().getTotalHits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}, client::search));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Async method to retrieve a user and their password
|
||||
*/
|
||||
|
|
|
@ -55,11 +55,11 @@ public class FileRealm extends CachingUsernamePasswordRealm {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> stats = super.usageStats();
|
||||
// here we can determine the size based on the in mem user store
|
||||
stats.put("size", userPasswdStore.usersCount());
|
||||
return stats;
|
||||
public void usageStats(ActionListener<Map<String, Object>> listener) {
|
||||
super.usageStats(ActionListener.wrap(stats -> {
|
||||
stats.put("size", userPasswdStore.usersCount());
|
||||
listener.onResponse(stats);
|
||||
}, listener::onFailure));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -160,12 +160,14 @@ public final class LdapRealm extends CachingUsernamePasswordRealm {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> usage = super.usageStats();
|
||||
usage.put("load_balance_type", LdapLoadBalancing.resolve(config.settings()).toString());
|
||||
usage.put("ssl", sessionFactory.isSslUsed());
|
||||
usage.put("user_search", LdapUserSearchSessionFactory.hasUserSearchSettings(config));
|
||||
return usage;
|
||||
public void usageStats(ActionListener<Map<String, Object>> listener) {
|
||||
super.usageStats(ActionListener.wrap(usage -> {
|
||||
usage.put("size", getCacheSize());
|
||||
usage.put("load_balance_type", LdapLoadBalancing.resolve(config.settings()).toString());
|
||||
usage.put("ssl", sessionFactory.isSslUsed());
|
||||
usage.put("user_search", LdapUserSearchSessionFactory.hasUserSearchSettings(config));
|
||||
listener.onResponse(usage);
|
||||
}, listener::onFailure));
|
||||
}
|
||||
|
||||
private static void buildUser(LdapSession session, String username, ActionListener<AuthenticationResult> listener,
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
|||
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
@ -177,10 +178,15 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
|||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> stats = super.usageStats();
|
||||
stats.put("size", cache.count());
|
||||
return stats;
|
||||
public void usageStats(ActionListener<Map<String, Object>> listener) {
|
||||
super.usageStats(ActionListener.wrap(stats -> {
|
||||
stats.put("cache", Collections.singletonMap("size", getCacheSize()));
|
||||
listener.onResponse(stats);
|
||||
}, listener::onFailure));
|
||||
}
|
||||
|
||||
protected int getCacheSize() {
|
||||
return cache.count();
|
||||
}
|
||||
|
||||
protected abstract void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener);
|
||||
|
|
|
@ -87,7 +87,7 @@ public class FileRolesStore extends AbstractComponent {
|
|||
}
|
||||
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> usageStats = new HashMap<>();
|
||||
Map<String, Object> usageStats = new HashMap<>(3);
|
||||
usageStats.put("size", permissions.size());
|
||||
|
||||
boolean dls = false;
|
||||
|
|
|
@ -195,7 +195,7 @@ public class NativeRolesStore extends AbstractComponent {
|
|||
}
|
||||
|
||||
public void usageStats(ActionListener<Map<String, Object>> listener) {
|
||||
Map<String, Object> usageStats = new HashMap<>();
|
||||
Map<String, Object> usageStats = new HashMap<>(3);
|
||||
if (securityIndex.indexExists() == false) {
|
||||
usageStats.put("size", 0L);
|
||||
usageStats.put("fls", false);
|
||||
|
@ -204,56 +204,56 @@ public class NativeRolesStore extends AbstractComponent {
|
|||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareMultiSearch()
|
||||
.add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
.setQuery(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE))
|
||||
.setSize(0))
|
||||
.add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
.setQuery(QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE))
|
||||
.must(QueryBuilders.boolQuery()
|
||||
.should(existsQuery("indices.field_security.grant"))
|
||||
.should(existsQuery("indices.field_security.except"))
|
||||
// for backwardscompat with 2.x
|
||||
.should(existsQuery("indices.fields"))))
|
||||
.setSize(0)
|
||||
.setTerminateAfter(1))
|
||||
.add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
.setQuery(QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE))
|
||||
.filter(existsQuery("indices.query")))
|
||||
.setSize(0)
|
||||
.setTerminateAfter(1))
|
||||
.request(),
|
||||
new ActionListener<MultiSearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(MultiSearchResponse items) {
|
||||
Item[] responses = items.getResponses();
|
||||
if (responses[0].isFailure()) {
|
||||
usageStats.put("size", 0);
|
||||
} else {
|
||||
usageStats.put("size", responses[0].getResponse().getHits().getTotalHits());
|
||||
}
|
||||
|
||||
if (responses[1].isFailure()) {
|
||||
usageStats.put("fls", false);
|
||||
} else {
|
||||
usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits() > 0L);
|
||||
}
|
||||
|
||||
if (responses[2].isFailure()) {
|
||||
usageStats.put("dls", false);
|
||||
} else {
|
||||
usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits() > 0L);
|
||||
}
|
||||
listener.onResponse(usageStats);
|
||||
client.prepareMultiSearch()
|
||||
.add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
.setQuery(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE))
|
||||
.setSize(0))
|
||||
.add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
.setQuery(QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE))
|
||||
.must(QueryBuilders.boolQuery()
|
||||
.should(existsQuery("indices.field_security.grant"))
|
||||
.should(existsQuery("indices.field_security.except"))
|
||||
// for backwardscompat with 2.x
|
||||
.should(existsQuery("indices.fields"))))
|
||||
.setSize(0)
|
||||
.setTerminateAfter(1))
|
||||
.add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
.setQuery(QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE))
|
||||
.filter(existsQuery("indices.query")))
|
||||
.setSize(0)
|
||||
.setTerminateAfter(1))
|
||||
.request(),
|
||||
new ActionListener<MultiSearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(MultiSearchResponse items) {
|
||||
Item[] responses = items.getResponses();
|
||||
if (responses[0].isFailure()) {
|
||||
usageStats.put("size", 0);
|
||||
} else {
|
||||
usageStats.put("size", responses[0].getResponse().getHits().getTotalHits());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
if (responses[1].isFailure()) {
|
||||
usageStats.put("fls", false);
|
||||
} else {
|
||||
usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits() > 0L);
|
||||
}
|
||||
}, client::multiSearch));
|
||||
|
||||
if (responses[2].isFailure()) {
|
||||
usageStats.put("dls", false);
|
||||
} else {
|
||||
usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits() > 0L);
|
||||
}
|
||||
listener.onResponse(usageStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}, client::multiSearch));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -146,7 +146,11 @@ public class SecurityFeatureSetTests extends ESTestCase {
|
|||
realmUsage.put("key2", Arrays.asList(i));
|
||||
realmUsage.put("key3", Arrays.asList(i % 2 == 0));
|
||||
}
|
||||
when(realms.usageStats()).thenReturn(realmsUsageStats);
|
||||
doAnswer(invocationOnMock -> {
|
||||
ActionListener<Map<String, Object>> listener = (ActionListener) invocationOnMock.getArguments()[0];
|
||||
listener.onResponse(realmsUsageStats);
|
||||
return Void.TYPE;
|
||||
}).when(realms).usageStats(any(ActionListener.class));
|
||||
|
||||
final boolean anonymousEnabled = randomBoolean();
|
||||
if (anonymousEnabled) {
|
||||
|
|
|
@ -6,10 +6,14 @@
|
|||
package org.elasticsearch.xpack.security.audit.index;
|
||||
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Client;
|
||||
|
@ -17,6 +21,8 @@ import org.elasticsearch.client.Requests;
|
|||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
|
@ -29,6 +35,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.plugins.MetaDataUpgrader;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
@ -70,7 +77,9 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.function.Function;
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
import static org.elasticsearch.test.InternalTestCluster.clusterName;
|
||||
|
@ -85,6 +94,7 @@ import static org.hamcrest.Matchers.hasToString;
|
|||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
@ -360,6 +370,21 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
auditor.start();
|
||||
}
|
||||
|
||||
public void testIndexTemplateUpgrader() throws Exception {
|
||||
final MetaDataUpgrader metaDataUpgrader = internalCluster().getInstance(MetaDataUpgrader.class);
|
||||
final Map<String, IndexTemplateMetaData> updatedTemplates = metaDataUpgrader.indexTemplateMetaDataUpgraders.apply(emptyMap());
|
||||
final IndexTemplateMetaData indexAuditTrailTemplate = updatedTemplates.get(IndexAuditTrail.INDEX_TEMPLATE_NAME);
|
||||
assertThat(indexAuditTrailTemplate, notNullValue());
|
||||
// test custom index settings override template
|
||||
assertThat(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.get(indexAuditTrailTemplate.settings()), is(numReplicas));
|
||||
assertThat(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.get(indexAuditTrailTemplate.settings()), is(numShards));
|
||||
// test upgrade template and installed template are equal
|
||||
final GetIndexTemplatesRequest request = new GetIndexTemplatesRequest(IndexAuditTrail.INDEX_TEMPLATE_NAME);
|
||||
final GetIndexTemplatesResponse response = client().admin().indices().getTemplates(request).get();
|
||||
assertThat(response.getIndexTemplates(), hasSize(1));
|
||||
assertThat(indexAuditTrailTemplate, is(response.getIndexTemplates().get(0)));
|
||||
}
|
||||
|
||||
public void testProcessorsSetting() {
|
||||
final boolean explicitProcessors = randomBoolean();
|
||||
final int processors;
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.authc;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -454,9 +455,11 @@ public class RealmsTests extends ESTestCase {
|
|||
.put("xpack.security.authc.realms.bar.order", "1");
|
||||
Settings settings = builder.build();
|
||||
Environment env = TestEnvironment.newEnvironment(settings);
|
||||
Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm );
|
||||
Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm);
|
||||
|
||||
Map<String, Object> usageStats = realms.usageStats();
|
||||
PlainActionFuture<Map<String, Object>> future = new PlainActionFuture<>();
|
||||
realms.usageStats(future);
|
||||
Map<String, Object> usageStats = future.get();
|
||||
assertThat(usageStats.size(), is(factories.size()));
|
||||
|
||||
// first check type_0
|
||||
|
@ -482,7 +485,9 @@ public class RealmsTests extends ESTestCase {
|
|||
// disable ALL using license
|
||||
when(licenseState.isAuthAllowed()).thenReturn(false);
|
||||
when(licenseState.allowedRealmType()).thenReturn(AllowedRealmType.NONE);
|
||||
usageStats = realms.usageStats();
|
||||
future = new PlainActionFuture<>();
|
||||
realms.usageStats(future);
|
||||
usageStats = future.get();
|
||||
assertThat(usageStats.size(), is(factories.size()));
|
||||
for (Entry<String, Object> entry : usageStats.entrySet()) {
|
||||
Map<String, Object> typeMap = (Map<String, Object>) entry.getValue();
|
||||
|
@ -494,7 +499,9 @@ public class RealmsTests extends ESTestCase {
|
|||
// check native or internal realms enabled only
|
||||
when(licenseState.isAuthAllowed()).thenReturn(true);
|
||||
when(licenseState.allowedRealmType()).thenReturn(randomFrom(AllowedRealmType.NATIVE, AllowedRealmType.DEFAULT));
|
||||
usageStats = realms.usageStats();
|
||||
future = new PlainActionFuture<>();
|
||||
realms.usageStats(future);
|
||||
usageStats = future.get();
|
||||
assertThat(usageStats.size(), is(factories.size()));
|
||||
for (Entry<String, Object> entry : usageStats.entrySet()) {
|
||||
final String type = entry.getKey();
|
||||
|
|
|
@ -22,6 +22,10 @@ import org.elasticsearch.rest.RestStatus;
|
|||
import org.elasticsearch.test.NativeRealmIntegTestCase;
|
||||
import org.elasticsearch.test.SecuritySettingsSource;
|
||||
import org.elasticsearch.test.SecuritySettingsSourceField;
|
||||
import org.elasticsearch.xpack.core.XPackFeatureSet;
|
||||
import org.elasticsearch.xpack.core.action.XPackUsageRequestBuilder;
|
||||
import org.elasticsearch.xpack.core.action.XPackUsageResponse;
|
||||
import org.elasticsearch.xpack.core.security.SecurityFeatureSetUsage;
|
||||
import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse;
|
||||
import org.elasticsearch.xpack.core.security.action.role.GetRolesResponse;
|
||||
import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse;
|
||||
|
@ -49,6 +53,7 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||
|
@ -662,6 +667,28 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
|
|||
assertThat(usage.get("dls"), is(dls));
|
||||
}
|
||||
|
||||
public void testRealmUsageStats() {
|
||||
final int numNativeUsers = scaledRandomIntBetween(1, 32);
|
||||
SecurityClient securityClient = new SecurityClient(client());
|
||||
for (int i = 0; i < numNativeUsers; i++) {
|
||||
securityClient.preparePutUser("joe" + i, "s3krit".toCharArray(), "superuser").get();
|
||||
}
|
||||
|
||||
XPackUsageResponse response = new XPackUsageRequestBuilder(client()).get();
|
||||
Optional<XPackFeatureSet.Usage> securityUsage = response.getUsages().stream()
|
||||
.filter(usage -> usage instanceof SecurityFeatureSetUsage)
|
||||
.findFirst();
|
||||
assertTrue(securityUsage.isPresent());
|
||||
SecurityFeatureSetUsage securityFeatureSetUsage = (SecurityFeatureSetUsage) securityUsage.get();
|
||||
Map<String, Object> realmsUsage = securityFeatureSetUsage.getRealmsUsage();
|
||||
assertNotNull(realmsUsage);
|
||||
assertNotNull(realmsUsage.get("native"));
|
||||
assertNotNull(((Map<String, Object>) realmsUsage.get("native")).get("size"));
|
||||
List<Long> sizeList = (List<Long>) ((Map<String, Object>) realmsUsage.get("native")).get("size");
|
||||
assertEquals(1, sizeList.size());
|
||||
assertEquals(numNativeUsers, Math.toIntExact(sizeList.get(0)));
|
||||
}
|
||||
|
||||
public void testSetEnabled() throws Exception {
|
||||
securityClient().preparePutUser("joe", "s3krit".toCharArray(), SecuritySettingsSource.TEST_ROLE).get();
|
||||
final String token = basicAuthHeaderValue("joe", new SecureString("s3krit".toCharArray()));
|
||||
|
|
|
@ -248,7 +248,9 @@ public class FileRealmTests extends ESTestCase {
|
|||
threadContext);
|
||||
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
|
||||
|
||||
Map<String, Object> usage = realm.usageStats();
|
||||
PlainActionFuture<Map<String, Object>> future = new PlainActionFuture<>();
|
||||
realm.usageStats(future);
|
||||
Map<String, Object> usage = future.get();
|
||||
assertThat(usage, is(notNullValue()));
|
||||
assertThat(usage, hasEntry("name", "file-realm"));
|
||||
assertThat(usage, hasEntry("order", order));
|
||||
|
|
|
@ -320,7 +320,9 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
|||
DnRoleMapper roleMapper = new DnRoleMapper(config, resourceWatcherService);
|
||||
LdapRealm realm = new LdapRealm(LdapRealmSettings.AD_TYPE, config, sessionFactory, roleMapper, threadPool);
|
||||
|
||||
Map<String, Object> stats = realm.usageStats();
|
||||
PlainActionFuture<Map<String, Object>> future = new PlainActionFuture<>();
|
||||
realm.usageStats(future);
|
||||
Map<String, Object> stats = future.get();
|
||||
assertThat(stats, is(notNullValue()));
|
||||
assertThat(stats, hasEntry("name", realm.name()));
|
||||
assertThat(stats, hasEntry("order", realm.order()));
|
||||
|
|
|
@ -360,7 +360,9 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
LdapRealm realm = new LdapRealm(LdapRealmSettings.LDAP_TYPE, config, ldapFactory,
|
||||
new DnRoleMapper(config, resourceWatcherService), threadPool);
|
||||
|
||||
Map<String, Object> stats = realm.usageStats();
|
||||
PlainActionFuture<Map<String, Object>> future = new PlainActionFuture<>();
|
||||
realm.usageStats(future);
|
||||
Map<String, Object> stats = future.get();
|
||||
assertThat(stats, is(notNullValue()));
|
||||
assertThat(stats, hasEntry("name", "ldap-realm"));
|
||||
assertThat(stats, hasEntry("order", realm.order()));
|
||||
|
|
|
@ -188,4 +188,3 @@ setup:
|
|||
]
|
||||
}
|
||||
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue