mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-24 17:09:48 +00:00
Splitting DeprecationLogger into two. HeaderWarningLogger - responsible for adding a response warning headers and ThrottlingLogger - responsible for limiting the duplicated log entries for the same key (previously deprecateAndMaybeLog). Introducing A ThrottlingAndHeaderWarningLogger which is a base for other common logging usages where both response warning header and logging throttling was needed. relates #55699 relates #52369 backports #55941
This commit is contained in:
parent
bff3c7470e
commit
f3f7d25316
@ -519,7 +519,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
||||
}
|
||||
|
||||
/**
|
||||
* Emulates Elasticsearch's DeprecationLogger.formatWarning in simple
|
||||
* Emulates Elasticsearch's HeaderWarningLogger.formatWarning in simple
|
||||
* cases. We don't have that available because we're testing against 1.7.
|
||||
*/
|
||||
private static String formatWarningWithoutDate(String warningBody) {
|
||||
|
@ -104,7 +104,7 @@ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory {
|
||||
"] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
}
|
||||
}
|
||||
|
@ -250,7 +250,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
filters.put("dutch_stem", DutchStemTokenFilterFactory::new);
|
||||
filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
|
||||
filters.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
|
||||
deprecationLogger.deprecatedAndMaybeLog("edgeNGram_deprecation",
|
||||
deprecationLogger.deprecate("edgeNGram_deprecation",
|
||||
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the filter name to [edge_ngram] instead.");
|
||||
return new EdgeNGramTokenFilterFactory(indexSettings, environment, name, settings);
|
||||
@ -275,7 +275,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
filters.put("multiplexer", MultiplexerTokenFilterFactory::new);
|
||||
filters.put("ngram", NGramTokenFilterFactory::new);
|
||||
filters.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
|
||||
deprecationLogger.deprecatedAndMaybeLog("nGram_deprecation",
|
||||
deprecationLogger.deprecate("nGram_deprecation",
|
||||
"The [nGram] token filter name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the filter name to [ngram] instead.");
|
||||
return new NGramTokenFilterFactory(indexSettings, environment, name, settings);
|
||||
@ -324,7 +324,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
tokenizers.put("thai", ThaiTokenizerFactory::new);
|
||||
tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("nGram_tokenizer_deprecation",
|
||||
deprecationLogger.deprecate("nGram_tokenizer_deprecation",
|
||||
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the tokenizer name to [ngram] instead.");
|
||||
}
|
||||
@ -333,7 +333,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
tokenizers.put("ngram", NGramTokenizerFactory::new);
|
||||
tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("edgeNGram_tokenizer_deprecation",
|
||||
deprecationLogger.deprecate("edgeNGram_tokenizer_deprecation",
|
||||
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the tokenizer name to [edge_ngram] instead.");
|
||||
}
|
||||
@ -414,7 +414,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
filters.add(PreConfiguredCharFilter.singleton("html_strip", false, HTMLStripCharFilter::new));
|
||||
filters.add(PreConfiguredCharFilter.elasticsearchVersion("htmlStrip", false, (reader, version) -> {
|
||||
if (version.onOrAfter(org.elasticsearch.Version.V_6_3_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("htmlStrip_deprecation",
|
||||
deprecationLogger.deprecate("htmlStrip_deprecation",
|
||||
"The [htmpStrip] char filter name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the filter name to [html_strip] instead.");
|
||||
}
|
||||
@ -445,7 +445,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
"[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead");
|
||||
}
|
||||
if (version.onOrAfter(Version.V_6_2_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("analysis_delimited_payload_filter",
|
||||
deprecationLogger.deprecate("analysis_delimited_payload_filter",
|
||||
"Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]");
|
||||
}
|
||||
return new DelimitedPayloadTokenFilter(input,
|
||||
@ -465,7 +465,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
|
||||
+ "Please change the filter name to [edge_ngram] instead.");
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("edgeNGram_deprecation",
|
||||
deprecationLogger.deprecate("edgeNGram_deprecation",
|
||||
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the filter name to [edge_ngram] instead.");
|
||||
}
|
||||
@ -492,7 +492,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
throw new IllegalArgumentException("The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
|
||||
+ "Please change the filter name to [ngram] instead.");
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("nGram_deprecation",
|
||||
deprecationLogger.deprecate("nGram_deprecation",
|
||||
"The [nGram] token filter name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the filter name to [ngram] instead.");
|
||||
}
|
||||
@ -570,7 +570,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
// Temporary shim for aliases. TODO deprecate after they are moved
|
||||
tokenizers.add(PreConfiguredTokenizer.elasticsearchVersion("nGram", (version) -> {
|
||||
if (version.onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("nGram_tokenizer_deprecation",
|
||||
deprecationLogger.deprecate("nGram_tokenizer_deprecation",
|
||||
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the tokenizer name to [ngram] instead.");
|
||||
}
|
||||
@ -578,7 +578,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
}));
|
||||
tokenizers.add(PreConfiguredTokenizer.elasticsearchVersion("edgeNGram", (version) -> {
|
||||
if (version.onOrAfter(org.elasticsearch.Version.V_7_6_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("edgeNGram_tokenizer_deprecation",
|
||||
deprecationLogger.deprecate("edgeNGram_tokenizer_deprecation",
|
||||
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the tokenizer name to [edge_ngram] instead.");
|
||||
}
|
||||
|
@ -71,7 +71,7 @@ public class CommonGramsTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
|
||||
} else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
}
|
||||
|
||||
|
@ -93,7 +93,7 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return this;
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ public class FingerprintTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return this;
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ public class LegacyDelimitedPayloadTokenFilterFactory extends DelimitedPayloadTo
|
||||
"[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead");
|
||||
}
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_2_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("analysis_legacy_delimited_payload_filter",
|
||||
deprecationLogger.deprecate("analysis_legacy_delimited_payload_filter",
|
||||
"Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]");
|
||||
}
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
}
|
||||
else {
|
||||
if (preserveOriginal) {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return IDENTITY_FILTER;
|
||||
}
|
||||
@ -131,7 +131,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
}
|
||||
else {
|
||||
if (preserveOriginal) {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return IDENTITY_FILTER;
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
+ maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the ["
|
||||
+ IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting.");
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("ngram_big_difference",
|
||||
deprecationLogger.deprecate("ngram_big_difference",
|
||||
"Deprecated big difference between max_gram and min_gram in NGram Tokenizer,"
|
||||
+ "expected difference must be less than or equal to: [" + maxAllowedNgramDiff + "]");
|
||||
}
|
||||
@ -73,7 +73,7 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return this;
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
|
||||
+ maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the ["
|
||||
+ IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting.");
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("ngram_big_difference",
|
||||
deprecationLogger.deprecate("ngram_big_difference",
|
||||
"Deprecated big difference between max_gram and min_gram in NGram Tokenizer,"
|
||||
+ "expected difference must be less than or equal to: [" + maxAllowedNgramDiff + "]");
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProv
|
||||
throw new IllegalArgumentException("[standard_html_strip] analyzer is not supported for new indices, " +
|
||||
"use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter");
|
||||
} else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("standard_html_strip_deprecation",
|
||||
DEPRECATION_LOGGER.deprecate("standard_html_strip_deprecation",
|
||||
"Deprecated analyzer [standard_html_strip] used, " +
|
||||
"replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter");
|
||||
}
|
||||
|
@ -59,10 +59,9 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
this.settings = settings;
|
||||
|
||||
if (settings.get("ignore_case") != null) {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog(
|
||||
"synonym_ignore_case_option",
|
||||
DEPRECATION_LOGGER.deprecate("synonym_ignore_case_option",
|
||||
"The ignore_case option on the synonym_graph filter is deprecated. " +
|
||||
"Instead, insert a lowercase filter in the filter chain before the synonym_graph filter.");
|
||||
"Instead, insert a lowercase filter in the filter chain before the synonym_graph filter.");
|
||||
}
|
||||
|
||||
this.expand = settings.getAsBoolean("expand", true);
|
||||
|
@ -113,7 +113,7 @@ public class WordDelimiterGraphTokenFilterFactory extends AbstractTokenFilterFac
|
||||
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return this;
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
|
||||
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return this;
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ public final class ScriptProcessor extends AbstractProcessor {
|
||||
new DeprecationLogger(LogManager.getLogger(DynamicMap.class));
|
||||
private static final Map<String, Function<Object, Object>> PARAMS_FUNCTIONS = org.elasticsearch.common.collect.Map.of(
|
||||
"_type", value -> {
|
||||
deprecationLogger.deprecatedAndMaybeLog("script_processor",
|
||||
deprecationLogger.deprecate("script_processor",
|
||||
"[types removal] Looking up doc types [_type] in scripts is deprecated.");
|
||||
return value;
|
||||
});
|
||||
|
@ -315,7 +315,7 @@ public class UserAgentProcessor extends AbstractProcessor {
|
||||
}
|
||||
|
||||
if (useECS == false) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("ecs_false_non_common_schema",
|
||||
deprecationLogger.deprecate("ecs_false_non_common_schema",
|
||||
"setting [ecs] to false for non-common schema " +
|
||||
"format is deprecated and will be removed in 8.0, set to true or remove to use the non-deprecated format");
|
||||
}
|
||||
@ -358,7 +358,7 @@ public class UserAgentProcessor extends AbstractProcessor {
|
||||
Property value = valueOf(propertyName.toUpperCase(Locale.ROOT));
|
||||
if (DEPRECATED_PROPERTIES.contains(value)) {
|
||||
final String key = "user_agent_processor_property_" + propertyName.replaceAll("[^\\w_]+", "_");
|
||||
deprecationLogger.deprecatedAndMaybeLog(key,
|
||||
deprecationLogger.deprecate(key,
|
||||
"the [{}] property is deprecated for the user-agent processor", propertyName);
|
||||
}
|
||||
return value;
|
||||
|
@ -86,7 +86,7 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler {
|
||||
// Emit a single deprecation message if any search template contains types.
|
||||
for (SearchTemplateRequest searchTemplateRequest : multiRequest.requests()) {
|
||||
if (searchTemplateRequest.getRequest().types().length > 0) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("msearch_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("msearch_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'min_children' field");
|
||||
}
|
||||
if (minChildren == 0) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("min_children", MIN_CHILDREN_0_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("min_children", MIN_CHILDREN_0_DEPRECATION_MESSAGE);
|
||||
}
|
||||
if (maxChildren < 0) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'max_children' field");
|
||||
|
@ -472,7 +472,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
||||
}
|
||||
GetRequest getRequest;
|
||||
if (indexedDocumentType != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("percolate_with_type", TYPE_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("percolate_with_type", TYPE_DEPRECATION_MESSAGE);
|
||||
getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentType, indexedDocumentId);
|
||||
} else {
|
||||
getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentId);
|
||||
@ -543,7 +543,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
||||
final MapperService mapperService = context.getMapperService();
|
||||
String type = mapperService.documentMapper().type();
|
||||
if (documentType != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("percolate_with_document_type", DOCUMENT_TYPE_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("percolate_with_document_type", DOCUMENT_TYPE_DEPRECATION_MESSAGE);
|
||||
if (documentType.equals(type) == false) {
|
||||
throw new IllegalArgumentException("specified document_type [" + documentType +
|
||||
"] is not equal to the actual type [" + type + "]");
|
||||
|
@ -69,7 +69,7 @@ class ReindexValidator {
|
||||
state);
|
||||
SearchSourceBuilder searchSource = request.getSearchRequest().source();
|
||||
if (searchSource != null && searchSource.sorts() != null && searchSource.sorts().isEmpty() == false) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("reindex_sort", SORT_DEPRECATED_MESSAGE);
|
||||
deprecationLogger.deprecate("reindex_sort", SORT_DEPRECATED_MESSAGE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -186,7 +186,7 @@ final class RemoteRequestBuilders {
|
||||
private static String encodeIndex(String s) {
|
||||
if (s.contains("%")) { // already encoded, pass-through to allow this in mixed version clusters
|
||||
checkIndexOrType("Index", s);
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("reindex_url_encoded_index", DEPRECATED_URL_ENCODED_INDEX_WARNING);
|
||||
DEPRECATION_LOGGER.deprecate("reindex_url_encoded_index", DEPRECATED_URL_ENCODED_INDEX_WARNING);
|
||||
return s;
|
||||
}
|
||||
try {
|
||||
|
@ -62,7 +62,7 @@ public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory
|
||||
String unicodeSetFilter = settings.get("unicodeSetFilter");
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
if (unicodeSetFilter != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("icu_normalizer_unicode_set_filter",
|
||||
deprecationLogger.deprecate("icu_normalizer_unicode_set_filter",
|
||||
"[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]");
|
||||
} else {
|
||||
unicodeSetFilter = settings.get("unicode_set_filter");
|
||||
|
@ -152,7 +152,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return this;
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ public class AzureDiscoveryPlugin extends Plugin implements DiscoveryPlugin {
|
||||
|
||||
public AzureDiscoveryPlugin(Settings settings) {
|
||||
this.settings = settings;
|
||||
deprecationLogger.deprecatedAndMaybeLog("azure_discovery_plugin", "azure classic discovery plugin is deprecated.");
|
||||
deprecationLogger.deprecate("azure_discovery_plugin", "azure classic discovery plugin is deprecated.");
|
||||
logger.trace("starting azure classic discovery plugin...");
|
||||
}
|
||||
|
||||
|
@ -135,13 +135,13 @@ final class Ec2ClientSettings {
|
||||
return null;
|
||||
} else {
|
||||
if (key.length() == 0) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("ec2_invalid_settings",
|
||||
deprecationLogger.deprecate("ec2_invalid_settings",
|
||||
"Setting [{}] is set but [{}] is not, which will be unsupported in future",
|
||||
SECRET_KEY_SETTING.getKey(), ACCESS_KEY_SETTING.getKey());
|
||||
}
|
||||
if (secret.length() == 0) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("ec2_invalid_settings",
|
||||
"Setting [{}] is set but [{}] is not, which will be unsupported in future",
|
||||
deprecationLogger.deprecate("ec2_invalid_settings",
|
||||
"Setting [{}] is set but [{}] is not, which will be unsupported in future",
|
||||
ACCESS_KEY_SETTING.getKey(), SECRET_KEY_SETTING.getKey());
|
||||
}
|
||||
|
||||
|
@ -248,7 +248,7 @@ class S3Repository extends BlobStoreRepository {
|
||||
|
||||
if (S3ClientSettings.checkDeprecatedCredentials(metadata.settings())) {
|
||||
// provided repository settings
|
||||
deprecationLogger.deprecatedAndMaybeLog("s3_repository_secret_settings",
|
||||
deprecationLogger.deprecate("s3_repository_secret_settings",
|
||||
"Using s3 access/secret key from repository settings. Instead "
|
||||
+ "store these in named clients and the elasticsearch keystore for secure settings.");
|
||||
}
|
||||
|
@ -67,6 +67,7 @@ public class EvilLoggerTests extends ESTestCase {
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
assert "false".equals(System.getProperty("tests.security.manager")) : "-Dtests.security.manager=false has to be set";
|
||||
super.setUp();
|
||||
LogConfigurator.registerErrorListener();
|
||||
}
|
||||
@ -126,7 +127,7 @@ public class EvilLoggerTests extends ESTestCase {
|
||||
}
|
||||
for (int j = 0; j < iterations; j++) {
|
||||
for (final Integer id : ids) {
|
||||
deprecationLogger.deprecatedAndMaybeLog(Integer.toString(id), "This is a maybe logged deprecation message" + id);
|
||||
deprecationLogger.deprecate(Integer.toString(id), "This is a maybe logged deprecation message" + id);
|
||||
}
|
||||
}
|
||||
|
||||
@ -137,12 +138,12 @@ public class EvilLoggerTests extends ESTestCase {
|
||||
*/
|
||||
final List<String> warnings = threadContext.getResponseHeaders().get("Warning");
|
||||
final Set<String> actualWarningValues =
|
||||
warnings.stream().map(s -> DeprecationLogger.extractWarningValueFromWarningHeader(s, true))
|
||||
warnings.stream().map(s -> HeaderWarning.extractWarningValueFromWarningHeader(s, true))
|
||||
.collect(Collectors.toSet());
|
||||
for (int j = 0; j < 128; j++) {
|
||||
assertThat(
|
||||
actualWarningValues,
|
||||
hasItem(DeprecationLogger.escapeAndEncode("This is a maybe logged deprecation message" + j)));
|
||||
hasItem(HeaderWarning.escapeAndEncode("This is a maybe logged deprecation message" + j)));
|
||||
}
|
||||
|
||||
try {
|
||||
@ -174,7 +175,7 @@ public class EvilLoggerTests extends ESTestCase {
|
||||
assertLogLine(
|
||||
deprecationEvents.get(i),
|
||||
Level.WARN,
|
||||
"org.elasticsearch.common.logging.DeprecationLogger\\$2\\.run",
|
||||
"org.elasticsearch.common.logging.ThrottlingLogger\\$2\\.run",
|
||||
"This is a maybe logged deprecation message" + i);
|
||||
}
|
||||
|
||||
@ -192,17 +193,17 @@ public class EvilLoggerTests extends ESTestCase {
|
||||
final int iterations = randomIntBetween(1, 16);
|
||||
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("key", "This is a maybe logged deprecation message");
|
||||
deprecationLogger.deprecate("key", "This is a maybe logged deprecation message");
|
||||
assertWarnings("This is a maybe logged deprecation message");
|
||||
}
|
||||
for (int k = 0; k < 128; k++) {
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("key" + k, "This is a maybe logged deprecation message" + k);
|
||||
deprecationLogger.deprecate("key" + k, "This is a maybe logged deprecation message" + k);
|
||||
assertWarnings("This is a maybe logged deprecation message" + k);
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("key", "This is a maybe logged deprecation message");
|
||||
deprecationLogger.deprecate("key", "This is a maybe logged deprecation message");
|
||||
assertWarnings("This is a maybe logged deprecation message");
|
||||
}
|
||||
|
||||
@ -216,13 +217,13 @@ public class EvilLoggerTests extends ESTestCase {
|
||||
assertLogLine(
|
||||
deprecationEvents.get(0),
|
||||
Level.WARN,
|
||||
"org.elasticsearch.common.logging.DeprecationLogger\\$2\\.run",
|
||||
"org.elasticsearch.common.logging.ThrottlingLogger\\$2\\.run",
|
||||
"This is a maybe logged deprecation message");
|
||||
for (int k = 0; k < 128; k++) {
|
||||
assertLogLine(
|
||||
deprecationEvents.get(1 + k),
|
||||
Level.WARN,
|
||||
"org.elasticsearch.common.logging.DeprecationLogger\\$2\\.run",
|
||||
"org.elasticsearch.common.logging.ThrottlingLogger\\$2\\.run",
|
||||
"This is a maybe logged deprecation message" + k);
|
||||
}
|
||||
}
|
||||
@ -250,7 +251,7 @@ public class EvilLoggerTests extends ESTestCase {
|
||||
assertLogLine(
|
||||
deprecationEvents.get(0),
|
||||
Level.WARN,
|
||||
"org.elasticsearch.common.logging.DeprecationLogger\\$2\\.run",
|
||||
"org.elasticsearch.common.logging.ThrottlingLogger\\$2\\.run",
|
||||
"\\[deprecated.foo\\] setting was deprecated in Elasticsearch and will be removed in a future release! " +
|
||||
"See the breaking changes documentation for the next major version.");
|
||||
}
|
||||
|
@ -79,7 +79,7 @@ public class JsonLoggerTests extends ESTestCase {
|
||||
}
|
||||
public void testDeprecatedMessage() throws IOException {
|
||||
final Logger testLogger = LogManager.getLogger("test");
|
||||
testLogger.info(new DeprecatedMessage("deprecated message1", "someId"));
|
||||
testLogger.info(new DeprecatedMessage("someId", "deprecated message1"));
|
||||
|
||||
final Path path = PathUtils.get(System.getProperty("es.logs.base_path"),
|
||||
System.getProperty("es.logs.cluster_name") + "_deprecated.json");
|
||||
@ -104,9 +104,9 @@ public class JsonLoggerTests extends ESTestCase {
|
||||
|
||||
public void testDeprecatedMessageWithoutXOpaqueId() throws IOException {
|
||||
final Logger testLogger = LogManager.getLogger("test");
|
||||
testLogger.info(new DeprecatedMessage("deprecated message1", "someId"));
|
||||
testLogger.info(new DeprecatedMessage("deprecated message2", ""));
|
||||
testLogger.info(new DeprecatedMessage("deprecated message3", null));
|
||||
testLogger.info(new DeprecatedMessage("someId", "deprecated message1"));
|
||||
testLogger.info(new DeprecatedMessage("", "deprecated message2"));
|
||||
testLogger.info(new DeprecatedMessage(null, "deprecated message3"));
|
||||
testLogger.info("deprecated message4");
|
||||
|
||||
final Path path = PathUtils.get(System.getProperty("es.logs.base_path"),
|
||||
@ -277,8 +277,8 @@ public class JsonLoggerTests extends ESTestCase {
|
||||
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
|
||||
threadContext.putHeader(Task.X_OPAQUE_ID, "ID1");
|
||||
DeprecationLogger.setThreadContext(threadContext);
|
||||
deprecationLogger.deprecatedAndMaybeLog("key", "message1");
|
||||
deprecationLogger.deprecatedAndMaybeLog("key", "message2");
|
||||
deprecationLogger.deprecate("key", "message1");
|
||||
deprecationLogger.deprecate("key", "message2");
|
||||
assertWarnings("message1", "message2");
|
||||
|
||||
final Path path = PathUtils.get(System.getProperty("es.logs.base_path"),
|
||||
@ -309,8 +309,8 @@ public class JsonLoggerTests extends ESTestCase {
|
||||
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
|
||||
threadContext.putHeader(Task.X_OPAQUE_ID, "ID2");
|
||||
DeprecationLogger.setThreadContext(threadContext);
|
||||
deprecationLogger.deprecatedAndMaybeLog("key", "message1");
|
||||
deprecationLogger.deprecatedAndMaybeLog("key", "message2");
|
||||
deprecationLogger.deprecate("key", "message1");
|
||||
deprecationLogger.deprecate("key", "message2");
|
||||
assertWarnings("message1", "message2");
|
||||
|
||||
final Path path = PathUtils.get(System.getProperty("es.logs.base_path"),
|
||||
|
@ -84,7 +84,7 @@ public class AddVotingConfigExclusionsRequest extends MasterNodeRequest<AddVotin
|
||||
}
|
||||
|
||||
if (nodeDescriptions.length > 0) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("voting_config_exclusion", DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("voting_config_exclusion", DEPRECATION_MESSAGE);
|
||||
}
|
||||
|
||||
this.nodeDescriptions = nodeDescriptions;
|
||||
@ -106,7 +106,7 @@ public class AddVotingConfigExclusionsRequest extends MasterNodeRequest<AddVotin
|
||||
timeout = in.readTimeValue();
|
||||
|
||||
if (nodeDescriptions.length > 0) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("voting_config_exclusion",
|
||||
deprecationLogger.deprecate("voting_config_exclusion",
|
||||
"nodeDescription is deprecated and will be removed, use nodeIds or nodeNames instead");
|
||||
}
|
||||
|
||||
|
@ -498,7 +498,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
|
||||
if (!(entry.getValue() instanceof Map)) {
|
||||
throw new IllegalArgumentException("malformed settings section");
|
||||
}
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("RestoreSnapshotRequest#settings",
|
||||
DEPRECATION_LOGGER.deprecate("RestoreSnapshotRequest#settings",
|
||||
"specifying [settings] when restoring a snapshot has no effect and will not be supported in a future version");
|
||||
} else if (name.equals("include_global_state")) {
|
||||
includeGlobalState = nodeBooleanValue(entry.getValue(), "include_global_state");
|
||||
|
@ -343,7 +343,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
||||
if (name.equals("template")) {
|
||||
// This is needed to allow for bwc (beats, logstash) with pre-5.0 templates (#21009)
|
||||
if(entry.getValue() instanceof String) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("put_index_template_field",
|
||||
deprecationLogger.deprecate("put_index_template_field",
|
||||
"Deprecated field [template] used, replaced by [index_patterns]");
|
||||
patterns(Collections.singletonList((String) entry.getValue()));
|
||||
}
|
||||
|
@ -208,7 +208,7 @@ public final class BulkRequestParser {
|
||||
index = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity());
|
||||
} else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
if (warnOnTypeUsage && typesDeprecationLogged == false) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE);
|
||||
typesDeprecationLogged = true;
|
||||
}
|
||||
type = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity());
|
||||
|
@ -187,7 +187,7 @@ public class SimulatePipelineRequest extends ActionRequest implements ToXContent
|
||||
String index = ConfigurationUtils.readStringOrIntProperty(null, null,
|
||||
dataMap, Metadata.INDEX.getFieldName(), "_index");
|
||||
if (dataMap.containsKey(Metadata.TYPE.getFieldName())) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("simulate_pipeline_with_types",
|
||||
deprecationLogger.deprecate("simulate_pipeline_with_types",
|
||||
"[types removal] specifying _type in pipeline simulation requests is deprecated");
|
||||
}
|
||||
String type = ConfigurationUtils.readStringOrIntProperty(null, null,
|
||||
|
@ -195,7 +195,7 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
|
||||
// support first line with \n
|
||||
if (nextMarker == 0) {
|
||||
from = nextMarker + 1;
|
||||
deprecationLogger.deprecatedAndMaybeLog("multi_search_empty_first_line",
|
||||
deprecationLogger.deprecate("multi_search_empty_first_line",
|
||||
"support for empty first line before any action metadata in msearch API is deprecated and " +
|
||||
"will be removed in the next major version");
|
||||
continue;
|
||||
|
@ -615,7 +615,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
||||
termVectorsRequest.index = parser.text();
|
||||
} else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
termVectorsRequest.type = parser.text();
|
||||
deprecationLogger.deprecatedAndMaybeLog("termvectors_with_types",
|
||||
deprecationLogger.deprecate("termvectors_with_types",
|
||||
RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
|
||||
} else if (ID.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
if (termVectorsRequest.doc != null) {
|
||||
|
@ -361,7 +361,7 @@ final class Bootstrap {
|
||||
"future versions of Elasticsearch will require Java 11; " +
|
||||
"your Java version from [%s] does not meet this requirement",
|
||||
System.getProperty("java.home"));
|
||||
new DeprecationLogger(LogManager.getLogger(Bootstrap.class)).deprecatedAndMaybeLog("java_version_11_required", message);
|
||||
new DeprecationLogger(LogManager.getLogger(Bootstrap.class)).deprecate("java_version_11_required", message);
|
||||
}
|
||||
if (environment.pidFile() != null) {
|
||||
try {
|
||||
|
@ -103,7 +103,7 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
||||
this.settings = settings;
|
||||
this.mappings = mappings;
|
||||
if (this.mappings.size() > 1) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("index-templates",
|
||||
deprecationLogger.deprecate("index-templates",
|
||||
"Index template {} contains multiple typed mappings; templates in 8x will only support a single mapping",
|
||||
name);
|
||||
}
|
||||
|
@ -196,7 +196,7 @@ public class MetadataCreateIndexService {
|
||||
} else if (isHidden) {
|
||||
logger.trace("index [{}] is a hidden index", index);
|
||||
} else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("index_name_starts_with_dot",
|
||||
DEPRECATION_LOGGER.deprecate("index_name_starts_with_dot",
|
||||
"index name [{}] starts with a dot '.', in the next major version, index names " +
|
||||
"starting with a dot are reserved for hidden indices and system indices", index);
|
||||
}
|
||||
@ -346,7 +346,7 @@ public class MetadataCreateIndexService {
|
||||
request.index(), isHiddenFromRequest);
|
||||
|
||||
if (v1Templates.size() > 1) {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("index_template_multiple_match",
|
||||
DEPRECATION_LOGGER.deprecate("index_template_multiple_match",
|
||||
"index [{}] matches multiple legacy templates [{}], composable templates will only match a single template",
|
||||
request.index(), v1Templates.stream().map(IndexTemplateMetadata::name).sorted().collect(Collectors.joining(", ")));
|
||||
}
|
||||
@ -688,7 +688,7 @@ public class MetadataCreateIndexService {
|
||||
*/
|
||||
shardLimitValidator.validateShardLimit(indexSettings, currentState);
|
||||
if (indexSettings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("soft_deletes_disabled",
|
||||
DEPRECATION_LOGGER.deprecate("soft_deletes_disabled",
|
||||
"Creating indices with soft-deletes disabled is deprecated and will be removed in future Elasticsearch versions. " +
|
||||
"Please do not specify value for setting [index.soft_deletes.enabled] of index [" + request.index() + "].");
|
||||
}
|
||||
@ -1156,7 +1156,7 @@ public class MetadataCreateIndexService {
|
||||
if (IndexSettings.INDEX_SOFT_DELETES_SETTING.get(indexSettings) &&
|
||||
(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.exists(indexSettings)
|
||||
|| IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.exists(indexSettings))) {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("translog_retention", "Translog retention settings [index.translog.retention.age] "
|
||||
DEPRECATION_LOGGER.deprecate("translog_retention", "Translog retention settings [index.translog.retention.age] "
|
||||
+ "and [index.translog.retention.size] are deprecated and effectively ignored. They will be removed in a future version.");
|
||||
}
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ import org.elasticsearch.common.ValidationException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.HeaderWarning;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -99,8 +99,6 @@ public class MetadataIndexTemplateService {
|
||||
" }\n" +
|
||||
" }";
|
||||
private static final Logger logger = LogManager.getLogger(MetadataIndexTemplateService.class);
|
||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger);
|
||||
|
||||
private final ClusterService clusterService;
|
||||
private final AliasValidator aliasValidator;
|
||||
private final IndicesService indicesService;
|
||||
@ -466,7 +464,7 @@ public class MetadataIndexTemplateService {
|
||||
.collect(Collectors.joining(",")),
|
||||
name);
|
||||
logger.warn(warning);
|
||||
deprecationLogger.deprecatedAndMaybeLog("index_template_pattern_overlap", warning);
|
||||
HeaderWarning.addWarning(warning);
|
||||
}
|
||||
|
||||
ComposableIndexTemplate finalIndexTemplate = template;
|
||||
@ -784,7 +782,7 @@ public class MetadataIndexTemplateService {
|
||||
.collect(Collectors.joining(",")),
|
||||
request.name);
|
||||
logger.warn(warning);
|
||||
deprecationLogger.deprecatedAndMaybeLog("index_template_pattern_overlap", warning);
|
||||
HeaderWarning.addWarning(warning);
|
||||
}
|
||||
|
||||
templateBuilder.order(request.order);
|
||||
|
@ -67,7 +67,7 @@ public class OperationRouting {
|
||||
if (ignoreAwarenessAttr == false) {
|
||||
awarenessAttributes = AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.get(settings);
|
||||
if (awarenessAttributes.isEmpty() == false) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("searches_not_routed_on_awareness_attributes",
|
||||
deprecationLogger.deprecate("searches_not_routed_on_awareness_attributes",
|
||||
IGNORE_AWARENESS_ATTRIBUTES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
clusterSettings.addSettingsUpdateConsumer(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING,
|
||||
@ -92,7 +92,7 @@ public class OperationRouting {
|
||||
boolean ignoreAwarenessAttr = parseBoolean(System.getProperty(IGNORE_AWARENESS_ATTRIBUTES_PROPERTY), false);
|
||||
if (ignoreAwarenessAttr == false) {
|
||||
if (this.awarenessAttributes.isEmpty() && awarenessAttributes.isEmpty() == false) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("searches_not_routed_on_awareness_attributes",
|
||||
deprecationLogger.deprecate("searches_not_routed_on_awareness_attributes",
|
||||
IGNORE_AWARENESS_ATTRIBUTES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
this.awarenessAttributes = awarenessAttributes;
|
||||
|
@ -101,7 +101,7 @@ public class DiskThresholdMonitor {
|
||||
this.diskThresholdSettings = new DiskThresholdSettings(settings, clusterSettings);
|
||||
this.client = client;
|
||||
if (diskThresholdSettings.isAutoReleaseIndexEnabled() == false) {
|
||||
deprecationLogger.deprecatedAndMaybeLog(DiskThresholdSettings.AUTO_RELEASE_INDEX_ENABLED_KEY.replace(".", "_"),
|
||||
deprecationLogger.deprecate(DiskThresholdSettings.AUTO_RELEASE_INDEX_ENABLED_KEY.replace(".", "_"),
|
||||
"[{}] will be removed in version {}",
|
||||
DiskThresholdSettings.AUTO_RELEASE_INDEX_ENABLED_KEY, Version.V_7_4_0.major + 1);
|
||||
}
|
||||
@ -305,7 +305,7 @@ public class DiskThresholdMonitor {
|
||||
logger.info("releasing read-only-allow-delete block on indices: [{}]", indicesToAutoRelease);
|
||||
updateIndicesReadOnly(indicesToAutoRelease, listener, false);
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog(
|
||||
deprecationLogger.deprecate(
|
||||
DiskThresholdSettings.AUTO_RELEASE_INDEX_ENABLED_KEY.replace(".", "_"),
|
||||
"[{}] will be removed in version {}",
|
||||
DiskThresholdSettings.AUTO_RELEASE_INDEX_ENABLED_KEY, Version.V_7_4_0.major + 1);
|
||||
|
@ -77,7 +77,7 @@ public class Joda {
|
||||
String msg = "Camel case format name {} is deprecated and will be removed in a future version. " +
|
||||
"Use snake case name {} instead.";
|
||||
getDeprecationLogger()
|
||||
.deprecatedAndMaybeLog("camelCaseDateFormat", msg, formatName.getCamelCaseName(), formatName.getSnakeCaseName());
|
||||
.deprecate("camelCaseDateFormat", msg, formatName.getCamelCaseName(), formatName.getSnakeCaseName());
|
||||
}
|
||||
|
||||
DateTimeFormatter formatter;
|
||||
@ -286,7 +286,7 @@ public class Joda {
|
||||
private static void maybeLogJodaDeprecation(String format) {
|
||||
if (JodaDeprecationPatterns.isDeprecatedPattern(format)) {
|
||||
String suggestion = JodaDeprecationPatterns.formatSuggestion(format);
|
||||
getDeprecationLogger().deprecatedAndMaybeLog("joda-pattern-deprecation",
|
||||
getDeprecationLogger().deprecate("joda-pattern-deprecation",
|
||||
suggestion + " " + JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS);
|
||||
}
|
||||
}
|
||||
@ -396,11 +396,11 @@ public class Joda {
|
||||
long millis = new BigDecimal(text).longValue() * factor;
|
||||
// check for deprecations, but after it has parsed correctly so invalid values aren't counted as deprecated
|
||||
if (millis < 0) {
|
||||
getDeprecationLogger().deprecatedAndMaybeLog("epoch-negative", "Use of negative values" +
|
||||
getDeprecationLogger().deprecate("epoch-negative", "Use of negative values" +
|
||||
" in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
|
||||
}
|
||||
if (scientificNotation.matcher(text).find()) {
|
||||
getDeprecationLogger().deprecatedAndMaybeLog("epoch-scientific-notation", "Use of scientific notation" +
|
||||
getDeprecationLogger().deprecate("epoch-scientific-notation", "Use of scientific notation" +
|
||||
" in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
|
||||
}
|
||||
DateTime dt = new DateTime(millis, DateTimeZone.UTC);
|
||||
|
@ -31,7 +31,7 @@ import java.util.Map;
|
||||
*/
|
||||
public class DeprecatedMessage extends ESLogMessage {
|
||||
|
||||
public DeprecatedMessage(String messagePattern, String xOpaqueId, Object... args) {
|
||||
public DeprecatedMessage(String xOpaqueId, String messagePattern, Object... args) {
|
||||
super(fieldMap(xOpaqueId), messagePattern, args);
|
||||
}
|
||||
|
||||
|
@ -21,79 +21,24 @@ package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.SuppressLoggerChecks;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.BitSet;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CopyOnWriteArraySet;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* A logger that logs deprecation notices.
|
||||
* A logger that logs deprecation notices. Logger should be initialized with a parent logger which name will be used
|
||||
* for deprecation logger. For instance <code>new DeprecationLogger("org.elasticsearch.test.SomeClass")</code> will
|
||||
* result in a deprecation logger with name <code>org.elasticsearch.deprecation.test.SomeClass</code>. This allows to use
|
||||
* <code>deprecation</code> logger defined in log4j2.properties.
|
||||
*
|
||||
* Deprecation logs are written to deprecation log file - defined in log4j2.properties, as well as warnings added to a response header.
|
||||
* All deprecation usages are throttled basing on a key. Key is a string provided in an argument and can be prefixed with
|
||||
* <code>X-Opaque-Id</code>. This allows to throttle deprecations per client usage.
|
||||
* <code>deprecationLogger.deprecate("key","message {}", "param");</code>
|
||||
*
|
||||
* @see ThrottlingAndHeaderWarningLogger for throttling and header warnings implementation details
|
||||
*/
|
||||
public class DeprecationLogger {
|
||||
|
||||
private final Logger logger;
|
||||
|
||||
/**
|
||||
* This is set once by the {@code Node} constructor, but it uses {@link CopyOnWriteArraySet} to ensure that tests can run in parallel.
|
||||
* <p>
|
||||
* Integration tests will create separate nodes within the same classloader, thus leading to a shared, {@code static} state.
|
||||
* In order for all tests to appropriately be handled, this must be able to remember <em>all</em> {@link ThreadContext}s that it is
|
||||
* given in a thread safe manner.
|
||||
* <p>
|
||||
* For actual usage, multiple nodes do not share the same JVM and therefore this will only be set once in practice.
|
||||
*/
|
||||
private static final CopyOnWriteArraySet<ThreadContext> THREAD_CONTEXT = new CopyOnWriteArraySet<>();
|
||||
|
||||
/**
|
||||
* Set the {@link ThreadContext} used to add deprecation headers to network responses.
|
||||
* <p>
|
||||
* This is expected to <em>only</em> be invoked by the {@code Node}'s constructor (therefore once outside of tests).
|
||||
*
|
||||
* @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node})
|
||||
* @throws IllegalStateException if this {@code threadContext} has already been set
|
||||
*/
|
||||
public static void setThreadContext(ThreadContext threadContext) {
|
||||
Objects.requireNonNull(threadContext, "Cannot register a null ThreadContext");
|
||||
|
||||
// add returning false means it _did_ have it already
|
||||
if (THREAD_CONTEXT.add(threadContext) == false) {
|
||||
throw new IllegalStateException("Double-setting ThreadContext not allowed!");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the {@link ThreadContext} used to add deprecation headers to network responses.
|
||||
* <p>
|
||||
* This is expected to <em>only</em> be invoked by the {@code Node}'s {@code close} method (therefore once outside of tests).
|
||||
*
|
||||
* @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node})
|
||||
* @throws IllegalStateException if this {@code threadContext} is unknown (and presumably already unset before)
|
||||
*/
|
||||
public static void removeThreadContext(ThreadContext threadContext) {
|
||||
assert threadContext != null;
|
||||
|
||||
// remove returning false means it did not have it already
|
||||
if (THREAD_CONTEXT.remove(threadContext) == false) {
|
||||
throw new IllegalStateException("Removing unknown ThreadContext not allowed!");
|
||||
}
|
||||
}
|
||||
private final ThrottlingAndHeaderWarningLogger deprecationLogger;
|
||||
|
||||
/**
|
||||
* Creates a new deprecation logger based on the parent logger. Automatically
|
||||
@ -102,316 +47,36 @@ public class DeprecationLogger {
|
||||
* the "org.elasticsearch" namespace.
|
||||
*/
|
||||
public DeprecationLogger(Logger parentLogger) {
|
||||
deprecationLogger = new ThrottlingAndHeaderWarningLogger(deprecatedLoggerName(parentLogger));
|
||||
}
|
||||
|
||||
private static Logger deprecatedLoggerName(Logger parentLogger) {
|
||||
String name = parentLogger.getName();
|
||||
if (name.startsWith("org.elasticsearch")) {
|
||||
name = name.replace("org.elasticsearch.", "org.elasticsearch.deprecation.");
|
||||
} else {
|
||||
name = "deprecation." + name;
|
||||
}
|
||||
this.logger = LogManager.getLogger(name);
|
||||
return LogManager.getLogger(name);
|
||||
}
|
||||
|
||||
// LRU set of keys used to determine if a deprecation message should be emitted to the deprecation logs
|
||||
private final Set<String> keys = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<String, Boolean>() {
|
||||
@Override
|
||||
protected boolean removeEldestEntry(final Map.Entry<String, Boolean> eldest) {
|
||||
return size() > 128;
|
||||
}
|
||||
}));
|
||||
|
||||
/**
|
||||
* Adds a formatted warning message as a response header on the thread context, and logs a deprecation message if the associated key has
|
||||
* not recently been seen.
|
||||
*
|
||||
* @param key the key used to determine if this deprecation should be logged
|
||||
* @param msg the message to log
|
||||
* @param params parameters to the message
|
||||
*/
|
||||
public void deprecatedAndMaybeLog(final String key, final String msg, final Object... params) {
|
||||
String xOpaqueId = getXOpaqueId(THREAD_CONTEXT);
|
||||
boolean shouldLog = keys.add(xOpaqueId + key);
|
||||
deprecated(THREAD_CONTEXT, msg, shouldLog, params);
|
||||
public static void setThreadContext(ThreadContext threadContext) {
|
||||
HeaderWarning.setThreadContext(threadContext);
|
||||
}
|
||||
|
||||
/*
|
||||
* RFC7234 specifies the warning format as warn-code <space> warn-agent <space> "warn-text" [<space> "warn-date"]. Here, warn-code is a
|
||||
* three-digit number with various standard warn codes specified. The warn code 299 is apt for our purposes as it represents a
|
||||
* miscellaneous persistent warning (can be presented to a human, or logged, and must not be removed by a cache). The warn-agent is an
|
||||
* arbitrary token; here we use the Elasticsearch version and build hash. The warn text must be quoted. The warn-date is an optional
|
||||
* quoted field that can be in a variety of specified date formats; here we use RFC 1123 format.
|
||||
*/
|
||||
private static final String WARNING_PREFIX =
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"299 Elasticsearch-%s%s-%s",
|
||||
Version.CURRENT.toString(),
|
||||
Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "",
|
||||
Build.CURRENT.hash());
|
||||
|
||||
/**
|
||||
* Regular expression to test if a string matches the RFC7234 specification for warning headers. This pattern assumes that the warn code
|
||||
* is always 299. Further, this pattern assumes that the warn agent represents a version of Elasticsearch including the build hash.
|
||||
*/
|
||||
public static final Pattern WARNING_HEADER_PATTERN = Pattern.compile(
|
||||
"299 " + // warn code
|
||||
"Elasticsearch-" + // warn agent
|
||||
"\\d+\\.\\d+\\.\\d+(?:-(?:alpha|beta|rc)\\d+)?(?:-SNAPSHOT)?-" + // warn agent
|
||||
"(?:[a-f0-9]{7}(?:[a-f0-9]{33})?|unknown) " + // warn agent
|
||||
"\"((?:\t| |!|[\\x23-\\x5B]|[\\x5D-\\x7E]|[\\x80-\\xFF]|\\\\|\\\\\")*)\"( " + // quoted warning value, captured
|
||||
// quoted RFC 1123 date format
|
||||
"\"" + // opening quote
|
||||
"(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), " + // weekday
|
||||
"\\d{2} " + // 2-digit day
|
||||
"(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + // month
|
||||
"\\d{4} " + // 4-digit year
|
||||
"\\d{2}:\\d{2}:\\d{2} " + // (two-digit hour):(two-digit minute):(two-digit second)
|
||||
"GMT" + // GMT
|
||||
"\")?"); // closing quote (optional, since an older version can still send a warn-date)
|
||||
|
||||
public static final Pattern WARNING_XCONTENT_LOCATION_PATTERN = Pattern.compile("^\\[.*?]\\[-?\\d+:-?\\d+] ");
|
||||
|
||||
/**
|
||||
* Extracts the warning value from the value of a warning header that is formatted according to RFC 7234. That is, given a string
|
||||
* {@code 299 Elasticsearch-6.0.0 "warning value"}, the return value of this method would be {@code warning value}.
|
||||
*
|
||||
* @param s the value of a warning header formatted according to RFC 7234.
|
||||
* @return the extracted warning value
|
||||
*/
|
||||
public static String extractWarningValueFromWarningHeader(final String s, boolean stripXContentPosition) {
|
||||
/*
|
||||
* We know the exact format of the warning header, so to extract the warning value we can skip forward from the front to the first
|
||||
* quote and we know the last quote is at the end of the string
|
||||
*
|
||||
* 299 Elasticsearch-6.0.0 "warning value"
|
||||
* ^ ^
|
||||
* firstQuote lastQuote
|
||||
*
|
||||
* We parse this manually rather than using the capturing regular expression because the regular expression involves a lot of
|
||||
* backtracking and carries a performance penalty. However, when assertions are enabled, we still use the regular expression to
|
||||
* verify that we are maintaining the warning header format.
|
||||
*/
|
||||
final int firstQuote = s.indexOf('\"');
|
||||
final int lastQuote = s.length() - 1;
|
||||
String warningValue = s.substring(firstQuote + 1, lastQuote);
|
||||
assert assertWarningValue(s, warningValue);
|
||||
if (stripXContentPosition) {
|
||||
Matcher matcher = WARNING_XCONTENT_LOCATION_PATTERN.matcher(warningValue);
|
||||
if (matcher.find()) {
|
||||
warningValue = warningValue.substring(matcher.end());
|
||||
}
|
||||
}
|
||||
return warningValue;
|
||||
public static void removeThreadContext(ThreadContext threadContext) {
|
||||
HeaderWarning.removeThreadContext(threadContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that the specified string has the warning value equal to the provided warning value.
|
||||
*
|
||||
* @param s the string representing a full warning header
|
||||
* @param warningValue the expected warning header
|
||||
* @return {@code true} if the specified string has the expected warning value
|
||||
* Logs a deprecation message, adding a formatted warning message as a response header on the thread context.
|
||||
* The deprecation message will be throttled to deprecation log.
|
||||
*/
|
||||
private static boolean assertWarningValue(final String s, final String warningValue) {
|
||||
final Matcher matcher = WARNING_HEADER_PATTERN.matcher(s);
|
||||
final boolean matches = matcher.matches();
|
||||
assert matches;
|
||||
return matcher.group(1).equals(warningValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a deprecated message to the deprecation log, as well as to the local {@link ThreadContext}.
|
||||
*
|
||||
* @param threadContexts The node's {@link ThreadContext} (outside of concurrent tests, this should only ever have one context).
|
||||
* @param message The deprecation message.
|
||||
* @param params The parameters used to fill in the message, if any exist.
|
||||
*/
|
||||
void deprecated(final Set<ThreadContext> threadContexts, final String message, final Object... params) {
|
||||
deprecated(threadContexts, message, true, params);
|
||||
}
|
||||
|
||||
void deprecated(final Set<ThreadContext> threadContexts, final String message, final boolean shouldLog, final Object... params) {
|
||||
final Iterator<ThreadContext> iterator = threadContexts.iterator();
|
||||
if (iterator.hasNext()) {
|
||||
final String formattedMessage = LoggerMessageFormat.format(message, params);
|
||||
final String warningHeaderValue = formatWarning(formattedMessage);
|
||||
assert WARNING_HEADER_PATTERN.matcher(warningHeaderValue).matches();
|
||||
assert extractWarningValueFromWarningHeader(warningHeaderValue, false).equals(escapeAndEncode(formattedMessage));
|
||||
while (iterator.hasNext()) {
|
||||
try {
|
||||
final ThreadContext next = iterator.next();
|
||||
next.addResponseHeader("Warning", warningHeaderValue);
|
||||
} catch (final IllegalStateException e) {
|
||||
// ignored; it should be removed shortly
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldLog) {
|
||||
AccessController.doPrivileged(new PrivilegedAction<Void>() {
|
||||
@SuppressLoggerChecks(reason = "safely delegates to logger")
|
||||
@Override
|
||||
public Void run() {
|
||||
/*
|
||||
* There should be only one threadContext (in prod env), @see DeprecationLogger#setThreadContext
|
||||
*/
|
||||
String opaqueId = getXOpaqueId(threadContexts);
|
||||
|
||||
logger.warn(new DeprecatedMessage(message, opaqueId, params));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public String getXOpaqueId(Set<ThreadContext> threadContexts) {
|
||||
return threadContexts.stream()
|
||||
.filter(t -> t.getHeader(Task.X_OPAQUE_ID) != null)
|
||||
.findFirst()
|
||||
.map(t -> t.getHeader(Task.X_OPAQUE_ID))
|
||||
.orElse("");
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a warning string in the proper warning format by prepending a warn code, warn agent, wrapping the warning string in quotes,
|
||||
* and appending the RFC 7231 date.
|
||||
*
|
||||
* @param s the warning string to format
|
||||
* @return a warning value formatted according to RFC 7234
|
||||
*/
|
||||
public static String formatWarning(final String s) {
|
||||
// Assume that the common scenario won't have a string to escape and encode.
|
||||
int length = WARNING_PREFIX.length() + s.length() + 3;
|
||||
final StringBuilder sb = new StringBuilder(length);
|
||||
sb.append(WARNING_PREFIX).append(" \"").append(escapeAndEncode(s)).append("\"");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape and encode a string as a valid RFC 7230 quoted-string.
|
||||
*
|
||||
* @param s the string to escape and encode
|
||||
* @return the escaped and encoded string
|
||||
*/
|
||||
public static String escapeAndEncode(final String s) {
|
||||
return encode(escapeBackslashesAndQuotes(s));
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape backslashes and quotes in the specified string.
|
||||
*
|
||||
* @param s the string to escape
|
||||
* @return the escaped string
|
||||
*/
|
||||
static String escapeBackslashesAndQuotes(final String s) {
|
||||
/*
|
||||
* We want a fast path check to avoid creating the string builder and copying characters if needed. So we walk the string looking
|
||||
* for either of the characters that we need to escape. If we find a character that needs escaping, we start over and
|
||||
*/
|
||||
boolean escapingNeeded = false;
|
||||
for (int i = 0; i < s.length(); i++) {
|
||||
final char c = s.charAt(i);
|
||||
if (c == '\\' || c == '"') {
|
||||
escapingNeeded = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (escapingNeeded) {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
for (final char c : s.toCharArray()) {
|
||||
if (c == '\\' || c == '"') {
|
||||
sb.append("\\");
|
||||
}
|
||||
sb.append(c);
|
||||
}
|
||||
return sb.toString();
|
||||
} else {
|
||||
return s;
|
||||
}
|
||||
}
|
||||
|
||||
private static BitSet doesNotNeedEncoding;
|
||||
|
||||
static {
|
||||
doesNotNeedEncoding = new BitSet(1 + 0xFF);
|
||||
doesNotNeedEncoding.set('\t');
|
||||
doesNotNeedEncoding.set(' ');
|
||||
doesNotNeedEncoding.set('!');
|
||||
doesNotNeedEncoding.set('\\');
|
||||
doesNotNeedEncoding.set('"');
|
||||
// we have to skip '%' which is 0x25 so that it is percent-encoded too
|
||||
for (int i = 0x23; i <= 0x24; i++) {
|
||||
doesNotNeedEncoding.set(i);
|
||||
}
|
||||
for (int i = 0x26; i <= 0x5B; i++) {
|
||||
doesNotNeedEncoding.set(i);
|
||||
}
|
||||
for (int i = 0x5D; i <= 0x7E; i++) {
|
||||
doesNotNeedEncoding.set(i);
|
||||
}
|
||||
for (int i = 0x80; i <= 0xFF; i++) {
|
||||
doesNotNeedEncoding.set(i);
|
||||
}
|
||||
assert doesNotNeedEncoding.get('%') == false : doesNotNeedEncoding;
|
||||
}
|
||||
|
||||
private static final Charset UTF_8 = StandardCharsets.UTF_8;
|
||||
|
||||
/**
|
||||
* Encode a string containing characters outside of the legal characters for an RFC 7230 quoted-string.
|
||||
*
|
||||
* @param s the string to encode
|
||||
* @return the encoded string
|
||||
*/
|
||||
static String encode(final String s) {
|
||||
// first check if the string needs any encoding; this is the fast path and we want to avoid creating a string builder and copying
|
||||
boolean encodingNeeded = false;
|
||||
for (int i = 0; i < s.length(); i++) {
|
||||
int current = s.charAt(i);
|
||||
if (doesNotNeedEncoding.get(current) == false) {
|
||||
encodingNeeded = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (encodingNeeded == false) {
|
||||
return s;
|
||||
}
|
||||
|
||||
final StringBuilder sb = new StringBuilder(s.length());
|
||||
for (int i = 0; i < s.length();) {
|
||||
int current = s.charAt(i);
|
||||
/*
|
||||
* Either the character does not need encoding or it does; when the character does not need encoding we append the character to
|
||||
* a buffer and move to the next character and when the character does need encoding, we peel off as many characters as possible
|
||||
* which we encode using UTF-8 until we encounter another character that does not need encoding.
|
||||
*/
|
||||
if (doesNotNeedEncoding.get(current)) {
|
||||
// append directly and move to the next character
|
||||
sb.append((char) current);
|
||||
i++;
|
||||
} else {
|
||||
int startIndex = i;
|
||||
do {
|
||||
i++;
|
||||
} while (i < s.length() && doesNotNeedEncoding.get(s.charAt(i)) == false);
|
||||
|
||||
final byte[] bytes = s.substring(startIndex, i).getBytes(UTF_8);
|
||||
// noinspection ForLoopReplaceableByForEach
|
||||
for (int j = 0; j < bytes.length; j++) {
|
||||
sb.append('%').append(hex(bytes[j] >> 4)).append(hex(bytes[j]));
|
||||
}
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static char hex(int b) {
|
||||
final char ch = Character.forDigit(b & 0xF, 16);
|
||||
if (Character.isLetter(ch)) {
|
||||
return Character.toUpperCase(ch);
|
||||
} else {
|
||||
return ch;
|
||||
}
|
||||
@SuppressLoggerChecks(reason = "Safely delegates to a deprecated message")
|
||||
public void deprecate(final String key, final String msg, final Object... params) {
|
||||
String opaqueId = HeaderWarning.getXOpaqueId();
|
||||
ESLogMessage deprecationMessage = new DeprecatedMessage(opaqueId, msg, params);
|
||||
deprecationLogger.throttleLogAndAddWarning(key, deprecationMessage);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import java.util.stream.Stream;
|
||||
/**
|
||||
* A base class for custom log4j logger messages. Carries additional fields which will populate JSON fields in logs.
|
||||
*/
|
||||
@SuppressLoggerChecks(reason = "Safe as this is abstract class")
|
||||
public abstract class ESLogMessage extends ParameterizedMessage {
|
||||
private final Map<String, Object> fields;
|
||||
|
||||
@ -36,7 +37,6 @@ public abstract class ESLogMessage extends ParameterizedMessage {
|
||||
* This is an abstract class, so this is safe. The check is done on DeprecationMessage.
|
||||
* Other subclasses are not allowing varargs
|
||||
*/
|
||||
@SuppressLoggerChecks(reason = "Safe as this is abstract class")
|
||||
public ESLogMessage(Map<String, Object> fields, String messagePattern, Object... args) {
|
||||
super(messagePattern, args);
|
||||
this.fields = fields;
|
||||
@ -64,4 +64,12 @@ public abstract class ESLogMessage extends ParameterizedMessage {
|
||||
.map(ESLogMessage::inQuotes)
|
||||
.collect(Collectors.joining(", ")) + "]";
|
||||
}
|
||||
|
||||
public Object[] getArguments() {
|
||||
return super.getParameters();
|
||||
}
|
||||
|
||||
public String getMessagePattern() {
|
||||
return super.getFormat();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,347 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.BitSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CopyOnWriteArraySet;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* This is a simplistic logger that adds warning messages to HTTP headers.
|
||||
* Use <code>HeaderWarning.addWarning(message,params)</code>. Message will be formatted according to RFC7234.
|
||||
* The result will be returned as HTTP response headers.
|
||||
*/
|
||||
public class HeaderWarning {
|
||||
/**
|
||||
* Regular expression to test if a string matches the RFC7234 specification for warning headers. This pattern assumes that the warn code
|
||||
* is always 299. Further, this pattern assumes that the warn agent represents a version of Elasticsearch including the build hash.
|
||||
*/
|
||||
public static final Pattern WARNING_HEADER_PATTERN = Pattern.compile(
|
||||
"299 " + // warn code
|
||||
"Elasticsearch-" + // warn agent
|
||||
"\\d+\\.\\d+\\.\\d+(?:-(?:alpha|beta|rc)\\d+)?(?:-SNAPSHOT)?-" + // warn agent
|
||||
"(?:[a-f0-9]{7}(?:[a-f0-9]{33})?|unknown) " + // warn agent
|
||||
"\"((?:\t| |!|[\\x23-\\x5B]|[\\x5D-\\x7E]|[\\x80-\\xFF]|\\\\|\\\\\")*)\"( " + // quoted warning value, captured
|
||||
// quoted RFC 1123 date format
|
||||
"\"" + // opening quote
|
||||
"(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), " + // weekday
|
||||
"\\d{2} " + // 2-digit day
|
||||
"(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + // month
|
||||
"\\d{4} " + // 4-digit year
|
||||
"\\d{2}:\\d{2}:\\d{2} " + // (two-digit hour):(two-digit minute):(two-digit second)
|
||||
"GMT" + // GMT
|
||||
"\")?"); // closing quote (optional, since an older version can still send a warn-date)
|
||||
public static final Pattern WARNING_XCONTENT_LOCATION_PATTERN = Pattern.compile("^\\[.*?]\\[-?\\d+:-?\\d+] ");
|
||||
|
||||
/*
|
||||
* RFC7234 specifies the warning format as warn-code <space> warn-agent <space> "warn-text" [<space> "warn-date"]. Here, warn-code is a
|
||||
* three-digit number with various standard warn codes specified. The warn code 299 is apt for our purposes as it represents a
|
||||
* miscellaneous persistent warning (can be presented to a human, or logged, and must not be removed by a cache). The warn-agent is an
|
||||
* arbitrary token; here we use the Elasticsearch version and build hash. The warn text must be quoted. The warn-date is an optional
|
||||
* quoted field that can be in a variety of specified date formats; here we use RFC 1123 format.
|
||||
*/
|
||||
private static final String WARNING_PREFIX =
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"299 Elasticsearch-%s%s-%s",
|
||||
Version.CURRENT.toString(),
|
||||
Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "",
|
||||
Build.CURRENT.hash());
|
||||
|
||||
private static BitSet doesNotNeedEncoding;
|
||||
|
||||
static {
|
||||
doesNotNeedEncoding = new BitSet(1 + 0xFF);
|
||||
doesNotNeedEncoding.set('\t');
|
||||
doesNotNeedEncoding.set(' ');
|
||||
doesNotNeedEncoding.set('!');
|
||||
doesNotNeedEncoding.set('\\');
|
||||
doesNotNeedEncoding.set('"');
|
||||
// we have to skip '%' which is 0x25 so that it is percent-encoded too
|
||||
for (int i = 0x23; i <= 0x24; i++) {
|
||||
doesNotNeedEncoding.set(i);
|
||||
}
|
||||
for (int i = 0x26; i <= 0x5B; i++) {
|
||||
doesNotNeedEncoding.set(i);
|
||||
}
|
||||
for (int i = 0x5D; i <= 0x7E; i++) {
|
||||
doesNotNeedEncoding.set(i);
|
||||
}
|
||||
for (int i = 0x80; i <= 0xFF; i++) {
|
||||
doesNotNeedEncoding.set(i);
|
||||
}
|
||||
assert doesNotNeedEncoding.get('%') == false : doesNotNeedEncoding;
|
||||
}
|
||||
|
||||
private static final Charset UTF_8 = StandardCharsets.UTF_8;
|
||||
|
||||
/**
|
||||
* This is set once by the {@code Node} constructor, but it uses {@link CopyOnWriteArraySet} to ensure that tests can run in parallel.
|
||||
* <p>
|
||||
* Integration tests will create separate nodes within the same classloader, thus leading to a shared, {@code static} state.
|
||||
* In order for all tests to appropriately be handled, this must be able to remember <em>all</em> {@link ThreadContext}s that it is
|
||||
* given in a thread safe manner.
|
||||
* <p>
|
||||
* For actual usage, multiple nodes do not share the same JVM and therefore this will only be set once in practice.
|
||||
*/
|
||||
static final CopyOnWriteArraySet<ThreadContext> THREAD_CONTEXT = new CopyOnWriteArraySet<>();
|
||||
|
||||
/**
|
||||
* Set the {@link ThreadContext} used to add warning headers to network responses.
|
||||
* <p>
|
||||
* This is expected to <em>only</em> be invoked by the {@code Node}'s constructor (therefore once outside of tests).
|
||||
*
|
||||
* @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node})
|
||||
* @throws IllegalStateException if this {@code threadContext} has already been set
|
||||
*/
|
||||
public static void setThreadContext(ThreadContext threadContext) {
|
||||
Objects.requireNonNull(threadContext, "Cannot register a null ThreadContext");
|
||||
|
||||
// add returning false means it _did_ have it already
|
||||
if (THREAD_CONTEXT.add(threadContext) == false) {
|
||||
throw new IllegalStateException("Double-setting ThreadContext not allowed!");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the {@link ThreadContext} used to add warning headers to network responses.
|
||||
* <p>
|
||||
* This is expected to <em>only</em> be invoked by the {@code Node}'s {@code close} method (therefore once outside of tests).
|
||||
*
|
||||
* @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node})
|
||||
* @throws IllegalStateException if this {@code threadContext} is unknown (and presumably already unset before)
|
||||
*/
|
||||
public static void removeThreadContext(ThreadContext threadContext) {
|
||||
assert threadContext != null;
|
||||
|
||||
// remove returning false means it did not have it already
|
||||
if (THREAD_CONTEXT.remove(threadContext) == false) {
|
||||
throw new IllegalStateException("Removing unknown ThreadContext not allowed!");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the warning value from the value of a warning header that is formatted according to RFC 7234. That is, given a string
|
||||
* {@code 299 Elasticsearch-6.0.0 "warning value"}, the return value of this method would be {@code warning value}.
|
||||
*
|
||||
* @param s the value of a warning header formatted according to RFC 7234.
|
||||
* @return the extracted warning value
|
||||
*/
|
||||
public static String extractWarningValueFromWarningHeader(final String s, boolean stripXContentPosition) {
|
||||
/*
|
||||
* We know the exact format of the warning header, so to extract the warning value we can skip forward from the front to the first
|
||||
* quote and we know the last quote is at the end of the string
|
||||
*
|
||||
* 299 Elasticsearch-6.0.0 "warning value"
|
||||
* ^ ^
|
||||
* firstQuote lastQuote
|
||||
*
|
||||
* We parse this manually rather than using the capturing regular expression because the regular expression involves a lot of
|
||||
* backtracking and carries a performance penalty. However, when assertions are enabled, we still use the regular expression to
|
||||
* verify that we are maintaining the warning header format.
|
||||
*/
|
||||
final int firstQuote = s.indexOf('\"');
|
||||
final int lastQuote = s.length() - 1;
|
||||
String warningValue = s.substring(firstQuote + 1, lastQuote);
|
||||
assert assertWarningValue(s, warningValue);
|
||||
if (stripXContentPosition) {
|
||||
Matcher matcher = WARNING_XCONTENT_LOCATION_PATTERN.matcher(warningValue);
|
||||
if (matcher.find()) {
|
||||
warningValue = warningValue.substring(matcher.end());
|
||||
}
|
||||
}
|
||||
return warningValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that the specified string has the warning value equal to the provided warning value.
|
||||
*
|
||||
* @param s the string representing a full warning header
|
||||
* @param warningValue the expected warning header
|
||||
* @return {@code true} if the specified string has the expected warning value
|
||||
*/
|
||||
private static boolean assertWarningValue(final String s, final String warningValue) {
|
||||
final Matcher matcher = WARNING_HEADER_PATTERN.matcher(s);
|
||||
final boolean matches = matcher.matches();
|
||||
assert matches;
|
||||
return matcher.group(1).equals(warningValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a warning string in the proper warning format by prepending a warn code, warn agent, wrapping the warning string in quotes,
|
||||
* and appending the RFC 7231 date.
|
||||
*
|
||||
* @param s the warning string to format
|
||||
* @return a warning value formatted according to RFC 7234
|
||||
*/
|
||||
public static String formatWarning(final String s) {
|
||||
// Assume that the common scenario won't have a string to escape and encode.
|
||||
int length = WARNING_PREFIX.length() + s.length() + 3;
|
||||
final StringBuilder sb = new StringBuilder(length);
|
||||
sb.append(WARNING_PREFIX).append(" \"").append(escapeAndEncode(s)).append("\"");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape and encode a string as a valid RFC 7230 quoted-string.
|
||||
*
|
||||
* @param s the string to escape and encode
|
||||
* @return the escaped and encoded string
|
||||
*/
|
||||
public static String escapeAndEncode(final String s) {
|
||||
return encode(escapeBackslashesAndQuotes(s));
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape backslashes and quotes in the specified string.
|
||||
*
|
||||
* @param s the string to escape
|
||||
* @return the escaped string
|
||||
*/
|
||||
static String escapeBackslashesAndQuotes(final String s) {
|
||||
/*
|
||||
* We want a fast path check to avoid creating the string builder and copying characters if needed. So we walk the string looking
|
||||
* for either of the characters that we need to escape. If we find a character that needs escaping, we start over and
|
||||
*/
|
||||
boolean escapingNeeded = false;
|
||||
for (int i = 0; i < s.length(); i++) {
|
||||
final char c = s.charAt(i);
|
||||
if (c == '\\' || c == '"') {
|
||||
escapingNeeded = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (escapingNeeded) {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
for (final char c : s.toCharArray()) {
|
||||
if (c == '\\' || c == '"') {
|
||||
sb.append("\\");
|
||||
}
|
||||
sb.append(c);
|
||||
}
|
||||
return sb.toString();
|
||||
} else {
|
||||
return s;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode a string containing characters outside of the legal characters for an RFC 7230 quoted-string.
|
||||
*
|
||||
* @param s the string to encode
|
||||
* @return the encoded string
|
||||
*/
|
||||
static String encode(final String s) {
|
||||
// first check if the string needs any encoding; this is the fast path and we want to avoid creating a string builder and copying
|
||||
boolean encodingNeeded = false;
|
||||
for (int i = 0; i < s.length(); i++) {
|
||||
int current = s.charAt(i);
|
||||
if (doesNotNeedEncoding.get(current) == false) {
|
||||
encodingNeeded = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (encodingNeeded == false) {
|
||||
return s;
|
||||
}
|
||||
|
||||
final StringBuilder sb = new StringBuilder(s.length());
|
||||
for (int i = 0; i < s.length(); ) {
|
||||
int current = s.charAt(i);
|
||||
/*
|
||||
* Either the character does not need encoding or it does; when the character does not need encoding we append the character to
|
||||
* a buffer and move to the next character and when the character does need encoding, we peel off as many characters as possible
|
||||
* which we encode using UTF-8 until we encounter another character that does not need encoding.
|
||||
*/
|
||||
if (doesNotNeedEncoding.get(current)) {
|
||||
// append directly and move to the next character
|
||||
sb.append((char) current);
|
||||
i++;
|
||||
} else {
|
||||
int startIndex = i;
|
||||
do {
|
||||
i++;
|
||||
} while (i < s.length() && doesNotNeedEncoding.get(s.charAt(i)) == false);
|
||||
|
||||
final byte[] bytes = s.substring(startIndex, i).getBytes(UTF_8);
|
||||
// noinspection ForLoopReplaceableByForEach
|
||||
for (int j = 0; j < bytes.length; j++) {
|
||||
sb.append('%').append(hex(bytes[j] >> 4)).append(hex(bytes[j]));
|
||||
}
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static char hex(int b) {
|
||||
final char ch = Character.forDigit(b & 0xF, 16);
|
||||
if (Character.isLetter(ch)) {
|
||||
return Character.toUpperCase(ch);
|
||||
} else {
|
||||
return ch;
|
||||
}
|
||||
}
|
||||
|
||||
public static String getXOpaqueId() {
|
||||
return THREAD_CONTEXT.stream()
|
||||
.filter(t -> t.getHeader(Task.X_OPAQUE_ID) != null)
|
||||
.findFirst()
|
||||
.map(t -> t.getHeader(Task.X_OPAQUE_ID))
|
||||
.orElse("");
|
||||
}
|
||||
|
||||
public static void addWarning(String message, Object... params) {
|
||||
addWarning(THREAD_CONTEXT, message, params);
|
||||
}
|
||||
|
||||
// package scope for testing
|
||||
static void addWarning(Set<ThreadContext> threadContexts, String message, Object... params) {
|
||||
final Iterator<ThreadContext> iterator = threadContexts.iterator();
|
||||
if (iterator.hasNext()) {
|
||||
final String formattedMessage = LoggerMessageFormat.format(message, params);
|
||||
final String warningHeaderValue = formatWarning(formattedMessage);
|
||||
assert WARNING_HEADER_PATTERN.matcher(warningHeaderValue).matches();
|
||||
assert extractWarningValueFromWarningHeader(warningHeaderValue, false)
|
||||
.equals(escapeAndEncode(formattedMessage));
|
||||
while (iterator.hasNext()) {
|
||||
try {
|
||||
final ThreadContext next = iterator.next();
|
||||
next.addResponseHeader("Warning", warningHeaderValue);
|
||||
} catch (final IllegalStateException e) {
|
||||
// ignored; it should be removed shortly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
/**
|
||||
* This class wraps both <code>HeaderWarningLogger</code> and <code>ThrottlingLogger</code>
|
||||
* which is a common use case across Elasticsearch
|
||||
*/
|
||||
class ThrottlingAndHeaderWarningLogger {
|
||||
private final ThrottlingLogger throttlingLogger;
|
||||
|
||||
ThrottlingAndHeaderWarningLogger(Logger logger) {
|
||||
this.throttlingLogger = new ThrottlingLogger(logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a formatted warning message as a response header on the thread context, and logs a message if the associated key has
|
||||
* not recently been seen.
|
||||
*
|
||||
* @param key the key used to determine if this message should be logged
|
||||
* @param message the message to log
|
||||
*/
|
||||
void throttleLogAndAddWarning(final String key, ESLogMessage message) {
|
||||
String messagePattern = message.getMessagePattern();
|
||||
Object[] arguments = message.getArguments();
|
||||
HeaderWarning.addWarning(messagePattern, arguments);
|
||||
throttlingLogger.throttleLog(key, message);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.Message;
|
||||
import org.elasticsearch.common.SuppressLoggerChecks;
|
||||
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* TODO wrapping logging this way limits the usage of %location. It will think this is used from that class.
|
||||
* <p>
|
||||
* This is a wrapper around a logger that allows to throttle log messages.
|
||||
* In order to throttle a key has to be used and throttling happens per each key combined with X-Opaque-Id.
|
||||
* X-Opaque-Id allows throttling per user. This value is set in ThreadContext from X-Opaque-Id HTTP header.
|
||||
* <p>
|
||||
* The throttling algorithm is relying on LRU set of keys which evicts entries when its size is > 128.
|
||||
* When a log with a key is emitted, it won't be logged again until the set reaches size 128 and the key is removed from the set.
|
||||
*
|
||||
* @see HeaderWarning
|
||||
*/
|
||||
class ThrottlingLogger {
|
||||
|
||||
// LRU set of keys used to determine if a message should be emitted to the logs
|
||||
private final Set<String> keys = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<String, Boolean>() {
|
||||
@Override
|
||||
protected boolean removeEldestEntry(final Map.Entry<String, Boolean> eldest) {
|
||||
return size() > 128;
|
||||
}
|
||||
}));
|
||||
|
||||
private final Logger logger;
|
||||
|
||||
ThrottlingLogger(Logger logger) {
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
void throttleLog(String key, Message message) {
|
||||
String xOpaqueId = HeaderWarning.getXOpaqueId();
|
||||
boolean shouldLog = keys.add(xOpaqueId + key);
|
||||
if (shouldLog) {
|
||||
log(message);
|
||||
}
|
||||
}
|
||||
|
||||
private void log(Message message) {
|
||||
AccessController.doPrivileged(new PrivilegedAction<Void>() {
|
||||
@SuppressLoggerChecks(reason = "safely delegates to logger")
|
||||
@Override
|
||||
public Void run() {
|
||||
logger.warn(message);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -522,11 +522,9 @@ public class Setting<T> implements ToXContentObject {
|
||||
if (this.isDeprecated() && this.exists(settings)) {
|
||||
// It would be convenient to show its replacement key, but replacement is often not so simple
|
||||
final String key = getKey();
|
||||
Settings.DeprecationLoggerHolder.deprecationLogger.deprecatedAndMaybeLog(
|
||||
key,
|
||||
"[{}] setting was deprecated in Elasticsearch and will be removed in a future release! "
|
||||
+ "See the breaking changes documentation for the next major version.",
|
||||
key);
|
||||
Settings.DeprecationLoggerHolder.deprecationLogger
|
||||
.deprecate(key, "[{}] setting was deprecated in Elasticsearch and will be removed in a future release! "
|
||||
+ "See the breaking changes documentation for the next major version.", key);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1649,7 +1649,7 @@ public class DateFormatters {
|
||||
String msg = "Camel case format name {} is deprecated and will be removed in a future version. " +
|
||||
"Use snake case name {} instead.";
|
||||
deprecationLogger.getOrCompute()
|
||||
.deprecatedAndMaybeLog("camelCaseDateFormat", msg, formatName.getCamelCaseName(), formatName.getSnakeCaseName());
|
||||
.deprecate("camelCaseDateFormat", msg, formatName.getCamelCaseName(), formatName.getSnakeCaseName());
|
||||
}
|
||||
|
||||
if (FormatNames.ISO8601.matches(input)) {
|
||||
|
@ -201,7 +201,7 @@ public class DateUtils {
|
||||
public static ZoneId of(String zoneId) {
|
||||
String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(zoneId);
|
||||
if (deprecatedId != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("timezone",
|
||||
deprecationLogger.deprecate("timezone",
|
||||
"Use of short timezone id " + zoneId + " is deprecated. Use " + deprecatedId + " instead");
|
||||
return ZoneId.of(deprecatedId);
|
||||
}
|
||||
|
@ -245,10 +245,10 @@ public class ByteSizeValue implements Writeable, Comparable<ByteSizeValue>, ToXC
|
||||
} catch (final NumberFormatException e) {
|
||||
try {
|
||||
final double doubleValue = Double.parseDouble(s);
|
||||
DeprecationLoggerHolder.deprecationLogger.deprecatedAndMaybeLog(
|
||||
"fractional_byte_values",
|
||||
"Fractional bytes values are deprecated. Use non-fractional bytes values instead: [{}] found for setting [{}]",
|
||||
initialInput, settingName);
|
||||
DeprecationLoggerHolder.deprecationLogger
|
||||
.deprecate("fractional_byte_values",
|
||||
"Fractional bytes values are deprecated. Use non-fractional bytes values instead: [{}] found for setting [{}]",
|
||||
initialInput, settingName);
|
||||
return new ByteSizeValue((long) (doubleValue * unit.toBytes(1)));
|
||||
} catch (final NumberFormatException ignored) {
|
||||
throw new ElasticsearchParseException("failed to parse [{}]", e, initialInput);
|
||||
|
@ -77,9 +77,10 @@ public class EsExecutors {
|
||||
final int value = Setting.parseInt(s, 1, name);
|
||||
final int availableProcessors = Runtime.getRuntime().availableProcessors();
|
||||
if (value > availableProcessors) {
|
||||
deprecationLogger.deprecatedAndMaybeLog(
|
||||
deprecationLogger.deprecate(
|
||||
"processors",
|
||||
"setting [" + name + "] to value [{}] which is more than available processors [{}] is deprecated",
|
||||
"setting [{}] to value [{}] which is more than available processors [{}] is deprecated",
|
||||
name,
|
||||
value,
|
||||
availableProcessors);
|
||||
}
|
||||
|
@ -53,21 +53,21 @@ public class LoggingDeprecationHandler implements DeprecationHandler {
|
||||
@Override
|
||||
public void usedDeprecatedName(String parserName, Supplier<XContentLocation> location, String usedName, String modernName) {
|
||||
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
|
||||
deprecationLogger.deprecatedAndMaybeLog("deprecated_field", "{}Deprecated field [{}] used, expected [{}] instead",
|
||||
prefix, usedName, modernName);
|
||||
deprecationLogger.deprecate("deprecated_field",
|
||||
"{}Deprecated field [{}] used, expected [{}] instead", prefix, usedName, modernName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void usedDeprecatedField(String parserName, Supplier<XContentLocation> location, String usedName, String replacedWith) {
|
||||
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
|
||||
deprecationLogger.deprecatedAndMaybeLog("deprecated_field", "{}Deprecated field [{}] used, replaced by [{}]",
|
||||
prefix, usedName, replacedWith);
|
||||
deprecationLogger.deprecate("deprecated_field",
|
||||
"{}Deprecated field [{}] used, replaced by [{}]", prefix, usedName, replacedWith);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void usedDeprecatedField(String parserName, Supplier<XContentLocation> location, String usedName) {
|
||||
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
|
||||
deprecationLogger.deprecatedAndMaybeLog("deprecated_field",
|
||||
deprecationLogger.deprecate("deprecated_field",
|
||||
"{}Deprecated field [{}] used, this field is unused and will be removed entirely", prefix, usedName);
|
||||
}
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ public class HttpInfo implements ReportingService.Info {
|
||||
String publishAddressString = publishAddress.toString();
|
||||
String hostString = publishAddress.address().getHostString();
|
||||
if (CNAME_IN_PUBLISH_HOST) {
|
||||
deprecationLogger.deprecatedAndMaybeLog(
|
||||
deprecationLogger.deprecate(
|
||||
"cname_in_publish_address",
|
||||
"es.http.cname_in_publish_address system property is deprecated and no longer affects http.publish_address " +
|
||||
"formatting. Remove this property to get rid of this deprecation warning."
|
||||
|
@ -132,7 +132,7 @@ public final class PreConfiguredTokenFilter extends PreConfiguredAnalysisCompone
|
||||
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog(name(), "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate(name(), "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return this;
|
||||
}
|
||||
@ -159,7 +159,7 @@ public final class PreConfiguredTokenFilter extends PreConfiguredAnalysisCompone
|
||||
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog(name(), "Token filter [" + name()
|
||||
DEPRECATION_LOGGER.deprecate(name(), "Token filter [" + name()
|
||||
+ "] will not be usable to parse synonyms after v7.0");
|
||||
return this;
|
||||
}
|
||||
|
@ -51,9 +51,9 @@ public class ShingleTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
+ " must be less than or equal to: [" + maxAllowedShingleDiff + "] but was [" + shingleDiff + "]. This limit"
|
||||
+ " can be set by changing the [" + IndexSettings.MAX_SHINGLE_DIFF_SETTING.getKey() + "] index level setting.");
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("excessive_shingle_diff",
|
||||
deprecationLogger.deprecate("excessive_shingle_diff",
|
||||
"Deprecated big difference between maxShingleSize and minShingleSize" +
|
||||
" in Shingle TokenFilter, expected difference must be less than or equal to: [" + maxAllowedShingleDiff + "]");
|
||||
" in Shingle TokenFilter, expected difference must be less than or equal to: [" + maxAllowedShingleDiff + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@ -77,8 +77,8 @@ public class ShingleTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
"] cannot be used to parse synonyms");
|
||||
}
|
||||
else {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter " + name()
|
||||
+ "] will not be usable to parse synonym after v7.0");
|
||||
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter " + name()
|
||||
+ "] will not be usable to parse synonym after v7.0");
|
||||
}
|
||||
return this;
|
||||
|
||||
|
@ -209,7 +209,7 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
|
||||
|
||||
private void checkCompletionContextsLimit(BuilderContext context) {
|
||||
if (this.contexts.getValue() != null && this.contexts.getValue().size() > COMPLETION_CONTEXTS_LIMIT) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("excessive_completion_contexts",
|
||||
deprecationLogger.deprecate("excessive_completion_contexts",
|
||||
"You have defined more than [" + COMPLETION_CONTEXTS_LIMIT + "] completion contexts" +
|
||||
" in the mapping for index [" + context.indexSettings().get(IndexMetadata.SETTING_INDEX_PROVIDED_NAME) + "]. " +
|
||||
"The maximum allowed number of completion contexts in a mapping will be limited to " +
|
||||
|
@ -208,7 +208,7 @@ public class DynamicTemplate implements ToXContentObject {
|
||||
if (indexVersionCreated.onOrAfter(Version.V_6_0_0_alpha1)) {
|
||||
throw e;
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("invalid_mapping_type",
|
||||
deprecationLogger.deprecate("invalid_mapping_type",
|
||||
"match_mapping_type [" + matchMappingType + "] is invalid and will be ignored: "
|
||||
+ e.getMessage());
|
||||
// this template is on an unknown type so it will never match anything
|
||||
|
@ -99,7 +99,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
||||
@Override
|
||||
public FieldNamesFieldMapper build(BuilderContext context) {
|
||||
if (enabled.getValue().explicit()) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("field_names_enabled_parameter", ENABLED_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("field_names_enabled_parameter", ENABLED_DEPRECATION_MESSAGE);
|
||||
}
|
||||
FieldNamesFieldType fieldNamesFieldType = new FieldNamesFieldType(enabled.getValue().value());
|
||||
return new FieldNamesFieldMapper(enabled.getValue(), indexVersionCreated, fieldNamesFieldType);
|
||||
@ -139,9 +139,8 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
||||
if (isEnabled() == false) {
|
||||
throw new IllegalStateException("Cannot run [exists] queries if the [_field_names] field is disabled");
|
||||
}
|
||||
deprecationLogger.deprecatedAndMaybeLog(
|
||||
"terms_query_on_field_names",
|
||||
"terms query on the _field_names field is deprecated and will be removed, use exists query instead");
|
||||
deprecationLogger.deprecate("terms_query_on_field_names",
|
||||
"terms query on the _field_names field is deprecated and will be removed, use exists query instead");
|
||||
return super.termQuery(value, context);
|
||||
}
|
||||
}
|
||||
|
@ -160,7 +160,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
||||
+ "you can re-enable it by updating the dynamic cluster setting: "
|
||||
+ IndicesService.INDICES_ID_FIELD_DATA_ENABLED_SETTING.getKey());
|
||||
}
|
||||
deprecationLogger.deprecatedAndMaybeLog("id_field_data", ID_FIELD_DATA_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("id_field_data", ID_FIELD_DATA_DEPRECATION_MESSAGE);
|
||||
final IndexFieldData<?> fieldData = fieldDataBuilder.build(cache, breakerService, mapperService);
|
||||
return new IndexFieldData<LeafFieldData>() {
|
||||
@Override
|
||||
|
@ -176,7 +176,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
|
||||
throw new ElasticsearchParseException("Field parameter [{}] is not supported for [{}] field type",
|
||||
fieldName, CONTENT_TYPE);
|
||||
}
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("geo_mapper_field_parameter",
|
||||
DEPRECATION_LOGGER.deprecate("geo_mapper_field_parameter",
|
||||
"Field parameter [{}] is deprecated and will be removed in a future version.", fieldName);
|
||||
}
|
||||
}
|
||||
|
@ -452,7 +452,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException(DEFAULT_MAPPING_ERROR_MESSAGE);
|
||||
} else if (reason == MergeReason.MAPPING_UPDATE) { // only log in case of explicit mapping updates
|
||||
deprecationLogger.deprecatedAndMaybeLog("default_mapping_not_allowed", DEFAULT_MAPPING_ERROR_MESSAGE);
|
||||
deprecationLogger.deprecate("default_mapping_not_allowed", DEFAULT_MAPPING_ERROR_MESSAGE);
|
||||
}
|
||||
assert defaultMapper.type().equals(DEFAULT_MAPPING);
|
||||
results.put(DEFAULT_MAPPING, defaultMapper);
|
||||
@ -631,7 +631,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||
*/
|
||||
public MappedFieldType unmappedFieldType(String type) {
|
||||
if (type.equals("string")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("unmapped_type_string",
|
||||
deprecationLogger.deprecate("unmapped_type_string",
|
||||
"[unmapped_type:string] should be replaced with [unmapped_type:keyword]");
|
||||
type = "keyword";
|
||||
}
|
||||
|
@ -238,7 +238,7 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
||||
}
|
||||
return true;
|
||||
} else if (fieldName.equals("include_in_all")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("include_in_all",
|
||||
deprecationLogger.deprecate("include_in_all",
|
||||
"[include_in_all] is deprecated, the _all field have been removed in this version");
|
||||
return true;
|
||||
}
|
||||
|
@ -547,14 +547,14 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
|
||||
}
|
||||
Parameter<?> parameter = deprecatedParamsMap.get(propName);
|
||||
if (parameter != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog(propName, "Parameter [{}] on mapper [{}] is deprecated, use [{}]",
|
||||
deprecationLogger.deprecate(propName, "Parameter [{}] on mapper [{}] is deprecated, use [{}]",
|
||||
propName, name, parameter.name);
|
||||
} else {
|
||||
parameter = paramsMap.get(propName);
|
||||
}
|
||||
if (parameter == null) {
|
||||
if (isDeprecatedParameter(propName, parserContext.indexVersionCreated())) {
|
||||
deprecationLogger.deprecatedAndMaybeLog(propName,
|
||||
deprecationLogger.deprecate(propName,
|
||||
"Parameter [{}] has no effect on type [{}] and will be removed in future", propName, type);
|
||||
iterator.remove();
|
||||
continue;
|
||||
|
@ -418,7 +418,7 @@ public class RootObjectMapper extends ObjectMapper {
|
||||
} else {
|
||||
deprecationMessage = message;
|
||||
}
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("invalid_dynamic_template", deprecationMessage);
|
||||
DEPRECATION_LOGGER.deprecate("invalid_dynamic_template", deprecationMessage);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -251,7 +251,7 @@ public class TypeParsers {
|
||||
builder.indexOptions(nodeIndexOptionValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("similarity")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("similarity",
|
||||
deprecationLogger.deprecate("similarity",
|
||||
"The [similarity] parameter has no effect on field [" + name + "] and will be removed in 8.0");
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder::addMultiField, name, parserContext, propName, propNode)) {
|
||||
@ -276,7 +276,7 @@ public class TypeParsers {
|
||||
Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
|
||||
if (propName.equals("fields")) {
|
||||
if (parserContext.isWithinMultiField()) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("multifield_within_multifield", "At least one multi-field, [" + name + "], was " +
|
||||
deprecationLogger.deprecate("multifield_within_multifield", "At least one multi-field, [" + name + "], was " +
|
||||
"encountered that itself contains a multi-field. Defining multi-fields within a multi-field is deprecated and will " +
|
||||
"no longer be supported in 8.0. To resolve the issue, all instances of [fields] that occur within a [fields] block " +
|
||||
"should be removed from the mappings, either by flattening the chained [fields] blocks into a single level, or " +
|
||||
|
@ -272,7 +272,7 @@ public class GeoShapeQueryBuilder extends AbstractGeometryQueryBuilder<GeoShapeQ
|
||||
|
||||
GeoShapeQueryBuilder builder;
|
||||
if (pgsqp.type != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("geo_share_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("geo_share_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
|
||||
if (pgsqp.shape != null) {
|
||||
|
@ -154,7 +154,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
||||
try {
|
||||
IdsQueryBuilder builder = PARSER.apply(parser, null);
|
||||
if (builder.types().length > 0) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("ids_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("ids_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
return builder;
|
||||
} catch (IllegalArgumentException e) {
|
||||
|
@ -969,7 +969,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||
}
|
||||
|
||||
if (moreLikeThisQueryBuilder.isTypeless() == false) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("more_like_this_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("more_like_this_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
return moreLikeThisQueryBuilder;
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
||||
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
if (name.equals(TypeFieldMapper.NAME)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
return failIfFieldMappingNotFound(name, mapperService.fieldType(name));
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ public class TypeQueryBuilder extends AbstractQueryBuilder<TypeQueryBuilder> {
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
deprecationLogger.deprecatedAndMaybeLog("type_query", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("type_query", TYPES_DEPRECATION_MESSAGE);
|
||||
//LUCENE 4 UPGRADE document mapper should use bytesref as well?
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(type);
|
||||
if (documentMapper == null) {
|
||||
|
@ -168,8 +168,8 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
|
||||
if (field != null) {
|
||||
fieldType = context.getMapperService().fieldType(field);
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("seed_requires_field",
|
||||
"As of version 7.0 Elasticsearch will require that a [field] parameter is provided when a [seed] is set");
|
||||
deprecationLogger.deprecate("seed_requires_field",
|
||||
"As of version 7.0 Elasticsearch will require that a [field] parameter is provided when a [seed] is set");
|
||||
fieldType = context.getMapperService().fieldType(IdFieldMapper.NAME);
|
||||
}
|
||||
if (fieldType == null) {
|
||||
|
@ -374,7 +374,7 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest<ReindexRequ
|
||||
}
|
||||
String[] types = extractStringArray(source, "type");
|
||||
if (types != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("reindex_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("reindex_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
request.getSearchRequest().types(types);
|
||||
}
|
||||
request.setRemoteInfo(buildRemoteInfo(source));
|
||||
@ -390,7 +390,7 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest<ReindexRequ
|
||||
ObjectParser<IndexRequest, Void> destParser = new ObjectParser<>("dest");
|
||||
destParser.declareString(IndexRequest::index, new ParseField("index"));
|
||||
destParser.declareString((request, type) -> {
|
||||
deprecationLogger.deprecatedAndMaybeLog("reindex_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("reindex_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
request.type(type);
|
||||
}, new ParseField("type"));
|
||||
destParser.declareString(IndexRequest::routing, new ParseField("routing"));
|
||||
|
@ -143,8 +143,8 @@ final class SimilarityProviders {
|
||||
throw new IllegalArgumentException("Basic model [" + basicModel + "] isn't supported anymore, " +
|
||||
"please use another model.");
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog(basicModel + "_similarity_model_replaced", "Basic model [" + basicModel +
|
||||
"] isn't supported anymore and has arbitrarily been replaced with [" + replacement + "].");
|
||||
deprecationLogger.deprecate(basicModel + "_similarity_model_replaced", "Basic model [" + basicModel +
|
||||
"] isn't supported anymore and has arbitrarily been replaced with [" + replacement + "].");
|
||||
model = BASIC_MODELS.get(replacement);
|
||||
assert model != null;
|
||||
}
|
||||
@ -174,8 +174,8 @@ final class SimilarityProviders {
|
||||
throw new IllegalArgumentException("After effect [" + afterEffect +
|
||||
"] isn't supported anymore, please use another effect.");
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog(afterEffect + "_after_effect_replaced", "After effect [" + afterEffect +
|
||||
"] isn't supported anymore and has arbitrarily been replaced with [" + replacement + "].");
|
||||
deprecationLogger.deprecate(afterEffect + "_after_effect_replaced", "After effect [" + afterEffect +
|
||||
"] isn't supported anymore and has arbitrarily been replaced with [" + replacement + "].");
|
||||
effect = AFTER_EFFECTS.get(replacement);
|
||||
assert effect != null;
|
||||
}
|
||||
@ -264,7 +264,7 @@ final class SimilarityProviders {
|
||||
if (version.onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("Unknown settings for similarity of type [" + type + "]: " + unknownSettings);
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("unknown_similarity_setting",
|
||||
deprecationLogger.deprecate("unknown_similarity_setting",
|
||||
"Unknown settings for similarity of type [" + type + "]: " + unknownSettings);
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
||||
} else {
|
||||
final ClassicSimilarity similarity = SimilarityProviders.createClassicSimilarity(Settings.EMPTY, version);
|
||||
return () -> {
|
||||
deprecationLogger.deprecatedAndMaybeLog("classic_similarity",
|
||||
deprecationLogger.deprecate("classic_similarity",
|
||||
"The [classic] similarity is now deprecated in favour of BM25, which is generally "
|
||||
+ "accepted as a better alternative. Use the [BM25] similarity or build a custom [scripted] similarity "
|
||||
+ "instead.");
|
||||
@ -91,7 +91,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
||||
throw new IllegalArgumentException("The [classic] similarity may not be used anymore. Please use the [BM25] "
|
||||
+ "similarity or build a custom [scripted] similarity instead.");
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("classic_similarity",
|
||||
deprecationLogger.deprecate("classic_similarity",
|
||||
"The [classic] similarity is now deprecated in favour of BM25, which is generally "
|
||||
+ "accepted as a better alternative. Use the [BM25] similarity or build a custom [scripted] similarity "
|
||||
+ "instead.");
|
||||
@ -156,7 +156,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
||||
defaultSimilarity = (providers.get("default") != null) ? providers.get("default").get()
|
||||
: providers.get(SimilarityService.DEFAULT_SIMILARITY).get();
|
||||
if (providers.get("base") != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("base_similarity_ignored",
|
||||
deprecationLogger.deprecate("base_similarity_ignored",
|
||||
"The [base] similarity is ignored since query normalization and coords have been removed");
|
||||
}
|
||||
}
|
||||
@ -274,7 +274,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException(message);
|
||||
} else if (indexCreatedVersion.onOrAfter(Version.V_6_5_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("similarity_failure", message);
|
||||
deprecationLogger.deprecate("similarity_failure", message);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -126,7 +126,7 @@ public final class AnalysisModule {
|
||||
@Override
|
||||
public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_7_0_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("standard_deprecation",
|
||||
deprecationLogger.deprecate("standard_deprecation",
|
||||
"The [standard] token filter name is deprecated and will be removed in a future version.");
|
||||
} else {
|
||||
throw new IllegalArgumentException("The [standard] token filter has been removed.");
|
||||
@ -184,7 +184,7 @@ public final class AnalysisModule {
|
||||
preConfiguredTokenFilters.register( "standard",
|
||||
PreConfiguredTokenFilter.elasticsearchVersion("standard", true, (reader, version) -> {
|
||||
if (version.before(Version.V_7_0_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("standard_deprecation",
|
||||
deprecationLogger.deprecate("standard_deprecation",
|
||||
"The [standard] token filter is deprecated and will be removed in a future version.");
|
||||
} else {
|
||||
throw new IllegalArgumentException("The [standard] token filter has been removed.");
|
||||
|
@ -160,7 +160,7 @@ public class SyncedFlushService implements IndexEventListener {
|
||||
final ActionListener<SyncedFlushResponse> listener) {
|
||||
final ClusterState state = clusterService.state();
|
||||
if (state.nodes().getMinNodeVersion().onOrAfter(Version.V_7_6_0)) {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("synced_flush", SYNCED_FLUSH_DEPRECATION_MESSAGE);
|
||||
DEPRECATION_LOGGER.deprecate("synced_flush", SYNCED_FLUSH_DEPRECATION_MESSAGE);
|
||||
}
|
||||
final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, indicesOptions, aliasesOrIndices);
|
||||
final Map<String, List<ShardsSyncedFlushResult>> results = ConcurrentCollections.newConcurrentMap();
|
||||
|
@ -51,7 +51,7 @@ public class ConditionalProcessor extends AbstractProcessor implements WrappingP
|
||||
new DeprecationLogger(LogManager.getLogger(DynamicMap.class));
|
||||
private static final Map<String, Function<Object, Object>> FUNCTIONS = org.elasticsearch.common.collect.Map.of(
|
||||
"_type", value -> {
|
||||
deprecationLogger.deprecatedAndMaybeLog("conditional-processor__type",
|
||||
deprecationLogger.deprecate("conditional-processor__type",
|
||||
"[types removal] Looking up doc types [_type] in scripts is deprecated.");
|
||||
return value;
|
||||
});
|
||||
|
@ -73,7 +73,7 @@ import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.HeaderWarning;
|
||||
import org.elasticsearch.common.logging.NodeAndClusterIdStateListener;
|
||||
import org.elasticsearch.common.network.NetworkAddress;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
@ -361,8 +361,8 @@ public class Node implements Closeable {
|
||||
final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool);
|
||||
resourcesToClose.add(resourceWatcherService);
|
||||
// adds the context to the DeprecationLogger so that it does not need to be injected everywhere
|
||||
DeprecationLogger.setThreadContext(threadPool.getThreadContext());
|
||||
resourcesToClose.add(() -> DeprecationLogger.removeThreadContext(threadPool.getThreadContext()));
|
||||
HeaderWarning.setThreadContext(threadPool.getThreadContext());
|
||||
resourcesToClose.add(() -> HeaderWarning.removeThreadContext(threadPool.getThreadContext()));
|
||||
|
||||
final List<Setting<?>> additionalSettings = new ArrayList<>();
|
||||
// register the node.data, node.ingest, node.master, node.remote_cluster_client settings here so we can mark them private
|
||||
|
@ -57,7 +57,7 @@ public class DeprecationRestHandler implements RestHandler {
|
||||
*/
|
||||
@Override
|
||||
public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception {
|
||||
deprecationLogger.deprecatedAndMaybeLog("deprecated_route", deprecationMessage);
|
||||
deprecationLogger.deprecate("deprecated_route", deprecationMessage);
|
||||
|
||||
handler.handleRequest(request, channel, client);
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class RestCreateIndexAction extends BaseRestHandler {
|
||||
DEFAULT_INCLUDE_TYPE_NAME_POLICY);
|
||||
|
||||
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("create_index_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("create_index_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index"));
|
||||
|
@ -60,7 +60,7 @@ public class RestForceMergeAction extends BaseRestHandler {
|
||||
mergeRequest.onlyExpungeDeletes(request.paramAsBoolean("only_expunge_deletes", mergeRequest.onlyExpungeDeletes()));
|
||||
mergeRequest.flush(request.paramAsBoolean("flush", mergeRequest.flush()));
|
||||
if (mergeRequest.onlyExpungeDeletes() && mergeRequest.maxNumSegments() != ForceMergeRequest.Defaults.MAX_NUM_SEGMENTS) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("force_merge_expunge_deletes_and_max_num_segments_deprecation",
|
||||
deprecationLogger.deprecate("force_merge_expunge_deletes_and_max_num_segments_deprecation",
|
||||
"setting only_expunge_deletes and max_num_segments at the same time is deprecated and will be rejected in a future version");
|
||||
}
|
||||
return channel -> client.admin().indices().forceMerge(mergeRequest, new RestToXContentListener<>(channel));
|
||||
|
@ -80,7 +80,7 @@ public class RestGetFieldMappingAction extends BaseRestHandler {
|
||||
" is set to true.");
|
||||
}
|
||||
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("get_field_mapping_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("get_field_mapping_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
|
||||
GetFieldMappingsRequest getMappingsRequest = new GetFieldMappingsRequest();
|
||||
@ -88,7 +88,7 @@ public class RestGetFieldMappingAction extends BaseRestHandler {
|
||||
getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions()));
|
||||
|
||||
if (request.hasParam("local")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("get_field_mapping_local",
|
||||
deprecationLogger.deprecate("get_field_mapping_local",
|
||||
"Use [local] in get field mapping requests is deprecated. "
|
||||
+ "The parameter will be removed in the next major version");
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ public class RestGetIndexTemplateAction extends BaseRestHandler {
|
||||
|
||||
final GetIndexTemplatesRequest getIndexTemplatesRequest = new GetIndexTemplatesRequest(names);
|
||||
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("get_index_template_include_type_name", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("get_index_template_include_type_name", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
getIndexTemplatesRequest.local(request.paramAsBoolean("local", getIndexTemplatesRequest.local()));
|
||||
getIndexTemplatesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getIndexTemplatesRequest.masterNodeTimeout()));
|
||||
|
@ -73,7 +73,7 @@ public class RestGetIndicesAction extends BaseRestHandler {
|
||||
String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
|
||||
// starting with 7.0 we don't include types by default in the response to GET requests
|
||||
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER) && request.method().equals(GET)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("get_indices_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("get_indices_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
final GetIndexRequest getIndexRequest = new GetIndexRequest();
|
||||
getIndexRequest.indices(indices);
|
||||
|
@ -100,14 +100,14 @@ public class RestGetMappingAction extends BaseRestHandler {
|
||||
boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY);
|
||||
|
||||
if (request.method().equals(HEAD)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("get_mapping_types_removal",
|
||||
deprecationLogger.deprecate("get_mapping_types_removal",
|
||||
"Type exists requests are deprecated, as types have been deprecated.");
|
||||
} else if (includeTypeName == false && types.length > 0) {
|
||||
throw new IllegalArgumentException("Types cannot be provided in get mapping requests, unless" +
|
||||
" include_type_name is set to true.");
|
||||
}
|
||||
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("get_mapping_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("get_mapping_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
|
||||
final GetMappingsRequest getMappingsRequest = new GetMappingsRequest();
|
||||
|
@ -66,10 +66,10 @@ public class RestPutIndexTemplateAction extends BaseRestHandler {
|
||||
|
||||
PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name"));
|
||||
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("put_index_template_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("put_index_template_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
if (request.hasParam("template")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("put_index_template_deprecated_parameter",
|
||||
deprecationLogger.deprecate("put_index_template_deprecated_parameter",
|
||||
"Deprecated parameter [template] used, replaced by [index_patterns]");
|
||||
putRequest.patterns(Collections.singletonList(request.param("template")));
|
||||
} else {
|
||||
|
@ -79,7 +79,7 @@ public class RestPutMappingAction extends BaseRestHandler {
|
||||
final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER,
|
||||
DEFAULT_INCLUDE_TYPE_NAME_POLICY);
|
||||
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
|
||||
PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
|
@ -71,7 +71,7 @@ public abstract class RestResizeHandler extends BaseRestHandler {
|
||||
throw new IllegalArgumentException("parameter [copy_settings] can not be explicitly set to [false]");
|
||||
}
|
||||
}
|
||||
deprecationLogger.deprecatedAndMaybeLog("resize_deprecated_parameter",
|
||||
deprecationLogger.deprecate("resize_deprecated_parameter",
|
||||
"parameter [copy_settings] is deprecated and will be removed in 8.0.0");
|
||||
}
|
||||
resizeRequest.setCopySettings(copySettings);
|
||||
|
@ -58,7 +58,7 @@ public class RestRolloverIndexAction extends BaseRestHandler {
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY);
|
||||
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("index_rollover_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("index_rollover_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
RolloverRequest rolloverIndexRequest = new RolloverRequest(request.param("index"), request.param("new_index"));
|
||||
request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(includeTypeName, parser));
|
||||
|
@ -74,7 +74,7 @@ public class RestValidateQueryAction extends BaseRestHandler {
|
||||
validateQueryRequest.explain(request.paramAsBoolean("explain", false));
|
||||
|
||||
if (request.hasParam("type")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("validate_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("validate_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
validateQueryRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
|
||||
}
|
||||
|
||||
|
@ -96,7 +96,7 @@ public class RestNodesAction extends AbstractCatAction {
|
||||
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
|
||||
clusterStateRequest.clear().nodes(true);
|
||||
if (request.hasParam("local")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("cat_nodes_local_parameter", LOCAL_DEPRECATED_MESSAGE);
|
||||
deprecationLogger.deprecate("cat_nodes_local_parameter", LOCAL_DEPRECATED_MESSAGE);
|
||||
}
|
||||
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
|
||||
clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
|
||||
|
@ -88,7 +88,7 @@ public class RestBulkAction extends BaseRestHandler {
|
||||
if (defaultType == null) {
|
||||
defaultType = MapperService.SINGLE_MAPPING_NAME;
|
||||
} else {
|
||||
deprecationLogger.deprecatedAndMaybeLog("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
String defaultRouting = request.param("routing");
|
||||
FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
|
||||
|
@ -60,7 +60,7 @@ public class RestDeleteAction extends BaseRestHandler {
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
DeleteRequest deleteRequest;
|
||||
if (request.hasParam("type")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("delete_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("delete_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deleteRequest = new DeleteRequest(request.param("index"), request.param("type"), request.param("id"));
|
||||
} else {
|
||||
deleteRequest = new DeleteRequest(request.param("index"), request.param("id"));
|
||||
|
@ -68,7 +68,7 @@ public class RestGetAction extends BaseRestHandler {
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
GetRequest getRequest;
|
||||
if (request.hasParam("type")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("get_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("get_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id"));
|
||||
} else {
|
||||
getRequest = new GetRequest(request.param("index"), request.param("id"));
|
||||
|
@ -74,7 +74,7 @@ public class RestGetSourceAction extends BaseRestHandler {
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
final GetRequest getRequest;
|
||||
if (request.hasParam("type")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("get_source_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("get_source_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id"));
|
||||
} else {
|
||||
getRequest = new GetRequest(request.param("index"), request.param("id"));
|
||||
|
@ -131,7 +131,7 @@ public class RestIndexAction extends BaseRestHandler {
|
||||
IndexRequest indexRequest;
|
||||
final String type = request.param("type");
|
||||
if (type != null && type.equals(MapperService.SINGLE_MAPPING_NAME) == false) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("index_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("index_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
indexRequest = new IndexRequest(request.param("index"), type, request.param("id"));
|
||||
} else {
|
||||
indexRequest = new IndexRequest(request.param("index"));
|
||||
|
@ -71,7 +71,7 @@ public class RestMultiGetAction extends BaseRestHandler {
|
||||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
if (request.param("type") != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("mget_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("mget_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
|
||||
MultiGetRequest multiGetRequest = new MultiGetRequest();
|
||||
@ -96,7 +96,7 @@ public class RestMultiGetAction extends BaseRestHandler {
|
||||
|
||||
for (MultiGetRequest.Item item : multiGetRequest.getItems()) {
|
||||
if (item.type() != null) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("multi_get_types_removal", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("multi_get_types_removal", TYPES_DEPRECATION_MESSAGE);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ public class RestMultiTermVectorsAction extends BaseRestHandler {
|
||||
.index(request.param("index"));
|
||||
|
||||
if (request.hasParam("type")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("mtermvectors_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
deprecationLogger.deprecate("mtermvectors_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
template.type(request.param("type"));
|
||||
} else {
|
||||
template.type(MapperService.SINGLE_MAPPING_NAME);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user