Avoid logging duplicate deprecation warnings multiple times (#1660)

* Avoid logging duplicate deprecation warnings multiple times

Signed-off-by: Vacha <vachshah@amazon.com>

* Fixes test failures

Signed-off-by: Vacha <vachshah@amazon.com>

* Adding deprecation logger tests

Signed-off-by: Vacha <vachshah@amazon.com>

* Using ConcurrentHashMap keySet

Signed-off-by: Vacha Shah <vachshah@amazon.com>
This commit is contained in:
Vacha 2021-12-15 15:26:44 -08:00 committed by GitHub
parent 10e51bb4ec
commit e66ea2c4f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 395 additions and 279 deletions

View File

@ -84,6 +84,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
@Override
protected boolean enableWarningsCheck() {
return false;
}
private static BulkProcessor.Builder initBulkProcessorBuilder(BulkProcessor.Listener listener) {
return BulkProcessor.builder(
(request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, bulkListener),
@ -95,7 +100,7 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
return BulkProcessor.builder(
(request, bulkListener) -> highLevelClient().bulkAsync(
request,
expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE),
expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE),
bulkListener
),
listener
@ -506,11 +511,6 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
} else {
BytesArray data = bytesBulkRequest(localIndex, localType, i);
processor.add(data, globalIndex, globalType, globalPipeline, XContentType.JSON);
if (localType != null) {
// If the payload contains types, parsing it into a bulk request results in a warning.
assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE);
}
}
multiGetRequest.add(localIndex, Integer.toString(i));
}

View File

@ -210,7 +210,7 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
request,
highLevelClient()::bulk,
highLevelClient()::bulkAsync,
expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE)
);
assertFalse(bulkResponse.hasFailures());
return bulkResponse;

View File

@ -214,7 +214,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
indexRequest,
highLevelClient()::index,
highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
);
DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId);
@ -222,7 +222,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
deleteRequest,
highLevelClient()::delete,
highLevelClient()::deleteAsync,
expectWarnings(RestDeleteAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestDeleteAction.TYPES_DEPRECATION_MESSAGE)
);
assertEquals("index", deleteResponse.getIndex());
@ -425,7 +425,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
indexRequest,
highLevelClient()::index,
highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
);
GetRequest getRequest = new GetRequest("index", "type", "id");
@ -433,7 +433,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
getRequest,
highLevelClient()::get,
highLevelClient()::getAsync,
expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetAction.TYPES_DEPRECATION_MESSAGE)
);
assertEquals("index", getResponse.getIndex());
@ -512,7 +512,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
bulk.add(new IndexRequest("index", "type", "id1").source("{\"field\":\"value1\"}", XContentType.JSON));
bulk.add(new IndexRequest("index", "type", "id2").source("{\"field\":\"value2\"}", XContentType.JSON));
highLevelClient().bulk(bulk, expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
highLevelClient().bulk(bulk, expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
MultiGetRequest multiGetRequest = new MultiGetRequest();
multiGetRequest.add("index", "id1");
multiGetRequest.add("index", "type", "id2");
@ -521,7 +521,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
multiGetRequest,
highLevelClient()::mget,
highLevelClient()::mgetAsync,
expectWarnings(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE)
);
assertEquals(2, response.getResponses().length);
@ -747,7 +747,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
indexRequest,
highLevelClient()::index,
highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
);
assertEquals(RestStatus.CREATED, indexResponse.status());
assertEquals("index", indexResponse.getIndex());
@ -962,7 +962,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
indexRequest,
highLevelClient()::index,
highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
);
UpdateRequest updateRequest = new UpdateRequest("index", "type", "id");
@ -971,7 +971,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
updateRequest,
highLevelClient()::update,
highLevelClient()::updateAsync,
expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestUpdateAction.TYPES_DEPRECATION_MESSAGE)
);
assertEquals(RestStatus.OK, updateResponse.status());

View File

@ -293,7 +293,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
createIndexRequest,
highLevelClient().indices()::create,
highLevelClient().indices()::createAsync,
expectWarnings(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE)
);
assertTrue(createIndexResponse.isAcknowledged());
@ -326,7 +326,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
createIndexRequest,
highLevelClient().indices()::create,
highLevelClient().indices()::createAsync,
expectWarnings(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE)
);
assertTrue(createIndexResponse.isAcknowledged());
@ -505,7 +505,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
getIndexRequest,
highLevelClient().indices()::get,
highLevelClient().indices()::getAsync,
expectWarnings(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE)
);
// default settings should be null
@ -601,7 +601,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putMappingRequest,
highLevelClient().indices()::putMapping,
highLevelClient().indices()::putMappingAsync,
expectWarnings(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE)
);
assertTrue(putMappingResponse.isAcknowledged());
@ -676,7 +676,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
request,
highLevelClient().indices()::getMapping,
highLevelClient().indices()::getMappingAsync,
expectWarnings(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE)
);
Map<String, Object> mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap();
@ -750,7 +750,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
getFieldMappingsRequest,
highLevelClient().indices()::getFieldMapping,
highLevelClient().indices()::getFieldMappingAsync,
expectWarnings(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE)
);
final Map<String, org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata> fieldMappingMap =
@ -1090,7 +1090,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
syncedFlushRequest,
highLevelClient().indices()::flushSynced,
highLevelClient().indices()::flushSyncedAsync,
expectWarnings(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE)
expectWarningsOnce(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE)
);
assertThat(flushResponse.totalShards(), equalTo(1));
assertThat(flushResponse.successfulShards(), equalTo(1));
@ -1106,7 +1106,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
syncedFlushRequest,
highLevelClient().indices()::flushSynced,
highLevelClient().indices()::flushSyncedAsync,
expectWarnings(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE)
expectWarningsOnce(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE)
)
);
assertEquals(RestStatus.NOT_FOUND, exception.status());
@ -1368,7 +1368,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
rolloverRequest,
highLevelClient().indices()::rollover,
highLevelClient().indices()::rolloverAsync,
expectWarnings(RestRolloverIndexAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestRolloverIndexAction.TYPES_DEPRECATION_MESSAGE)
);
assertTrue(rolloverResponse.isRolledOver());
assertFalse(rolloverResponse.isDryRun());
@ -1782,7 +1782,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplateRequest,
highLevelClient().indices()::putTemplate,
highLevelClient().indices()::putTemplateAsync,
expectWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
);
assertThat(putTemplateResponse.isAcknowledged(), equalTo(true));
@ -1846,7 +1846,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplateRequest,
highLevelClient().indices()::putTemplate,
highLevelClient().indices()::putTemplateAsync,
expectWarnings("Deprecated field [template] used, replaced by [index_patterns]")
expectWarningsOnce("Deprecated field [template] used, replaced by [index_patterns]")
);
assertThat(putTemplateResponse.isAcknowledged(), equalTo(true));
@ -1916,7 +1916,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplateRequest,
highLevelClient().indices()::putTemplate,
highLevelClient().indices()::putTemplateAsync,
expectWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
);
assertThat(putTemplateResponse.isAcknowledged(), equalTo(true));
@ -2026,7 +2026,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplate1,
client.indices()::putTemplate,
client.indices()::putTemplateAsync,
expectWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
).isAcknowledged(),
equalTo(true)
);
@ -2040,7 +2040,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplate2,
client.indices()::putTemplate,
client.indices()::putTemplateAsync,
expectWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
).isAcknowledged(),
equalTo(true)
);
@ -2049,7 +2049,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-1"),
client.indices()::getTemplate,
client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
);
assertThat(getTemplate1.getIndexTemplates(), hasSize(1));
org.opensearch.cluster.metadata.IndexTemplateMetadata template1 = getTemplate1.getIndexTemplates().get(0);
@ -2062,7 +2062,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-2"),
client.indices()::getTemplate,
client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
);
assertThat(getTemplate2.getIndexTemplates(), hasSize(1));
org.opensearch.cluster.metadata.IndexTemplateMetadata template2 = getTemplate2.getIndexTemplates().get(0);
@ -2080,7 +2080,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
getBothRequest,
client.indices()::getTemplate,
client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
);
assertThat(getBoth.getIndexTemplates(), hasSize(2));
assertThat(
@ -2093,7 +2093,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
getAllRequest,
client.indices()::getTemplate,
client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
);
assertThat(getAll.getIndexTemplates().size(), greaterThanOrEqualTo(2));
assertThat(
@ -2132,7 +2132,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-*"),
client.indices()::getTemplate,
client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
).getIndexTemplates(),
hasSize(1)
);
@ -2141,7 +2141,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-*"),
client.indices()::getTemplate,
client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
).getIndexTemplates().get(0).name(),
equalTo("template-2")
);
@ -2157,7 +2157,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-*"),
client.indices()::getTemplate,
client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
)
).status(),
equalTo(RestStatus.NOT_FOUND)

View File

@ -126,23 +126,23 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
public void indexDocuments() throws IOException {
{
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/type/1");
doc1.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc1.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc1.setJsonEntity("{\"type\":\"type1\", \"id\":1, \"num\":10, \"num2\":50}");
client().performRequest(doc1);
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/type/2");
doc2.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc2.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc2.setJsonEntity("{\"type\":\"type1\", \"id\":2, \"num\":20, \"num2\":40}");
client().performRequest(doc2);
Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/type/3");
doc3.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc3.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc3.setJsonEntity("{\"type\":\"type1\", \"id\":3, \"num\":50, \"num2\":35}");
client().performRequest(doc3);
Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/type/4");
doc4.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc4.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc4.setJsonEntity("{\"type\":\"type2\", \"id\":4, \"num\":100, \"num2\":10}");
client().performRequest(doc4);
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/type/5");
doc5.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc5.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc5.setJsonEntity("{\"type\":\"type2\", \"id\":5, \"num\":100, \"num2\":10}");
client().performRequest(doc5);
}
@ -1445,7 +1445,6 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
}
public void testCountAllIndicesMatchQuery() throws IOException {
CountRequest countRequest = new CountRequest();
countRequest.source(new SearchSourceBuilder().query(new MatchQueryBuilder("field", "value1")));
CountResponse countResponse = execute(countRequest, highLevelClient()::count, highLevelClient()::countAsync);

View File

@ -114,7 +114,7 @@ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
}

View File

@ -84,7 +84,7 @@ public class CommonGramsTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
}

View File

@ -106,7 +106,7 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return this;

View File

@ -71,7 +71,7 @@ public class FingerprintTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return this;

View File

@ -79,7 +79,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
} else {
if (preserveOriginal) {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return IDENTITY_FILTER;
@ -147,7 +147,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
} else {
if (preserveOriginal) {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return IDENTITY_FILTER;

View File

@ -92,7 +92,7 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return this;

View File

@ -123,7 +123,7 @@ public class WordDelimiterGraphTokenFilterFactory extends AbstractTokenFilterFac
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return this;

View File

@ -123,7 +123,7 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return this;

View File

@ -212,8 +212,8 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2),
false
);
doTestCustomTokenizerDeprecation("nGram", "ngram", LegacyESVersion.V_7_6_0, true);
doTestCustomTokenizerDeprecation("edgeNGram", "edge_ngram", LegacyESVersion.V_7_6_0, true);
doTestCustomTokenizerDeprecation("nGram", "ngram", LegacyESVersion.V_7_6_0, false);
doTestCustomTokenizerDeprecation("edgeNGram", "edge_ngram", LegacyESVersion.V_7_6_0, false);
}
public void doTestPrebuiltTokenizerDeprecation(String deprecatedName, String replacement, Version version, boolean expectWarning)

View File

@ -19,10 +19,10 @@
- skip:
version: " - 6.2.99"
reason: deprecated in 6.3
features: "warnings"
features: "allowed_warnings"
- do:
warnings:
allowed_warnings:
- 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.'
indices.create:
index: test_deprecated_htmlstrip
@ -41,7 +41,7 @@
analyzer: my_htmlStripWithCharfilter
- do:
warnings:
allowed_warnings:
- 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.'
indices.analyze:
index: test_deprecated_htmlstrip

View File

@ -1,7 +1,7 @@
---
"Test common terms query with stacked tokens":
- skip:
features: "warnings"
features: "allowed_warnings"
- do:
indices.create:
@ -50,7 +50,7 @@
refresh: true
- do:
warnings:
allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -67,7 +67,7 @@
- match: { hits.hits.2._id: "3" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -83,7 +83,7 @@
- match: { hits.hits.1._id: "2" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -99,7 +99,7 @@
- match: { hits.hits.2._id: "3" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -114,7 +114,7 @@
- match: { hits.hits.0._id: "2" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -131,7 +131,7 @@
- match: { hits.hits.1._id: "1" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -147,7 +147,7 @@
- match: { hits.hits.0._id: "2" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -161,7 +161,7 @@
- match: { hits.hits.0._id: "2" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -177,7 +177,7 @@
- match: { hits.hits.2._id: "3" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -193,7 +193,7 @@
- match: { hits.hits.1._id: "2" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -210,7 +210,7 @@
- match: { hits.hits.2._id: "3" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true
@ -226,7 +226,7 @@
- match: { hits.hits.1._id: "2" }
- do:
warnings:
allowed_warnings:
- 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [multi_match] query can skip block of documents efficiently if the total number of hits is not tracked]'
search:
rest_total_hits_as_int: true

View File

@ -83,10 +83,10 @@
- skip:
version : "all"
reason : "tracked at https://github.com/elastic/elasticsearch/issues/52266"
features: warnings
features: allowed_warnings
- do:
warnings:
allowed_warnings:
- "setting [ecs] to false for non-common schema format is deprecated and will be removed in 8.0, set to true or remove to use the non-deprecated format"
- "the [os_major] property is deprecated for the user-agent processor"
ingest.put_pipeline:

View File

@ -1,7 +1,7 @@
---
"Stored script":
- skip:
features: warnings
features: allowed_warnings
- do:
put_script:

View File

@ -86,7 +86,7 @@ setup:
---
"date":
- skip:
features: "warnings"
features: "allowed_warnings"
- do:
search:
@ -179,7 +179,7 @@ setup:
---
"long":
- skip:
features: "warnings"
features: "allowed_warnings"
- do:
search:

View File

@ -288,7 +288,7 @@
- skip:
version: " - 7.2.99"
reason: "deprecation warnings only emitted on 7.3+"
features: warnings
features: allowed_warnings
- do:
index:
@ -304,7 +304,7 @@
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- Deprecated field [size] used, expected [max_docs] instead
delete_by_query:
index: twitter

View File

@ -75,7 +75,7 @@
---
"Rethrottle to -1 which turns off throttling":
- skip:
features: warnings
features: allowed_warnings
# Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard
# and a small batch size on the request
- do:
@ -124,7 +124,7 @@
task_id: $task
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {}

View File

@ -63,7 +63,7 @@
---
"Multiple slices with wait_for_completion=false":
- skip:
features: warnings
features: allowed_warnings
- do:
index:
index: test
@ -153,11 +153,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search:
rest_total_hits_as_int: true
@ -172,7 +172,7 @@
---
"Multiple slices with rethrottle":
- skip:
features: warnings
features: allowed_warnings
- do:
index:
index: test
@ -268,11 +268,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search:
rest_total_hits_as_int: true

View File

@ -36,7 +36,7 @@
- skip:
version: " - 7.2.99"
reason: "size deprecation warnings only emitted on 7.3+, but sort deprecated in 7.6"
features: warnings
features: allowed_warnings
- do:
index:
@ -52,7 +52,7 @@
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- Deprecated field [size] used, expected [max_docs] instead
- The sort option in reindex is deprecated. Instead consider using query
filtering to find the desired subset of data.
@ -127,7 +127,7 @@
- skip:
version: " - 7.5.99"
reason: "max_docs introduced in 7.3.0, but sort deprecated in 7.6"
features: "warnings"
features: "allowed_warnings"
- do:
index:
@ -143,7 +143,7 @@
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- The sort option in reindex is deprecated. Instead consider using query
filtering to find the desired subset of data.
reindex:
@ -174,7 +174,7 @@
- skip:
version: " - 7.5.99"
reason: "sort deprecated in 7.6"
features: "warnings"
features: "allowed_warnings"
- do:
index:
@ -185,7 +185,7 @@
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- The sort option in reindex is deprecated. Instead consider using query
filtering to find the desired subset of data.
reindex:

View File

@ -59,7 +59,7 @@
---
"Multiple slices with wait_for_completion=false":
- skip:
features: warnings
features: allowed_warnings
- do:
index:
index: source
@ -162,11 +162,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search:
rest_total_hits_as_int: true
@ -177,7 +177,7 @@
---
"Multiple slices with rethrottle":
- skip:
features: warnings
features: allowed_warnings
- do:
index:
index: source
@ -280,11 +280,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search:
rest_total_hits_as_int: true

View File

@ -225,7 +225,7 @@
- skip:
version: " - 7.2.99"
reason: "deprecation warnings only emitted on 7.3+"
features: warnings
features: allowed_warnings
- do:
index:
@ -241,7 +241,7 @@
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- Deprecated field [size] used, expected [max_docs] instead
update_by_query:
index: twitter

View File

@ -55,7 +55,7 @@
---
"Multiple slices with wait_for_completion=false":
- skip:
features: warnings
features: allowed_warnings
- do:
index:
index: test
@ -145,11 +145,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search:
rest_total_hits_as_int: true
@ -159,7 +159,7 @@
---
"Multiple slices with rethrottle":
- skip:
features: warnings
features: allowed_warnings
- do:
index:
index: test
@ -254,11 +254,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {}
- do:
warnings:
allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search:
rest_total_hits_as_int: true

View File

@ -109,10 +109,10 @@
- skip:
version: " - 6.99.99"
reason: unicodeSetFilter deprecated in 7.0.0, replaced by unicode_set_filter
features: "warnings"
features: "allowed_warnings"
- do:
warnings:
allowed_warnings:
- "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]"
indices.create:
index: test
@ -132,7 +132,7 @@
type: icu_folding
unicodeSetFilter: "[^â]"
- do:
warnings:
allowed_warnings:
- "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]"
indices.analyze:
index: test

View File

@ -162,7 +162,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return this;

View File

@ -174,7 +174,7 @@ final class Ec2ClientSettings {
} else {
if (key.length() == 0) {
deprecationLogger.deprecate(
"ec2_invalid_settings",
"ec2_invalid_key_settings",
"Setting [{}] is set but [{}] is not, which will be unsupported in future",
SECRET_KEY_SETTING.getKey(),
ACCESS_KEY_SETTING.getKey()

View File

@ -145,21 +145,6 @@ public class EvilLoggerTests extends OpenSearchTestCase {
}
}
/*
* We have to manually check that each thread has the right warning headers in the thread context because the act of doing
* this through the test framework on one thread would otherwise clear the thread context and we would be unable to assert
* on the other threads.
*/
final List<String> warnings = threadContext.getResponseHeaders().get("Warning");
final Set<String> actualWarningValues =
warnings.stream().map(s -> HeaderWarning.extractWarningValueFromWarningHeader(s, true))
.collect(Collectors.toSet());
for (int j = 0; j < 128; j++) {
assertThat(
actualWarningValues,
hasItem(HeaderWarning.escapeAndEncode("This is a maybe logged deprecation message" + j)));
}
try {
barrier.await();
} catch (final BrokenBarrierException | InterruptedException e) {
@ -209,8 +194,10 @@ public class EvilLoggerTests extends OpenSearchTestCase {
final int iterations = randomIntBetween(0, 128);
for (int i = 0; i < iterations; i++) {
setting.get(settings);
if (i == 0) {
assertSettingDeprecationsAndWarnings(new Setting<?>[]{setting});
}
}
final String deprecationPath =
System.getProperty("opensearch.logs.base_path") +

View File

@ -289,12 +289,10 @@ public class JsonLoggerTests extends OpenSearchTestCase {
public void testDuplicateLogMessages() throws Exception {
final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger("test");
// For the same key and X-Opaque-ID deprecation should be once
withThreadContext(threadContext -> {
threadContext.putHeader(Task.X_OPAQUE_ID, "ID1");
deprecationLogger.deprecate("key", "message1");
deprecationLogger.deprecate("key", "message2");
assertWarnings("message1", "message2");
assertWarnings("message1");
final Path path = PathUtils.get(System.getProperty("opensearch.logs.base_path"),
System.getProperty("opensearch.logs.cluster_name") + "_deprecated.json");
@ -317,12 +315,11 @@ public class JsonLoggerTests extends OpenSearchTestCase {
});
// For the same key and different X-Opaque-ID should be multiple times per key/x-opaque-id
//continuing with message1-ID1 in logs already, adding a new deprecation log line with message2-ID2
//continuing with message1-ID1 in logs already
withThreadContext(threadContext -> {
threadContext.putHeader(Task.X_OPAQUE_ID, "ID2");
deprecationLogger.deprecate("key", "message1");
deprecationLogger.deprecate("key", "message2");
assertWarnings("message1", "message2");
assertWarnings("message1");
final Path path = PathUtils.get(
System.getProperty("opensearch.logs.base_path"),

View File

@ -1,11 +1,11 @@
---
"Create index with joda style index that is incompatible with java.time. (6.0)":
- skip:
features: "warnings"
features: "allowed_warnings"
version: "6.8.1 -"
reason: change of warning message
- do:
warnings:
allowed_warnings:
- "Use of 'Y' (year-of-era) will change to 'y' in the next major version of OpenSearch. Prefix your date format with '8' to use the new specifier."
indices.create:
index: joda_for_range
@ -41,11 +41,11 @@
---
"Create index with joda style index that is incompatible with java.time (>6.1)":
- skip:
features: "warnings"
features: "allowed_warnings"
version: " - 6.8.0, 7.0.0 -"
reason: change of warning message, we skip 7 becase this format will be considered java
- do:
warnings:
allowed_warnings:
- "'Y' year-of-era should be replaced with 'y'. Use 'Y' for week-based-year.; 'Z' time zone offset/id fails when parsing 'Z' for Zulu timezone. Consider using 'X'. Prefix your date format with '8' to use the new specifier."
indices.create:
index: joda_for_range

View File

@ -57,8 +57,10 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import static org.opensearch.test.rest.OpenSearchRestTestCase.entityAsMap;
@ -68,6 +70,8 @@ import static org.hamcrest.Matchers.is;
public class SystemIndexRestIT extends HttpSmokeTestCase {
private Set<String> assertedWarnings = new HashSet<>();
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
@ -126,15 +130,20 @@ public class SystemIndexRestIT extends HttpSmokeTestCase {
searchRequest.setJsonEntity("{\"query\": {\"match\": {\"some_field\": \"some_value\"}}}");
// Disallow no indices to cause an exception if this resolves to zero indices, so that we're sure it resolved the index
searchRequest.addParameter("allow_no_indices", "false");
if (!assertedWarnings.contains(expectedWarning)) {
searchRequest.setOptions(expectWarnings(expectedWarning));
assertedWarnings.add(expectedWarning);
}
Response response = getRestClient().performRequest(searchRequest);
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
}
private RequestOptions expectWarnings(String expectedWarning) {
final RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder();
if (!assertedWarnings.contains(expectedWarning)) {
builder.setWarningsHandler(w -> w.contains(expectedWarning) == false || w.size() != 1);
assertedWarnings.add(expectedWarning);
}
return builder.build();
}

View File

@ -160,10 +160,10 @@
- skip:
version: " - 7.5.99"
reason: "indices without soft deletes are deprecated in 7.6"
features: "warnings"
features: "allowed_warnings"
- do:
warnings:
allowed_warnings:
- Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions.
Please do not specify value for setting [index.soft_deletes.enabled] of index [test_index].
indices.create:

View File

@ -3,7 +3,7 @@
- skip:
version: " - 7.5.99"
reason: "synced flush is deprecated in 7.6"
features: "warnings"
features: "allowed_warnings"
- do:
indices.create:
index: testing
@ -16,7 +16,7 @@
cluster.health:
wait_for_status: green
- do:
warnings:
allowed_warnings:
- Synced flush is deprecated and will be removed in 8.0. Use flush at _/flush or /{index}/_flush instead.
indices.flush_synced:
index: testing

View File

@ -14,14 +14,14 @@
- skip:
version: " - 7.3.99"
reason: "deprecation warning about only_expunge_deletes and max_num_segments added in 7.4"
features: "warnings"
features: "allowed_warnings"
- do:
indices.create:
index: test
- do:
warnings:
allowed_warnings:
- 'setting only_expunge_deletes and max_num_segments at the same time is deprecated and will be rejected in a future version'
indices.forcemerge:
index: test

View File

@ -57,12 +57,12 @@ setup:
"Get field mapping with local is deprecated":
- skip:
features: ["warnings", "node_selector"]
features: ["allowed_warnings", "node_selector"]
- do:
node_selector:
version: "7.8.0 - "
warnings:
allowed_warnings:
- "Use [local] in get field mapping requests is deprecated. The parameter will be removed in the next major version"
indices.get_field_mapping:
fields: text

View File

@ -3,7 +3,7 @@
- skip:
version: " - 6.9.99"
reason: expects warnings that pre-7.0.0 will not send
features: [warnings, arbitrary_key]
features: [allowed_warnings, arbitrary_key]
- do:
nodes.info:
@ -48,7 +48,7 @@
settings:
index.number_of_replicas: 0
index.merge.scheduler.max_thread_count: 2
warnings:
allowed_warnings:
- "parameter [copy_settings] is deprecated and will be removed in 8.0.0"
- do:

View File

@ -3,7 +3,7 @@
- skip:
version: " - 6.9.99"
reason: expects warnings that pre-7.0.0 will not send
features: [arbitrary_key, warnings]
features: [arbitrary_key, allowed_warnings]
- do:
nodes.info:
@ -50,7 +50,7 @@
index.number_of_replicas: 0
index.number_of_shards: 2
index.merge.scheduler.max_thread_count: 2
warnings:
allowed_warnings:
- "parameter [copy_settings] is deprecated and will be removed in 8.0.0"

View File

@ -3,7 +3,7 @@
- skip:
version: " - 7.5.99"
reason: "indices without soft deletes are deprecated in 7.6"
features: "warnings"
features: "allowed_warnings"
- do:
indices.create:
@ -11,7 +11,7 @@
body:
settings:
soft_deletes.enabled: false
warnings:
allowed_warnings:
- Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions.
Please do not specify value for setting [index.soft_deletes.enabled] of index [test].
- do:
@ -132,9 +132,9 @@
- skip:
version: " - 7.6.99"
reason: "translog retention settings are deprecated in 7.6"
features: "warnings"
features: "allowed_warnings"
- do:
warnings:
allowed_warnings:
- Translog retention settings [index.translog.retention.age] and [index.translog.retention.size]
are deprecated and effectively ignored. They will be removed in a future version.
indices.create:
@ -148,7 +148,7 @@
body:
index.number_of_replicas: 0
- do:
warnings:
allowed_warnings:
- Translog retention settings [index.translog.retention.age] and [index.translog.retention.size]
are deprecated and effectively ignored. They will be removed in a future version.
indices.put_settings:
@ -183,7 +183,7 @@
- skip:
version: " - 7.5.99"
reason: "indices without soft deletes are deprecated in 7.6"
features: "warnings"
features: "allowed_warnings"
- do:
indices.create:
@ -192,7 +192,7 @@
settings:
soft_deletes.enabled: false
routing.rebalance.enable: "none" # prevents shard relocations while we are closing an index
warnings:
allowed_warnings:
- Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions.
Please do not specify value for setting [index.soft_deletes.enabled] of index [test].

View File

@ -145,7 +145,7 @@ setup:
- skip:
version: " - 7.1.99"
reason: _time order deprecated in 6.0, replaced by _key. Calendar_interval added in 7.2
features: "warnings"
features: "allowed_warnings"
- do:
index:
@ -178,7 +178,7 @@ setup:
search:
rest_total_hits_as_int: true
body: { "aggs" : { "histo" : { "date_histogram" : { "field" : "date", "calendar_interval" : "month", "order" : { "_time" : "desc" } } } } }
warnings:
allowed_warnings:
- "Deprecated aggregation order key [_time] used, replaced by [_key]"
- match: { hits.total: 4 }

View File

@ -654,7 +654,7 @@ setup:
- skip:
reason: _term order deprecated in 6.0, replaced by _key
features: "warnings"
features: "allowed_warnings"
- do:
index:
@ -681,7 +681,7 @@ setup:
search:
rest_total_hits_as_int: true
body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "str", "order" : { "_term" : "desc" } } } } }
warnings:
allowed_warnings:
- "Deprecated aggregation order key [_term] used, replaced by [_key]"
- match: { hits.total: 3 }

View File

@ -268,10 +268,10 @@ setup:
- skip:
version: " - 7.1.99"
reason: calendar_interval introduced in 7.2.0
features: warnings
features: allowed_warnings
- do:
warnings:
allowed_warnings:
- '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.'
search:
rest_total_hits_as_int: true
@ -300,7 +300,7 @@ setup:
- match: { aggregations.test.buckets.1.doc_count: 1 }
- do:
warnings:
allowed_warnings:
- '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.'
search:
rest_total_hits_as_int: true

View File

@ -37,11 +37,11 @@ setup:
- skip:
version: " - 7.1.99"
reason: "interval deprecation added in 7.2"
features: "warnings"
features: "allowed_warnings"
- do:
catch: /\[window\] must be a positive, non-zero integer\./
warnings:
allowed_warnings:
- "[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."
search:
rest_total_hits_as_int: true

View File

@ -176,9 +176,9 @@ setup:
- skip:
version: " - 6.99.99"
reason: Only triggers warnings on 7.0+
features: warnings
features: allowed_warnings
- do:
warnings:
allowed_warnings:
- "[use_field_mapping] is a special format that was only used to ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore."
search:
body:

View File

@ -36,10 +36,10 @@ setup:
"Indices boost using object":
- skip:
reason: deprecation was added in 5.2.0
features: "warnings"
features: "allowed_warnings"
- do:
warnings:
allowed_warnings:
- 'Object format in indices_boost is deprecated, please use array format instead'
search:
rest_total_hits_as_int: true
@ -52,7 +52,7 @@ setup:
- match: { hits.hits.1._index: test_2 }
- do:
warnings:
allowed_warnings:
- 'Object format in indices_boost is deprecated, please use array format instead'
search:
rest_total_hits_as_int: true

View File

@ -118,10 +118,7 @@ public class AddVotingConfigExclusionsRequest extends MasterNodeRequest<AddVotin
timeout = in.readTimeValue();
if (nodeDescriptions.length > 0) {
deprecationLogger.deprecate(
"voting_config_exclusion",
"nodeDescription is deprecated and will be removed, use nodeIds or nodeNames instead"
);
deprecationLogger.deprecate("voting_config_exclusion", DEPRECATION_MESSAGE);
}
}

View File

@ -51,11 +51,12 @@ import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class TransportGetAliasesAction extends TransportMasterNodeReadAction<GetAliasesRequest, GetAliasesResponse> {
@ -152,7 +153,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
ClusterState state,
ImmutableOpenMap<String, List<AliasMetadata>> aliasesMap
) {
List<String> systemIndicesNames = new ArrayList<>();
Set<String> systemIndicesNames = new HashSet<>();
for (Iterator<String> it = aliasesMap.keysIt(); it.hasNext();) {
String indexName = it.next();
IndexMetadata index = state.metadata().index(indexName);
@ -161,11 +162,13 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
}
}
if (systemIndicesNames.isEmpty() == false) {
deprecationLogger.deprecate(
"open_system_index_access",
"this request accesses system indices: {}, but in a future major version, direct access to system "
systemIndicesNames.forEach(
systemIndexName -> deprecationLogger.deprecate(
"open_system_index_access_" + systemIndexName,
"this request accesses system indices: [{}], but in a future major version, direct access to system "
+ "indices will be prevented by default",
systemIndicesNames
systemIndexName
)
);
} else {
checkSystemAliasAccess(request, systemIndices);

View File

@ -365,11 +365,13 @@ public class IndexNameExpressionResolver {
.sorted() // reliable order for testing
.collect(Collectors.toList());
if (resolvedSystemIndices.isEmpty() == false) {
deprecationLogger.deprecate(
"open_system_index_access",
"this request accesses system indices: {}, but in a future major version, direct access to system "
resolvedSystemIndices.forEach(
systemIndexName -> deprecationLogger.deprecate(
"open_system_index_access_" + systemIndexName,
"this request accesses system indices: [{}], but in a future major version, direct access to system "
+ "indices will be prevented by default",
resolvedSystemIndices
systemIndexName
)
);
}
}

View File

@ -90,7 +90,12 @@ public class Joda {
if (formatName != null && formatName.isCamelCase(input)) {
String msg = "Camel case format name {} is deprecated and will be removed in a future version. "
+ "Use snake case name {} instead.";
getDeprecationLogger().deprecate("camelCaseDateFormat", msg, formatName.getCamelCaseName(), formatName.getSnakeCaseName());
getDeprecationLogger().deprecate(
"camelCaseDateFormat_" + formatName.getCamelCaseName(),
msg,
formatName.getCamelCaseName(),
formatName.getSnakeCaseName()
);
}
DateTimeFormatter formatter;

View File

@ -37,15 +37,31 @@ import java.util.Map;
import org.opensearch.common.Strings;
import org.opensearch.common.collect.MapBuilder;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
/**
* A logger message used by {@link DeprecationLogger}.
* Carries x-opaque-id field if provided in the headers. Will populate the x-opaque-id field in JSON logs.
*/
public class DeprecatedMessage extends OpenSearchLogMessage {
public static final String X_OPAQUE_ID_FIELD_NAME = "x-opaque-id";
private static final Set<String> keys = ConcurrentHashMap.newKeySet();
private final String keyWithXOpaqueId;
public DeprecatedMessage(String key, String xOpaqueId, String messagePattern, Object... args) {
super(fieldMap(key, xOpaqueId), messagePattern, args);
this.keyWithXOpaqueId = new StringBuilder().append(key).append(xOpaqueId).toString();
}
/**
* This method is to reset the key set which is used to log unique deprecation logs only.
* The key set helps avoiding the deprecation messages being logged multiple times.
* This method is a utility to reset this set for tests so they can run independent of each other.
* Otherwise, a warning can be logged by some test and the upcoming test can be impacted by it.
*/
public static void resetDeprecatedMessageForTests() {
keys.clear();
}
private static Map<String, Object> fieldMap(String key, String xOpaqueId) {
@ -58,4 +74,8 @@ public class DeprecatedMessage extends OpenSearchLogMessage {
}
return builder.immutableMap();
}
public boolean isAlreadyLogged() {
return !keys.add(keyWithXOpaqueId);
}
}

View File

@ -106,10 +106,10 @@ public class DeprecationLogger {
public class DeprecationLoggerBuilder {
public DeprecationLoggerBuilder withDeprecation(String key, String msg, Object[] params) {
OpenSearchLogMessage deprecationMessage = new DeprecatedMessage(key, HeaderWarning.getXOpaqueId(), msg, params);
DeprecatedMessage deprecationMessage = new DeprecatedMessage(key, HeaderWarning.getXOpaqueId(), msg, params);
if (!deprecationMessage.isAlreadyLogged()) {
logger.log(DEPRECATION, deprecationMessage);
}
return this;
}
}

View File

@ -1971,7 +1971,12 @@ public class DateFormatters {
String msg = "Camel case format name {} is deprecated and will be removed in a future version. "
+ "Use snake case name {} instead.";
deprecationLogger.getOrCompute()
.deprecate("camelCaseDateFormat", msg, formatName.getCamelCaseName(), formatName.getSnakeCaseName());
.deprecate(
"camelCaseDateFormat_" + formatName.getCamelCaseName(),
msg,
formatName.getCamelCaseName(),
formatName.getSnakeCaseName()
);
}
if (FormatNames.ISO8601.matches(input)) {

View File

@ -214,7 +214,7 @@ public class DateUtils {
String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(zoneId);
if (deprecatedId != null) {
deprecationLogger.deprecate(
"timezone",
"timezone_" + zoneId,
"Use of short timezone id " + zoneId + " is deprecated. Use " + deprecatedId + " instead"
);
return ZoneId.of(deprecatedId);

View File

@ -90,7 +90,7 @@ public class OpenSearchExecutors {
final int availableProcessors = Runtime.getRuntime().availableProcessors();
if (value > availableProcessors) {
deprecationLogger.deprecate(
"processors",
"processors_" + name,
"setting [{}] to value [{}] which is more than available processors [{}] is deprecated",
name,
value,

View File

@ -65,7 +65,7 @@ public class LoggingDeprecationHandler implements DeprecationHandler {
public void usedDeprecatedName(String parserName, Supplier<XContentLocation> location, String usedName, String modernName) {
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
deprecationLogger.deprecate(
"deprecated_field",
usedName + "_deprecated_name",
"{}Deprecated field [{}] used, expected [{}] instead",
prefix,
usedName,
@ -76,14 +76,20 @@ public class LoggingDeprecationHandler implements DeprecationHandler {
@Override
public void usedDeprecatedField(String parserName, Supplier<XContentLocation> location, String usedName, String replacedWith) {
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
deprecationLogger.deprecate("deprecated_field", "{}Deprecated field [{}] used, replaced by [{}]", prefix, usedName, replacedWith);
deprecationLogger.deprecate(
usedName + "_deprecated_field",
"{}Deprecated field [{}] used, replaced by [{}]",
prefix,
usedName,
replacedWith
);
}
@Override
public void usedDeprecatedField(String parserName, Supplier<XContentLocation> location, String usedName) {
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
deprecationLogger.deprecate(
"deprecated_field",
usedName + "_deprecated_field",
"{}Deprecated field [{}] used, this field is unused and will be removed entirely",
prefix,
usedName

View File

@ -184,7 +184,7 @@ public final class PreConfiguredTokenFilter extends PreConfiguredAnalysisCompone
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
name(),
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return this;
@ -211,7 +211,10 @@ public final class PreConfiguredTokenFilter extends PreConfiguredAnalysisCompone
if (version.onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(name(), "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0");
DEPRECATION_LOGGER.deprecate(
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return this;
}
}

View File

@ -104,8 +104,8 @@ public class ShingleTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters",
"Token filter " + name() + "] will not be usable to parse synonym after v7.0"
name() + "_synonym_tokenfilters",
"Token filter " + name() + "] will not be usable to parse synonyms after v7.0"
);
}
return this;

View File

@ -192,7 +192,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
throw new OpenSearchParseException("Field parameter [{}] is not supported for [{}] field type", fieldName, CONTENT_TYPE);
}
DEPRECATION_LOGGER.deprecate(
"geo_mapper_field_parameter",
"geo_mapper_field_parameter_" + fieldName,
"Field parameter [{}] is deprecated and will be removed in a future version.",
fieldName
);

View File

@ -667,7 +667,7 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
Parameter<?> parameter = deprecatedParamsMap.get(propName);
if (parameter != null) {
deprecationLogger.deprecate(
propName,
propName + name,
"Parameter [{}] on mapper [{}] is deprecated, use [{}]",
propName,
name,
@ -679,7 +679,7 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
if (parameter == null) {
if (isDeprecatedParameter(propName, parserContext.indexVersionCreated())) {
deprecationLogger.deprecate(
propName,
propName + type,
"Parameter [{}] has no effect on type [{}] and will be removed in future",
propName,
type
@ -692,7 +692,11 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
);
}
if (Objects.equals("boost", propName)) {
deprecationLogger.deprecate("boost", "Parameter [boost] on field [{}] is deprecated and will be removed in 8.0", name);
deprecationLogger.deprecate(
"boost_" + name,
"Parameter [boost] on field [{}] is deprecated and will be removed in 8.0",
name
);
}
if (propNode == null && parameter.acceptsNull == false) {
throw new MapperParsingException(

View File

@ -449,7 +449,7 @@ public class RootObjectMapper extends ObjectMapper {
} else {
deprecationMessage = message;
}
DEPRECATION_LOGGER.deprecate("invalid_dynamic_template", deprecationMessage);
DEPRECATION_LOGGER.deprecate("invalid_dynamic_template_" + dynamicTemplate.getName(), deprecationMessage);
}
}

View File

@ -148,7 +148,11 @@ public class TypeParsers {
iterator.remove();
} else if (propName.equals("boost")) {
builder.boost(nodeFloatValue(propNode));
deprecationLogger.deprecate("boost", "Parameter [boost] on field [{}] is deprecated and will be removed in 8.0", name);
deprecationLogger.deprecate(
"boost_" + name,
"Parameter [boost] on field [{}] is deprecated and will be removed in 8.0",
name
);
iterator.remove();
} else if (propName.equals("index_options")) {
builder.indexOptions(nodeIndexOptionValue(propNode));

View File

@ -118,7 +118,7 @@ public class RestMultiGetAction extends BaseRestHandler {
for (MultiGetRequest.Item item : multiGetRequest.getItems()) {
if (item.type() != null) {
deprecationLogger.deprecate("multi_get_types_removal", TYPES_DEPRECATION_MESSAGE);
deprecationLogger.deprecate("mget_with_types", TYPES_DEPRECATION_MESSAGE);
break;
}
}

View File

@ -160,7 +160,7 @@ public class DateIntervalWrapper implements ToXContentFragment, Writeable {
/** Get the current interval in milliseconds that is set on this builder. */
@Deprecated
public long interval() {
DEPRECATION_LOGGER.deprecate("date-interval-getter", DEPRECATION_TEXT);
DEPRECATION_LOGGER.deprecate("date-histogram-interval", DEPRECATION_TEXT);
if (intervalType.equals(IntervalTypeEnum.LEGACY_INTERVAL)) {
return TimeValue.parseTimeValue(dateHistogramInterval.toString(), "interval").getMillis();
}
@ -181,14 +181,14 @@ public class DateIntervalWrapper implements ToXContentFragment, Writeable {
throw new IllegalArgumentException("[interval] must be 1 or greater for aggregation [date_histogram]");
}
setIntervalType(IntervalTypeEnum.LEGACY_INTERVAL);
DEPRECATION_LOGGER.deprecate("date-interval-setter", DEPRECATION_TEXT);
DEPRECATION_LOGGER.deprecate("date-histogram-interval", DEPRECATION_TEXT);
this.dateHistogramInterval = new DateHistogramInterval(interval + "ms");
}
/** Get the current date interval that is set on this builder. */
@Deprecated
public DateHistogramInterval dateHistogramInterval() {
DEPRECATION_LOGGER.deprecate("date-histogram-interval-getter", DEPRECATION_TEXT);
DEPRECATION_LOGGER.deprecate("date-histogram-interval", DEPRECATION_TEXT);
if (intervalType.equals(IntervalTypeEnum.LEGACY_DATE_HISTO)) {
return dateHistogramInterval;
}
@ -209,7 +209,7 @@ public class DateIntervalWrapper implements ToXContentFragment, Writeable {
throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [date_histogram]");
}
setIntervalType(IntervalTypeEnum.LEGACY_DATE_HISTO);
DEPRECATION_LOGGER.deprecate("date-histogram-interval-setter", DEPRECATION_TEXT);
DEPRECATION_LOGGER.deprecate("date-histogram-interval", DEPRECATION_TEXT);
this.dateHistogramInterval = dateHistogramInterval;
}

View File

@ -121,7 +121,7 @@ public class TransportInfo implements ReportingService.Info {
publishAddressString = hostString + '/' + publishAddress.toString();
} else {
deprecationLogger.deprecate(
"cname_in_publish_address",
"cname_in_publish_address_" + propertyName,
propertyName
+ " was printed as [ip:port] instead of [hostname/ip:port]. "
+ "This format is deprecated and will change to [hostname/ip:port] in a future version. "

View File

@ -252,11 +252,11 @@ public class SettingsUpdaterTests extends OpenSearchTestCase {
final Settings toApplyUnset = Settings.builder().putNull("logger.org.opensearch").build();
final ClusterState afterUnset = settingsUpdater.updateSettings(afterDebug, toApplyUnset, Settings.EMPTY, logger);
assertSettingDeprecationsAndWarnings(new Setting<?>[] { deprecatedSetting });
assertNoDeprecationWarnings();
// we also check that if no settings are changed, deprecation logging still occurs
settingsUpdater.updateSettings(afterUnset, toApplyUnset, Settings.EMPTY, logger);
assertSettingDeprecationsAndWarnings(new Setting<?>[] { deprecatedSetting });
assertNoDeprecationWarnings();
}
public void testUpdateWithUnknownAndSettings() {

View File

@ -599,6 +599,5 @@ public class TransportAnalyzeActionTests extends OpenSearchTestCase {
analyze = TransportAnalyzeAction.analyze(req, registry, mockIndexService(), maxTokenCount);
assertEquals(1, analyze.getTokens().size());
assertWarnings("Using deprecated token filter [deprecated]");
}
}

View File

@ -2202,7 +2202,9 @@ public class IndexNameExpressionResolverTests extends OpenSearchTestCase {
List<String> indexNames = resolveConcreteIndexNameList(state, request);
assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta"));
assertWarnings(
"this request accesses system indices: [.ml-meta, .ml-stuff], but in a future major version, "
"this request accesses system indices: [.ml-meta], but in a future major version, "
+ "direct access to system indices will be prevented by default",
"this request accesses system indices: [.ml-stuff], but in a future major version, "
+ "direct access to system indices will be prevented by default"
);
@ -2230,7 +2232,9 @@ public class IndexNameExpressionResolverTests extends OpenSearchTestCase {
List<String> indexNames = resolveConcreteIndexNameList(state, request);
assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff"));
assertWarnings(
"this request accesses system indices: [.ml-meta, .ml-stuff], but in a future major version, direct access "
"this request accesses system indices: [.ml-meta], but in a future major version, direct access "
+ "to system indices will be prevented by default",
"this request accesses system indices: [.ml-stuff], but in a future major version, direct access "
+ "to system indices will be prevented by default"
);

View File

@ -70,26 +70,26 @@ public class DiscoveryNodeRoleSettingTests extends OpenSearchTestCase {
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() });
assertThat(DiscoveryNode.getRolesFromSettings(legacyTrue), hasItem(role));
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() });
assertNoDeprecationWarnings();
final Settings legacyFalse = Settings.builder().put(role.legacySetting().getKey(), false).build();
assertFalse(predicate.test(legacyFalse));
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() });
assertNoDeprecationWarnings();
assertThat(DiscoveryNode.getRolesFromSettings(legacyFalse), not(hasItem(role)));
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() });
assertNoDeprecationWarnings();
assertTrue(predicate.test(onlyRole(role)));
assertThat(DiscoveryNode.getRolesFromSettings(onlyRole(role)), hasItem(role));
assertNoDeprecationWarnings();
assertFalse(predicate.test(removeRoles(Collections.singleton(role))));
assertThat(DiscoveryNode.getRolesFromSettings(removeRoles(Collections.singleton(role))), not(hasItem(role)));
assertNoDeprecationWarnings();
final Settings settings = Settings.builder().put(onlyRole(role)).put(role.legacySetting().getKey(), randomBoolean()).build();
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DiscoveryNode.getRolesFromSettings(settings));
assertThat(e.getMessage(), startsWith("can not explicitly configure node roles and use legacy role setting"));
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() });
assertNoDeprecationWarnings();
}
}

View File

@ -54,4 +54,19 @@ public class DeprecationLoggerTests extends OpenSearchTestCase {
assertThat(numberOfLoggersAfter, equalTo(numberOfLoggersBefore + 1));
}
public void testDuplicateLogMessages() {
DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DeprecationLoggerTests.class);
deprecationLogger.deprecate("deprecated-message-1", "Deprecated message 1");
deprecationLogger.deprecate("deprecated-message-2", "Deprecated message 2");
deprecationLogger.deprecate("deprecated-message-3", "Deprecated message 3");
deprecationLogger.deprecate("deprecated-message-2", "Deprecated message 2");
deprecationLogger.deprecate("deprecated-message-1", "Deprecated message 1");
deprecationLogger.deprecate("deprecated-message-3", "Deprecated message 3");
deprecationLogger.deprecate("deprecated-message-1", "Deprecated message 1");
deprecationLogger.deprecate("deprecated-message-3", "Deprecated message 3");
deprecationLogger.deprecate("deprecated-message-2", "Deprecated message 2");
// assert that only unique warnings are logged
assertWarnings("Deprecated message 1", "Deprecated message 2", "Deprecated message 3");
}
}

View File

@ -524,15 +524,6 @@ public class DateFormattersTests extends OpenSearchTestCase {
assertThat(dateFormatter.pattern(), equalTo(name));
String snakeCaseName = FormatNames.forName(name).getSnakeCaseName();
assertWarnings(
"Camel case format name "
+ name
+ " is deprecated and will be removed in a future version. "
+ "Use snake case name "
+ snakeCaseName
+ " instead."
);
dateFormatter = Joda.forPattern(snakeCaseName);
assertThat(dateFormatter.pattern(), equalTo(snakeCaseName));
}

View File

@ -312,7 +312,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates");
{
mapping.startObject();
mapping.startObject("my_template");
mapping.startObject("my_template1");
mapping.field("match_mapping_type", "string");
mapping.startObject("mapping");
mapping.field("type", "string");
@ -328,7 +328,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE);
assertThat(mapper.mappingSource().toString(), containsString("\"type\":\"string\""));
assertWarnings(
"dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{\"type\":"
"dynamic template [my_template1] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{\"type\":"
+ "\"string\"}}], caused by [No mapper found for type [string]]"
);
}
@ -341,7 +341,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates");
{
mapping.startObject();
mapping.startObject("my_template");
mapping.startObject("my_template2");
mapping.field("match_mapping_type", "string");
mapping.startObject("mapping");
mapping.field("type", "keyword");
@ -358,9 +358,9 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE);
assertThat(mapper.mappingSource().toString(), containsString("\"foo\":\"bar\""));
assertWarnings(
"dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{"
"dynamic template [my_template2] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{"
+ "\"foo\":\"bar\",\"type\":\"keyword\"}}], "
+ "caused by [unknown parameter [foo] on mapper [__dynamic__my_template] of type [keyword]]"
+ "caused by [unknown parameter [foo] on mapper [__dynamic__my_template2] of type [keyword]]"
);
}
@ -372,7 +372,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates");
{
mapping.startObject();
mapping.startObject("my_template");
mapping.startObject("my_template3");
mapping.field("match_mapping_type", "string");
mapping.startObject("mapping");
mapping.field("type", "text");
@ -389,7 +389,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE);
assertThat(mapper.mappingSource().toString(), containsString("\"analyzer\":\"foobar\""));
assertWarnings(
"dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{"
"dynamic template [my_template3] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{"
+ "\"analyzer\":\"foobar\",\"type\":\"text\"}}], caused by [analyzer [foobar] has not been configured in mappings]"
);
}
@ -405,7 +405,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates");
{
mapping.startObject();
mapping.startObject("my_template");
mapping.startObject("my_template4");
if (randomBoolean()) {
mapping.field("match_mapping_type", "*");
} else {
@ -439,7 +439,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates");
{
mapping.startObject();
mapping.startObject("my_template");
mapping.startObject("my_template4");
if (useMatchMappingType) {
mapping.field("match_mapping_type", "*");
} else {
@ -465,15 +465,15 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
assertThat(mapper.mappingSource().toString(), containsString("\"foo\":\"bar\""));
if (useMatchMappingType) {
assertWarnings(
"dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"*\",\"mapping\":{"
"dynamic template [my_template4] has invalid content [{\"match_mapping_type\":\"*\",\"mapping\":{"
+ "\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], "
+ "caused by [unknown parameter [foo] on mapper [__dynamic__my_template] of type [binary]]"
+ "caused by [unknown parameter [foo] on mapper [__dynamic__my_template4] of type [binary]]"
);
} else {
assertWarnings(
"dynamic template [my_template] has invalid content [{\"match\":\"string_*\",\"mapping\":{"
"dynamic template [my_template4] has invalid content [{\"match\":\"string_*\",\"mapping\":{"
+ "\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], "
+ "caused by [unknown parameter [foo] on mapper [__dynamic__my_template] of type [binary]]"
+ "caused by [unknown parameter [foo] on mapper [__dynamic__my_template4] of type [binary]]"
);
}
}

View File

@ -43,6 +43,8 @@ import org.opensearch.test.AbstractQueryTestCase;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.contains;
@ -50,6 +52,8 @@ import static org.hamcrest.Matchers.containsString;
public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder> {
private Set<String> assertedWarnings = new HashSet<>();
@Override
protected IdsQueryBuilder doCreateTestQueryBuilder() {
final String type;
@ -161,8 +165,9 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
assertThat(query, instanceOf(IdsQueryBuilder.class));
IdsQueryBuilder idsQuery = (IdsQueryBuilder) query;
if (idsQuery.types().length > 0) {
if (idsQuery.types().length > 0 && !assertedWarnings.contains(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE)) {
assertWarnings(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE);
assertedWarnings.add(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE);
}
return query;
}

View File

@ -66,6 +66,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.stream.Stream;
@ -84,6 +85,8 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
private static Item[] randomLikeItems;
private static Item[] randomUnlikeItems;
private Set<String> assertedWarnings = new HashSet<>();
@Before
public void setup() {
// MLT only supports string fields, unsupported fields are tested below
@ -480,8 +483,9 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
assertThat(query, instanceOf(MoreLikeThisQueryBuilder.class));
MoreLikeThisQueryBuilder mltQuery = (MoreLikeThisQueryBuilder) query;
if (mltQuery.isTypeless() == false) {
if (mltQuery.isTypeless() == false && !assertedWarnings.contains(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE)) {
assertWarnings(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE);
assertedWarnings.add(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE);
}
return query;
}

View File

@ -60,8 +60,10 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsString;
@ -72,6 +74,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
private List<Object> randomTerms;
private String termsPath;
private boolean maybeIncludeType = true;
private Set<String> assertedWarnings = new HashSet<>();
@Before
public void randomTerms() {
@ -380,8 +383,10 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
assertThat(query, CoreMatchers.instanceOf(TermsQueryBuilder.class));
TermsQueryBuilder termsQuery = (TermsQueryBuilder) query;
if (termsQuery.isTypeless() == false) {
assertWarnings("Deprecated field [type] used, this field is unused and will be removed entirely");
String deprecationWarning = "Deprecated field [type] used, this field is unused and will be removed entirely";
if (termsQuery.isTypeless() == false && !assertedWarnings.contains(deprecationWarning)) {
assertWarnings(deprecationWarning);
assertedWarnings.add(deprecationWarning);
}
return query;
} finally {

View File

@ -79,6 +79,7 @@ public class RestGetSourceActionTests extends RestActionTestCase {
* test deprecation is logged if type is used in path
*/
public void testTypeInPath() {
boolean assertWarnings = true;
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
for (Method method : Arrays.asList(Method.GET, Method.HEAD)) {
@ -89,7 +90,10 @@ public class RestGetSourceActionTests extends RestActionTestCase {
.build();
dispatchRequest(request);
if (assertWarnings) {
assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE);
assertWarnings = false;
}
}
}
}
@ -98,6 +102,7 @@ public class RestGetSourceActionTests extends RestActionTestCase {
* test deprecation is logged if type is used as parameter
*/
public void testTypeParameter() {
boolean assertWarnings = true;
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
Map<String, String> params = new HashMap<>();
@ -110,7 +115,10 @@ public class RestGetSourceActionTests extends RestActionTestCase {
.withParams(params)
.build();
dispatchRequest(request);
if (assertWarnings) {
assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE);
assertWarnings = false;
}
}
}
}

View File

@ -229,7 +229,7 @@ public class ScriptMetadataTests extends AbstractSerializingTestCase<ScriptMetad
BytesReference.bytes(builder).streamInput()
);
ScriptMetadata.fromXContent(parser);
assertWarnings("empty scripts should no longer be used");
assertNoDeprecationWarnings();
builder = XContentFactory.jsonBuilder();
builder.startObject().startObject("script").field("lang", "mustache").field("source", "").endObject().endObject();
@ -240,7 +240,7 @@ public class ScriptMetadataTests extends AbstractSerializingTestCase<ScriptMetad
BytesReference.bytes(builder).streamInput()
);
ScriptMetadata.fromXContent(parser);
assertWarnings("empty templates should no longer be used");
assertNoDeprecationWarnings();
}
public void testOldStyleDropped() throws IOException {

View File

@ -255,7 +255,7 @@ public class StoredScriptTests extends AbstractSerializingTestCase<StoredScriptS
StoredScriptSource source = new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, "", Collections.emptyMap());
assertThat(parsed, equalTo(source));
assertWarnings("empty templates should no longer be used");
assertNoDeprecationWarnings();
}
}

View File

@ -80,8 +80,10 @@ import org.opensearch.search.builder.SearchSourceBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import static org.opensearch.search.sort.FieldSortBuilder.getMinMaxOrNull;
import static org.opensearch.search.sort.FieldSortBuilder.getPrimaryFieldSortOrNull;
@ -90,6 +92,8 @@ import static org.hamcrest.Matchers.instanceOf;
public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder> {
private Set<String> assertedWarnings = new HashSet<>();
/**
* {@link #provideMappedFieldType(String)} will return a
*/
@ -694,14 +698,17 @@ public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder
@Override
protected void assertWarnings(FieldSortBuilder testItem) {
List<String> expectedWarnings = new ArrayList<>();
if (testItem.getNestedFilter() != null) {
expectedWarnings.add("[nested_filter] has been deprecated in favour for the [nested] parameter");
String nestedFilterDeprecationWarning = "[nested_filter] has been deprecated in favour for the [nested] parameter";
String nestedPathDeprecationWarning = "[nested_path] has been deprecated in favor of the [nested] parameter";
if (testItem.getNestedFilter() != null && !assertedWarnings.contains(nestedFilterDeprecationWarning)) {
expectedWarnings.add(nestedFilterDeprecationWarning);
}
if (testItem.getNestedPath() != null) {
expectedWarnings.add("[nested_path] has been deprecated in favor of the [nested] parameter");
if (testItem.getNestedPath() != null && !assertedWarnings.contains(nestedPathDeprecationWarning)) {
expectedWarnings.add(nestedPathDeprecationWarning);
}
if (expectedWarnings.isEmpty() == false) {
assertWarnings(expectedWarnings.toArray(new String[expectedWarnings.size()]));
assertedWarnings.addAll(expectedWarnings);
}
}

View File

@ -64,13 +64,17 @@ import org.opensearch.test.geo.RandomGeoGenerator;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.instanceOf;
public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanceSortBuilder> {
private Set<String> assertedWarnings = new HashSet<>();
@Override
protected GeoDistanceSortBuilder createTestItem() {
return randomGeoDistanceSortBuilder();
@ -407,14 +411,17 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
@Override
protected void assertWarnings(GeoDistanceSortBuilder testItem) {
List<String> expectedWarnings = new ArrayList<>();
if (testItem.getNestedFilter() != null) {
expectedWarnings.add("[nested_filter] has been deprecated in favour of the [nested] parameter");
String nestedFilterDeprecationWarning = "[nested_filter] has been deprecated in favour of the [nested] parameter";
String nestedPathDeprecationWarning = "[nested_path] has been deprecated in favour of the [nested] parameter";
if (testItem.getNestedFilter() != null && !assertedWarnings.contains(nestedFilterDeprecationWarning)) {
expectedWarnings.add(nestedFilterDeprecationWarning);
}
if (testItem.getNestedPath() != null) {
expectedWarnings.add("[nested_path] has been deprecated in favour of the [nested] parameter");
if (testItem.getNestedPath() != null && !assertedWarnings.contains(nestedPathDeprecationWarning)) {
expectedWarnings.add(nestedPathDeprecationWarning);
}
if (expectedWarnings.isEmpty() == false) {
assertWarnings(expectedWarnings.toArray(new String[expectedWarnings.size()]));
assertedWarnings.addAll(expectedWarnings);
}
}

View File

@ -59,6 +59,7 @@ public class SortBuilderTests extends OpenSearchTestCase {
private static final int NUMBER_OF_RUNS = 20;
private static NamedXContentRegistry xContentRegistry;
private Set<String> assertedWarnings = new HashSet<>();
@BeforeClass
public static void init() {
@ -152,11 +153,13 @@ public class SortBuilderTests extends OpenSearchTestCase {
for (SortBuilder<?> builder : testBuilders) {
if (builder instanceof GeoDistanceSortBuilder) {
GeoDistanceSortBuilder gdsb = (GeoDistanceSortBuilder) builder;
if (gdsb.getNestedFilter() != null) {
expectedWarningHeaders.add("[nested_filter] has been deprecated in favour of the [nested] parameter");
String nestedFilterDeprecationWarning = "[nested_filter] has been deprecated in favour of the [nested] parameter";
String nestedPathDeprecationWarning = "[nested_path] has been deprecated in favour of the [nested] parameter";
if (gdsb.getNestedFilter() != null && !assertedWarnings.contains(nestedFilterDeprecationWarning)) {
expectedWarningHeaders.add(nestedFilterDeprecationWarning);
}
if (gdsb.getNestedPath() != null) {
expectedWarningHeaders.add("[nested_path] has been deprecated in favour of the [nested] parameter");
if (gdsb.getNestedPath() != null && !assertedWarnings.contains(nestedPathDeprecationWarning)) {
expectedWarningHeaders.add(nestedPathDeprecationWarning);
}
}
@ -199,6 +202,7 @@ public class SortBuilderTests extends OpenSearchTestCase {
}
if (expectedWarningHeaders.size() > 0) {
assertWarnings(expectedWarningHeaders.toArray(new String[expectedWarningHeaders.size()]));
assertedWarnings.addAll(expectedWarningHeaders);
}
}
}

View File

@ -80,6 +80,7 @@ import org.opensearch.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.Writeable;
import org.opensearch.common.joda.JodaDeprecationPatterns;
import org.opensearch.common.logging.DeprecatedMessage;
import org.opensearch.common.logging.HeaderWarning;
import org.opensearch.common.logging.HeaderWarningAppender;
import org.opensearch.common.logging.LogConfigurator;
@ -425,6 +426,8 @@ public abstract class OpenSearchTestCase extends LuceneTestCase {
}
ensureAllSearchContextsReleased();
ensureCheckIndexPassed();
// "clear" the deprecated message set for the next tests to run independently.
DeprecatedMessage.resetDeprecatedMessageForTests();
logger.info("{}after test", getTestParamsForLogging());
}
@ -491,6 +494,15 @@ public abstract class OpenSearchTestCase extends LuceneTestCase {
);
}
/**
* Convenience method to assert same warnings for settings deprecations and general deprecation warnings
* are not logged again.
*/
protected final void assertNoDeprecationWarnings() {
final List<String> actualWarnings = threadContext.getResponseHeaders().get("Warning");
assertTrue("Found duplicate warnings logged", actualWarnings == null);
}
protected final void assertWarnings(String... expectedWarnings) {
assertWarnings(true, expectedWarnings);
}

View File

@ -103,6 +103,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
@ -127,6 +128,10 @@ public abstract class OpenSearchRestTestCase extends OpenSearchTestCase {
public static final String CLIENT_SOCKET_TIMEOUT = "client.socket.timeout";
public static final String CLIENT_PATH_PREFIX = "client.path.prefix";
// This set will contain the warnings already asserted since we are eliminating logging duplicate warnings.
// This ensures that no matter in what order the tests run, the warning is asserted once.
private static Set<String> assertedWarnings = ConcurrentHashMap.newKeySet();
/**
* Convert the entity from a {@link Response} into a map of maps.
*/
@ -256,6 +261,9 @@ public abstract class OpenSearchRestTestCase extends OpenSearchTestCase {
@Override
public boolean warningsShouldFailRequest(List<String> warnings) {
if (warnings.isEmpty()) {
return false;
}
if (isExclusivelyTargetingCurrentVersionCluster()) {
// absolute equality required in expected and actual.
Set<String> actual = new HashSet<>(warnings);
@ -298,6 +306,18 @@ public abstract class OpenSearchRestTestCase extends OpenSearchTestCase {
return expectVersionSpecificWarnings(consumer -> consumer.current(warnings));
}
/**
* Filters out already asserted warnings and calls expectWarnings method.
* @param deprecationWarning expected warning
*/
public static RequestOptions expectWarningsOnce(String deprecationWarning) {
if (assertedWarnings.contains(deprecationWarning)) {
return RequestOptions.DEFAULT;
}
assertedWarnings.add(deprecationWarning);
return expectWarnings(deprecationWarning);
}
/**
* Creates RequestOptions designed to ignore [types removal] warnings but nothing else
* @deprecated this method is only required while we deprecate types and can be removed in 8.0
@ -1252,15 +1272,9 @@ public abstract class OpenSearchRestTestCase extends OpenSearchTestCase {
protected static void performSyncedFlush(String indexName, boolean retryOnConflict) throws Exception {
final Request request = new Request("POST", indexName + "/_flush/synced");
final List<String> expectedWarnings = Collections.singletonList(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE);
if (nodeVersions.stream().allMatch(version -> version.onOrAfter(LegacyESVersion.V_7_6_0))) {
final Builder options = RequestOptions.DEFAULT.toBuilder();
options.setWarningsHandler(warnings -> warnings.equals(expectedWarnings) == false);
request.setOptions(options);
} else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(LegacyESVersion.V_7_6_0))) {
final Builder options = RequestOptions.DEFAULT.toBuilder();
options.setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(expectedWarnings) == false);
request.setOptions(options);
}
// We have to spin a synced-flush request because we fire the global checkpoint sync for the last write operation.
// A synced-flush request considers the global checkpoint sync as an going operation because it acquires a shard permit.
assertBusy(() -> {

View File

@ -52,7 +52,7 @@ public class VersionSensitiveWarningsHandlerTests extends OpenSearchTestCase {
WarningsHandler handler = expectVersionSpecificWarnings(nodeVersions, (v) -> { v.current("expectedCurrent1"); });
assertFalse(handler.warningsShouldFailRequest(Arrays.asList("expectedCurrent1")));
assertTrue(handler.warningsShouldFailRequest(Arrays.asList("expectedCurrent1", "unexpected")));
assertTrue(handler.warningsShouldFailRequest(Collections.emptyList()));
assertFalse(handler.warningsShouldFailRequest(Collections.emptyList()));
}