Avoid logging duplicate deprecation warnings multiple times (#1660)

* Avoid logging duplicate deprecation warnings multiple times

Signed-off-by: Vacha <vachshah@amazon.com>

* Fixes test failures

Signed-off-by: Vacha <vachshah@amazon.com>

* Adding deprecation logger tests

Signed-off-by: Vacha <vachshah@amazon.com>

* Using ConcurrentHashMap keySet

Signed-off-by: Vacha Shah <vachshah@amazon.com>
This commit is contained in:
Vacha 2021-12-15 15:26:44 -08:00 committed by GitHub
parent 10e51bb4ec
commit e66ea2c4f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 395 additions and 279 deletions

View File

@ -84,6 +84,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase { public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
@Override
protected boolean enableWarningsCheck() {
return false;
}
private static BulkProcessor.Builder initBulkProcessorBuilder(BulkProcessor.Listener listener) { private static BulkProcessor.Builder initBulkProcessorBuilder(BulkProcessor.Listener listener) {
return BulkProcessor.builder( return BulkProcessor.builder(
(request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, bulkListener), (request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, bulkListener),
@ -95,7 +100,7 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
return BulkProcessor.builder( return BulkProcessor.builder(
(request, bulkListener) -> highLevelClient().bulkAsync( (request, bulkListener) -> highLevelClient().bulkAsync(
request, request,
expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE), expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE),
bulkListener bulkListener
), ),
listener listener
@ -506,11 +511,6 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
} else { } else {
BytesArray data = bytesBulkRequest(localIndex, localType, i); BytesArray data = bytesBulkRequest(localIndex, localType, i);
processor.add(data, globalIndex, globalType, globalPipeline, XContentType.JSON); processor.add(data, globalIndex, globalType, globalPipeline, XContentType.JSON);
if (localType != null) {
// If the payload contains types, parsing it into a bulk request results in a warning.
assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE);
}
} }
multiGetRequest.add(localIndex, Integer.toString(i)); multiGetRequest.add(localIndex, Integer.toString(i));
} }

View File

@ -210,7 +210,7 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
request, request,
highLevelClient()::bulk, highLevelClient()::bulk,
highLevelClient()::bulkAsync, highLevelClient()::bulkAsync,
expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE)
); );
assertFalse(bulkResponse.hasFailures()); assertFalse(bulkResponse.hasFailures());
return bulkResponse; return bulkResponse;

View File

@ -214,7 +214,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
indexRequest, indexRequest,
highLevelClient()::index, highLevelClient()::index,
highLevelClient()::indexAsync, highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
); );
DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId); DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId);
@ -222,7 +222,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
deleteRequest, deleteRequest,
highLevelClient()::delete, highLevelClient()::delete,
highLevelClient()::deleteAsync, highLevelClient()::deleteAsync,
expectWarnings(RestDeleteAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestDeleteAction.TYPES_DEPRECATION_MESSAGE)
); );
assertEquals("index", deleteResponse.getIndex()); assertEquals("index", deleteResponse.getIndex());
@ -425,7 +425,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
indexRequest, indexRequest,
highLevelClient()::index, highLevelClient()::index,
highLevelClient()::indexAsync, highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
); );
GetRequest getRequest = new GetRequest("index", "type", "id"); GetRequest getRequest = new GetRequest("index", "type", "id");
@ -433,7 +433,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
getRequest, getRequest,
highLevelClient()::get, highLevelClient()::get,
highLevelClient()::getAsync, highLevelClient()::getAsync,
expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetAction.TYPES_DEPRECATION_MESSAGE)
); );
assertEquals("index", getResponse.getIndex()); assertEquals("index", getResponse.getIndex());
@ -512,7 +512,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
bulk.add(new IndexRequest("index", "type", "id1").source("{\"field\":\"value1\"}", XContentType.JSON)); bulk.add(new IndexRequest("index", "type", "id1").source("{\"field\":\"value1\"}", XContentType.JSON));
bulk.add(new IndexRequest("index", "type", "id2").source("{\"field\":\"value2\"}", XContentType.JSON)); bulk.add(new IndexRequest("index", "type", "id2").source("{\"field\":\"value2\"}", XContentType.JSON));
highLevelClient().bulk(bulk, expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); highLevelClient().bulk(bulk, expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
MultiGetRequest multiGetRequest = new MultiGetRequest(); MultiGetRequest multiGetRequest = new MultiGetRequest();
multiGetRequest.add("index", "id1"); multiGetRequest.add("index", "id1");
multiGetRequest.add("index", "type", "id2"); multiGetRequest.add("index", "type", "id2");
@ -521,7 +521,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
multiGetRequest, multiGetRequest,
highLevelClient()::mget, highLevelClient()::mget,
highLevelClient()::mgetAsync, highLevelClient()::mgetAsync,
expectWarnings(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE)
); );
assertEquals(2, response.getResponses().length); assertEquals(2, response.getResponses().length);
@ -747,7 +747,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
indexRequest, indexRequest,
highLevelClient()::index, highLevelClient()::index,
highLevelClient()::indexAsync, highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
); );
assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals(RestStatus.CREATED, indexResponse.status());
assertEquals("index", indexResponse.getIndex()); assertEquals("index", indexResponse.getIndex());
@ -962,7 +962,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
indexRequest, indexRequest,
highLevelClient()::index, highLevelClient()::index,
highLevelClient()::indexAsync, highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
); );
UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); UpdateRequest updateRequest = new UpdateRequest("index", "type", "id");
@ -971,7 +971,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
updateRequest, updateRequest,
highLevelClient()::update, highLevelClient()::update,
highLevelClient()::updateAsync, highLevelClient()::updateAsync,
expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestUpdateAction.TYPES_DEPRECATION_MESSAGE)
); );
assertEquals(RestStatus.OK, updateResponse.status()); assertEquals(RestStatus.OK, updateResponse.status());

View File

@ -293,7 +293,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
createIndexRequest, createIndexRequest,
highLevelClient().indices()::create, highLevelClient().indices()::create,
highLevelClient().indices()::createAsync, highLevelClient().indices()::createAsync,
expectWarnings(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE)
); );
assertTrue(createIndexResponse.isAcknowledged()); assertTrue(createIndexResponse.isAcknowledged());
@ -326,7 +326,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
createIndexRequest, createIndexRequest,
highLevelClient().indices()::create, highLevelClient().indices()::create,
highLevelClient().indices()::createAsync, highLevelClient().indices()::createAsync,
expectWarnings(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE)
); );
assertTrue(createIndexResponse.isAcknowledged()); assertTrue(createIndexResponse.isAcknowledged());
@ -505,7 +505,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
getIndexRequest, getIndexRequest,
highLevelClient().indices()::get, highLevelClient().indices()::get,
highLevelClient().indices()::getAsync, highLevelClient().indices()::getAsync,
expectWarnings(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE)
); );
// default settings should be null // default settings should be null
@ -601,7 +601,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putMappingRequest, putMappingRequest,
highLevelClient().indices()::putMapping, highLevelClient().indices()::putMapping,
highLevelClient().indices()::putMappingAsync, highLevelClient().indices()::putMappingAsync,
expectWarnings(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE)
); );
assertTrue(putMappingResponse.isAcknowledged()); assertTrue(putMappingResponse.isAcknowledged());
@ -676,7 +676,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
request, request,
highLevelClient().indices()::getMapping, highLevelClient().indices()::getMapping,
highLevelClient().indices()::getMappingAsync, highLevelClient().indices()::getMappingAsync,
expectWarnings(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE)
); );
Map<String, Object> mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap(); Map<String, Object> mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap();
@ -750,7 +750,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
getFieldMappingsRequest, getFieldMappingsRequest,
highLevelClient().indices()::getFieldMapping, highLevelClient().indices()::getFieldMapping,
highLevelClient().indices()::getFieldMappingAsync, highLevelClient().indices()::getFieldMappingAsync,
expectWarnings(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE)
); );
final Map<String, org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata> fieldMappingMap = final Map<String, org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata> fieldMappingMap =
@ -1090,7 +1090,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
syncedFlushRequest, syncedFlushRequest,
highLevelClient().indices()::flushSynced, highLevelClient().indices()::flushSynced,
highLevelClient().indices()::flushSyncedAsync, highLevelClient().indices()::flushSyncedAsync,
expectWarnings(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE) expectWarningsOnce(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE)
); );
assertThat(flushResponse.totalShards(), equalTo(1)); assertThat(flushResponse.totalShards(), equalTo(1));
assertThat(flushResponse.successfulShards(), equalTo(1)); assertThat(flushResponse.successfulShards(), equalTo(1));
@ -1106,7 +1106,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
syncedFlushRequest, syncedFlushRequest,
highLevelClient().indices()::flushSynced, highLevelClient().indices()::flushSynced,
highLevelClient().indices()::flushSyncedAsync, highLevelClient().indices()::flushSyncedAsync,
expectWarnings(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE) expectWarningsOnce(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE)
) )
); );
assertEquals(RestStatus.NOT_FOUND, exception.status()); assertEquals(RestStatus.NOT_FOUND, exception.status());
@ -1368,7 +1368,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
rolloverRequest, rolloverRequest,
highLevelClient().indices()::rollover, highLevelClient().indices()::rollover,
highLevelClient().indices()::rolloverAsync, highLevelClient().indices()::rolloverAsync,
expectWarnings(RestRolloverIndexAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestRolloverIndexAction.TYPES_DEPRECATION_MESSAGE)
); );
assertTrue(rolloverResponse.isRolledOver()); assertTrue(rolloverResponse.isRolledOver());
assertFalse(rolloverResponse.isDryRun()); assertFalse(rolloverResponse.isDryRun());
@ -1782,7 +1782,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplateRequest, putTemplateRequest,
highLevelClient().indices()::putTemplate, highLevelClient().indices()::putTemplate,
highLevelClient().indices()::putTemplateAsync, highLevelClient().indices()::putTemplateAsync,
expectWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
); );
assertThat(putTemplateResponse.isAcknowledged(), equalTo(true)); assertThat(putTemplateResponse.isAcknowledged(), equalTo(true));
@ -1846,7 +1846,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplateRequest, putTemplateRequest,
highLevelClient().indices()::putTemplate, highLevelClient().indices()::putTemplate,
highLevelClient().indices()::putTemplateAsync, highLevelClient().indices()::putTemplateAsync,
expectWarnings("Deprecated field [template] used, replaced by [index_patterns]") expectWarningsOnce("Deprecated field [template] used, replaced by [index_patterns]")
); );
assertThat(putTemplateResponse.isAcknowledged(), equalTo(true)); assertThat(putTemplateResponse.isAcknowledged(), equalTo(true));
@ -1916,7 +1916,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplateRequest, putTemplateRequest,
highLevelClient().indices()::putTemplate, highLevelClient().indices()::putTemplate,
highLevelClient().indices()::putTemplateAsync, highLevelClient().indices()::putTemplateAsync,
expectWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
); );
assertThat(putTemplateResponse.isAcknowledged(), equalTo(true)); assertThat(putTemplateResponse.isAcknowledged(), equalTo(true));
@ -2026,7 +2026,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplate1, putTemplate1,
client.indices()::putTemplate, client.indices()::putTemplate,
client.indices()::putTemplateAsync, client.indices()::putTemplateAsync,
expectWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
).isAcknowledged(), ).isAcknowledged(),
equalTo(true) equalTo(true)
); );
@ -2040,7 +2040,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
putTemplate2, putTemplate2,
client.indices()::putTemplate, client.indices()::putTemplate,
client.indices()::putTemplateAsync, client.indices()::putTemplateAsync,
expectWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
).isAcknowledged(), ).isAcknowledged(),
equalTo(true) equalTo(true)
); );
@ -2049,7 +2049,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-1"), new GetIndexTemplatesRequest("template-1"),
client.indices()::getTemplate, client.indices()::getTemplate,
client.indices()::getTemplateAsync, client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
); );
assertThat(getTemplate1.getIndexTemplates(), hasSize(1)); assertThat(getTemplate1.getIndexTemplates(), hasSize(1));
org.opensearch.cluster.metadata.IndexTemplateMetadata template1 = getTemplate1.getIndexTemplates().get(0); org.opensearch.cluster.metadata.IndexTemplateMetadata template1 = getTemplate1.getIndexTemplates().get(0);
@ -2062,7 +2062,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-2"), new GetIndexTemplatesRequest("template-2"),
client.indices()::getTemplate, client.indices()::getTemplate,
client.indices()::getTemplateAsync, client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
); );
assertThat(getTemplate2.getIndexTemplates(), hasSize(1)); assertThat(getTemplate2.getIndexTemplates(), hasSize(1));
org.opensearch.cluster.metadata.IndexTemplateMetadata template2 = getTemplate2.getIndexTemplates().get(0); org.opensearch.cluster.metadata.IndexTemplateMetadata template2 = getTemplate2.getIndexTemplates().get(0);
@ -2080,7 +2080,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
getBothRequest, getBothRequest,
client.indices()::getTemplate, client.indices()::getTemplate,
client.indices()::getTemplateAsync, client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
); );
assertThat(getBoth.getIndexTemplates(), hasSize(2)); assertThat(getBoth.getIndexTemplates(), hasSize(2));
assertThat( assertThat(
@ -2093,7 +2093,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
getAllRequest, getAllRequest,
client.indices()::getTemplate, client.indices()::getTemplate,
client.indices()::getTemplateAsync, client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
); );
assertThat(getAll.getIndexTemplates().size(), greaterThanOrEqualTo(2)); assertThat(getAll.getIndexTemplates().size(), greaterThanOrEqualTo(2));
assertThat( assertThat(
@ -2132,7 +2132,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-*"), new GetIndexTemplatesRequest("template-*"),
client.indices()::getTemplate, client.indices()::getTemplate,
client.indices()::getTemplateAsync, client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
).getIndexTemplates(), ).getIndexTemplates(),
hasSize(1) hasSize(1)
); );
@ -2141,7 +2141,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-*"), new GetIndexTemplatesRequest("template-*"),
client.indices()::getTemplate, client.indices()::getTemplate,
client.indices()::getTemplateAsync, client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
).getIndexTemplates().get(0).name(), ).getIndexTemplates().get(0).name(),
equalTo("template-2") equalTo("template-2")
); );
@ -2157,7 +2157,7 @@ public class IndicesClientIT extends OpenSearchRestHighLevelClientTestCase {
new GetIndexTemplatesRequest("template-*"), new GetIndexTemplatesRequest("template-*"),
client.indices()::getTemplate, client.indices()::getTemplate,
client.indices()::getTemplateAsync, client.indices()::getTemplateAsync,
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE)
) )
).status(), ).status(),
equalTo(RestStatus.NOT_FOUND) equalTo(RestStatus.NOT_FOUND)

View File

@ -126,23 +126,23 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
public void indexDocuments() throws IOException { public void indexDocuments() throws IOException {
{ {
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/type/1"); Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/type/1");
doc1.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)); doc1.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc1.setJsonEntity("{\"type\":\"type1\", \"id\":1, \"num\":10, \"num2\":50}"); doc1.setJsonEntity("{\"type\":\"type1\", \"id\":1, \"num\":10, \"num2\":50}");
client().performRequest(doc1); client().performRequest(doc1);
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/type/2"); Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/type/2");
doc2.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)); doc2.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc2.setJsonEntity("{\"type\":\"type1\", \"id\":2, \"num\":20, \"num2\":40}"); doc2.setJsonEntity("{\"type\":\"type1\", \"id\":2, \"num\":20, \"num2\":40}");
client().performRequest(doc2); client().performRequest(doc2);
Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/type/3"); Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/type/3");
doc3.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)); doc3.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc3.setJsonEntity("{\"type\":\"type1\", \"id\":3, \"num\":50, \"num2\":35}"); doc3.setJsonEntity("{\"type\":\"type1\", \"id\":3, \"num\":50, \"num2\":35}");
client().performRequest(doc3); client().performRequest(doc3);
Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/type/4"); Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/type/4");
doc4.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)); doc4.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc4.setJsonEntity("{\"type\":\"type2\", \"id\":4, \"num\":100, \"num2\":10}"); doc4.setJsonEntity("{\"type\":\"type2\", \"id\":4, \"num\":100, \"num2\":10}");
client().performRequest(doc4); client().performRequest(doc4);
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/type/5"); Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/type/5");
doc5.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)); doc5.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
doc5.setJsonEntity("{\"type\":\"type2\", \"id\":5, \"num\":100, \"num2\":10}"); doc5.setJsonEntity("{\"type\":\"type2\", \"id\":5, \"num\":100, \"num2\":10}");
client().performRequest(doc5); client().performRequest(doc5);
} }
@ -1445,7 +1445,6 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
} }
public void testCountAllIndicesMatchQuery() throws IOException { public void testCountAllIndicesMatchQuery() throws IOException {
CountRequest countRequest = new CountRequest(); CountRequest countRequest = new CountRequest();
countRequest.source(new SearchSourceBuilder().query(new MatchQueryBuilder("field", "value1"))); countRequest.source(new SearchSourceBuilder().query(new MatchQueryBuilder("field", "value1")));
CountResponse countResponse = execute(countRequest, highLevelClient()::count, highLevelClient()::countAsync); CountResponse countResponse = execute(countRequest, highLevelClient()::count, highLevelClient()::countAsync);

View File

@ -114,7 +114,7 @@ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
} }

View File

@ -84,7 +84,7 @@ public class CommonGramsTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
} }

View File

@ -106,7 +106,7 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
return this; return this;

View File

@ -71,7 +71,7 @@ public class FingerprintTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
return this; return this;

View File

@ -79,7 +79,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
} else { } else {
if (preserveOriginal) { if (preserveOriginal) {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
return IDENTITY_FILTER; return IDENTITY_FILTER;
@ -147,7 +147,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
} else { } else {
if (preserveOriginal) { if (preserveOriginal) {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
return IDENTITY_FILTER; return IDENTITY_FILTER;

View File

@ -92,7 +92,7 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
return this; return this;

View File

@ -123,7 +123,7 @@ public class WordDelimiterGraphTokenFilterFactory extends AbstractTokenFilterFac
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
return this; return this;

View File

@ -123,7 +123,7 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
return this; return this;

View File

@ -212,8 +212,8 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2), VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2),
false false
); );
doTestCustomTokenizerDeprecation("nGram", "ngram", LegacyESVersion.V_7_6_0, true); doTestCustomTokenizerDeprecation("nGram", "ngram", LegacyESVersion.V_7_6_0, false);
doTestCustomTokenizerDeprecation("edgeNGram", "edge_ngram", LegacyESVersion.V_7_6_0, true); doTestCustomTokenizerDeprecation("edgeNGram", "edge_ngram", LegacyESVersion.V_7_6_0, false);
} }
public void doTestPrebuiltTokenizerDeprecation(String deprecatedName, String replacement, Version version, boolean expectWarning) public void doTestPrebuiltTokenizerDeprecation(String deprecatedName, String replacement, Version version, boolean expectWarning)

View File

@ -19,10 +19,10 @@
- skip: - skip:
version: " - 6.2.99" version: " - 6.2.99"
reason: deprecated in 6.3 reason: deprecated in 6.3
features: "warnings" features: "allowed_warnings"
- do: - do:
warnings: allowed_warnings:
- 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.' - 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.'
indices.create: indices.create:
index: test_deprecated_htmlstrip index: test_deprecated_htmlstrip
@ -41,7 +41,7 @@
analyzer: my_htmlStripWithCharfilter analyzer: my_htmlStripWithCharfilter
- do: - do:
warnings: allowed_warnings:
- 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.' - 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.'
indices.analyze: indices.analyze:
index: test_deprecated_htmlstrip index: test_deprecated_htmlstrip

View File

@ -1,7 +1,7 @@
--- ---
"Test common terms query with stacked tokens": "Test common terms query with stacked tokens":
- skip: - skip:
features: "warnings" features: "allowed_warnings"
- do: - do:
indices.create: indices.create:
@ -50,7 +50,7 @@
refresh: true refresh: true
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -67,7 +67,7 @@
- match: { hits.hits.2._id: "3" } - match: { hits.hits.2._id: "3" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -83,7 +83,7 @@
- match: { hits.hits.1._id: "2" } - match: { hits.hits.1._id: "2" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -99,7 +99,7 @@
- match: { hits.hits.2._id: "3" } - match: { hits.hits.2._id: "3" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -114,7 +114,7 @@
- match: { hits.hits.0._id: "2" } - match: { hits.hits.0._id: "2" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -131,7 +131,7 @@
- match: { hits.hits.1._id: "1" } - match: { hits.hits.1._id: "1" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -147,7 +147,7 @@
- match: { hits.hits.0._id: "2" } - match: { hits.hits.0._id: "2" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -161,7 +161,7 @@
- match: { hits.hits.0._id: "2" } - match: { hits.hits.0._id: "2" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -177,7 +177,7 @@
- match: { hits.hits.2._id: "3" } - match: { hits.hits.2._id: "3" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]' - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -193,7 +193,7 @@
- match: { hits.hits.1._id: "2" } - match: { hits.hits.1._id: "2" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]' - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -210,7 +210,7 @@
- match: { hits.hits.2._id: "3" } - match: { hits.hits.2._id: "3" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]' - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -226,7 +226,7 @@
- match: { hits.hits.1._id: "2" } - match: { hits.hits.1._id: "2" }
- do: - do:
warnings: allowed_warnings:
- 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [multi_match] query can skip block of documents efficiently if the total number of hits is not tracked]' - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [multi_match] query can skip block of documents efficiently if the total number of hits is not tracked]'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true

View File

@ -83,10 +83,10 @@
- skip: - skip:
version : "all" version : "all"
reason : "tracked at https://github.com/elastic/elasticsearch/issues/52266" reason : "tracked at https://github.com/elastic/elasticsearch/issues/52266"
features: warnings features: allowed_warnings
- do: - do:
warnings: allowed_warnings:
- "setting [ecs] to false for non-common schema format is deprecated and will be removed in 8.0, set to true or remove to use the non-deprecated format" - "setting [ecs] to false for non-common schema format is deprecated and will be removed in 8.0, set to true or remove to use the non-deprecated format"
- "the [os_major] property is deprecated for the user-agent processor" - "the [os_major] property is deprecated for the user-agent processor"
ingest.put_pipeline: ingest.put_pipeline:

View File

@ -1,7 +1,7 @@
--- ---
"Stored script": "Stored script":
- skip: - skip:
features: warnings features: allowed_warnings
- do: - do:
put_script: put_script:

View File

@ -86,7 +86,7 @@ setup:
--- ---
"date": "date":
- skip: - skip:
features: "warnings" features: "allowed_warnings"
- do: - do:
search: search:
@ -179,7 +179,7 @@ setup:
--- ---
"long": "long":
- skip: - skip:
features: "warnings" features: "allowed_warnings"
- do: - do:
search: search:

View File

@ -288,7 +288,7 @@
- skip: - skip:
version: " - 7.2.99" version: " - 7.2.99"
reason: "deprecation warnings only emitted on 7.3+" reason: "deprecation warnings only emitted on 7.3+"
features: warnings features: allowed_warnings
- do: - do:
index: index:
@ -304,7 +304,7 @@
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- Deprecated field [size] used, expected [max_docs] instead - Deprecated field [size] used, expected [max_docs] instead
delete_by_query: delete_by_query:
index: twitter index: twitter

View File

@ -75,7 +75,7 @@
--- ---
"Rethrottle to -1 which turns off throttling": "Rethrottle to -1 which turns off throttling":
- skip: - skip:
features: warnings features: allowed_warnings
# Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard
# and a small batch size on the request # and a small batch size on the request
- do: - do:
@ -124,7 +124,7 @@
task_id: $task task_id: $task
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {} indices.refresh: {}

View File

@ -63,7 +63,7 @@
--- ---
"Multiple slices with wait_for_completion=false": "Multiple slices with wait_for_completion=false":
- skip: - skip:
features: warnings features: allowed_warnings
- do: - do:
index: index:
index: test index: test
@ -153,11 +153,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though # Only the "parent" reindex task wrote its status to the tasks index though
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -172,7 +172,7 @@
--- ---
"Multiple slices with rethrottle": "Multiple slices with rethrottle":
- skip: - skip:
features: warnings features: allowed_warnings
- do: - do:
index: index:
index: test index: test
@ -268,11 +268,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though # Only the "parent" reindex task wrote its status to the tasks index though
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true

View File

@ -36,7 +36,7 @@
- skip: - skip:
version: " - 7.2.99" version: " - 7.2.99"
reason: "size deprecation warnings only emitted on 7.3+, but sort deprecated in 7.6" reason: "size deprecation warnings only emitted on 7.3+, but sort deprecated in 7.6"
features: warnings features: allowed_warnings
- do: - do:
index: index:
@ -52,7 +52,7 @@
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- Deprecated field [size] used, expected [max_docs] instead - Deprecated field [size] used, expected [max_docs] instead
- The sort option in reindex is deprecated. Instead consider using query - The sort option in reindex is deprecated. Instead consider using query
filtering to find the desired subset of data. filtering to find the desired subset of data.
@ -127,7 +127,7 @@
- skip: - skip:
version: " - 7.5.99" version: " - 7.5.99"
reason: "max_docs introduced in 7.3.0, but sort deprecated in 7.6" reason: "max_docs introduced in 7.3.0, but sort deprecated in 7.6"
features: "warnings" features: "allowed_warnings"
- do: - do:
index: index:
@ -143,7 +143,7 @@
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- The sort option in reindex is deprecated. Instead consider using query - The sort option in reindex is deprecated. Instead consider using query
filtering to find the desired subset of data. filtering to find the desired subset of data.
reindex: reindex:
@ -174,7 +174,7 @@
- skip: - skip:
version: " - 7.5.99" version: " - 7.5.99"
reason: "sort deprecated in 7.6" reason: "sort deprecated in 7.6"
features: "warnings" features: "allowed_warnings"
- do: - do:
index: index:
@ -185,7 +185,7 @@
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- The sort option in reindex is deprecated. Instead consider using query - The sort option in reindex is deprecated. Instead consider using query
filtering to find the desired subset of data. filtering to find the desired subset of data.
reindex: reindex:

View File

@ -59,7 +59,7 @@
--- ---
"Multiple slices with wait_for_completion=false": "Multiple slices with wait_for_completion=false":
- skip: - skip:
features: warnings features: allowed_warnings
- do: - do:
index: index:
index: source index: source
@ -162,11 +162,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though # Only the "parent" reindex task wrote its status to the tasks index though
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -177,7 +177,7 @@
--- ---
"Multiple slices with rethrottle": "Multiple slices with rethrottle":
- skip: - skip:
features: warnings features: allowed_warnings
- do: - do:
index: index:
index: source index: source
@ -280,11 +280,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though # Only the "parent" reindex task wrote its status to the tasks index though
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true

View File

@ -225,7 +225,7 @@
- skip: - skip:
version: " - 7.2.99" version: " - 7.2.99"
reason: "deprecation warnings only emitted on 7.3+" reason: "deprecation warnings only emitted on 7.3+"
features: warnings features: allowed_warnings
- do: - do:
index: index:
@ -241,7 +241,7 @@
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- Deprecated field [size] used, expected [max_docs] instead - Deprecated field [size] used, expected [max_docs] instead
update_by_query: update_by_query:
index: twitter index: twitter

View File

@ -55,7 +55,7 @@
--- ---
"Multiple slices with wait_for_completion=false": "Multiple slices with wait_for_completion=false":
- skip: - skip:
features: warnings features: allowed_warnings
- do: - do:
index: index:
index: test index: test
@ -145,11 +145,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though # Only the "parent" reindex task wrote its status to the tasks index though
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -159,7 +159,7 @@
--- ---
"Multiple slices with rethrottle": "Multiple slices with rethrottle":
- skip: - skip:
features: warnings features: allowed_warnings
- do: - do:
index: index:
index: test index: test
@ -254,11 +254,11 @@
# Only the "parent" reindex task wrote its status to the tasks index though # Only the "parent" reindex task wrote its status to the tasks index though
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
indices.refresh: {} indices.refresh: {}
- do: - do:
warnings: allowed_warnings:
- "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default" - "this request accesses system indices: [.tasks], but in a future major version, direct access to system indices will be prevented by default"
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true

View File

@ -109,10 +109,10 @@
- skip: - skip:
version: " - 6.99.99" version: " - 6.99.99"
reason: unicodeSetFilter deprecated in 7.0.0, replaced by unicode_set_filter reason: unicodeSetFilter deprecated in 7.0.0, replaced by unicode_set_filter
features: "warnings" features: "allowed_warnings"
- do: - do:
warnings: allowed_warnings:
- "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]" - "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]"
indices.create: indices.create:
index: test index: test
@ -132,7 +132,7 @@
type: icu_folding type: icu_folding
unicodeSetFilter: "[^â]" unicodeSetFilter: "[^â]"
- do: - do:
warnings: allowed_warnings:
- "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]" - "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]"
indices.analyze: indices.analyze:
index: test index: test

View File

@ -162,7 +162,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
return this; return this;

View File

@ -174,7 +174,7 @@ final class Ec2ClientSettings {
} else { } else {
if (key.length() == 0) { if (key.length() == 0) {
deprecationLogger.deprecate( deprecationLogger.deprecate(
"ec2_invalid_settings", "ec2_invalid_key_settings",
"Setting [{}] is set but [{}] is not, which will be unsupported in future", "Setting [{}] is set but [{}] is not, which will be unsupported in future",
SECRET_KEY_SETTING.getKey(), SECRET_KEY_SETTING.getKey(),
ACCESS_KEY_SETTING.getKey() ACCESS_KEY_SETTING.getKey()

View File

@ -145,21 +145,6 @@ public class EvilLoggerTests extends OpenSearchTestCase {
} }
} }
/*
* We have to manually check that each thread has the right warning headers in the thread context because the act of doing
* this through the test framework on one thread would otherwise clear the thread context and we would be unable to assert
* on the other threads.
*/
final List<String> warnings = threadContext.getResponseHeaders().get("Warning");
final Set<String> actualWarningValues =
warnings.stream().map(s -> HeaderWarning.extractWarningValueFromWarningHeader(s, true))
.collect(Collectors.toSet());
for (int j = 0; j < 128; j++) {
assertThat(
actualWarningValues,
hasItem(HeaderWarning.escapeAndEncode("This is a maybe logged deprecation message" + j)));
}
try { try {
barrier.await(); barrier.await();
} catch (final BrokenBarrierException | InterruptedException e) { } catch (final BrokenBarrierException | InterruptedException e) {
@ -209,7 +194,9 @@ public class EvilLoggerTests extends OpenSearchTestCase {
final int iterations = randomIntBetween(0, 128); final int iterations = randomIntBetween(0, 128);
for (int i = 0; i < iterations; i++) { for (int i = 0; i < iterations; i++) {
setting.get(settings); setting.get(settings);
assertSettingDeprecationsAndWarnings(new Setting<?>[]{setting}); if (i == 0) {
assertSettingDeprecationsAndWarnings(new Setting<?>[]{setting});
}
} }
final String deprecationPath = final String deprecationPath =

View File

@ -289,12 +289,10 @@ public class JsonLoggerTests extends OpenSearchTestCase {
public void testDuplicateLogMessages() throws Exception { public void testDuplicateLogMessages() throws Exception {
final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger("test"); final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger("test");
// For the same key and X-Opaque-ID deprecation should be once
withThreadContext(threadContext -> { withThreadContext(threadContext -> {
threadContext.putHeader(Task.X_OPAQUE_ID, "ID1"); threadContext.putHeader(Task.X_OPAQUE_ID, "ID1");
deprecationLogger.deprecate("key", "message1"); deprecationLogger.deprecate("key", "message1");
deprecationLogger.deprecate("key", "message2"); assertWarnings("message1");
assertWarnings("message1", "message2");
final Path path = PathUtils.get(System.getProperty("opensearch.logs.base_path"), final Path path = PathUtils.get(System.getProperty("opensearch.logs.base_path"),
System.getProperty("opensearch.logs.cluster_name") + "_deprecated.json"); System.getProperty("opensearch.logs.cluster_name") + "_deprecated.json");
@ -317,12 +315,11 @@ public class JsonLoggerTests extends OpenSearchTestCase {
}); });
// For the same key and different X-Opaque-ID should be multiple times per key/x-opaque-id // For the same key and different X-Opaque-ID should be multiple times per key/x-opaque-id
//continuing with message1-ID1 in logs already, adding a new deprecation log line with message2-ID2 //continuing with message1-ID1 in logs already
withThreadContext(threadContext -> { withThreadContext(threadContext -> {
threadContext.putHeader(Task.X_OPAQUE_ID, "ID2"); threadContext.putHeader(Task.X_OPAQUE_ID, "ID2");
deprecationLogger.deprecate("key", "message1"); deprecationLogger.deprecate("key", "message1");
deprecationLogger.deprecate("key", "message2"); assertWarnings("message1");
assertWarnings("message1", "message2");
final Path path = PathUtils.get( final Path path = PathUtils.get(
System.getProperty("opensearch.logs.base_path"), System.getProperty("opensearch.logs.base_path"),

View File

@ -1,11 +1,11 @@
--- ---
"Create index with joda style index that is incompatible with java.time. (6.0)": "Create index with joda style index that is incompatible with java.time. (6.0)":
- skip: - skip:
features: "warnings" features: "allowed_warnings"
version: "6.8.1 -" version: "6.8.1 -"
reason: change of warning message reason: change of warning message
- do: - do:
warnings: allowed_warnings:
- "Use of 'Y' (year-of-era) will change to 'y' in the next major version of OpenSearch. Prefix your date format with '8' to use the new specifier." - "Use of 'Y' (year-of-era) will change to 'y' in the next major version of OpenSearch. Prefix your date format with '8' to use the new specifier."
indices.create: indices.create:
index: joda_for_range index: joda_for_range
@ -41,11 +41,11 @@
--- ---
"Create index with joda style index that is incompatible with java.time (>6.1)": "Create index with joda style index that is incompatible with java.time (>6.1)":
- skip: - skip:
features: "warnings" features: "allowed_warnings"
version: " - 6.8.0, 7.0.0 -" version: " - 6.8.0, 7.0.0 -"
reason: change of warning message, we skip 7 becase this format will be considered java reason: change of warning message, we skip 7 becase this format will be considered java
- do: - do:
warnings: allowed_warnings:
- "'Y' year-of-era should be replaced with 'y'. Use 'Y' for week-based-year.; 'Z' time zone offset/id fails when parsing 'Z' for Zulu timezone. Consider using 'X'. Prefix your date format with '8' to use the new specifier." - "'Y' year-of-era should be replaced with 'y'. Use 'Y' for week-based-year.; 'Z' time zone offset/id fails when parsing 'Z' for Zulu timezone. Consider using 'X'. Prefix your date format with '8' to use the new specifier."
indices.create: indices.create:
index: joda_for_range index: joda_for_range

View File

@ -57,8 +57,10 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.function.Supplier; import java.util.function.Supplier;
import static org.opensearch.test.rest.OpenSearchRestTestCase.entityAsMap; import static org.opensearch.test.rest.OpenSearchRestTestCase.entityAsMap;
@ -68,6 +70,8 @@ import static org.hamcrest.Matchers.is;
public class SystemIndexRestIT extends HttpSmokeTestCase { public class SystemIndexRestIT extends HttpSmokeTestCase {
private Set<String> assertedWarnings = new HashSet<>();
@Override @Override
protected Collection<Class<? extends Plugin>> nodePlugins() { protected Collection<Class<? extends Plugin>> nodePlugins() {
List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins()); List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
@ -126,15 +130,20 @@ public class SystemIndexRestIT extends HttpSmokeTestCase {
searchRequest.setJsonEntity("{\"query\": {\"match\": {\"some_field\": \"some_value\"}}}"); searchRequest.setJsonEntity("{\"query\": {\"match\": {\"some_field\": \"some_value\"}}}");
// Disallow no indices to cause an exception if this resolves to zero indices, so that we're sure it resolved the index // Disallow no indices to cause an exception if this resolves to zero indices, so that we're sure it resolved the index
searchRequest.addParameter("allow_no_indices", "false"); searchRequest.addParameter("allow_no_indices", "false");
searchRequest.setOptions(expectWarnings(expectedWarning)); if (!assertedWarnings.contains(expectedWarning)) {
searchRequest.setOptions(expectWarnings(expectedWarning));
assertedWarnings.add(expectedWarning);
}
Response response = getRestClient().performRequest(searchRequest); Response response = getRestClient().performRequest(searchRequest);
assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
} }
private RequestOptions expectWarnings(String expectedWarning) { private RequestOptions expectWarnings(String expectedWarning) {
final RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); final RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder();
builder.setWarningsHandler(w -> w.contains(expectedWarning) == false || w.size() != 1); if (!assertedWarnings.contains(expectedWarning)) {
builder.setWarningsHandler(w -> w.contains(expectedWarning) == false || w.size() != 1);
assertedWarnings.add(expectedWarning);
}
return builder.build(); return builder.build();
} }

View File

@ -160,10 +160,10 @@
- skip: - skip:
version: " - 7.5.99" version: " - 7.5.99"
reason: "indices without soft deletes are deprecated in 7.6" reason: "indices without soft deletes are deprecated in 7.6"
features: "warnings" features: "allowed_warnings"
- do: - do:
warnings: allowed_warnings:
- Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions. - Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions.
Please do not specify value for setting [index.soft_deletes.enabled] of index [test_index]. Please do not specify value for setting [index.soft_deletes.enabled] of index [test_index].
indices.create: indices.create:

View File

@ -3,7 +3,7 @@
- skip: - skip:
version: " - 7.5.99" version: " - 7.5.99"
reason: "synced flush is deprecated in 7.6" reason: "synced flush is deprecated in 7.6"
features: "warnings" features: "allowed_warnings"
- do: - do:
indices.create: indices.create:
index: testing index: testing
@ -16,7 +16,7 @@
cluster.health: cluster.health:
wait_for_status: green wait_for_status: green
- do: - do:
warnings: allowed_warnings:
- Synced flush is deprecated and will be removed in 8.0. Use flush at _/flush or /{index}/_flush instead. - Synced flush is deprecated and will be removed in 8.0. Use flush at _/flush or /{index}/_flush instead.
indices.flush_synced: indices.flush_synced:
index: testing index: testing

View File

@ -14,14 +14,14 @@
- skip: - skip:
version: " - 7.3.99" version: " - 7.3.99"
reason: "deprecation warning about only_expunge_deletes and max_num_segments added in 7.4" reason: "deprecation warning about only_expunge_deletes and max_num_segments added in 7.4"
features: "warnings" features: "allowed_warnings"
- do: - do:
indices.create: indices.create:
index: test index: test
- do: - do:
warnings: allowed_warnings:
- 'setting only_expunge_deletes and max_num_segments at the same time is deprecated and will be rejected in a future version' - 'setting only_expunge_deletes and max_num_segments at the same time is deprecated and will be rejected in a future version'
indices.forcemerge: indices.forcemerge:
index: test index: test

View File

@ -57,12 +57,12 @@ setup:
"Get field mapping with local is deprecated": "Get field mapping with local is deprecated":
- skip: - skip:
features: ["warnings", "node_selector"] features: ["allowed_warnings", "node_selector"]
- do: - do:
node_selector: node_selector:
version: "7.8.0 - " version: "7.8.0 - "
warnings: allowed_warnings:
- "Use [local] in get field mapping requests is deprecated. The parameter will be removed in the next major version" - "Use [local] in get field mapping requests is deprecated. The parameter will be removed in the next major version"
indices.get_field_mapping: indices.get_field_mapping:
fields: text fields: text

View File

@ -3,7 +3,7 @@
- skip: - skip:
version: " - 6.9.99" version: " - 6.9.99"
reason: expects warnings that pre-7.0.0 will not send reason: expects warnings that pre-7.0.0 will not send
features: [warnings, arbitrary_key] features: [allowed_warnings, arbitrary_key]
- do: - do:
nodes.info: nodes.info:
@ -48,7 +48,7 @@
settings: settings:
index.number_of_replicas: 0 index.number_of_replicas: 0
index.merge.scheduler.max_thread_count: 2 index.merge.scheduler.max_thread_count: 2
warnings: allowed_warnings:
- "parameter [copy_settings] is deprecated and will be removed in 8.0.0" - "parameter [copy_settings] is deprecated and will be removed in 8.0.0"
- do: - do:

View File

@ -3,7 +3,7 @@
- skip: - skip:
version: " - 6.9.99" version: " - 6.9.99"
reason: expects warnings that pre-7.0.0 will not send reason: expects warnings that pre-7.0.0 will not send
features: [arbitrary_key, warnings] features: [arbitrary_key, allowed_warnings]
- do: - do:
nodes.info: nodes.info:
@ -50,7 +50,7 @@
index.number_of_replicas: 0 index.number_of_replicas: 0
index.number_of_shards: 2 index.number_of_shards: 2
index.merge.scheduler.max_thread_count: 2 index.merge.scheduler.max_thread_count: 2
warnings: allowed_warnings:
- "parameter [copy_settings] is deprecated and will be removed in 8.0.0" - "parameter [copy_settings] is deprecated and will be removed in 8.0.0"

View File

@ -3,7 +3,7 @@
- skip: - skip:
version: " - 7.5.99" version: " - 7.5.99"
reason: "indices without soft deletes are deprecated in 7.6" reason: "indices without soft deletes are deprecated in 7.6"
features: "warnings" features: "allowed_warnings"
- do: - do:
indices.create: indices.create:
@ -11,7 +11,7 @@
body: body:
settings: settings:
soft_deletes.enabled: false soft_deletes.enabled: false
warnings: allowed_warnings:
- Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions. - Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions.
Please do not specify value for setting [index.soft_deletes.enabled] of index [test]. Please do not specify value for setting [index.soft_deletes.enabled] of index [test].
- do: - do:
@ -132,9 +132,9 @@
- skip: - skip:
version: " - 7.6.99" version: " - 7.6.99"
reason: "translog retention settings are deprecated in 7.6" reason: "translog retention settings are deprecated in 7.6"
features: "warnings" features: "allowed_warnings"
- do: - do:
warnings: allowed_warnings:
- Translog retention settings [index.translog.retention.age] and [index.translog.retention.size] - Translog retention settings [index.translog.retention.age] and [index.translog.retention.size]
are deprecated and effectively ignored. They will be removed in a future version. are deprecated and effectively ignored. They will be removed in a future version.
indices.create: indices.create:
@ -148,7 +148,7 @@
body: body:
index.number_of_replicas: 0 index.number_of_replicas: 0
- do: - do:
warnings: allowed_warnings:
- Translog retention settings [index.translog.retention.age] and [index.translog.retention.size] - Translog retention settings [index.translog.retention.age] and [index.translog.retention.size]
are deprecated and effectively ignored. They will be removed in a future version. are deprecated and effectively ignored. They will be removed in a future version.
indices.put_settings: indices.put_settings:
@ -183,7 +183,7 @@
- skip: - skip:
version: " - 7.5.99" version: " - 7.5.99"
reason: "indices without soft deletes are deprecated in 7.6" reason: "indices without soft deletes are deprecated in 7.6"
features: "warnings" features: "allowed_warnings"
- do: - do:
indices.create: indices.create:
@ -192,7 +192,7 @@
settings: settings:
soft_deletes.enabled: false soft_deletes.enabled: false
routing.rebalance.enable: "none" # prevents shard relocations while we are closing an index routing.rebalance.enable: "none" # prevents shard relocations while we are closing an index
warnings: allowed_warnings:
- Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions. - Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions.
Please do not specify value for setting [index.soft_deletes.enabled] of index [test]. Please do not specify value for setting [index.soft_deletes.enabled] of index [test].

View File

@ -145,7 +145,7 @@ setup:
- skip: - skip:
version: " - 7.1.99" version: " - 7.1.99"
reason: _time order deprecated in 6.0, replaced by _key. Calendar_interval added in 7.2 reason: _time order deprecated in 6.0, replaced by _key. Calendar_interval added in 7.2
features: "warnings" features: "allowed_warnings"
- do: - do:
index: index:
@ -178,7 +178,7 @@ setup:
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
body: { "aggs" : { "histo" : { "date_histogram" : { "field" : "date", "calendar_interval" : "month", "order" : { "_time" : "desc" } } } } } body: { "aggs" : { "histo" : { "date_histogram" : { "field" : "date", "calendar_interval" : "month", "order" : { "_time" : "desc" } } } } }
warnings: allowed_warnings:
- "Deprecated aggregation order key [_time] used, replaced by [_key]" - "Deprecated aggregation order key [_time] used, replaced by [_key]"
- match: { hits.total: 4 } - match: { hits.total: 4 }

View File

@ -654,7 +654,7 @@ setup:
- skip: - skip:
reason: _term order deprecated in 6.0, replaced by _key reason: _term order deprecated in 6.0, replaced by _key
features: "warnings" features: "allowed_warnings"
- do: - do:
index: index:
@ -681,7 +681,7 @@ setup:
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "str", "order" : { "_term" : "desc" } } } } } body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "str", "order" : { "_term" : "desc" } } } } }
warnings: allowed_warnings:
- "Deprecated aggregation order key [_term] used, replaced by [_key]" - "Deprecated aggregation order key [_term] used, replaced by [_key]"
- match: { hits.total: 3 } - match: { hits.total: 3 }

View File

@ -268,10 +268,10 @@ setup:
- skip: - skip:
version: " - 7.1.99" version: " - 7.1.99"
reason: calendar_interval introduced in 7.2.0 reason: calendar_interval introduced in 7.2.0
features: warnings features: allowed_warnings
- do: - do:
warnings: allowed_warnings:
- '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -300,7 +300,7 @@ setup:
- match: { aggregations.test.buckets.1.doc_count: 1 } - match: { aggregations.test.buckets.1.doc_count: 1 }
- do: - do:
warnings: allowed_warnings:
- '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true

View File

@ -37,11 +37,11 @@ setup:
- skip: - skip:
version: " - 7.1.99" version: " - 7.1.99"
reason: "interval deprecation added in 7.2" reason: "interval deprecation added in 7.2"
features: "warnings" features: "allowed_warnings"
- do: - do:
catch: /\[window\] must be a positive, non-zero integer\./ catch: /\[window\] must be a positive, non-zero integer\./
warnings: allowed_warnings:
- "[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future." - "[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true

View File

@ -176,9 +176,9 @@ setup:
- skip: - skip:
version: " - 6.99.99" version: " - 6.99.99"
reason: Only triggers warnings on 7.0+ reason: Only triggers warnings on 7.0+
features: warnings features: allowed_warnings
- do: - do:
warnings: allowed_warnings:
- "[use_field_mapping] is a special format that was only used to ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore." - "[use_field_mapping] is a special format that was only used to ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore."
search: search:
body: body:

View File

@ -36,10 +36,10 @@ setup:
"Indices boost using object": "Indices boost using object":
- skip: - skip:
reason: deprecation was added in 5.2.0 reason: deprecation was added in 5.2.0
features: "warnings" features: "allowed_warnings"
- do: - do:
warnings: allowed_warnings:
- 'Object format in indices_boost is deprecated, please use array format instead' - 'Object format in indices_boost is deprecated, please use array format instead'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true
@ -52,7 +52,7 @@ setup:
- match: { hits.hits.1._index: test_2 } - match: { hits.hits.1._index: test_2 }
- do: - do:
warnings: allowed_warnings:
- 'Object format in indices_boost is deprecated, please use array format instead' - 'Object format in indices_boost is deprecated, please use array format instead'
search: search:
rest_total_hits_as_int: true rest_total_hits_as_int: true

View File

@ -118,10 +118,7 @@ public class AddVotingConfigExclusionsRequest extends MasterNodeRequest<AddVotin
timeout = in.readTimeValue(); timeout = in.readTimeValue();
if (nodeDescriptions.length > 0) { if (nodeDescriptions.length > 0) {
deprecationLogger.deprecate( deprecationLogger.deprecate("voting_config_exclusion", DEPRECATION_MESSAGE);
"voting_config_exclusion",
"nodeDescription is deprecated and will be removed, use nodeIds or nodeNames instead"
);
} }
} }

View File

@ -51,11 +51,12 @@ import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportService; import org.opensearch.transport.TransportService;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class TransportGetAliasesAction extends TransportMasterNodeReadAction<GetAliasesRequest, GetAliasesResponse> { public class TransportGetAliasesAction extends TransportMasterNodeReadAction<GetAliasesRequest, GetAliasesResponse> {
@ -152,7 +153,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
ClusterState state, ClusterState state,
ImmutableOpenMap<String, List<AliasMetadata>> aliasesMap ImmutableOpenMap<String, List<AliasMetadata>> aliasesMap
) { ) {
List<String> systemIndicesNames = new ArrayList<>(); Set<String> systemIndicesNames = new HashSet<>();
for (Iterator<String> it = aliasesMap.keysIt(); it.hasNext();) { for (Iterator<String> it = aliasesMap.keysIt(); it.hasNext();) {
String indexName = it.next(); String indexName = it.next();
IndexMetadata index = state.metadata().index(indexName); IndexMetadata index = state.metadata().index(indexName);
@ -161,11 +162,13 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
} }
} }
if (systemIndicesNames.isEmpty() == false) { if (systemIndicesNames.isEmpty() == false) {
deprecationLogger.deprecate( systemIndicesNames.forEach(
"open_system_index_access", systemIndexName -> deprecationLogger.deprecate(
"this request accesses system indices: {}, but in a future major version, direct access to system " "open_system_index_access_" + systemIndexName,
+ "indices will be prevented by default", "this request accesses system indices: [{}], but in a future major version, direct access to system "
systemIndicesNames + "indices will be prevented by default",
systemIndexName
)
); );
} else { } else {
checkSystemAliasAccess(request, systemIndices); checkSystemAliasAccess(request, systemIndices);

View File

@ -365,11 +365,13 @@ public class IndexNameExpressionResolver {
.sorted() // reliable order for testing .sorted() // reliable order for testing
.collect(Collectors.toList()); .collect(Collectors.toList());
if (resolvedSystemIndices.isEmpty() == false) { if (resolvedSystemIndices.isEmpty() == false) {
deprecationLogger.deprecate( resolvedSystemIndices.forEach(
"open_system_index_access", systemIndexName -> deprecationLogger.deprecate(
"this request accesses system indices: {}, but in a future major version, direct access to system " "open_system_index_access_" + systemIndexName,
+ "indices will be prevented by default", "this request accesses system indices: [{}], but in a future major version, direct access to system "
resolvedSystemIndices + "indices will be prevented by default",
systemIndexName
)
); );
} }
} }

View File

@ -90,7 +90,12 @@ public class Joda {
if (formatName != null && formatName.isCamelCase(input)) { if (formatName != null && formatName.isCamelCase(input)) {
String msg = "Camel case format name {} is deprecated and will be removed in a future version. " String msg = "Camel case format name {} is deprecated and will be removed in a future version. "
+ "Use snake case name {} instead."; + "Use snake case name {} instead.";
getDeprecationLogger().deprecate("camelCaseDateFormat", msg, formatName.getCamelCaseName(), formatName.getSnakeCaseName()); getDeprecationLogger().deprecate(
"camelCaseDateFormat_" + formatName.getCamelCaseName(),
msg,
formatName.getCamelCaseName(),
formatName.getSnakeCaseName()
);
} }
DateTimeFormatter formatter; DateTimeFormatter formatter;

View File

@ -37,15 +37,31 @@ import java.util.Map;
import org.opensearch.common.Strings; import org.opensearch.common.Strings;
import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.collect.MapBuilder;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
/** /**
* A logger message used by {@link DeprecationLogger}. * A logger message used by {@link DeprecationLogger}.
* Carries x-opaque-id field if provided in the headers. Will populate the x-opaque-id field in JSON logs. * Carries x-opaque-id field if provided in the headers. Will populate the x-opaque-id field in JSON logs.
*/ */
public class DeprecatedMessage extends OpenSearchLogMessage { public class DeprecatedMessage extends OpenSearchLogMessage {
public static final String X_OPAQUE_ID_FIELD_NAME = "x-opaque-id"; public static final String X_OPAQUE_ID_FIELD_NAME = "x-opaque-id";
private static final Set<String> keys = ConcurrentHashMap.newKeySet();
private final String keyWithXOpaqueId;
public DeprecatedMessage(String key, String xOpaqueId, String messagePattern, Object... args) { public DeprecatedMessage(String key, String xOpaqueId, String messagePattern, Object... args) {
super(fieldMap(key, xOpaqueId), messagePattern, args); super(fieldMap(key, xOpaqueId), messagePattern, args);
this.keyWithXOpaqueId = new StringBuilder().append(key).append(xOpaqueId).toString();
}
/**
* This method is to reset the key set which is used to log unique deprecation logs only.
* The key set helps avoiding the deprecation messages being logged multiple times.
* This method is a utility to reset this set for tests so they can run independent of each other.
* Otherwise, a warning can be logged by some test and the upcoming test can be impacted by it.
*/
public static void resetDeprecatedMessageForTests() {
keys.clear();
} }
private static Map<String, Object> fieldMap(String key, String xOpaqueId) { private static Map<String, Object> fieldMap(String key, String xOpaqueId) {
@ -58,4 +74,8 @@ public class DeprecatedMessage extends OpenSearchLogMessage {
} }
return builder.immutableMap(); return builder.immutableMap();
} }
public boolean isAlreadyLogged() {
return !keys.add(keyWithXOpaqueId);
}
} }

View File

@ -106,10 +106,10 @@ public class DeprecationLogger {
public class DeprecationLoggerBuilder { public class DeprecationLoggerBuilder {
public DeprecationLoggerBuilder withDeprecation(String key, String msg, Object[] params) { public DeprecationLoggerBuilder withDeprecation(String key, String msg, Object[] params) {
OpenSearchLogMessage deprecationMessage = new DeprecatedMessage(key, HeaderWarning.getXOpaqueId(), msg, params); DeprecatedMessage deprecationMessage = new DeprecatedMessage(key, HeaderWarning.getXOpaqueId(), msg, params);
if (!deprecationMessage.isAlreadyLogged()) {
logger.log(DEPRECATION, deprecationMessage); logger.log(DEPRECATION, deprecationMessage);
}
return this; return this;
} }
} }

View File

@ -1971,7 +1971,12 @@ public class DateFormatters {
String msg = "Camel case format name {} is deprecated and will be removed in a future version. " String msg = "Camel case format name {} is deprecated and will be removed in a future version. "
+ "Use snake case name {} instead."; + "Use snake case name {} instead.";
deprecationLogger.getOrCompute() deprecationLogger.getOrCompute()
.deprecate("camelCaseDateFormat", msg, formatName.getCamelCaseName(), formatName.getSnakeCaseName()); .deprecate(
"camelCaseDateFormat_" + formatName.getCamelCaseName(),
msg,
formatName.getCamelCaseName(),
formatName.getSnakeCaseName()
);
} }
if (FormatNames.ISO8601.matches(input)) { if (FormatNames.ISO8601.matches(input)) {

View File

@ -214,7 +214,7 @@ public class DateUtils {
String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(zoneId); String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(zoneId);
if (deprecatedId != null) { if (deprecatedId != null) {
deprecationLogger.deprecate( deprecationLogger.deprecate(
"timezone", "timezone_" + zoneId,
"Use of short timezone id " + zoneId + " is deprecated. Use " + deprecatedId + " instead" "Use of short timezone id " + zoneId + " is deprecated. Use " + deprecatedId + " instead"
); );
return ZoneId.of(deprecatedId); return ZoneId.of(deprecatedId);

View File

@ -90,7 +90,7 @@ public class OpenSearchExecutors {
final int availableProcessors = Runtime.getRuntime().availableProcessors(); final int availableProcessors = Runtime.getRuntime().availableProcessors();
if (value > availableProcessors) { if (value > availableProcessors) {
deprecationLogger.deprecate( deprecationLogger.deprecate(
"processors", "processors_" + name,
"setting [{}] to value [{}] which is more than available processors [{}] is deprecated", "setting [{}] to value [{}] which is more than available processors [{}] is deprecated",
name, name,
value, value,

View File

@ -65,7 +65,7 @@ public class LoggingDeprecationHandler implements DeprecationHandler {
public void usedDeprecatedName(String parserName, Supplier<XContentLocation> location, String usedName, String modernName) { public void usedDeprecatedName(String parserName, Supplier<XContentLocation> location, String usedName, String modernName) {
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] "; String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
deprecationLogger.deprecate( deprecationLogger.deprecate(
"deprecated_field", usedName + "_deprecated_name",
"{}Deprecated field [{}] used, expected [{}] instead", "{}Deprecated field [{}] used, expected [{}] instead",
prefix, prefix,
usedName, usedName,
@ -76,14 +76,20 @@ public class LoggingDeprecationHandler implements DeprecationHandler {
@Override @Override
public void usedDeprecatedField(String parserName, Supplier<XContentLocation> location, String usedName, String replacedWith) { public void usedDeprecatedField(String parserName, Supplier<XContentLocation> location, String usedName, String replacedWith) {
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] "; String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
deprecationLogger.deprecate("deprecated_field", "{}Deprecated field [{}] used, replaced by [{}]", prefix, usedName, replacedWith); deprecationLogger.deprecate(
usedName + "_deprecated_field",
"{}Deprecated field [{}] used, replaced by [{}]",
prefix,
usedName,
replacedWith
);
} }
@Override @Override
public void usedDeprecatedField(String parserName, Supplier<XContentLocation> location, String usedName) { public void usedDeprecatedField(String parserName, Supplier<XContentLocation> location, String usedName) {
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] "; String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
deprecationLogger.deprecate( deprecationLogger.deprecate(
"deprecated_field", usedName + "_deprecated_field",
"{}Deprecated field [{}] used, this field is unused and will be removed entirely", "{}Deprecated field [{}] used, this field is unused and will be removed entirely",
prefix, prefix,
usedName usedName

View File

@ -184,7 +184,7 @@ public final class PreConfiguredTokenFilter extends PreConfiguredAnalysisCompone
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
name(), name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0" "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
); );
return this; return this;
@ -211,7 +211,10 @@ public final class PreConfiguredTokenFilter extends PreConfiguredAnalysisCompone
if (version.onOrAfter(LegacyESVersion.V_7_0_0)) { if (version.onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate(name(), "Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"); DEPRECATION_LOGGER.deprecate(
name() + "_synonym_tokenfilters",
"Token filter [" + name() + "] will not be usable to parse synonyms after v7.0"
);
return this; return this;
} }
} }

View File

@ -104,8 +104,8 @@ public class ShingleTokenFilterFactory extends AbstractTokenFilterFactory {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else { } else {
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"synonym_tokenfilters", name() + "_synonym_tokenfilters",
"Token filter " + name() + "] will not be usable to parse synonym after v7.0" "Token filter " + name() + "] will not be usable to parse synonyms after v7.0"
); );
} }
return this; return this;

View File

@ -192,7 +192,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
throw new OpenSearchParseException("Field parameter [{}] is not supported for [{}] field type", fieldName, CONTENT_TYPE); throw new OpenSearchParseException("Field parameter [{}] is not supported for [{}] field type", fieldName, CONTENT_TYPE);
} }
DEPRECATION_LOGGER.deprecate( DEPRECATION_LOGGER.deprecate(
"geo_mapper_field_parameter", "geo_mapper_field_parameter_" + fieldName,
"Field parameter [{}] is deprecated and will be removed in a future version.", "Field parameter [{}] is deprecated and will be removed in a future version.",
fieldName fieldName
); );

View File

@ -667,7 +667,7 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
Parameter<?> parameter = deprecatedParamsMap.get(propName); Parameter<?> parameter = deprecatedParamsMap.get(propName);
if (parameter != null) { if (parameter != null) {
deprecationLogger.deprecate( deprecationLogger.deprecate(
propName, propName + name,
"Parameter [{}] on mapper [{}] is deprecated, use [{}]", "Parameter [{}] on mapper [{}] is deprecated, use [{}]",
propName, propName,
name, name,
@ -679,7 +679,7 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
if (parameter == null) { if (parameter == null) {
if (isDeprecatedParameter(propName, parserContext.indexVersionCreated())) { if (isDeprecatedParameter(propName, parserContext.indexVersionCreated())) {
deprecationLogger.deprecate( deprecationLogger.deprecate(
propName, propName + type,
"Parameter [{}] has no effect on type [{}] and will be removed in future", "Parameter [{}] has no effect on type [{}] and will be removed in future",
propName, propName,
type type
@ -692,7 +692,11 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
); );
} }
if (Objects.equals("boost", propName)) { if (Objects.equals("boost", propName)) {
deprecationLogger.deprecate("boost", "Parameter [boost] on field [{}] is deprecated and will be removed in 8.0", name); deprecationLogger.deprecate(
"boost_" + name,
"Parameter [boost] on field [{}] is deprecated and will be removed in 8.0",
name
);
} }
if (propNode == null && parameter.acceptsNull == false) { if (propNode == null && parameter.acceptsNull == false) {
throw new MapperParsingException( throw new MapperParsingException(

View File

@ -449,7 +449,7 @@ public class RootObjectMapper extends ObjectMapper {
} else { } else {
deprecationMessage = message; deprecationMessage = message;
} }
DEPRECATION_LOGGER.deprecate("invalid_dynamic_template", deprecationMessage); DEPRECATION_LOGGER.deprecate("invalid_dynamic_template_" + dynamicTemplate.getName(), deprecationMessage);
} }
} }

View File

@ -148,7 +148,11 @@ public class TypeParsers {
iterator.remove(); iterator.remove();
} else if (propName.equals("boost")) { } else if (propName.equals("boost")) {
builder.boost(nodeFloatValue(propNode)); builder.boost(nodeFloatValue(propNode));
deprecationLogger.deprecate("boost", "Parameter [boost] on field [{}] is deprecated and will be removed in 8.0", name); deprecationLogger.deprecate(
"boost_" + name,
"Parameter [boost] on field [{}] is deprecated and will be removed in 8.0",
name
);
iterator.remove(); iterator.remove();
} else if (propName.equals("index_options")) { } else if (propName.equals("index_options")) {
builder.indexOptions(nodeIndexOptionValue(propNode)); builder.indexOptions(nodeIndexOptionValue(propNode));

View File

@ -118,7 +118,7 @@ public class RestMultiGetAction extends BaseRestHandler {
for (MultiGetRequest.Item item : multiGetRequest.getItems()) { for (MultiGetRequest.Item item : multiGetRequest.getItems()) {
if (item.type() != null) { if (item.type() != null) {
deprecationLogger.deprecate("multi_get_types_removal", TYPES_DEPRECATION_MESSAGE); deprecationLogger.deprecate("mget_with_types", TYPES_DEPRECATION_MESSAGE);
break; break;
} }
} }

View File

@ -160,7 +160,7 @@ public class DateIntervalWrapper implements ToXContentFragment, Writeable {
/** Get the current interval in milliseconds that is set on this builder. */ /** Get the current interval in milliseconds that is set on this builder. */
@Deprecated @Deprecated
public long interval() { public long interval() {
DEPRECATION_LOGGER.deprecate("date-interval-getter", DEPRECATION_TEXT); DEPRECATION_LOGGER.deprecate("date-histogram-interval", DEPRECATION_TEXT);
if (intervalType.equals(IntervalTypeEnum.LEGACY_INTERVAL)) { if (intervalType.equals(IntervalTypeEnum.LEGACY_INTERVAL)) {
return TimeValue.parseTimeValue(dateHistogramInterval.toString(), "interval").getMillis(); return TimeValue.parseTimeValue(dateHistogramInterval.toString(), "interval").getMillis();
} }
@ -181,14 +181,14 @@ public class DateIntervalWrapper implements ToXContentFragment, Writeable {
throw new IllegalArgumentException("[interval] must be 1 or greater for aggregation [date_histogram]"); throw new IllegalArgumentException("[interval] must be 1 or greater for aggregation [date_histogram]");
} }
setIntervalType(IntervalTypeEnum.LEGACY_INTERVAL); setIntervalType(IntervalTypeEnum.LEGACY_INTERVAL);
DEPRECATION_LOGGER.deprecate("date-interval-setter", DEPRECATION_TEXT); DEPRECATION_LOGGER.deprecate("date-histogram-interval", DEPRECATION_TEXT);
this.dateHistogramInterval = new DateHistogramInterval(interval + "ms"); this.dateHistogramInterval = new DateHistogramInterval(interval + "ms");
} }
/** Get the current date interval that is set on this builder. */ /** Get the current date interval that is set on this builder. */
@Deprecated @Deprecated
public DateHistogramInterval dateHistogramInterval() { public DateHistogramInterval dateHistogramInterval() {
DEPRECATION_LOGGER.deprecate("date-histogram-interval-getter", DEPRECATION_TEXT); DEPRECATION_LOGGER.deprecate("date-histogram-interval", DEPRECATION_TEXT);
if (intervalType.equals(IntervalTypeEnum.LEGACY_DATE_HISTO)) { if (intervalType.equals(IntervalTypeEnum.LEGACY_DATE_HISTO)) {
return dateHistogramInterval; return dateHistogramInterval;
} }
@ -209,7 +209,7 @@ public class DateIntervalWrapper implements ToXContentFragment, Writeable {
throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [date_histogram]"); throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [date_histogram]");
} }
setIntervalType(IntervalTypeEnum.LEGACY_DATE_HISTO); setIntervalType(IntervalTypeEnum.LEGACY_DATE_HISTO);
DEPRECATION_LOGGER.deprecate("date-histogram-interval-setter", DEPRECATION_TEXT); DEPRECATION_LOGGER.deprecate("date-histogram-interval", DEPRECATION_TEXT);
this.dateHistogramInterval = dateHistogramInterval; this.dateHistogramInterval = dateHistogramInterval;
} }

View File

@ -121,7 +121,7 @@ public class TransportInfo implements ReportingService.Info {
publishAddressString = hostString + '/' + publishAddress.toString(); publishAddressString = hostString + '/' + publishAddress.toString();
} else { } else {
deprecationLogger.deprecate( deprecationLogger.deprecate(
"cname_in_publish_address", "cname_in_publish_address_" + propertyName,
propertyName propertyName
+ " was printed as [ip:port] instead of [hostname/ip:port]. " + " was printed as [ip:port] instead of [hostname/ip:port]. "
+ "This format is deprecated and will change to [hostname/ip:port] in a future version. " + "This format is deprecated and will change to [hostname/ip:port] in a future version. "

View File

@ -252,11 +252,11 @@ public class SettingsUpdaterTests extends OpenSearchTestCase {
final Settings toApplyUnset = Settings.builder().putNull("logger.org.opensearch").build(); final Settings toApplyUnset = Settings.builder().putNull("logger.org.opensearch").build();
final ClusterState afterUnset = settingsUpdater.updateSettings(afterDebug, toApplyUnset, Settings.EMPTY, logger); final ClusterState afterUnset = settingsUpdater.updateSettings(afterDebug, toApplyUnset, Settings.EMPTY, logger);
assertSettingDeprecationsAndWarnings(new Setting<?>[] { deprecatedSetting }); assertNoDeprecationWarnings();
// we also check that if no settings are changed, deprecation logging still occurs // we also check that if no settings are changed, deprecation logging still occurs
settingsUpdater.updateSettings(afterUnset, toApplyUnset, Settings.EMPTY, logger); settingsUpdater.updateSettings(afterUnset, toApplyUnset, Settings.EMPTY, logger);
assertSettingDeprecationsAndWarnings(new Setting<?>[] { deprecatedSetting }); assertNoDeprecationWarnings();
} }
public void testUpdateWithUnknownAndSettings() { public void testUpdateWithUnknownAndSettings() {

View File

@ -599,6 +599,5 @@ public class TransportAnalyzeActionTests extends OpenSearchTestCase {
analyze = TransportAnalyzeAction.analyze(req, registry, mockIndexService(), maxTokenCount); analyze = TransportAnalyzeAction.analyze(req, registry, mockIndexService(), maxTokenCount);
assertEquals(1, analyze.getTokens().size()); assertEquals(1, analyze.getTokens().size());
assertWarnings("Using deprecated token filter [deprecated]");
} }
} }

View File

@ -2202,7 +2202,9 @@ public class IndexNameExpressionResolverTests extends OpenSearchTestCase {
List<String> indexNames = resolveConcreteIndexNameList(state, request); List<String> indexNames = resolveConcreteIndexNameList(state, request);
assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta")); assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta"));
assertWarnings( assertWarnings(
"this request accesses system indices: [.ml-meta, .ml-stuff], but in a future major version, " "this request accesses system indices: [.ml-meta], but in a future major version, "
+ "direct access to system indices will be prevented by default",
"this request accesses system indices: [.ml-stuff], but in a future major version, "
+ "direct access to system indices will be prevented by default" + "direct access to system indices will be prevented by default"
); );
@ -2230,7 +2232,9 @@ public class IndexNameExpressionResolverTests extends OpenSearchTestCase {
List<String> indexNames = resolveConcreteIndexNameList(state, request); List<String> indexNames = resolveConcreteIndexNameList(state, request);
assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff"));
assertWarnings( assertWarnings(
"this request accesses system indices: [.ml-meta, .ml-stuff], but in a future major version, direct access " "this request accesses system indices: [.ml-meta], but in a future major version, direct access "
+ "to system indices will be prevented by default",
"this request accesses system indices: [.ml-stuff], but in a future major version, direct access "
+ "to system indices will be prevented by default" + "to system indices will be prevented by default"
); );

View File

@ -70,26 +70,26 @@ public class DiscoveryNodeRoleSettingTests extends OpenSearchTestCase {
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() }); assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() });
assertThat(DiscoveryNode.getRolesFromSettings(legacyTrue), hasItem(role)); assertThat(DiscoveryNode.getRolesFromSettings(legacyTrue), hasItem(role));
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() }); assertNoDeprecationWarnings();
final Settings legacyFalse = Settings.builder().put(role.legacySetting().getKey(), false).build(); final Settings legacyFalse = Settings.builder().put(role.legacySetting().getKey(), false).build();
assertFalse(predicate.test(legacyFalse)); assertFalse(predicate.test(legacyFalse));
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() }); assertNoDeprecationWarnings();
assertThat(DiscoveryNode.getRolesFromSettings(legacyFalse), not(hasItem(role))); assertThat(DiscoveryNode.getRolesFromSettings(legacyFalse), not(hasItem(role)));
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() }); assertNoDeprecationWarnings();
assertTrue(predicate.test(onlyRole(role))); assertTrue(predicate.test(onlyRole(role)));
assertThat(DiscoveryNode.getRolesFromSettings(onlyRole(role)), hasItem(role)); assertNoDeprecationWarnings();
assertFalse(predicate.test(removeRoles(Collections.singleton(role)))); assertFalse(predicate.test(removeRoles(Collections.singleton(role))));
assertThat(DiscoveryNode.getRolesFromSettings(removeRoles(Collections.singleton(role))), not(hasItem(role))); assertNoDeprecationWarnings();
final Settings settings = Settings.builder().put(onlyRole(role)).put(role.legacySetting().getKey(), randomBoolean()).build(); final Settings settings = Settings.builder().put(onlyRole(role)).put(role.legacySetting().getKey(), randomBoolean()).build();
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DiscoveryNode.getRolesFromSettings(settings)); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DiscoveryNode.getRolesFromSettings(settings));
assertThat(e.getMessage(), startsWith("can not explicitly configure node roles and use legacy role setting")); assertThat(e.getMessage(), startsWith("can not explicitly configure node roles and use legacy role setting"));
assertSettingDeprecationsAndWarnings(new Setting<?>[] { role.legacySetting() }); assertNoDeprecationWarnings();
} }
} }

View File

@ -54,4 +54,19 @@ public class DeprecationLoggerTests extends OpenSearchTestCase {
assertThat(numberOfLoggersAfter, equalTo(numberOfLoggersBefore + 1)); assertThat(numberOfLoggersAfter, equalTo(numberOfLoggersBefore + 1));
} }
public void testDuplicateLogMessages() {
DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DeprecationLoggerTests.class);
deprecationLogger.deprecate("deprecated-message-1", "Deprecated message 1");
deprecationLogger.deprecate("deprecated-message-2", "Deprecated message 2");
deprecationLogger.deprecate("deprecated-message-3", "Deprecated message 3");
deprecationLogger.deprecate("deprecated-message-2", "Deprecated message 2");
deprecationLogger.deprecate("deprecated-message-1", "Deprecated message 1");
deprecationLogger.deprecate("deprecated-message-3", "Deprecated message 3");
deprecationLogger.deprecate("deprecated-message-1", "Deprecated message 1");
deprecationLogger.deprecate("deprecated-message-3", "Deprecated message 3");
deprecationLogger.deprecate("deprecated-message-2", "Deprecated message 2");
// assert that only unique warnings are logged
assertWarnings("Deprecated message 1", "Deprecated message 2", "Deprecated message 3");
}
} }

View File

@ -524,15 +524,6 @@ public class DateFormattersTests extends OpenSearchTestCase {
assertThat(dateFormatter.pattern(), equalTo(name)); assertThat(dateFormatter.pattern(), equalTo(name));
String snakeCaseName = FormatNames.forName(name).getSnakeCaseName(); String snakeCaseName = FormatNames.forName(name).getSnakeCaseName();
assertWarnings(
"Camel case format name "
+ name
+ " is deprecated and will be removed in a future version. "
+ "Use snake case name "
+ snakeCaseName
+ " instead."
);
dateFormatter = Joda.forPattern(snakeCaseName); dateFormatter = Joda.forPattern(snakeCaseName);
assertThat(dateFormatter.pattern(), equalTo(snakeCaseName)); assertThat(dateFormatter.pattern(), equalTo(snakeCaseName));
} }

View File

@ -312,7 +312,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates"); mapping.startArray("dynamic_templates");
{ {
mapping.startObject(); mapping.startObject();
mapping.startObject("my_template"); mapping.startObject("my_template1");
mapping.field("match_mapping_type", "string"); mapping.field("match_mapping_type", "string");
mapping.startObject("mapping"); mapping.startObject("mapping");
mapping.field("type", "string"); mapping.field("type", "string");
@ -328,7 +328,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE);
assertThat(mapper.mappingSource().toString(), containsString("\"type\":\"string\"")); assertThat(mapper.mappingSource().toString(), containsString("\"type\":\"string\""));
assertWarnings( assertWarnings(
"dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{\"type\":" "dynamic template [my_template1] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{\"type\":"
+ "\"string\"}}], caused by [No mapper found for type [string]]" + "\"string\"}}], caused by [No mapper found for type [string]]"
); );
} }
@ -341,7 +341,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates"); mapping.startArray("dynamic_templates");
{ {
mapping.startObject(); mapping.startObject();
mapping.startObject("my_template"); mapping.startObject("my_template2");
mapping.field("match_mapping_type", "string"); mapping.field("match_mapping_type", "string");
mapping.startObject("mapping"); mapping.startObject("mapping");
mapping.field("type", "keyword"); mapping.field("type", "keyword");
@ -358,9 +358,9 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE);
assertThat(mapper.mappingSource().toString(), containsString("\"foo\":\"bar\"")); assertThat(mapper.mappingSource().toString(), containsString("\"foo\":\"bar\""));
assertWarnings( assertWarnings(
"dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{" "dynamic template [my_template2] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{"
+ "\"foo\":\"bar\",\"type\":\"keyword\"}}], " + "\"foo\":\"bar\",\"type\":\"keyword\"}}], "
+ "caused by [unknown parameter [foo] on mapper [__dynamic__my_template] of type [keyword]]" + "caused by [unknown parameter [foo] on mapper [__dynamic__my_template2] of type [keyword]]"
); );
} }
@ -372,7 +372,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates"); mapping.startArray("dynamic_templates");
{ {
mapping.startObject(); mapping.startObject();
mapping.startObject("my_template"); mapping.startObject("my_template3");
mapping.field("match_mapping_type", "string"); mapping.field("match_mapping_type", "string");
mapping.startObject("mapping"); mapping.startObject("mapping");
mapping.field("type", "text"); mapping.field("type", "text");
@ -389,7 +389,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE);
assertThat(mapper.mappingSource().toString(), containsString("\"analyzer\":\"foobar\"")); assertThat(mapper.mappingSource().toString(), containsString("\"analyzer\":\"foobar\""));
assertWarnings( assertWarnings(
"dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{" "dynamic template [my_template3] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{"
+ "\"analyzer\":\"foobar\",\"type\":\"text\"}}], caused by [analyzer [foobar] has not been configured in mappings]" + "\"analyzer\":\"foobar\",\"type\":\"text\"}}], caused by [analyzer [foobar] has not been configured in mappings]"
); );
} }
@ -405,7 +405,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates"); mapping.startArray("dynamic_templates");
{ {
mapping.startObject(); mapping.startObject();
mapping.startObject("my_template"); mapping.startObject("my_template4");
if (randomBoolean()) { if (randomBoolean()) {
mapping.field("match_mapping_type", "*"); mapping.field("match_mapping_type", "*");
} else { } else {
@ -439,7 +439,7 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
mapping.startArray("dynamic_templates"); mapping.startArray("dynamic_templates");
{ {
mapping.startObject(); mapping.startObject();
mapping.startObject("my_template"); mapping.startObject("my_template4");
if (useMatchMappingType) { if (useMatchMappingType) {
mapping.field("match_mapping_type", "*"); mapping.field("match_mapping_type", "*");
} else { } else {
@ -465,15 +465,15 @@ public class RootObjectMapperTests extends OpenSearchSingleNodeTestCase {
assertThat(mapper.mappingSource().toString(), containsString("\"foo\":\"bar\"")); assertThat(mapper.mappingSource().toString(), containsString("\"foo\":\"bar\""));
if (useMatchMappingType) { if (useMatchMappingType) {
assertWarnings( assertWarnings(
"dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"*\",\"mapping\":{" "dynamic template [my_template4] has invalid content [{\"match_mapping_type\":\"*\",\"mapping\":{"
+ "\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], " + "\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], "
+ "caused by [unknown parameter [foo] on mapper [__dynamic__my_template] of type [binary]]" + "caused by [unknown parameter [foo] on mapper [__dynamic__my_template4] of type [binary]]"
); );
} else { } else {
assertWarnings( assertWarnings(
"dynamic template [my_template] has invalid content [{\"match\":\"string_*\",\"mapping\":{" "dynamic template [my_template4] has invalid content [{\"match\":\"string_*\",\"mapping\":{"
+ "\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], " + "\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], "
+ "caused by [unknown parameter [foo] on mapper [__dynamic__my_template] of type [binary]]" + "caused by [unknown parameter [foo] on mapper [__dynamic__my_template4] of type [binary]]"
); );
} }
} }

View File

@ -43,6 +43,8 @@ import org.opensearch.test.AbstractQueryTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
@ -50,6 +52,8 @@ import static org.hamcrest.Matchers.containsString;
public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder> { public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder> {
private Set<String> assertedWarnings = new HashSet<>();
@Override @Override
protected IdsQueryBuilder doCreateTestQueryBuilder() { protected IdsQueryBuilder doCreateTestQueryBuilder() {
final String type; final String type;
@ -161,8 +165,9 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
assertThat(query, instanceOf(IdsQueryBuilder.class)); assertThat(query, instanceOf(IdsQueryBuilder.class));
IdsQueryBuilder idsQuery = (IdsQueryBuilder) query; IdsQueryBuilder idsQuery = (IdsQueryBuilder) query;
if (idsQuery.types().length > 0) { if (idsQuery.types().length > 0 && !assertedWarnings.contains(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE)) {
assertWarnings(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE); assertWarnings(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE);
assertedWarnings.add(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE);
} }
return query; return query;
} }

View File

@ -66,6 +66,7 @@ import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Stream; import java.util.stream.Stream;
@ -84,6 +85,8 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
private static Item[] randomLikeItems; private static Item[] randomLikeItems;
private static Item[] randomUnlikeItems; private static Item[] randomUnlikeItems;
private Set<String> assertedWarnings = new HashSet<>();
@Before @Before
public void setup() { public void setup() {
// MLT only supports string fields, unsupported fields are tested below // MLT only supports string fields, unsupported fields are tested below
@ -480,8 +483,9 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
assertThat(query, instanceOf(MoreLikeThisQueryBuilder.class)); assertThat(query, instanceOf(MoreLikeThisQueryBuilder.class));
MoreLikeThisQueryBuilder mltQuery = (MoreLikeThisQueryBuilder) query; MoreLikeThisQueryBuilder mltQuery = (MoreLikeThisQueryBuilder) query;
if (mltQuery.isTypeless() == false) { if (mltQuery.isTypeless() == false && !assertedWarnings.contains(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE)) {
assertWarnings(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE); assertWarnings(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE);
assertedWarnings.add(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE);
} }
return query; return query;
} }

View File

@ -60,8 +60,10 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
@ -72,6 +74,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
private List<Object> randomTerms; private List<Object> randomTerms;
private String termsPath; private String termsPath;
private boolean maybeIncludeType = true; private boolean maybeIncludeType = true;
private Set<String> assertedWarnings = new HashSet<>();
@Before @Before
public void randomTerms() { public void randomTerms() {
@ -380,8 +383,10 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
assertThat(query, CoreMatchers.instanceOf(TermsQueryBuilder.class)); assertThat(query, CoreMatchers.instanceOf(TermsQueryBuilder.class));
TermsQueryBuilder termsQuery = (TermsQueryBuilder) query; TermsQueryBuilder termsQuery = (TermsQueryBuilder) query;
if (termsQuery.isTypeless() == false) { String deprecationWarning = "Deprecated field [type] used, this field is unused and will be removed entirely";
assertWarnings("Deprecated field [type] used, this field is unused and will be removed entirely"); if (termsQuery.isTypeless() == false && !assertedWarnings.contains(deprecationWarning)) {
assertWarnings(deprecationWarning);
assertedWarnings.add(deprecationWarning);
} }
return query; return query;
} finally { } finally {

View File

@ -79,6 +79,7 @@ public class RestGetSourceActionTests extends RestActionTestCase {
* test deprecation is logged if type is used in path * test deprecation is logged if type is used in path
*/ */
public void testTypeInPath() { public void testTypeInPath() {
boolean assertWarnings = true;
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null); verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
for (Method method : Arrays.asList(Method.GET, Method.HEAD)) { for (Method method : Arrays.asList(Method.GET, Method.HEAD)) {
@ -89,7 +90,10 @@ public class RestGetSourceActionTests extends RestActionTestCase {
.build(); .build();
dispatchRequest(request); dispatchRequest(request);
assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); if (assertWarnings) {
assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE);
assertWarnings = false;
}
} }
} }
} }
@ -98,6 +102,7 @@ public class RestGetSourceActionTests extends RestActionTestCase {
* test deprecation is logged if type is used as parameter * test deprecation is logged if type is used as parameter
*/ */
public void testTypeParameter() { public void testTypeParameter() {
boolean assertWarnings = true;
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null); verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
Map<String, String> params = new HashMap<>(); Map<String, String> params = new HashMap<>();
@ -110,7 +115,10 @@ public class RestGetSourceActionTests extends RestActionTestCase {
.withParams(params) .withParams(params)
.build(); .build();
dispatchRequest(request); dispatchRequest(request);
assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); if (assertWarnings) {
assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE);
assertWarnings = false;
}
} }
} }
} }

View File

@ -229,7 +229,7 @@ public class ScriptMetadataTests extends AbstractSerializingTestCase<ScriptMetad
BytesReference.bytes(builder).streamInput() BytesReference.bytes(builder).streamInput()
); );
ScriptMetadata.fromXContent(parser); ScriptMetadata.fromXContent(parser);
assertWarnings("empty scripts should no longer be used"); assertNoDeprecationWarnings();
builder = XContentFactory.jsonBuilder(); builder = XContentFactory.jsonBuilder();
builder.startObject().startObject("script").field("lang", "mustache").field("source", "").endObject().endObject(); builder.startObject().startObject("script").field("lang", "mustache").field("source", "").endObject().endObject();
@ -240,7 +240,7 @@ public class ScriptMetadataTests extends AbstractSerializingTestCase<ScriptMetad
BytesReference.bytes(builder).streamInput() BytesReference.bytes(builder).streamInput()
); );
ScriptMetadata.fromXContent(parser); ScriptMetadata.fromXContent(parser);
assertWarnings("empty templates should no longer be used"); assertNoDeprecationWarnings();
} }
public void testOldStyleDropped() throws IOException { public void testOldStyleDropped() throws IOException {

View File

@ -255,7 +255,7 @@ public class StoredScriptTests extends AbstractSerializingTestCase<StoredScriptS
StoredScriptSource source = new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, "", Collections.emptyMap()); StoredScriptSource source = new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, "", Collections.emptyMap());
assertThat(parsed, equalTo(source)); assertThat(parsed, equalTo(source));
assertWarnings("empty templates should no longer be used"); assertNoDeprecationWarnings();
} }
} }

View File

@ -80,8 +80,10 @@ import org.opensearch.search.builder.SearchSourceBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Set;
import static org.opensearch.search.sort.FieldSortBuilder.getMinMaxOrNull; import static org.opensearch.search.sort.FieldSortBuilder.getMinMaxOrNull;
import static org.opensearch.search.sort.FieldSortBuilder.getPrimaryFieldSortOrNull; import static org.opensearch.search.sort.FieldSortBuilder.getPrimaryFieldSortOrNull;
@ -90,6 +92,8 @@ import static org.hamcrest.Matchers.instanceOf;
public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder> { public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder> {
private Set<String> assertedWarnings = new HashSet<>();
/** /**
* {@link #provideMappedFieldType(String)} will return a * {@link #provideMappedFieldType(String)} will return a
*/ */
@ -694,14 +698,17 @@ public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder
@Override @Override
protected void assertWarnings(FieldSortBuilder testItem) { protected void assertWarnings(FieldSortBuilder testItem) {
List<String> expectedWarnings = new ArrayList<>(); List<String> expectedWarnings = new ArrayList<>();
if (testItem.getNestedFilter() != null) { String nestedFilterDeprecationWarning = "[nested_filter] has been deprecated in favour for the [nested] parameter";
expectedWarnings.add("[nested_filter] has been deprecated in favour for the [nested] parameter"); String nestedPathDeprecationWarning = "[nested_path] has been deprecated in favor of the [nested] parameter";
if (testItem.getNestedFilter() != null && !assertedWarnings.contains(nestedFilterDeprecationWarning)) {
expectedWarnings.add(nestedFilterDeprecationWarning);
} }
if (testItem.getNestedPath() != null) { if (testItem.getNestedPath() != null && !assertedWarnings.contains(nestedPathDeprecationWarning)) {
expectedWarnings.add("[nested_path] has been deprecated in favor of the [nested] parameter"); expectedWarnings.add(nestedPathDeprecationWarning);
} }
if (expectedWarnings.isEmpty() == false) { if (expectedWarnings.isEmpty() == false) {
assertWarnings(expectedWarnings.toArray(new String[expectedWarnings.size()])); assertWarnings(expectedWarnings.toArray(new String[expectedWarnings.size()]));
assertedWarnings.addAll(expectedWarnings);
} }
} }

View File

@ -64,13 +64,17 @@ import org.opensearch.test.geo.RandomGeoGenerator;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanceSortBuilder> { public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanceSortBuilder> {
private Set<String> assertedWarnings = new HashSet<>();
@Override @Override
protected GeoDistanceSortBuilder createTestItem() { protected GeoDistanceSortBuilder createTestItem() {
return randomGeoDistanceSortBuilder(); return randomGeoDistanceSortBuilder();
@ -407,14 +411,17 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
@Override @Override
protected void assertWarnings(GeoDistanceSortBuilder testItem) { protected void assertWarnings(GeoDistanceSortBuilder testItem) {
List<String> expectedWarnings = new ArrayList<>(); List<String> expectedWarnings = new ArrayList<>();
if (testItem.getNestedFilter() != null) { String nestedFilterDeprecationWarning = "[nested_filter] has been deprecated in favour of the [nested] parameter";
expectedWarnings.add("[nested_filter] has been deprecated in favour of the [nested] parameter"); String nestedPathDeprecationWarning = "[nested_path] has been deprecated in favour of the [nested] parameter";
if (testItem.getNestedFilter() != null && !assertedWarnings.contains(nestedFilterDeprecationWarning)) {
expectedWarnings.add(nestedFilterDeprecationWarning);
} }
if (testItem.getNestedPath() != null) { if (testItem.getNestedPath() != null && !assertedWarnings.contains(nestedPathDeprecationWarning)) {
expectedWarnings.add("[nested_path] has been deprecated in favour of the [nested] parameter"); expectedWarnings.add(nestedPathDeprecationWarning);
} }
if (expectedWarnings.isEmpty() == false) { if (expectedWarnings.isEmpty() == false) {
assertWarnings(expectedWarnings.toArray(new String[expectedWarnings.size()])); assertWarnings(expectedWarnings.toArray(new String[expectedWarnings.size()]));
assertedWarnings.addAll(expectedWarnings);
} }
} }

View File

@ -59,6 +59,7 @@ public class SortBuilderTests extends OpenSearchTestCase {
private static final int NUMBER_OF_RUNS = 20; private static final int NUMBER_OF_RUNS = 20;
private static NamedXContentRegistry xContentRegistry; private static NamedXContentRegistry xContentRegistry;
private Set<String> assertedWarnings = new HashSet<>();
@BeforeClass @BeforeClass
public static void init() { public static void init() {
@ -152,11 +153,13 @@ public class SortBuilderTests extends OpenSearchTestCase {
for (SortBuilder<?> builder : testBuilders) { for (SortBuilder<?> builder : testBuilders) {
if (builder instanceof GeoDistanceSortBuilder) { if (builder instanceof GeoDistanceSortBuilder) {
GeoDistanceSortBuilder gdsb = (GeoDistanceSortBuilder) builder; GeoDistanceSortBuilder gdsb = (GeoDistanceSortBuilder) builder;
if (gdsb.getNestedFilter() != null) { String nestedFilterDeprecationWarning = "[nested_filter] has been deprecated in favour of the [nested] parameter";
expectedWarningHeaders.add("[nested_filter] has been deprecated in favour of the [nested] parameter"); String nestedPathDeprecationWarning = "[nested_path] has been deprecated in favour of the [nested] parameter";
if (gdsb.getNestedFilter() != null && !assertedWarnings.contains(nestedFilterDeprecationWarning)) {
expectedWarningHeaders.add(nestedFilterDeprecationWarning);
} }
if (gdsb.getNestedPath() != null) { if (gdsb.getNestedPath() != null && !assertedWarnings.contains(nestedPathDeprecationWarning)) {
expectedWarningHeaders.add("[nested_path] has been deprecated in favour of the [nested] parameter"); expectedWarningHeaders.add(nestedPathDeprecationWarning);
} }
} }
@ -199,6 +202,7 @@ public class SortBuilderTests extends OpenSearchTestCase {
} }
if (expectedWarningHeaders.size() > 0) { if (expectedWarningHeaders.size() > 0) {
assertWarnings(expectedWarningHeaders.toArray(new String[expectedWarningHeaders.size()])); assertWarnings(expectedWarningHeaders.toArray(new String[expectedWarningHeaders.size()]));
assertedWarnings.addAll(expectedWarningHeaders);
} }
} }
} }

View File

@ -80,6 +80,7 @@ import org.opensearch.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.io.stream.Writeable;
import org.opensearch.common.joda.JodaDeprecationPatterns; import org.opensearch.common.joda.JodaDeprecationPatterns;
import org.opensearch.common.logging.DeprecatedMessage;
import org.opensearch.common.logging.HeaderWarning; import org.opensearch.common.logging.HeaderWarning;
import org.opensearch.common.logging.HeaderWarningAppender; import org.opensearch.common.logging.HeaderWarningAppender;
import org.opensearch.common.logging.LogConfigurator; import org.opensearch.common.logging.LogConfigurator;
@ -425,6 +426,8 @@ public abstract class OpenSearchTestCase extends LuceneTestCase {
} }
ensureAllSearchContextsReleased(); ensureAllSearchContextsReleased();
ensureCheckIndexPassed(); ensureCheckIndexPassed();
// "clear" the deprecated message set for the next tests to run independently.
DeprecatedMessage.resetDeprecatedMessageForTests();
logger.info("{}after test", getTestParamsForLogging()); logger.info("{}after test", getTestParamsForLogging());
} }
@ -491,6 +494,15 @@ public abstract class OpenSearchTestCase extends LuceneTestCase {
); );
} }
/**
* Convenience method to assert same warnings for settings deprecations and general deprecation warnings
* are not logged again.
*/
protected final void assertNoDeprecationWarnings() {
final List<String> actualWarnings = threadContext.getResponseHeaders().get("Warning");
assertTrue("Found duplicate warnings logged", actualWarnings == null);
}
protected final void assertWarnings(String... expectedWarnings) { protected final void assertWarnings(String... expectedWarnings) {
assertWarnings(true, expectedWarnings); assertWarnings(true, expectedWarnings);
} }

View File

@ -103,6 +103,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer; import java.util.function.Consumer;
@ -127,6 +128,10 @@ public abstract class OpenSearchRestTestCase extends OpenSearchTestCase {
public static final String CLIENT_SOCKET_TIMEOUT = "client.socket.timeout"; public static final String CLIENT_SOCKET_TIMEOUT = "client.socket.timeout";
public static final String CLIENT_PATH_PREFIX = "client.path.prefix"; public static final String CLIENT_PATH_PREFIX = "client.path.prefix";
// This set will contain the warnings already asserted since we are eliminating logging duplicate warnings.
// This ensures that no matter in what order the tests run, the warning is asserted once.
private static Set<String> assertedWarnings = ConcurrentHashMap.newKeySet();
/** /**
* Convert the entity from a {@link Response} into a map of maps. * Convert the entity from a {@link Response} into a map of maps.
*/ */
@ -256,6 +261,9 @@ public abstract class OpenSearchRestTestCase extends OpenSearchTestCase {
@Override @Override
public boolean warningsShouldFailRequest(List<String> warnings) { public boolean warningsShouldFailRequest(List<String> warnings) {
if (warnings.isEmpty()) {
return false;
}
if (isExclusivelyTargetingCurrentVersionCluster()) { if (isExclusivelyTargetingCurrentVersionCluster()) {
// absolute equality required in expected and actual. // absolute equality required in expected and actual.
Set<String> actual = new HashSet<>(warnings); Set<String> actual = new HashSet<>(warnings);
@ -298,6 +306,18 @@ public abstract class OpenSearchRestTestCase extends OpenSearchTestCase {
return expectVersionSpecificWarnings(consumer -> consumer.current(warnings)); return expectVersionSpecificWarnings(consumer -> consumer.current(warnings));
} }
/**
* Filters out already asserted warnings and calls expectWarnings method.
* @param deprecationWarning expected warning
*/
public static RequestOptions expectWarningsOnce(String deprecationWarning) {
if (assertedWarnings.contains(deprecationWarning)) {
return RequestOptions.DEFAULT;
}
assertedWarnings.add(deprecationWarning);
return expectWarnings(deprecationWarning);
}
/** /**
* Creates RequestOptions designed to ignore [types removal] warnings but nothing else * Creates RequestOptions designed to ignore [types removal] warnings but nothing else
* @deprecated this method is only required while we deprecate types and can be removed in 8.0 * @deprecated this method is only required while we deprecate types and can be removed in 8.0
@ -1252,15 +1272,9 @@ public abstract class OpenSearchRestTestCase extends OpenSearchTestCase {
protected static void performSyncedFlush(String indexName, boolean retryOnConflict) throws Exception { protected static void performSyncedFlush(String indexName, boolean retryOnConflict) throws Exception {
final Request request = new Request("POST", indexName + "/_flush/synced"); final Request request = new Request("POST", indexName + "/_flush/synced");
final List<String> expectedWarnings = Collections.singletonList(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE); final List<String> expectedWarnings = Collections.singletonList(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE);
if (nodeVersions.stream().allMatch(version -> version.onOrAfter(LegacyESVersion.V_7_6_0))) { final Builder options = RequestOptions.DEFAULT.toBuilder();
final Builder options = RequestOptions.DEFAULT.toBuilder(); options.setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(expectedWarnings) == false);
options.setWarningsHandler(warnings -> warnings.equals(expectedWarnings) == false); request.setOptions(options);
request.setOptions(options);
} else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(LegacyESVersion.V_7_6_0))) {
final Builder options = RequestOptions.DEFAULT.toBuilder();
options.setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(expectedWarnings) == false);
request.setOptions(options);
}
// We have to spin a synced-flush request because we fire the global checkpoint sync for the last write operation. // We have to spin a synced-flush request because we fire the global checkpoint sync for the last write operation.
// A synced-flush request considers the global checkpoint sync as an going operation because it acquires a shard permit. // A synced-flush request considers the global checkpoint sync as an going operation because it acquires a shard permit.
assertBusy(() -> { assertBusy(() -> {

View File

@ -52,7 +52,7 @@ public class VersionSensitiveWarningsHandlerTests extends OpenSearchTestCase {
WarningsHandler handler = expectVersionSpecificWarnings(nodeVersions, (v) -> { v.current("expectedCurrent1"); }); WarningsHandler handler = expectVersionSpecificWarnings(nodeVersions, (v) -> { v.current("expectedCurrent1"); });
assertFalse(handler.warningsShouldFailRequest(Arrays.asList("expectedCurrent1"))); assertFalse(handler.warningsShouldFailRequest(Arrays.asList("expectedCurrent1")));
assertTrue(handler.warningsShouldFailRequest(Arrays.asList("expectedCurrent1", "unexpected"))); assertTrue(handler.warningsShouldFailRequest(Arrays.asList("expectedCurrent1", "unexpected")));
assertTrue(handler.warningsShouldFailRequest(Collections.emptyList())); assertFalse(handler.warningsShouldFailRequest(Collections.emptyList()));
} }