Merge branch 'master' into ccr
This commit is contained in:
commit
b5032eab80
|
@ -1,5 +1,5 @@
|
|||
Elasticsearch
|
||||
Copyright 2009-2017 Elasticsearch
|
||||
Copyright 2009-2018 Elasticsearch
|
||||
|
||||
This product includes software developed by The Apache Software
|
||||
Foundation (http://www.apache.org/).
|
||||
|
|
|
@ -167,7 +167,10 @@ class NodeInfo {
|
|||
String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
|
||||
String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs
|
||||
if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) {
|
||||
esJavaOpts += " -ea -esa"
|
||||
// put the enable assertions options before other options to allow
|
||||
// flexibility to disable assertions for specific packages or classes
|
||||
// in the cluster-specific options
|
||||
esJavaOpts = String.join(" ", "-ea", "-esa", esJavaOpts)
|
||||
}
|
||||
env.put('ES_JAVA_OPTS', esJavaOpts)
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
|
|
|
@ -56,14 +56,12 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]PendingClusterTasksAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]PendingClusterTasksRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]TransportPendingClusterTasksAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]Alias.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]TransportIndicesAliasesAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]exists[/\\]TransportAliasesExistAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]get[/\\]BaseAliasesRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]get[/\\]TransportGetAliasesAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]analyze[/\\]TransportAnalyzeAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]cache[/\\]clear[/\\]ClearIndicesCacheRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]cache[/\\]clear[/\\]TransportClearIndicesCacheAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]close[/\\]TransportCloseIndexAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]create[/\\]CreateIndexRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]create[/\\]TransportCreateIndexAction.java" checks="LineLength" />
|
||||
|
@ -77,18 +75,15 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]flush[/\\]TransportSyncedFlushAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]forcemerge[/\\]ForceMergeRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]forcemerge[/\\]TransportForceMergeAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]get[/\\]TransportGetIndexAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]mapping[/\\]get[/\\]GetFieldMappingsRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]mapping[/\\]get[/\\]GetMappingsRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]mapping[/\\]get[/\\]TransportGetFieldMappingsAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]mapping[/\\]get[/\\]TransportGetFieldMappingsIndexAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]mapping[/\\]get[/\\]TransportGetMappingsAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]mapping[/\\]put[/\\]TransportPutMappingAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]open[/\\]TransportOpenIndexAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]recovery[/\\]TransportRecoveryAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]refresh[/\\]TransportRefreshAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]segments[/\\]IndexSegments.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]segments[/\\]IndicesSegmentResponse.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]segments[/\\]IndicesSegmentsRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]segments[/\\]TransportIndicesSegmentsAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]settings[/\\]get[/\\]GetSettingsRequestBuilder.java" checks="LineLength" />
|
||||
|
@ -101,7 +96,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]shards[/\\]TransportIndicesShardStoresAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]stats[/\\]IndexStats.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]stats[/\\]IndicesStatsRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]stats[/\\]IndicesStatsResponse.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]stats[/\\]TransportIndicesStatsAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]delete[/\\]DeleteIndexTemplateAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]delete[/\\]DeleteIndexTemplateRequestBuilder.java" checks="LineLength" />
|
||||
|
@ -112,19 +106,15 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]get[/\\]IndexUpgradeStatus.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]get[/\\]TransportUpgradeStatusAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]get[/\\]UpgradeStatusRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]get[/\\]UpgradeStatusResponse.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]post[/\\]TransportUpgradeAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]post[/\\]TransportUpgradeSettingsAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]post[/\\]UpgradeResponse.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]post[/\\]UpgradeSettingsRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]validate[/\\]query[/\\]TransportValidateQueryAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]validate[/\\]query[/\\]ValidateQueryRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]validate[/\\]query[/\\]ValidateQueryResponse.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BackoffPolicy.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkProcessor.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkResponse.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]Retry.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]TransportBulkAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]TransportShardBulkAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]DeleteRequest.java" checks="LineLength" />
|
||||
|
@ -139,12 +129,10 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]GetPipelineRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]GetPipelineTransportAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]PutPipelineTransportAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]SimulatePipelineRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]SimulatePipelineRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]SimulatePipelineTransportAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]MultiSearchRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchPhaseController.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]ShardSearchFailure.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]DelegatingActionListener.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]IndicesOptions.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]ToXContentToBytes.java" checks="LineLength" />
|
||||
|
@ -177,7 +165,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]TransportUpdateAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]Bootstrap.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNANatives.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JarHell.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]ElasticsearchClient.java" checks="LineLength" />
|
||||
|
@ -215,7 +202,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AbstractAllocateAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocateEmptyPrimaryAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocateReplicaAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocateStalePrimaryAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocationCommands.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]MoveAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]decider[/\\]AllocationDeciders.java" checks="LineLength" />
|
||||
|
@ -314,7 +300,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]UidFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]VersionFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]merge[/\\]MergeStats.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]AbstractQueryBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryBuilders.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryValidationException.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MultiMatchQuery.java" checks="LineLength" />
|
||||
|
@ -358,7 +343,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]blobstore[/\\]BlobStoreRepository.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]blobstore[/\\]ChecksumBlobStoreFormat.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]fs[/\\]FsRepository.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]RestController.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestIndicesAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
|
||||
|
@ -401,7 +385,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]ShardSearchTransportRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]FieldLookup.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafDocLookup.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]QueryPhase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]QueryRescorer.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]CategoryContextMapping.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]ContextMapping.java" checks="LineLength" />
|
||||
|
@ -439,7 +422,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]SimulatePipelineResponseTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]WriteableIngestDocumentTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchRequestBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]TransportActionFilterChainTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]WaitActiveShardCountIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]node[/\\]TransportBroadcastByNodeActionTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]TransportMasterNodeActionTests.java" checks="LineLength" />
|
||||
|
@ -582,7 +564,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoShapeFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]IdFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]IndexFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]JavaMultiFieldMergeTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MapperServiceTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MultiFieldCopyToMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MultiFieldTests.java" checks="LineLength" />
|
||||
|
@ -604,7 +585,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]SpanNotQueryBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]functionscore[/\\]FunctionScoreTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]replication[/\\]ESIndexLevelReplicationTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MultiMatchQueryTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]geo[/\\]GeoUtilsTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]nested[/\\]AbstractNumberNestedSortingTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]nested[/\\]DoubleNestedSortingTests.java" checks="LineLength" />
|
||||
|
@ -733,7 +713,6 @@
|
|||
<suppress files="plugins[/\\]analysis-phonetic[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PhoneticTokenFilterFactory.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]discovery-ec2[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]ec2[/\\]AbstractAwsTestCase.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]discovery-ec2[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]ec2[/\\]AmazonEC2Mock.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]discovery-gce[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]gce[/\\]GceNetworkTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]mapper-murmur3[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]murmur3[/\\]Murmur3FieldMapper.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-hdfs[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]hdfs[/\\]HdfsRepository.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-hdfs[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]hdfs[/\\]HdfsTests.java" checks="LineLength" />
|
||||
|
@ -744,11 +723,9 @@
|
|||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]MockDefaultS3OutputStream.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]TestAmazonS3.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]store-smb[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]SmbDirectoryWrapper.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]BootstrapForTesting.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]MockInternalClusterInfoService.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]TestShardRouting.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]ModuleTestCase.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]MockBigArrays.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]script[/\\]NativeSignificanceScoreScriptWithParams.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]BackgroundIndexer.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]CorruptionUtils.java" checks="LineLength" />
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
elasticsearch = 7.0.0-alpha1
|
||||
lucene = 7.3.0-snapshot-98a6b3d
|
||||
lucene = 7.3.0
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.7
|
||||
|
|
|
@ -544,8 +544,10 @@ public final class Request {
|
|||
|
||||
static Request rankEval(RankEvalRequest rankEvalRequest) throws IOException {
|
||||
String endpoint = endpoint(rankEvalRequest.indices(), Strings.EMPTY_ARRAY, "_rank_eval");
|
||||
Params params = Params.builder();
|
||||
params.withIndicesOptions(rankEvalRequest.indicesOptions());
|
||||
HttpEntity entity = createEntity(rankEvalRequest.getRankEvalSpec(), REQUEST_BODY_CONTENT_TYPE);
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint, Collections.emptyMap(), entity);
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint, params.getParams(), entity);
|
||||
}
|
||||
|
||||
static Request split(ResizeRequest resizeRequest) throws IOException {
|
||||
|
|
|
@ -21,6 +21,8 @@ package org.elasticsearch.client;
|
|||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.rankeval.EvalQueryQuality;
|
||||
import org.elasticsearch.index.rankeval.PrecisionAtK;
|
||||
|
@ -37,8 +39,9 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
|
||||
|
||||
|
@ -55,6 +58,10 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
|||
client().performRequest("PUT", "/index/doc/5", Collections.emptyMap(), doc);
|
||||
client().performRequest("PUT", "/index/doc/6", Collections.emptyMap(), doc);
|
||||
client().performRequest("POST", "/index/_refresh");
|
||||
|
||||
// add another index to test basic multi index support
|
||||
client().performRequest("PUT", "/index2/doc/7", Collections.emptyMap(), doc);
|
||||
client().performRequest("POST", "/index2/_refresh");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -64,7 +71,9 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
|||
public void testRankEvalRequest() throws IOException {
|
||||
SearchSourceBuilder testQuery = new SearchSourceBuilder();
|
||||
testQuery.query(new MatchAllQueryBuilder());
|
||||
RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", createRelevant("index" , "2", "3", "4", "5"), testQuery);
|
||||
List<RatedDocument> amsterdamRatedDocs = createRelevant("index" , "2", "3", "4", "5");
|
||||
amsterdamRatedDocs.addAll(createRelevant("index2", "7"));
|
||||
RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", amsterdamRatedDocs, testQuery);
|
||||
RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "1"), testQuery);
|
||||
List<RatedRequest> specifications = new ArrayList<>();
|
||||
specifications.add(amsterdamRequest);
|
||||
|
@ -72,49 +81,46 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
|||
PrecisionAtK metric = new PrecisionAtK(1, false, 10);
|
||||
RankEvalSpec spec = new RankEvalSpec(specifications, metric);
|
||||
|
||||
RankEvalResponse response = execute(new RankEvalRequest(spec, new String[] { "index" }), highLevelClient()::rankEval,
|
||||
RankEvalRequest rankEvalRequest = new RankEvalRequest(spec, new String[] { "index", "index2" });
|
||||
RankEvalResponse response = execute(rankEvalRequest, highLevelClient()::rankEval,
|
||||
highLevelClient()::rankEvalAsync);
|
||||
// the expected Prec@ for the first query is 4/6 and the expected Prec@ for the second is 1/6, divided by 2 to get the average
|
||||
double expectedPrecision = (1.0 / 6.0 + 4.0 / 6.0) / 2.0;
|
||||
// the expected Prec@ for the first query is 5/7 and the expected Prec@ for the second is 1/7, divided by 2 to get the average
|
||||
double expectedPrecision = (1.0 / 7.0 + 5.0 / 7.0) / 2.0;
|
||||
assertEquals(expectedPrecision, response.getEvaluationResult(), Double.MIN_VALUE);
|
||||
Set<Entry<String, EvalQueryQuality>> entrySet = response.getPartialResults().entrySet();
|
||||
assertEquals(2, entrySet.size());
|
||||
for (Entry<String, EvalQueryQuality> entry : entrySet) {
|
||||
EvalQueryQuality quality = entry.getValue();
|
||||
if (entry.getKey() == "amsterdam_query") {
|
||||
assertEquals(2, filterUnknownDocuments(quality.getHitsAndRatings()).size());
|
||||
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
|
||||
assertEquals(6, hitsAndRatings.size());
|
||||
for (RatedSearchHit hit : hitsAndRatings) {
|
||||
String id = hit.getSearchHit().getId();
|
||||
if (id.equals("1") || id.equals("6")) {
|
||||
assertFalse(hit.getRating().isPresent());
|
||||
} else {
|
||||
assertEquals(1, hit.getRating().get().intValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (entry.getKey() == "berlin_query") {
|
||||
assertEquals(5, filterUnknownDocuments(quality.getHitsAndRatings()).size());
|
||||
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
|
||||
assertEquals(6, hitsAndRatings.size());
|
||||
for (RatedSearchHit hit : hitsAndRatings) {
|
||||
String id = hit.getSearchHit().getId();
|
||||
if (id.equals("1")) {
|
||||
assertEquals(1, hit.getRating().get().intValue());
|
||||
} else {
|
||||
assertFalse(hit.getRating().isPresent());
|
||||
}
|
||||
}
|
||||
Map<String, EvalQueryQuality> partialResults = response.getPartialResults();
|
||||
assertEquals(2, partialResults.size());
|
||||
EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query");
|
||||
assertEquals(2, filterUnknownDocuments(amsterdamQueryQuality.getHitsAndRatings()).size());
|
||||
List<RatedSearchHit> hitsAndRatings = amsterdamQueryQuality.getHitsAndRatings();
|
||||
assertEquals(7, hitsAndRatings.size());
|
||||
for (RatedSearchHit hit : hitsAndRatings) {
|
||||
String id = hit.getSearchHit().getId();
|
||||
if (id.equals("1") || id.equals("6")) {
|
||||
assertFalse(hit.getRating().isPresent());
|
||||
} else {
|
||||
assertEquals(1, hit.getRating().get().intValue());
|
||||
}
|
||||
}
|
||||
EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query");
|
||||
assertEquals(6, filterUnknownDocuments(berlinQueryQuality.getHitsAndRatings()).size());
|
||||
hitsAndRatings = berlinQueryQuality.getHitsAndRatings();
|
||||
assertEquals(7, hitsAndRatings.size());
|
||||
for (RatedSearchHit hit : hitsAndRatings) {
|
||||
String id = hit.getSearchHit().getId();
|
||||
if (id.equals("1")) {
|
||||
assertEquals(1, hit.getRating().get().intValue());
|
||||
} else {
|
||||
assertFalse(hit.getRating().isPresent());
|
||||
}
|
||||
}
|
||||
|
||||
// now try this when test2 is closed
|
||||
client().performRequest("POST", "index2/_close", Collections.emptyMap());
|
||||
rankEvalRequest.indicesOptions(IndicesOptions.fromParameters(null, "true", null, SearchRequest.DEFAULT_INDICES_OPTIONS));
|
||||
response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync);
|
||||
}
|
||||
|
||||
private static List<RatedDocument> createRelevant(String indexName, String... docs) {
|
||||
List<RatedDocument> relevant = new ArrayList<>();
|
||||
for (String doc : docs) {
|
||||
relevant.add(new RatedDocument(indexName, doc, 1));
|
||||
}
|
||||
return relevant;
|
||||
return Stream.of(docs).map(s -> new RatedDocument(indexName, s, 1)).collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1247,6 +1247,8 @@ public class RequestTests extends ESTestCase {
|
|||
new PrecisionAtK());
|
||||
String[] indices = randomIndicesNames(0, 5);
|
||||
RankEvalRequest rankEvalRequest = new RankEvalRequest(spec, indices);
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomIndicesOptions(rankEvalRequest::indicesOptions, rankEvalRequest::indicesOptions, expectedParams);
|
||||
|
||||
Request request = Request.rankEval(rankEvalRequest);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
|
@ -1256,8 +1258,10 @@ public class RequestTests extends ESTestCase {
|
|||
}
|
||||
endpoint.add(RestRankEvalAction.ENDPOINT);
|
||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
||||
assertEquals(Collections.emptyMap(), request.getParameters());
|
||||
assertEquals(3, request.getParameters().size());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertToXContentBody(spec, request.getEntity());
|
||||
|
||||
}
|
||||
|
||||
public void testSplit() throws IOException {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
Copyright 2013-2016 Elasticsearch <info@elastic.co>
|
||||
Copyright 2013-2018 Elasticsearch <info@elastic.co>
|
||||
|
||||
License: Apache-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
== License
|
||||
|
||||
Copyright 2013-2017 Elasticsearch
|
||||
Copyright 2013-2018 Elasticsearch
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -115,7 +115,7 @@ https://github.com/elastic/elasticsearch-perl/blob/master/CONTRIBUTING.asciidoc[
|
|||
|
||||
== Copyright and License
|
||||
|
||||
This software is Copyright (c) 2013-2016 by Elasticsearch BV.
|
||||
This software is Copyright (c) 2013-2018 by Elasticsearch BV.
|
||||
|
||||
This is free software, licensed under:
|
||||
https://github.com/elastic/elasticsearch-perl/blob/master/LICENSE.txt[The Apache License Version 2.0].
|
||||
|
|
|
@ -120,7 +120,7 @@ some of the more engaging tasks like bulk indexing and reindexing.
|
|||
|
||||
=== License
|
||||
|
||||
Copyright 2013-2017 Elasticsearch
|
||||
Copyright 2013-2018 Elasticsearch
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,20 +1,12 @@
|
|||
[[breaking_70_analysis_changes]]
|
||||
=== Analysis changes
|
||||
|
||||
==== The `delimited_payload_filter` is renamed
|
||||
|
||||
The `delimited_payload_filter` is renamed to `delimited_payload`, the old name is
|
||||
deprecated and will be removed at some point, so it should be replaced by
|
||||
`delimited_payload`.
|
||||
|
||||
|
||||
==== Limiting the number of tokens produced by _analyze
|
||||
|
||||
To safeguard against out of memory errors, the number of tokens that can be produced
|
||||
using the `_analyze` endpoint has been limited to 10000. This default limit can be changed
|
||||
for a particular index with the index setting `index.analyze.max_token_count`.
|
||||
|
||||
|
||||
==== Limiting the length of an analyzed text during highlighting
|
||||
|
||||
Highlighting a text that was indexed without offsets or term vectors,
|
||||
|
@ -22,4 +14,11 @@ requires analysis of this text in memory real time during the search request.
|
|||
For large texts this analysis may take substantial amount of time and memory.
|
||||
To protect against this, the maximum number of characters that will be analyzed has been
|
||||
limited to 1000000. This default limit can be changed
|
||||
for a particular index with the index setting `index.highlight.max_analyzed_offset`.
|
||||
for a particular index with the index setting `index.highlight.max_analyzed_offset`.
|
||||
|
||||
==== `delimited_payload_filter` renaming
|
||||
|
||||
The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` in 6.2.
|
||||
Using it in indices created before 7.0 will issue deprecation warnings. Using the old
|
||||
name in new indices created in 7.0 will throw an error. Use the new name `delimited_payload`
|
||||
instead.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
== Copyright and License
|
||||
|
||||
This software is Copyright (c) 2013-2016 by Elasticsearch BV.
|
||||
This software is Copyright (c) 2013-2018 by Elasticsearch BV.
|
||||
|
||||
This is free software, licensed under The Apache License Version 2.0.
|
||||
|
|
|
@ -26,6 +26,45 @@ apply plugin: 'nebula.maven-scm'
|
|||
|
||||
archivesBaseName = 'elasticsearch-core'
|
||||
|
||||
// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 9 so we do not include this source set in our IDEs
|
||||
if (!isEclipse && !isIdea) {
|
||||
sourceSets {
|
||||
java9 {
|
||||
java {
|
||||
srcDirs = ['src/main/java9']
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
java9Compile.extendsFrom(compile)
|
||||
}
|
||||
|
||||
dependencies {
|
||||
java9Compile sourceSets.main.output
|
||||
}
|
||||
|
||||
compileJava9Java {
|
||||
sourceCompatibility = 9
|
||||
targetCompatibility = 9
|
||||
}
|
||||
|
||||
/* Enable this when forbiddenapis was updated to 2.6.
|
||||
* See: https://github.com/elastic/elasticsearch/issues/29292
|
||||
forbiddenApisJava9 {
|
||||
targetCompatibility = 9
|
||||
}
|
||||
*/
|
||||
|
||||
jar {
|
||||
metaInf {
|
||||
into 'versions/9'
|
||||
from sourceSets.java9.output
|
||||
}
|
||||
manifest.attributes('Multi-Release': 'true')
|
||||
}
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
nebula {
|
||||
|
@ -39,6 +78,10 @@ dependencies {
|
|||
testCompile "junit:junit:${versions.junit}"
|
||||
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
|
||||
|
||||
if (!isEclipse && !isIdea) {
|
||||
java9Compile sourceSets.main.output
|
||||
}
|
||||
|
||||
if (isEclipse == false || project.path == ":libs:elasticsearch-core-tests") {
|
||||
testCompile("org.elasticsearch.test:framework:${version}") {
|
||||
exclude group: 'org.elasticsearch', module: 'elasticsearch-core'
|
||||
|
@ -66,14 +109,14 @@ if (isEclipse) {
|
|||
}
|
||||
|
||||
thirdPartyAudit.excludes = [
|
||||
// from log4j
|
||||
'org/osgi/framework/AdaptPermission',
|
||||
'org/osgi/framework/AdminPermission',
|
||||
'org/osgi/framework/Bundle',
|
||||
'org/osgi/framework/BundleActivator',
|
||||
'org/osgi/framework/BundleContext',
|
||||
'org/osgi/framework/BundleEvent',
|
||||
'org/osgi/framework/SynchronousBundleListener',
|
||||
'org/osgi/framework/wiring/BundleWire',
|
||||
'org/osgi/framework/wiring/BundleWiring'
|
||||
// from log4j
|
||||
'org/osgi/framework/AdaptPermission',
|
||||
'org/osgi/framework/AdminPermission',
|
||||
'org/osgi/framework/Bundle',
|
||||
'org/osgi/framework/BundleActivator',
|
||||
'org/osgi/framework/BundleContext',
|
||||
'org/osgi/framework/BundleEvent',
|
||||
'org/osgi/framework/SynchronousBundleListener',
|
||||
'org/osgi/framework/wiring/BundleWire',
|
||||
'org/osgi/framework/wiring/BundleWiring'
|
||||
]
|
||||
|
|
|
@ -46,7 +46,7 @@ public class Tuple<V1, V2> {
|
|||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
Tuple tuple = (Tuple) o;
|
||||
Tuple<?, ?> tuple = (Tuple<?, ?>) o;
|
||||
|
||||
if (v1 != null ? !v1.equals(tuple.v1) : tuple.v1 != null) return false;
|
||||
if (v2 != null ? !v2.equals(tuple.v2) : tuple.v2 != null) return false;
|
|
@ -41,45 +41,73 @@ public final class IOUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* Closes all given <tt>Closeable</tt>s. Some of the <tt>Closeable</tt>s may be null; they are ignored. After everything is closed, the
|
||||
* method either throws the first exception it hit while closing, or completes normally if there were no exceptions.
|
||||
* Closes all given <tt>Closeable</tt>s. Some of the <tt>Closeable</tt>s may be null; they are
|
||||
* ignored. After everything is closed, the method either throws the first exception it hit
|
||||
* while closing with other exceptions added as suppressed, or completes normally if there were
|
||||
* no exceptions.
|
||||
*
|
||||
* @param objects objects to close
|
||||
*/
|
||||
public static void close(final Closeable... objects) throws IOException {
|
||||
close(Arrays.asList(objects));
|
||||
close(null, Arrays.asList(objects));
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes all given {@link Closeable}s.
|
||||
* Closes all given <tt>Closeable</tt>s. Some of the <tt>Closeable</tt>s may be null; they are
|
||||
* ignored. After everything is closed, the method adds any exceptions as suppressed to the
|
||||
* original exception, or throws the first exception it hit if {@code Exception} is null. If
|
||||
* no exceptions are encountered and the passed in exception is null, it completes normally.
|
||||
*
|
||||
* @param objects objects to close
|
||||
*/
|
||||
public static void close(final Exception e, final Closeable... objects) throws IOException {
|
||||
close(e, Arrays.asList(objects));
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes all given <tt>Closeable</tt>s. Some of the <tt>Closeable</tt>s may be null; they are
|
||||
* ignored. After everything is closed, the method either throws the first exception it hit
|
||||
* while closing with other exceptions added as suppressed, or completes normally if there were
|
||||
* no exceptions.
|
||||
*
|
||||
* @param objects objects to close
|
||||
*/
|
||||
public static void close(final Iterable<? extends Closeable> objects) throws IOException {
|
||||
close(null, objects);
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes all given {@link Closeable}s. If a non-null exception is passed in, or closing a
|
||||
* stream causes an exception, throws the exception with other {@link RuntimeException} or
|
||||
* {@link IOException} exceptions added as suppressed.
|
||||
*
|
||||
* @param ex existing Exception to add exceptions occurring during close to
|
||||
* @param objects objects to close
|
||||
*
|
||||
* @see #close(Closeable...)
|
||||
*/
|
||||
public static void close(final Iterable<? extends Closeable> objects) throws IOException {
|
||||
Exception ex = null;
|
||||
|
||||
public static void close(final Exception ex, final Iterable<? extends Closeable> objects) throws IOException {
|
||||
Exception firstException = ex;
|
||||
for (final Closeable object : objects) {
|
||||
try {
|
||||
if (object != null) {
|
||||
object.close();
|
||||
}
|
||||
} catch (final IOException | RuntimeException e) {
|
||||
if (ex == null) {
|
||||
ex = e;
|
||||
if (firstException == null) {
|
||||
firstException = e;
|
||||
} else {
|
||||
ex.addSuppressed(e);
|
||||
firstException.addSuppressed(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ex != null) {
|
||||
if (ex instanceof IOException) {
|
||||
throw (IOException) ex;
|
||||
if (firstException != null) {
|
||||
if (firstException instanceof IOException) {
|
||||
throw (IOException) firstException;
|
||||
} else {
|
||||
// since we only assigned an IOException or a RuntimeException to ex above, in this case ex must be a RuntimeException
|
||||
throw (RuntimeException) ex;
|
||||
throw (RuntimeException) firstException;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.core.internal.io;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Simple utility methods for file and stream copying.
|
||||
* All copy methods use a block size of 4096 bytes,
|
||||
* and close all affected streams when done.
|
||||
* <p>
|
||||
* Mainly for use within the framework,
|
||||
* but also useful for application code.
|
||||
*/
|
||||
public class Streams {
|
||||
|
||||
/**
|
||||
* Copy the contents of the given InputStream to the given OutputStream.
|
||||
* Closes both streams when done.
|
||||
*
|
||||
* @param in the stream to copy from
|
||||
* @param out the stream to copy to
|
||||
* @return the number of bytes copied
|
||||
* @throws IOException in case of I/O errors
|
||||
*/
|
||||
public static long copy(final InputStream in, final OutputStream out) throws IOException {
|
||||
Objects.requireNonNull(in, "No InputStream specified");
|
||||
Objects.requireNonNull(out, "No OutputStream specified");
|
||||
final byte[] buffer = new byte[8192];
|
||||
Exception err = null;
|
||||
try {
|
||||
long byteCount = 0;
|
||||
int bytesRead;
|
||||
while ((bytesRead = in.read(buffer)) != -1) {
|
||||
out.write(buffer, 0, bytesRead);
|
||||
byteCount += bytesRead;
|
||||
}
|
||||
out.flush();
|
||||
return byteCount;
|
||||
} catch (IOException | RuntimeException e) {
|
||||
err = e;
|
||||
throw e;
|
||||
} finally {
|
||||
IOUtils.close(err, in, out);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.core.internal.io;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* Simple utility methods for file and stream copying.
|
||||
* All copy methods close all affected streams when done.
|
||||
* <p>
|
||||
* Mainly for use within the framework,
|
||||
* but also useful for application code.
|
||||
*/
|
||||
public abstract class Streams {
|
||||
|
||||
/**
|
||||
* Copy the contents of the given InputStream to the given OutputStream.
|
||||
* Closes both streams when done.
|
||||
*
|
||||
* @param in the stream to copy from
|
||||
* @param out the stream to copy to
|
||||
* @return the number of bytes copied
|
||||
* @throws IOException in case of I/O errors
|
||||
*/
|
||||
public static long copy(final InputStream in, final OutputStream out) throws IOException {
|
||||
Exception err = null;
|
||||
try {
|
||||
final long byteCount = in.transferTo(out);
|
||||
out.flush();
|
||||
return byteCount;
|
||||
} catch (IOException | RuntimeException e) {
|
||||
err = e;
|
||||
throw e;
|
||||
} finally {
|
||||
IOUtils.close(err, in, out);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.collect;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class TupleTests extends ESTestCase {
|
||||
|
||||
public void testTuple() {
|
||||
Tuple<Long, String> t1 = new Tuple<>(2L, "foo");
|
||||
Tuple<Long, String> t2 = new Tuple<>(2L, "foo");
|
||||
Tuple<Long, String> t3 = new Tuple<>(3L, "foo");
|
||||
Tuple<Long, String> t4 = new Tuple<>(2L, "bar");
|
||||
Tuple<Integer, String> t5 = new Tuple<>(2, "foo");
|
||||
|
||||
assertThat(t1.v1(), equalTo(Long.valueOf(2L)));
|
||||
assertThat(t1.v2(), equalTo("foo"));
|
||||
|
||||
assertThat(t1, equalTo(t2));
|
||||
assertNotEquals(t1, t3);
|
||||
assertNotEquals(t2, t3);
|
||||
assertNotEquals(t2, t4);
|
||||
assertNotEquals(t3, t4);
|
||||
assertNotEquals(t1, t5);
|
||||
|
||||
assertThat(t1.toString(), equalTo("Tuple [v1=2, v2=foo]"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.core.internal.io;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class StreamsTests extends ESTestCase {
|
||||
public void testCopyFromInputStream() throws IOException {
|
||||
byte[] content = "content".getBytes(StandardCharsets.UTF_8);
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(content);
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
|
||||
long count = Streams.copy(in, out);
|
||||
|
||||
assertThat(count, equalTo((long) content.length));
|
||||
assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
|
||||
}
|
||||
}
|
|
@ -34,8 +34,10 @@ import java.io.InputStream;
|
|||
import java.io.InputStreamReader;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Collections;
|
||||
|
@ -74,8 +76,6 @@ public final class Grok {
|
|||
private final Map<String, String> patternBank;
|
||||
private final boolean namedCaptures;
|
||||
private final Regex compiledExpression;
|
||||
private final String expression;
|
||||
|
||||
|
||||
public Grok(Map<String, String> patternBank, String grokPattern) {
|
||||
this(patternBank, grokPattern, true);
|
||||
|
@ -86,11 +86,59 @@ public final class Grok {
|
|||
this.patternBank = patternBank;
|
||||
this.namedCaptures = namedCaptures;
|
||||
|
||||
this.expression = toRegex(grokPattern);
|
||||
for (Map.Entry<String, String> entry : patternBank.entrySet()) {
|
||||
String name = entry.getKey();
|
||||
String pattern = entry.getValue();
|
||||
forbidCircularReferences(name, new ArrayList<>(), pattern);
|
||||
}
|
||||
|
||||
String expression = toRegex(grokPattern);
|
||||
byte[] expressionBytes = expression.getBytes(StandardCharsets.UTF_8);
|
||||
this.compiledExpression = new Regex(expressionBytes, 0, expressionBytes.length, Option.DEFAULT, UTF8Encoding.INSTANCE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether patterns reference each other in a circular manner and if so fail with an exception
|
||||
*
|
||||
* In a pattern, anything between <code>%{</code> and <code>}</code> or <code>:</code> is considered
|
||||
* a reference to another named pattern. This method will navigate to all these named patterns and
|
||||
* check for a circular reference.
|
||||
*/
|
||||
private void forbidCircularReferences(String patternName, List<String> path, String pattern) {
|
||||
if (pattern.contains("%{" + patternName + "}") || pattern.contains("%{" + patternName + ":")) {
|
||||
String message;
|
||||
if (path.isEmpty()) {
|
||||
message = "circular reference in pattern [" + patternName + "][" + pattern + "]";
|
||||
} else {
|
||||
message = "circular reference in pattern [" + path.remove(path.size() - 1) + "][" + pattern +
|
||||
"] back to pattern [" + patternName + "]";
|
||||
// add rest of the path:
|
||||
if (path.isEmpty() == false) {
|
||||
message += " via patterns [" + String.join("=>", path) + "]";
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
|
||||
for (int i = pattern.indexOf("%{"); i != -1; i = pattern.indexOf("%{", i + 1)) {
|
||||
int begin = i + 2;
|
||||
int brackedIndex = pattern.indexOf('}', begin);
|
||||
int columnIndex = pattern.indexOf(':', begin);
|
||||
int end;
|
||||
if (brackedIndex != -1 && columnIndex == -1) {
|
||||
end = brackedIndex;
|
||||
} else if (columnIndex != -1 && brackedIndex == -1) {
|
||||
end = columnIndex;
|
||||
} else if (brackedIndex != -1 && columnIndex != -1) {
|
||||
end = Math.min(brackedIndex, columnIndex);
|
||||
} else {
|
||||
throw new IllegalArgumentException("pattern [" + pattern + "] has circular references to other pattern definitions");
|
||||
}
|
||||
String otherPatternName = pattern.substring(begin, end);
|
||||
path.add(otherPatternName);
|
||||
forbidCircularReferences(patternName, path, patternBank.get(otherPatternName));
|
||||
}
|
||||
}
|
||||
|
||||
public String groupMatch(String name, Region region, String pattern) {
|
||||
try {
|
||||
|
@ -125,10 +173,12 @@ public final class Grok {
|
|||
String patternName = groupMatch(PATTERN_GROUP, region, grokPattern);
|
||||
|
||||
String pattern = patternBank.get(patternName);
|
||||
|
||||
if (pattern == null) {
|
||||
throw new IllegalArgumentException("Unable to find pattern [" + patternName + "] in Grok's pattern dictionary");
|
||||
}
|
||||
if (pattern.contains("%{" + patternName + "}") || pattern.contains("%{" + patternName + ":")) {
|
||||
throw new IllegalArgumentException("circular reference in pattern back [" + patternName + "]");
|
||||
}
|
||||
|
||||
String grokPart;
|
||||
if (namedCaptures && subName != null) {
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
@ -205,6 +206,65 @@ public class GrokTests extends ESTestCase {
|
|||
assertEquals(expected, actual);
|
||||
}
|
||||
|
||||
public void testCircularReference() {
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> {
|
||||
Map<String, String> bank = new HashMap<>();
|
||||
bank.put("NAME", "!!!%{NAME}!!!");
|
||||
String pattern = "%{NAME}";
|
||||
new Grok(bank, pattern, false);
|
||||
});
|
||||
assertEquals("circular reference in pattern [NAME][!!!%{NAME}!!!]", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> {
|
||||
Map<String, String> bank = new HashMap<>();
|
||||
bank.put("NAME", "!!!%{NAME:name}!!!");
|
||||
String pattern = "%{NAME}";
|
||||
new Grok(bank, pattern, false);
|
||||
});
|
||||
assertEquals("circular reference in pattern [NAME][!!!%{NAME:name}!!!]", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> {
|
||||
Map<String, String> bank = new HashMap<>();
|
||||
bank.put("NAME", "!!!%{NAME:name:int}!!!");
|
||||
String pattern = "%{NAME}";
|
||||
new Grok(bank, pattern, false);
|
||||
});
|
||||
assertEquals("circular reference in pattern [NAME][!!!%{NAME:name:int}!!!]", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> {
|
||||
Map<String, String> bank = new TreeMap<>();
|
||||
bank.put("NAME1", "!!!%{NAME2}!!!");
|
||||
bank.put("NAME2", "!!!%{NAME1}!!!");
|
||||
String pattern = "%{NAME1}";
|
||||
new Grok(bank, pattern, false);
|
||||
});
|
||||
assertEquals("circular reference in pattern [NAME2][!!!%{NAME1}!!!] back to pattern [NAME1]", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> {
|
||||
Map<String, String> bank = new TreeMap<>();
|
||||
bank.put("NAME1", "!!!%{NAME2}!!!");
|
||||
bank.put("NAME2", "!!!%{NAME3}!!!");
|
||||
bank.put("NAME3", "!!!%{NAME1}!!!");
|
||||
String pattern = "%{NAME1}";
|
||||
new Grok(bank, pattern, false);
|
||||
});
|
||||
assertEquals("circular reference in pattern [NAME3][!!!%{NAME1}!!!] back to pattern [NAME1] via patterns [NAME2]",
|
||||
e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> {
|
||||
Map<String, String> bank = new TreeMap<>();
|
||||
bank.put("NAME1", "!!!%{NAME2}!!!");
|
||||
bank.put("NAME2", "!!!%{NAME3}!!!");
|
||||
bank.put("NAME3", "!!!%{NAME4}!!!");
|
||||
bank.put("NAME4", "!!!%{NAME5}!!!");
|
||||
bank.put("NAME5", "!!!%{NAME1}!!!");
|
||||
String pattern = "%{NAME1}";
|
||||
new Grok(bank, pattern, false);
|
||||
});
|
||||
assertEquals("circular reference in pattern [NAME5][!!!%{NAME1}!!!] back to pattern [NAME1] " +
|
||||
"via patterns [NAME2=>NAME3=>NAME4]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testBooleanCaptures() {
|
||||
Map<String, String> bank = new HashMap<>();
|
||||
|
||||
|
|
|
@ -21,10 +21,8 @@ package org.elasticsearch.common.xcontent;
|
|||
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -214,17 +212,6 @@ public abstract class AbstractObjectParser<Value, Context>
|
|||
declareField(consumer, (p, c) -> parseArray(p, () -> itemParser.parse(p, c)), field, type);
|
||||
}
|
||||
|
||||
public void declareRawObject(BiConsumer<Value, BytesReference> consumer, ParseField field) {
|
||||
CheckedFunction<XContentParser, BytesReference, IOException> bytesParser = p -> {
|
||||
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
|
||||
builder.prettyPrint();
|
||||
builder.copyCurrentStructure(p);
|
||||
return BytesReference.bytes(builder);
|
||||
}
|
||||
};
|
||||
declareField(consumer, bytesParser, field, ValueType.OBJECT);
|
||||
}
|
||||
|
||||
private interface IOSupplier<T> {
|
||||
T get() throws IOException;
|
||||
}
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
|
||||
|
@ -161,7 +160,7 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
try {
|
||||
return parse(parser, context);
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + objectParser.getName() + "] failed to parse object", e);
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + objectParser.getName() + "] failed to parse object", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -335,7 +334,7 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
try {
|
||||
consumer.accept(targetObject, v);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException(location,
|
||||
throw new XContentParseException(location,
|
||||
"[" + objectParser.getName() + "] failed to parse field [" + parseField.getPreferredName() + "]", e);
|
||||
}
|
||||
});
|
||||
|
@ -413,7 +412,7 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
private void queue(Consumer<Value> queueMe) {
|
||||
assert targetObject == null: "Don't queue after the targetObject has been built! Just apply the consumer directly.";
|
||||
if (queuedFields == null) {
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
Consumer<Value>[] queuedFields = new Consumer[numberOfFields];
|
||||
this.queuedFields = queuedFields;
|
||||
}
|
||||
|
@ -471,11 +470,12 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
queuedFieldsCount -= 1;
|
||||
queuedFields[queuedFieldsCount].accept(targetObject);
|
||||
}
|
||||
} catch (ParsingException e) {
|
||||
throw new ParsingException(e.getLineNumber(), e.getColumnNumber(),
|
||||
"failed to build [" + objectParser.getName() + "] after last required field arrived", e);
|
||||
} catch (XContentParseException e) {
|
||||
throw new XContentParseException(e.getLocation(),
|
||||
"failed to build [" + objectParser.getName() + "] after last required field arrived", e);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException(null, "Failed to build [" + objectParser.getName() + "] after last required field arrived", e);
|
||||
throw new XContentParseException(null,
|
||||
"Failed to build [" + objectParser.getName() + "] after last required field arrived", e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -20,7 +20,6 @@ package org.elasticsearch.common.xcontent;
|
|||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Array;
|
||||
|
@ -147,7 +146,7 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
} else {
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] Expected START_OBJECT but was: " + token);
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] Expected START_OBJECT but was: " + token);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -159,7 +158,7 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
fieldParser = getParser(currentFieldName);
|
||||
} else {
|
||||
if (currentFieldName == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] no field found");
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] no field found");
|
||||
}
|
||||
if (fieldParser == null) {
|
||||
assert ignoreUnknownFields : "this should only be possible if configured to ignore known fields";
|
||||
|
@ -182,7 +181,7 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
try {
|
||||
return parse(parser, valueSupplier.get(), context);
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] failed to parse object", e);
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] failed to parse object", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -233,7 +232,7 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
// This creates and parses the named object
|
||||
BiFunction<XContentParser, Context, T> objectParser = (XContentParser p, Context c) -> {
|
||||
if (p.currentToken() != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
|
||||
+ "fields or an array where each entry is an object with a single field");
|
||||
}
|
||||
// This messy exception nesting has the nice side effect of telling the use which field failed to parse
|
||||
|
@ -242,10 +241,10 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
try {
|
||||
return namedObjectParser.parse(p, c, name);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException(p.getTokenLocation(), "[" + field + "] failed to parse field [" + name + "]", e);
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] failed to parse field [" + name + "]", e);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(p.getTokenLocation(), "[" + field + "] error while parsing", e);
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] error while parsing", e);
|
||||
}
|
||||
};
|
||||
declareField((XContentParser p, Value v, Context c) -> {
|
||||
|
@ -261,14 +260,14 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
orderedModeCallback.accept(v);
|
||||
while ((token = p.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new ParsingException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
|
||||
+ "fields or an array where each entry is an object with a single field");
|
||||
}
|
||||
p.nextToken(); // Move to the first field in the object
|
||||
fields.add(objectParser.apply(p, c));
|
||||
p.nextToken(); // Move past the object, should be back to into the array
|
||||
if (p.currentToken() != XContentParser.Token.END_OBJECT) {
|
||||
throw new ParsingException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
|
||||
+ "fields or an array where each entry is an object with a single field");
|
||||
}
|
||||
}
|
||||
|
@ -314,7 +313,8 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
try {
|
||||
fieldParser.parser.parse(parser, value, context);
|
||||
} catch (Exception ex) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] failed to parse field [" + currentFieldName + "]", ex);
|
||||
throw new XContentParseException(parser.getTokenLocation(),
|
||||
"[" + name + "] failed to parse field [" + currentFieldName + "]", ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -331,7 +331,7 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
case END_OBJECT:
|
||||
case END_ARRAY:
|
||||
case FIELD_NAME:
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + name + "]" + token + " is unexpected");
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "]" + token + " is unexpected");
|
||||
case VALUE_STRING:
|
||||
case VALUE_NUMBER:
|
||||
case VALUE_BOOLEAN:
|
||||
|
@ -364,11 +364,11 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
|
||||
void assertSupports(String parserName, XContentParser parser, String currentFieldName) {
|
||||
if (parseField.match(currentFieldName, parser.getDeprecationHandler()) == false) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
throw new XContentParseException(parser.getTokenLocation(),
|
||||
"[" + parserName + "] parsefield doesn't accept: " + currentFieldName);
|
||||
}
|
||||
if (supportedTokens.contains(parser.currentToken()) == false) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
throw new XContentParseException(parser.getTokenLocation(),
|
||||
"[" + parserName + "] " + currentFieldName + " doesn't support values of type: " + parser.currentToken());
|
||||
}
|
||||
}
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
|
@ -37,6 +39,11 @@ public class XContentParseException extends IllegalArgumentException {
|
|||
this.location = Optional.ofNullable(location);
|
||||
}
|
||||
|
||||
public XContentParseException(XContentLocation location, String message, Exception cause) {
|
||||
super(message, cause);
|
||||
this.location = Optional.ofNullable(location);
|
||||
}
|
||||
|
||||
public int getLineNumber() {
|
||||
return location.map(l -> l.lineNumber).orElse(-1);
|
||||
}
|
||||
|
@ -45,8 +52,14 @@ public class XContentParseException extends IllegalArgumentException {
|
|||
return location.map(l -> l.columnNumber).orElse(-1);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public XContentLocation getLocation() {
|
||||
return location.orElse(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return location.map(l -> "[" + l.toString() + "] ").orElse("") + super.getMessage();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,14 +22,12 @@ package org.elasticsearch.common.xcontent;
|
|||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ObjectParserTests.NamedObject;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matcher;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
@ -38,6 +36,7 @@ import static java.util.Collections.unmodifiableList;
|
|||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
@ -79,7 +78,8 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
||||
expected.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder = shuffleXContent(builder);
|
||||
BytesReference bytes = BytesReference.bytes(builder);
|
||||
builder.flush();
|
||||
byte[] bytes = ((ByteArrayOutputStream) builder.getOutputStream()).toByteArray();
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) {
|
||||
HasCtorArguments parsed = randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null);
|
||||
assertEquals(expected.animal, parsed.animal);
|
||||
|
@ -90,9 +90,6 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
assertEquals(expected.b, parsed.b);
|
||||
assertEquals(expected.c, parsed.c);
|
||||
assertEquals(expected.d, parsed.d);
|
||||
} catch (Exception e) {
|
||||
// It is convenient to decorate the error message with the json
|
||||
throw new Exception("Error parsing: [" + Strings.toString(builder) + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -175,7 +172,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
+ " \"vegetable\": 1,\n"
|
||||
+ " \"vegetable\": 2\n"
|
||||
+ "}");
|
||||
Throwable e = expectThrows(ParsingException.class, () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null));
|
||||
Throwable e = expectThrows(XContentParseException.class, () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null));
|
||||
assertEquals("[has_required_arguments] failed to parse field [vegetable]", e.getMessage());
|
||||
e = e.getCause();
|
||||
assertThat(e, instanceOf(IllegalArgumentException.class));
|
||||
|
@ -189,8 +186,9 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
+ " \"vegetable\": 2,\n"
|
||||
+ " \"a\": \"supercalifragilisticexpialidocious\"\n"
|
||||
+ "}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null));
|
||||
assertEquals("[has_required_arguments] failed to parse field [a]", e.getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class,
|
||||
() -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[has_required_arguments] failed to parse field [a]"));
|
||||
assertEquals(4, e.getLineNumber());
|
||||
assertEquals("[a] must be less than 10 characters in length but was [supercalifragilisticexpialidocious]",
|
||||
e.getCause().getMessage());
|
||||
|
@ -203,14 +201,15 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
+ " \"animal\": \"cat\"\n,"
|
||||
+ " \"vegetable\": 2\n"
|
||||
+ "}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null));
|
||||
assertEquals("[has_required_arguments] failed to parse field [vegetable]", e.getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class,
|
||||
() -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[has_required_arguments] failed to parse field [vegetable]"));
|
||||
assertEquals(4, e.getLineNumber());
|
||||
e = (ParsingException) e.getCause();
|
||||
assertEquals("failed to build [has_required_arguments] after last required field arrived", e.getMessage());
|
||||
e = (XContentParseException) e.getCause();
|
||||
assertThat(e.getMessage(), containsString("failed to build [has_required_arguments] after last required field arrived"));
|
||||
assertEquals(2, e.getLineNumber());
|
||||
e = (ParsingException) e.getCause();
|
||||
assertEquals("[has_required_arguments] failed to parse field [a]", e.getMessage());
|
||||
e = (XContentParseException) e.getCause();
|
||||
assertThat(e.getMessage(), containsString("[has_required_arguments] failed to parse field [a]"));
|
||||
assertEquals(2, e.getLineNumber());
|
||||
assertEquals("[a] must be less than 10 characters in length but was [supercalifragilisticexpialidocious]",
|
||||
e.getCause().getMessage());
|
||||
|
@ -465,11 +464,11 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
+ "],\"named_in_constructor\": [\n"
|
||||
+ " {\"c\": {}}"
|
||||
+ "]}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage());
|
||||
assertEquals(
|
||||
"[named] can be a single object with any number of fields or an array where each entry is an object with a single field",
|
||||
e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named] can be a single object with any number of fields " +
|
||||
"or an array where each entry is an object with a single field"));
|
||||
}
|
||||
|
||||
public void testParseNamedObjectTwoFieldsInArrayConstructorArg() throws IOException {
|
||||
|
@ -479,11 +478,11 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
+ "],\"named_in_constructor\": [\n"
|
||||
+ " {\"c\": {}, \"d\": {}}"
|
||||
+ "]}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named_in_constructor]", e.getMessage());
|
||||
assertEquals(
|
||||
"[named_in_constructor] can be a single object with any number of fields or an array where each entry is an object with a "
|
||||
+ "single field", e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named_in_constructor] can be a single object with any number of fields "
|
||||
+ "or an array where each entry is an object with a single field"));
|
||||
}
|
||||
|
||||
public void testParseNamedObjectNoFieldsInArray() throws IOException {
|
||||
|
@ -493,11 +492,11 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
+ "],\"named_in_constructor\": [\n"
|
||||
+ " {\"a\": {}}"
|
||||
+ "]}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage());
|
||||
assertEquals(
|
||||
"[named] can be a single object with any number of fields or an array where each entry is an object with a single field",
|
||||
e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named] can be a single object with any number of fields " +
|
||||
"or an array where each entry is an object with a single field"));
|
||||
}
|
||||
|
||||
public void testParseNamedObjectNoFieldsInArrayConstructorArg() throws IOException {
|
||||
|
@ -507,11 +506,11 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
+ "],\"named_in_constructor\": [\n"
|
||||
+ " {}"
|
||||
+ "]}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named_in_constructor]", e.getMessage());
|
||||
assertEquals(
|
||||
"[named_in_constructor] can be a single object with any number of fields or an array where each entry is an object with a "
|
||||
+ "single field", e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named_in_constructor] can be a single object with any number of fields "
|
||||
+ "or an array where each entry is an object with a single field"));
|
||||
}
|
||||
|
||||
public void testParseNamedObjectJunkInArray() throws IOException {
|
||||
|
@ -521,11 +520,11 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
+ "],\"named_in_constructor\": [\n"
|
||||
+ " {\"a\": {}}"
|
||||
+ "]}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage());
|
||||
assertEquals(
|
||||
"[named] can be a single object with any number of fields or an array where each entry is an object with a single field",
|
||||
e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named] can be a single object with any number of fields " +
|
||||
"or an array where each entry is an object with a single field"));
|
||||
}
|
||||
|
||||
public void testParseNamedObjectJunkInArrayConstructorArg() throws IOException {
|
||||
|
@ -535,11 +534,11 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
+ "],\"named_in_constructor\": [\n"
|
||||
+ " \"junk\""
|
||||
+ "]}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named_in_constructor]", e.getMessage());
|
||||
assertEquals(
|
||||
"[named_in_constructor] can be a single object with any number of fields or an array where each entry is an object with a "
|
||||
+ "single field", e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named_in_constructor] can be a single object with any number of fields "
|
||||
+ "or an array where each entry is an object with a single field"));
|
||||
}
|
||||
|
||||
public void testParseNamedObjectInOrderNotSupported() throws IOException {
|
||||
|
@ -558,8 +557,8 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
objectParser.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named"));
|
||||
|
||||
// Now firing the xml through it fails
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> objectParser.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> objectParser.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
|
||||
assertEquals("[named] doesn't support arrays. Use a single object with multiple fields.", e.getCause().getMessage());
|
||||
}
|
||||
|
||||
|
@ -579,9 +578,10 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
objectParser.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named"));
|
||||
|
||||
// Now firing the xml through it fails
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> objectParser.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named_in_constructor]", e.getMessage());
|
||||
assertEquals("[named_in_constructor] doesn't support arrays. Use a single object with multiple fields.", e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> objectParser.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named_in_constructor] doesn't support arrays. Use a single object with multiple fields."));
|
||||
}
|
||||
|
||||
static class NamedObjectHolder {
|
|
@ -20,14 +20,13 @@ package org.elasticsearch.common.xcontent;
|
|||
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.URI;
|
||||
|
@ -199,8 +198,8 @@ public class ObjectParserTests extends ESTestCase {
|
|||
try {
|
||||
objectParser.parse(parser, s, null);
|
||||
fail("numeric value expected");
|
||||
} catch (ParsingException ex) {
|
||||
assertEquals(ex.getMessage(), "[the_parser] failed to parse field [test]");
|
||||
} catch (XContentParseException ex) {
|
||||
assertThat(ex.getMessage(), containsString("[the_parser] failed to parse field [test]"));
|
||||
assertTrue(ex.getCause() instanceof NumberFormatException);
|
||||
}
|
||||
|
||||
|
@ -235,7 +234,7 @@ public class ObjectParserTests extends ESTestCase {
|
|||
TestStruct s = new TestStruct();
|
||||
|
||||
objectParser.declareField((i, c, x) -> c.test = i.text(), new ParseField("numeric_value"), ObjectParser.ValueType.FLOAT);
|
||||
Exception e = expectThrows(ParsingException.class, () -> objectParser.parse(parser, s, null));
|
||||
Exception e = expectThrows(XContentParseException.class, () -> objectParser.parse(parser, s, null));
|
||||
assertThat(e.getMessage(), containsString("[foo] numeric_value doesn't support values of type: VALUE_BOOLEAN"));
|
||||
}
|
||||
|
||||
|
@ -478,11 +477,11 @@ public class ObjectParserTests extends ESTestCase {
|
|||
"{\"named\": [\n"
|
||||
+ " {\"a\": {}, \"b\": {}}"
|
||||
+ "]}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage());
|
||||
assertEquals(
|
||||
"[named] can be a single object with any number of fields or an array where each entry is an object with a single field",
|
||||
e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named] can be a single object with any number of fields " +
|
||||
"or an array where each entry is an object with a single field"));
|
||||
}
|
||||
|
||||
public void testParseNamedObjectNoFieldsInArray() throws IOException {
|
||||
|
@ -490,11 +489,11 @@ public class ObjectParserTests extends ESTestCase {
|
|||
"{\"named\": [\n"
|
||||
+ " {}"
|
||||
+ "]}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage());
|
||||
assertEquals(
|
||||
"[named] can be a single object with any number of fields or an array where each entry is an object with a single field",
|
||||
e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named] can be a single object with any number of fields " +
|
||||
"or an array where each entry is an object with a single field"));
|
||||
}
|
||||
|
||||
public void testParseNamedObjectJunkInArray() throws IOException {
|
||||
|
@ -502,11 +501,11 @@ public class ObjectParserTests extends ESTestCase {
|
|||
"{\"named\": [\n"
|
||||
+ " \"junk\""
|
||||
+ "]}");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage());
|
||||
assertEquals(
|
||||
"[named] can be a single object with any number of fields or an array where each entry is an object with a single field",
|
||||
e.getCause().getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
|
||||
assertThat(e.getCause().getMessage(),
|
||||
containsString("[named] can be a single object with any number of fields " +
|
||||
"or an array where each entry is an object with a single field"));
|
||||
}
|
||||
|
||||
public void testParseNamedObjectInOrderNotSupported() throws IOException {
|
||||
|
@ -521,8 +520,8 @@ public class ObjectParserTests extends ESTestCase {
|
|||
objectParser.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named"));
|
||||
|
||||
// Now firing the xml through it fails
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> objectParser.apply(parser, null));
|
||||
assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> objectParser.apply(parser, null));
|
||||
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
|
||||
assertEquals("[named] doesn't support arrays. Use a single object with multiple fields.", e.getCause().getMessage());
|
||||
}
|
||||
|
||||
|
@ -535,7 +534,9 @@ public class ObjectParserTests extends ESTestCase {
|
|||
}
|
||||
b.endObject();
|
||||
b = shuffleXContent(b);
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b));
|
||||
b.flush();
|
||||
byte[] bytes = ((ByteArrayOutputStream) b.getOutputStream()).toByteArray();
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, bytes);
|
||||
|
||||
class TestStruct {
|
||||
public String test;
|
||||
|
@ -559,7 +560,9 @@ public class ObjectParserTests extends ESTestCase {
|
|||
}
|
||||
b.endObject();
|
||||
b = shuffleXContent(b);
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b));
|
||||
b.flush();
|
||||
byte[] bytes = ((ByteArrayOutputStream) b.getOutputStream()).toByteArray();
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, bytes);
|
||||
|
||||
class TestStruct {
|
||||
public String test;
|
||||
|
@ -587,7 +590,9 @@ public class ObjectParserTests extends ESTestCase {
|
|||
}
|
||||
b.endObject();
|
||||
b = shuffleXContent(b);
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b));
|
||||
b.flush();
|
||||
byte[] bytes = ((ByteArrayOutputStream) b.getOutputStream()).toByteArray();
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, bytes);
|
||||
class TestStruct {
|
||||
public String test;
|
||||
}
|
||||
|
@ -646,8 +651,8 @@ public class ObjectParserTests extends ESTestCase {
|
|||
// Make sure that we didn't break the null handling in arrays that shouldn't support nulls
|
||||
XContentParser parser2 = createParser(JsonXContent.jsonXContent, "{\"int_array\": [1, null, 3]}");
|
||||
TestStruct s2 = new TestStruct();
|
||||
ParsingException ex = expectThrows(ParsingException.class, () -> objectParser.parse(parser2, s2, null));
|
||||
assertThat(ex.getMessage(), startsWith("[foo] failed to parse field [int_array]"));
|
||||
XContentParseException ex = expectThrows(XContentParseException.class, () -> objectParser.parse(parser2, s2, null));
|
||||
assertThat(ex.getMessage(), containsString("[foo] failed to parse field [int_array]"));
|
||||
}
|
||||
|
||||
static class NamedObjectHolder {
|
|
@ -32,6 +32,10 @@ public class LegacyDelimitedPayloadTokenFilterFactory extends DelimitedPayloadTo
|
|||
|
||||
LegacyDelimitedPayloadTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, env, name, settings);
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
throw new IllegalArgumentException(
|
||||
"[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead");
|
||||
}
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_2_0)) {
|
||||
DEPRECATION_LOGGER.deprecated("Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]");
|
||||
}
|
||||
|
|
|
@ -1026,15 +1026,13 @@
|
|||
- match: { tokens.10.token: ちた }
|
||||
|
||||
---
|
||||
"delimited_payload_filter":
|
||||
"delimited_payload_filter_error":
|
||||
- skip:
|
||||
version: " - 6.1.99"
|
||||
reason: delimited_payload_filter deprecated in 6.2, replaced by delimited_payload
|
||||
features: "warnings"
|
||||
version: " - 6.99.99"
|
||||
reason: using delimited_payload_filter throws error from 7.0 on
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]"
|
||||
catch: /\[delimited_payload_filter\] is not supported for new indices, use \[delimited_payload\] instead/
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
|
@ -1045,29 +1043,15 @@
|
|||
type: delimited_payload_filter
|
||||
delimiter: ^
|
||||
encoding: identity
|
||||
- do:
|
||||
warnings:
|
||||
- "Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]"
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: foo^bar
|
||||
tokenizer: keyword
|
||||
filter: [my_delimited_payload_filter]
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: foo }
|
||||
|
||||
# Test pre-configured token filter too:
|
||||
- do:
|
||||
warnings:
|
||||
- "Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]"
|
||||
catch: /\[delimited_payload_filter\] is not supported for new indices, use \[delimited_payload\] instead/
|
||||
indices.analyze:
|
||||
body:
|
||||
text: foo|5
|
||||
tokenizer: keyword
|
||||
filter: [delimited_payload_filter]
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: foo }
|
||||
|
||||
---
|
||||
"delimited_payload":
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptException;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -30,6 +31,7 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Matchers.any;
|
||||
|
@ -80,9 +82,9 @@ public class ScriptProcessorFactoryTests extends ESTestCase {
|
|||
configMap.put("source", "bar");
|
||||
configMap.put("lang", "mockscript");
|
||||
|
||||
ElasticsearchException exception = expectThrows(ElasticsearchException.class,
|
||||
XContentParseException exception = expectThrows(XContentParseException.class,
|
||||
() -> factory.create(null, randomAlphaOfLength(10), configMap));
|
||||
assertThat(exception.getMessage(), is("[script] failed to parse field [source]"));
|
||||
assertThat(exception.getMessage(), containsString("[script] failed to parse field [source]"));
|
||||
}
|
||||
|
||||
public void testFactoryValidationAtLeastOneScriptingType() throws Exception {
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
38ff5a1f4bcbfb6e1ffacd3263175c2a1ba23e9f
|
|
@ -0,0 +1 @@
|
|||
cb82d9db3043bbd25b4d0eb5022ed1e529c936d3
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.script.mustache;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
|
@ -122,7 +122,7 @@ public class SearchTemplateRequestTests extends ESTestCase {
|
|||
|
||||
public void testParseWrongTemplate() {
|
||||
// Unclosed template id
|
||||
expectThrows(ParsingException.class, () -> RestSearchTemplateAction.parse(newParser("{'id' : 'another_temp }")));
|
||||
expectThrows(XContentParseException.class, () -> RestSearchTemplateAction.parse(newParser("{'id' : 'another_temp }")));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.SynonymQuery;
|
||||
import org.apache.lucene.search.TermInSetQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.spans.SpanFirstQuery;
|
||||
import org.apache.lucene.search.spans.SpanNearQuery;
|
||||
import org.apache.lucene.search.spans.SpanNotQuery;
|
||||
|
@ -235,20 +236,18 @@ final class QueryAnalyzer {
|
|||
return new Result(true, Collections.emptySet(), 0);
|
||||
}
|
||||
|
||||
if (version.onOrAfter(Version.V_6_1_0)) {
|
||||
Set<QueryExtraction> extractions = new HashSet<>();
|
||||
for (Term[] termArr : terms) {
|
||||
extractions.addAll(Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet()));
|
||||
// This query has the same problem as boolean queries when it comes to duplicated terms
|
||||
// So to keep things simple, we just rewrite to a boolean query
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
for (Term[] termArr : terms) {
|
||||
BooleanQuery.Builder subBuilder = new BooleanQuery.Builder();
|
||||
for (Term term : termArr) {
|
||||
subBuilder.add(new TermQuery(term), Occur.SHOULD);
|
||||
}
|
||||
return new Result(false, extractions, terms.length);
|
||||
} else {
|
||||
Set<QueryExtraction> bestTermArr = null;
|
||||
for (Term[] termArr : terms) {
|
||||
Set<QueryExtraction> queryExtractions = Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet());
|
||||
bestTermArr = selectBestExtraction(bestTermArr, queryExtractions);
|
||||
}
|
||||
return new Result(false, bestTermArr, 1);
|
||||
builder.add(subBuilder.build(), Occur.FILTER);
|
||||
}
|
||||
// Make sure to unverify the result
|
||||
return booleanQuery().apply(builder.build(), version).unverify();
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -263,41 +262,35 @@ final class QueryAnalyzer {
|
|||
return (query, version) -> {
|
||||
SpanNearQuery spanNearQuery = (SpanNearQuery) query;
|
||||
if (version.onOrAfter(Version.V_6_1_0)) {
|
||||
Set<Result> results = Arrays.stream(spanNearQuery.getClauses()).map(clause -> analyze(clause, version)).collect(toSet());
|
||||
int msm = 0;
|
||||
Set<QueryExtraction> extractions = new HashSet<>();
|
||||
Set<String> seenRangeFields = new HashSet<>();
|
||||
for (Result result : results) {
|
||||
QueryExtraction[] t = result.extractions.toArray(new QueryExtraction[1]);
|
||||
if (result.extractions.size() == 1 && t[0].range != null) {
|
||||
if (seenRangeFields.add(t[0].range.fieldName)) {
|
||||
msm += 1;
|
||||
}
|
||||
} else {
|
||||
msm += result.minimumShouldMatch;
|
||||
}
|
||||
extractions.addAll(result.extractions);
|
||||
// This has the same problem as boolean queries when it comes to duplicated clauses
|
||||
// so we rewrite to a boolean query to keep things simple.
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
for (SpanQuery clause : spanNearQuery.getClauses()) {
|
||||
builder.add(clause, Occur.FILTER);
|
||||
}
|
||||
return new Result(false, extractions, msm);
|
||||
// make sure to unverify the result
|
||||
return booleanQuery().apply(builder.build(), version).unverify();
|
||||
} else {
|
||||
Set<QueryExtraction> bestClauses = null;
|
||||
Result bestClause = null;
|
||||
for (SpanQuery clause : spanNearQuery.getClauses()) {
|
||||
Result temp = analyze(clause, version);
|
||||
bestClauses = selectBestExtraction(temp.extractions, bestClauses);
|
||||
bestClause = selectBestResult(temp, bestClause);
|
||||
}
|
||||
return new Result(false, bestClauses, 1);
|
||||
return bestClause;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Version, Result> spanOrQuery() {
|
||||
return (query, version) -> {
|
||||
Set<QueryExtraction> terms = new HashSet<>();
|
||||
SpanOrQuery spanOrQuery = (SpanOrQuery) query;
|
||||
// handle it like a boolean query to not dulplicate eg. logic
|
||||
// about duplicated terms
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
for (SpanQuery clause : spanOrQuery.getClauses()) {
|
||||
terms.addAll(analyze(clause, version).extractions);
|
||||
builder.add(clause, Occur.SHOULD);
|
||||
}
|
||||
return new Result(false, terms, Math.min(1, terms.size()));
|
||||
return booleanQuery().apply(builder.build(), version);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -423,9 +416,13 @@ final class QueryAnalyzer {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
Set<QueryExtraction> bestClause = null;
|
||||
Result bestClause = null;
|
||||
UnsupportedQueryException uqe = null;
|
||||
boolean hasProhibitedClauses = false;
|
||||
for (BooleanClause clause : clauses) {
|
||||
if (clause.isProhibited()) {
|
||||
hasProhibitedClauses = true;
|
||||
}
|
||||
if (clause.isRequired() == false) {
|
||||
// skip must_not clauses, we don't need to remember the things that do *not* match...
|
||||
// skip should clauses, this bq has must clauses, so we don't need to remember should clauses,
|
||||
|
@ -440,17 +437,20 @@ final class QueryAnalyzer {
|
|||
uqe = e;
|
||||
continue;
|
||||
}
|
||||
bestClause = selectBestExtraction(temp.extractions, bestClause);
|
||||
bestClause = selectBestResult(temp, bestClause);
|
||||
}
|
||||
if (bestClause != null) {
|
||||
return new Result(false, bestClause, 1);
|
||||
if (hasProhibitedClauses || minimumShouldMatch > 0) {
|
||||
bestClause = bestClause.unverify();
|
||||
}
|
||||
return bestClause;
|
||||
} else {
|
||||
if (uqe != null) {
|
||||
// we're unable to select the best clause and an exception occurred, so we bail
|
||||
throw uqe;
|
||||
} else {
|
||||
// We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries,
|
||||
return new Result(true, Collections.emptySet(), 1);
|
||||
return new Result(true, Collections.emptySet(), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -616,22 +616,40 @@ final class QueryAnalyzer {
|
|||
}
|
||||
}
|
||||
|
||||
static Set<QueryExtraction> selectBestExtraction(Set<QueryExtraction> extractions1, Set<QueryExtraction> extractions2) {
|
||||
assert extractions1 != null || extractions2 != null;
|
||||
if (extractions1 == null) {
|
||||
return extractions2;
|
||||
} else if (extractions2 == null) {
|
||||
return extractions1;
|
||||
/**
|
||||
* Return an extraction for the conjunction of {@code result1} and {@code result2}
|
||||
* by picking up clauses that look most restrictive and making it unverified if
|
||||
* the other clause is not null and doesn't match all documents. This is used by
|
||||
* 6.0.0 indices which didn't use the terms_set query.
|
||||
*/
|
||||
static Result selectBestResult(Result result1, Result result2) {
|
||||
assert result1 != null || result2 != null;
|
||||
if (result1 == null) {
|
||||
return result2;
|
||||
} else if (result2 == null) {
|
||||
return result1;
|
||||
} else if (result1.matchAllDocs) { // conjunction with match_all
|
||||
Result result = result2;
|
||||
if (result1.verified == false) {
|
||||
result = result.unverify();
|
||||
}
|
||||
return result;
|
||||
} else if (result2.matchAllDocs) { // conjunction with match_all
|
||||
Result result = result1;
|
||||
if (result2.verified == false) {
|
||||
result = result.unverify();
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
// Prefer term based extractions over range based extractions:
|
||||
boolean onlyRangeBasedExtractions = true;
|
||||
for (QueryExtraction clause : extractions1) {
|
||||
for (QueryExtraction clause : result1.extractions) {
|
||||
if (clause.term != null) {
|
||||
onlyRangeBasedExtractions = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (QueryExtraction clause : extractions2) {
|
||||
for (QueryExtraction clause : result2.extractions) {
|
||||
if (clause.term != null) {
|
||||
onlyRangeBasedExtractions = false;
|
||||
break;
|
||||
|
@ -639,28 +657,28 @@ final class QueryAnalyzer {
|
|||
}
|
||||
|
||||
if (onlyRangeBasedExtractions) {
|
||||
BytesRef extraction1SmallestRange = smallestRange(extractions1);
|
||||
BytesRef extraction2SmallestRange = smallestRange(extractions2);
|
||||
BytesRef extraction1SmallestRange = smallestRange(result1.extractions);
|
||||
BytesRef extraction2SmallestRange = smallestRange(result2.extractions);
|
||||
if (extraction1SmallestRange == null) {
|
||||
return extractions2;
|
||||
return result2.unverify();
|
||||
} else if (extraction2SmallestRange == null) {
|
||||
return extractions1;
|
||||
return result1.unverify();
|
||||
}
|
||||
|
||||
// Keep the clause with smallest range, this is likely to be the rarest.
|
||||
if (extraction1SmallestRange.compareTo(extraction2SmallestRange) <= 0) {
|
||||
return extractions1;
|
||||
return result1.unverify();
|
||||
} else {
|
||||
return extractions2;
|
||||
return result2.unverify();
|
||||
}
|
||||
} else {
|
||||
int extraction1ShortestTerm = minTermLength(extractions1);
|
||||
int extraction2ShortestTerm = minTermLength(extractions2);
|
||||
int extraction1ShortestTerm = minTermLength(result1.extractions);
|
||||
int extraction2ShortestTerm = minTermLength(result2.extractions);
|
||||
// keep the clause with longest terms, this likely to be rarest.
|
||||
if (extraction1ShortestTerm >= extraction2ShortestTerm) {
|
||||
return extractions1;
|
||||
return result1.unverify();
|
||||
} else {
|
||||
return extractions2;
|
||||
return result2.unverify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -695,6 +713,13 @@ final class QueryAnalyzer {
|
|||
return min;
|
||||
}
|
||||
|
||||
/**
|
||||
* Query extraction result. A result is a candidate for a given document either if:
|
||||
* - `matchAllDocs` is true
|
||||
* - `extractions` and the document have `minimumShouldMatch` terms in common
|
||||
* Further more, the match doesn't need to be verified if `verified` is true, checking
|
||||
* `matchAllDocs` and `extractions` is enough.
|
||||
*/
|
||||
static class Result {
|
||||
|
||||
final Set<QueryExtraction> extractions;
|
||||
|
@ -702,24 +727,32 @@ final class QueryAnalyzer {
|
|||
final int minimumShouldMatch;
|
||||
final boolean matchAllDocs;
|
||||
|
||||
Result(boolean verified, Set<QueryExtraction> extractions, int minimumShouldMatch) {
|
||||
private Result(boolean matchAllDocs, boolean verified, Set<QueryExtraction> extractions, int minimumShouldMatch) {
|
||||
if (minimumShouldMatch > extractions.size()) {
|
||||
throw new IllegalArgumentException("minimumShouldMatch can't be greater than the number of extractions: "
|
||||
+ minimumShouldMatch + " > " + extractions.size());
|
||||
}
|
||||
this.matchAllDocs = matchAllDocs;
|
||||
this.extractions = extractions;
|
||||
this.verified = verified;
|
||||
this.minimumShouldMatch = minimumShouldMatch;
|
||||
this.matchAllDocs = false;
|
||||
}
|
||||
|
||||
Result(boolean verified, Set<QueryExtraction> extractions, int minimumShouldMatch) {
|
||||
this(false, verified, extractions, minimumShouldMatch);
|
||||
}
|
||||
|
||||
Result(boolean matchAllDocs, boolean verified) {
|
||||
this.extractions = Collections.emptySet();
|
||||
this.verified = verified;
|
||||
this.minimumShouldMatch = 0;
|
||||
this.matchAllDocs = matchAllDocs;
|
||||
this(matchAllDocs, verified, Collections.emptySet(), 0);
|
||||
}
|
||||
|
||||
Result unverify() {
|
||||
if (verified) {
|
||||
return new Result(matchAllDocs, false, extractions, minimumShouldMatch);
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class QueryExtraction {
|
||||
|
|
|
@ -74,7 +74,7 @@ import java.util.stream.Collectors;
|
|||
|
||||
import static org.elasticsearch.percolator.QueryAnalyzer.UnsupportedQueryException;
|
||||
import static org.elasticsearch.percolator.QueryAnalyzer.analyze;
|
||||
import static org.elasticsearch.percolator.QueryAnalyzer.selectBestExtraction;
|
||||
import static org.elasticsearch.percolator.QueryAnalyzer.selectBestResult;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
|
@ -163,6 +163,20 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
assertThat(terms.get(0).bytes().utf8ToString(), equalTo("_very_long_term"));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_multiPhraseQuery_dups() {
|
||||
MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder()
|
||||
.add(new Term("_field", "_term1"))
|
||||
.add(new Term[] {new Term("_field", "_term1"), new Term("_field", "_term2")})
|
||||
.build();
|
||||
|
||||
Result result = analyze(multiPhraseQuery, Version.CURRENT);
|
||||
assertFalse(result.matchAllDocs);
|
||||
assertFalse(result.verified);
|
||||
assertTermsEqual(result.extractions, new Term("_field", "_term1"), new Term("_field", "_term2"));
|
||||
assertEquals(1, result.minimumShouldMatch); // because of the dup term
|
||||
}
|
||||
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "term0"));
|
||||
|
@ -370,18 +384,28 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
builder.add(termQuery1, BooleanClause.Occur.MUST_NOT);
|
||||
PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2");
|
||||
builder.add(phraseQuery, BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery booleanQuery = builder.build();
|
||||
|
||||
Result result = analyze(booleanQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
assertThat(terms.size(), equalTo(2));
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field()));
|
||||
assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes()));
|
||||
assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[1].field()));
|
||||
assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[1].bytes()));
|
||||
assertTermsEqual(result.extractions, phraseQuery.getTerms());
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(termQuery1, BooleanClause.Occur.MUST_NOT);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
|
||||
booleanQuery = builder.build();
|
||||
result = analyze(booleanQuery, Version.CURRENT);
|
||||
assertThat(result.matchAllDocs, is(true));
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(0));
|
||||
assertTermsEqual(result.extractions);
|
||||
|
||||
result = analyze(booleanQuery, Version.V_6_0_0);
|
||||
assertThat(result.matchAllDocs, is(true));
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(0));
|
||||
assertTermsEqual(result.extractions);
|
||||
}
|
||||
|
||||
public void testExactMatch_booleanQuery() {
|
||||
|
@ -651,7 +675,7 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term"));
|
||||
SpanOrQuery spanOrQuery = new SpanOrQuery(spanTermQuery1, spanTermQuery2);
|
||||
Result result = analyze(spanOrQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, spanTermQuery1.getTerm(), spanTermQuery2.getTerm());
|
||||
}
|
||||
|
@ -943,64 +967,111 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
assertThat(result.extractions.isEmpty(), is(true));
|
||||
}
|
||||
|
||||
public void testSelectBestExtraction() {
|
||||
public void testSelectBestResult() {
|
||||
Set<QueryExtraction> queryTerms1 = terms(new int[0], "12", "1234", "12345");
|
||||
Result result1 = new Result(true, queryTerms1, 1);
|
||||
Set<QueryAnalyzer.QueryExtraction> queryTerms2 = terms(new int[0], "123", "1234", "12345");
|
||||
Set<QueryExtraction> result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms2, result);
|
||||
Result result2 = new Result(true, queryTerms2, 1);
|
||||
Result result = selectBestResult(result1, result2);
|
||||
assertSame(queryTerms2, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{1, 2, 3});
|
||||
result1 = new Result(true, queryTerms1, 1);
|
||||
queryTerms2 = terms(new int[]{2, 3, 4});
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
result2 = new Result(true, queryTerms2, 1);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame(queryTerms1, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{4, 5, 6});
|
||||
result1 = new Result(true, queryTerms1, 1);
|
||||
queryTerms2 = terms(new int[]{1, 2, 3});
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms2, result);
|
||||
result2 = new Result(true, queryTerms2, 1);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame(queryTerms2, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456");
|
||||
result1 = new Result(true, queryTerms1, 1);
|
||||
queryTerms2 = terms(new int[]{2, 3, 4}, "123", "456");
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
result2 = new Result(true, queryTerms2, 1);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame(queryTerms1, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{10});
|
||||
result1 = new Result(true, queryTerms1, 1);
|
||||
queryTerms2 = terms(new int[]{1});
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms2, result);
|
||||
result2 = new Result(true, queryTerms2, 1);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame(queryTerms2, result.extractions);
|
||||
|
||||
queryTerms1 = terms(new int[]{10}, "123");
|
||||
result1 = new Result(true, queryTerms1, 1);
|
||||
queryTerms2 = terms(new int[]{1});
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
result2 = new Result(true, queryTerms2, 1);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame(queryTerms1, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{10}, "1", "123");
|
||||
result1 = new Result(true, queryTerms1, 1);
|
||||
queryTerms2 = terms(new int[]{1}, "1", "2");
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
result2 = new Result(true, queryTerms2, 1);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame(queryTerms1, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456");
|
||||
result1 = new Result(true, queryTerms1, 1);
|
||||
queryTerms2 = terms(new int[]{2, 3, 4}, "1", "456");
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame("Ignoring ranges, so then prefer queryTerms1, because it has the longest shortest term", queryTerms1, result);
|
||||
result2 = new Result(true, queryTerms2, 1);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame("Ignoring ranges, so then prefer queryTerms1, because it has the longest shortest term",
|
||||
queryTerms1, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{});
|
||||
result1 = new Result(false, queryTerms1, 0);
|
||||
queryTerms2 = terms(new int[]{});
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame("In case query extractions are empty", queryTerms2, result);
|
||||
result2 = new Result(false, queryTerms2, 0);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame("In case query extractions are empty", queryTerms2, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{1});
|
||||
result1 = new Result(true, queryTerms1, 1);
|
||||
queryTerms2 = terms(new int[]{});
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame("In case query a single extraction is empty", queryTerms1, result);
|
||||
result2 = new Result(false, queryTerms2, 0);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame("In case query a single extraction is empty", queryTerms1, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{});
|
||||
result1 = new Result(false, queryTerms1, 0);
|
||||
queryTerms2 = terms(new int[]{1});
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame("In case query a single extraction is empty", queryTerms2, result);
|
||||
result2 = new Result(true, queryTerms2, 1);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame("In case query a single extraction is empty", queryTerms2, result.extractions);
|
||||
assertFalse(result.verified);
|
||||
|
||||
result1 = new Result(true, true);
|
||||
queryTerms2 = terms(new int[]{1});
|
||||
result2 = new Result(true, queryTerms2, 1);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame("Conjunction with a match_all", result2, result);
|
||||
assertTrue(result.verified);
|
||||
|
||||
queryTerms1 = terms(new int[]{1});
|
||||
result1 = new Result(true, queryTerms2, 1);
|
||||
result2 = new Result(true, true);
|
||||
result = selectBestResult(result1, result2);
|
||||
assertSame("Conjunction with a match_all", result1, result);
|
||||
assertTrue(result.verified);
|
||||
}
|
||||
|
||||
public void testSelectBestExtraction_random() {
|
||||
public void testselectBestResult_random() {
|
||||
Set<QueryExtraction> terms1 = new HashSet<>();
|
||||
int shortestTerms1Length = Integer.MAX_VALUE;
|
||||
int sumTermLength = randomIntBetween(1, 128);
|
||||
|
@ -1021,9 +1092,11 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
sumTermLength -= length;
|
||||
}
|
||||
|
||||
Set<QueryAnalyzer.QueryExtraction> result = selectBestExtraction(terms1, terms2);
|
||||
Result result1 = new Result(true, terms1, 1);
|
||||
Result result2 = new Result(true, terms2, 1);
|
||||
Result result = selectBestResult(result1, result2);
|
||||
Set<QueryExtraction> expected = shortestTerms1Length >= shortestTerms2Length ? terms1 : terms2;
|
||||
assertThat(result, sameInstance(expected));
|
||||
assertThat(result.extractions, sameInstance(expected));
|
||||
}
|
||||
|
||||
public void testPointRangeQuery() {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.rankeval;
|
||||
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -109,6 +110,7 @@ public class RestRankEvalAction extends BaseRestHandler {
|
|||
|
||||
private static void parseRankEvalRequest(RankEvalRequest rankEvalRequest, RestRequest request, XContentParser parser) {
|
||||
rankEvalRequest.indices(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
rankEvalRequest.indicesOptions(IndicesOptions.fromRequest(request, rankEvalRequest.indicesOptions()));
|
||||
RankEvalSpec spec = RankEvalSpec.parse(parser);
|
||||
rankEvalRequest.setRankEvalSpec(spec);
|
||||
}
|
||||
|
|
|
@ -126,7 +126,9 @@ public class TransportRankEvalAction extends HandledTransportAction<RankEvalRequ
|
|||
} else {
|
||||
ratedSearchSource.fetchSource(summaryFields.toArray(new String[summaryFields.size()]), new String[0]);
|
||||
}
|
||||
msearchRequest.add(new SearchRequest(request.indices(), ratedSearchSource));
|
||||
SearchRequest searchRequest = new SearchRequest(request.indices(), ratedSearchSource);
|
||||
searchRequest.indicesOptions(request.indicesOptions());
|
||||
msearchRequest.add(searchRequest);
|
||||
}
|
||||
assert ratedRequestsInSearch.size() == msearchRequest.requests().size();
|
||||
client.multiSearch(msearchRequest, new RankEvalActionListener(listener, metric,
|
||||
|
|
|
@ -20,8 +20,13 @@
|
|||
package org.elasticsearch.index.rankeval;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.rankeval.PrecisionAtK.Breakdown;
|
||||
import org.elasticsearch.indices.IndexClosedException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -35,6 +40,7 @@ import java.util.Map.Entry;
|
|||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class RankEvalRequestIT extends ESIntegTestCase {
|
||||
@Override
|
||||
|
@ -59,6 +65,9 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test", "testtype").setId("4").setSource("text", "amsterdam", "population", 851573).get();
|
||||
client().prepareIndex("test", "testtype").setId("5").setSource("text", "amsterdam", "population", 851573).get();
|
||||
client().prepareIndex("test", "testtype").setId("6").setSource("text", "amsterdam", "population", 851573).get();
|
||||
|
||||
// add another index for testing closed indices etc...
|
||||
client().prepareIndex("test2", "testtype").setId("7").setSource("text", "amsterdam", "population", 851573).get();
|
||||
refresh();
|
||||
}
|
||||
|
||||
|
@ -244,6 +253,72 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
assertEquals("java.lang.NumberFormatException: For input string: \"noStringOnNumericFields\"", rootCauses[0].getCause().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* test that multiple indices work, setting indices Options is possible and works as expected
|
||||
*/
|
||||
public void testIndicesOptions() {
|
||||
SearchSourceBuilder amsterdamQuery = new SearchSourceBuilder().query(new MatchAllQueryBuilder());
|
||||
List<RatedDocument> relevantDocs = createRelevant("2", "3", "4", "5", "6");
|
||||
relevantDocs.add(new RatedDocument("test2", "7", TestRatingEnum.RELEVANT.ordinal()));
|
||||
List<RatedRequest> specifications = new ArrayList<>();
|
||||
specifications.add(new RatedRequest("amsterdam_query", relevantDocs, amsterdamQuery));
|
||||
RankEvalSpec task = new RankEvalSpec(specifications, new PrecisionAtK());
|
||||
|
||||
RankEvalRequest request = new RankEvalRequest(task, new String[] { "test", "test2" });
|
||||
request.setRankEvalSpec(task);
|
||||
|
||||
RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, request).actionGet();
|
||||
Breakdown details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails();
|
||||
assertEquals(7, details.getRetrieved());
|
||||
assertEquals(6, details.getRelevantRetrieved());
|
||||
|
||||
// test that ignore_unavailable=true works but returns one result less
|
||||
assertTrue(client().admin().indices().prepareClose("test2").get().isAcknowledged());
|
||||
|
||||
request.indicesOptions(IndicesOptions.fromParameters(null, "true", null, SearchRequest.DEFAULT_INDICES_OPTIONS));
|
||||
response = client().execute(RankEvalAction.INSTANCE, request).actionGet();
|
||||
details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails();
|
||||
assertEquals(6, details.getRetrieved());
|
||||
assertEquals(5, details.getRelevantRetrieved());
|
||||
|
||||
// test that ignore_unavailable=false or default settings throw an IndexClosedException
|
||||
assertTrue(client().admin().indices().prepareClose("test2").get().isAcknowledged());
|
||||
request.indicesOptions(IndicesOptions.fromParameters(null, "false", null, SearchRequest.DEFAULT_INDICES_OPTIONS));
|
||||
response = client().execute(RankEvalAction.INSTANCE, request).actionGet();
|
||||
assertEquals(1, response.getFailures().size());
|
||||
assertThat(response.getFailures().get("amsterdam_query"), instanceOf(IndexClosedException.class));
|
||||
|
||||
// test expand_wildcards
|
||||
request = new RankEvalRequest(task, new String[] { "tes*" });
|
||||
request.indicesOptions(IndicesOptions.fromParameters("none", null, null, SearchRequest.DEFAULT_INDICES_OPTIONS));
|
||||
response = client().execute(RankEvalAction.INSTANCE, request).actionGet();
|
||||
details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails();
|
||||
assertEquals(0, details.getRetrieved());
|
||||
|
||||
request.indicesOptions(IndicesOptions.fromParameters("open", null, null, SearchRequest.DEFAULT_INDICES_OPTIONS));
|
||||
response = client().execute(RankEvalAction.INSTANCE, request).actionGet();
|
||||
details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails();
|
||||
assertEquals(6, details.getRetrieved());
|
||||
assertEquals(5, details.getRelevantRetrieved());
|
||||
|
||||
request.indicesOptions(IndicesOptions.fromParameters("closed", null, null, SearchRequest.DEFAULT_INDICES_OPTIONS));
|
||||
response = client().execute(RankEvalAction.INSTANCE, request).actionGet();
|
||||
assertEquals(1, response.getFailures().size());
|
||||
assertThat(response.getFailures().get("amsterdam_query"), instanceOf(IndexClosedException.class));
|
||||
|
||||
// test allow_no_indices
|
||||
request = new RankEvalRequest(task, new String[] { "bad*" });
|
||||
request.indicesOptions(IndicesOptions.fromParameters(null, null, "true", SearchRequest.DEFAULT_INDICES_OPTIONS));
|
||||
response = client().execute(RankEvalAction.INSTANCE, request).actionGet();
|
||||
details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails();
|
||||
assertEquals(0, details.getRetrieved());
|
||||
|
||||
request.indicesOptions(IndicesOptions.fromParameters(null, null, "false", SearchRequest.DEFAULT_INDICES_OPTIONS));
|
||||
response = client().execute(RankEvalAction.INSTANCE, request).actionGet();
|
||||
assertEquals(1, response.getFailures().size());
|
||||
assertThat(response.getFailures().get("amsterdam_query"), instanceOf(IndexNotFoundException.class));
|
||||
}
|
||||
|
||||
private static List<RatedDocument> createRelevant(String... docs) {
|
||||
List<RatedDocument> relevant = new ArrayList<>();
|
||||
for (String doc : docs) {
|
||||
|
|
|
@ -51,6 +51,7 @@ import java.util.function.Supplier;
|
|||
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public class RankEvalSpecTests extends ESTestCase {
|
||||
|
@ -133,7 +134,7 @@ public class RankEvalSpecTests extends ESTestCase {
|
|||
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random());
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
|
||||
Exception exception = expectThrows(Exception.class, () -> RankEvalSpec.parse(parser));
|
||||
assertThat(exception.getMessage(), startsWith("[rank_eval] failed to parse field"));
|
||||
assertThat(exception.getMessage(), containsString("[rank_eval] failed to parse field"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.rankeval;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
|
@ -27,6 +28,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -51,6 +53,7 @@ import static java.util.Collections.emptyList;
|
|||
import static java.util.stream.Collectors.toList;
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public class RatedRequestsTests extends ESTestCase {
|
||||
|
@ -134,11 +137,13 @@ public class RatedRequestsTests extends ESTestCase {
|
|||
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random());
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
|
||||
Exception exception = expectThrows(Exception.class, () -> RatedRequest.fromXContent(parser));
|
||||
if (exception instanceof IllegalArgumentException) {
|
||||
assertThat(exception.getMessage(), startsWith("[request] unknown field"));
|
||||
if (exception instanceof XContentParseException) {
|
||||
XContentParseException xcpe = (XContentParseException) exception;
|
||||
assertThat(ExceptionsHelper.detailedMessage(xcpe), containsString("unknown field"));
|
||||
assertThat(ExceptionsHelper.detailedMessage(xcpe), containsString("parser not found"));
|
||||
}
|
||||
if (exception instanceof ParsingException) {
|
||||
assertThat(exception.getMessage(), startsWith("[request] failed to parse field"));
|
||||
if (exception instanceof XContentParseException) {
|
||||
assertThat(exception.getMessage(), containsString("[request] failed to parse field"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.ElasticsearchStatusException;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.bulk.BackoffPolicy;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.index.reindex.ScrollableHitSource;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
|
@ -199,7 +200,7 @@ public class RemoteScrollableHitSource extends ScrollableHitSource {
|
|||
try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY,
|
||||
LoggingDeprecationHandler.INSTANCE, content)) {
|
||||
parsedResponse = parser.apply(xContentParser, xContentType);
|
||||
} catch (ParsingException e) {
|
||||
} catch (XContentParseException e) {
|
||||
/* Because we're streaming the response we can't get a copy of it here. The best we can do is hint that it
|
||||
* is totally wrong and we're probably not talking to Elasticsearch. */
|
||||
throw new ElasticsearchException(
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.http.entity.ContentType;
|
|||
import org.apache.http.nio.entity.NStringEntity;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -38,6 +39,7 @@ import org.junit.Before;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -46,6 +48,7 @@ import static java.util.Collections.emptyMap;
|
|||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
|
@ -72,17 +75,21 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
|
|||
Map<String, Object> clusterSettings = entityAsMap(clusterSettingsResponse);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> pathRepo = (List<String>) XContentMapValues.extractValue("defaults.path.repo", clusterSettings);
|
||||
assertThat(pathRepo, hasSize(1));
|
||||
List<String> pathRepos = (List<String>) XContentMapValues.extractValue("defaults.path.repo", clusterSettings);
|
||||
assertThat(pathRepos, notNullValue());
|
||||
assertThat(pathRepos, hasSize(1));
|
||||
|
||||
final String pathRepo = pathRepos.get(0);
|
||||
final URI pathRepoUri = PathUtils.get(pathRepo).toUri().normalize();
|
||||
|
||||
// Create a FS repository using the path.repo location
|
||||
Response createFsRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-fs", emptyMap(),
|
||||
buildRepositorySettings(FsRepository.TYPE, Settings.builder().put("location", pathRepo.get(0)).build()));
|
||||
buildRepositorySettings(FsRepository.TYPE, Settings.builder().put("location", pathRepo).build()));
|
||||
assertThat(createFsRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
|
||||
|
||||
// Create a URL repository using the file://{path.repo} URL
|
||||
Response createFileRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-file", emptyMap(),
|
||||
buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", "file://" + pathRepo.get(0)).build()));
|
||||
buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", pathRepoUri.toString()).build()));
|
||||
assertThat(createFileRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
|
||||
|
||||
// Create a URL repository using the http://{fixture} URL
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
ece1b4232697fad170c589f0df887efa6e66dd4f
|
|
@ -0,0 +1 @@
|
|||
c09216a18658d5b2912566efff8665e45edc24b4
|
|
@ -1 +0,0 @@
|
|||
a16521e8f7240a9b93ea8ced157298b9d18bca43
|
|
@ -0,0 +1 @@
|
|||
c9d5bbd0affa90b46e173c762c35419a54977c35
|
|
@ -1 +0,0 @@
|
|||
0dc6db8e16bf1ed6ebaa914fcbfbb4970af23747
|
|
@ -0,0 +1 @@
|
|||
4e6c63fa8ae005d81d12f0d88ffa98346b443ac4
|
|
@ -1 +0,0 @@
|
|||
de43b057e8800f6c7b26907035664feb686127af
|
|
@ -0,0 +1 @@
|
|||
37b7ff0a6493f139cb77f5bda965ac0189c8efd1
|
|
@ -1 +0,0 @@
|
|||
c5e6a6d99a04ea5121bfd77470a7818725516ead
|
|
@ -0,0 +1 @@
|
|||
d189185da23b2221c4d532da5e2cacce735f8a0c
|
|
@ -1 +0,0 @@
|
|||
d755dcef8763b783b7cbba7154a62f91e413007c
|
|
@ -0,0 +1 @@
|
|||
74462b51de45afe708f1042cc901fe7370413871
|
|
@ -16,6 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import org.elasticsearch.gradle.test.AntFixture
|
||||
|
||||
esplugin {
|
||||
description 'The Azure Repository plugin adds support for Azure storage repositories.'
|
||||
|
@ -42,9 +43,28 @@ thirdPartyAudit.excludes = [
|
|||
'org.slf4j.LoggerFactory',
|
||||
]
|
||||
|
||||
integTestCluster {
|
||||
keystoreSetting 'azure.client.default.account', 'cloudazureresource'
|
||||
keystoreSetting 'azure.client.default.key', 'abcdefgh'
|
||||
keystoreSetting 'azure.client.secondary.account', 'cloudazureresource'
|
||||
keystoreSetting 'azure.client.secondary.key', 'abcdefgh'
|
||||
forbiddenApisTest {
|
||||
// we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage
|
||||
bundledSignatures -= 'jdk-non-portable'
|
||||
bundledSignatures += 'jdk-internal'
|
||||
}
|
||||
|
||||
/** A task to start the fixture which emulates an Azure Storage service **/
|
||||
task azureStorageFixture(type: AntFixture) {
|
||||
dependsOn compileTestJava
|
||||
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
|
||||
executable = new File(project.runtimeJavaHome, 'bin/java')
|
||||
args 'org.elasticsearch.repositories.azure.AzureStorageFixture', baseDir, 'container_test'
|
||||
}
|
||||
|
||||
integTestCluster {
|
||||
dependsOn azureStorageFixture
|
||||
|
||||
keystoreSetting 'azure.client.integration_test.account', "azure_integration_test_account"
|
||||
/* The key is "azure_integration_test_key" encoded using base64 */
|
||||
keystoreSetting 'azure.client.integration_test.key', "YXp1cmVfaW50ZWdyYXRpb25fdGVzdF9rZXk="
|
||||
// Use a closure on the string to delay evaluation until tests are executed. The endpoint_suffix is used
|
||||
// in a hacky way to change the protocol and endpoint. We must fix that.
|
||||
setting 'azure.client.integration_test.endpoint_suffix',
|
||||
"ignored;DefaultEndpointsProtocol=http;BlobEndpoint=http://${ -> azureStorageFixture.addressAndPort }"
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.repositories.azure;
|
||||
|
||||
import com.microsoft.azure.storage.StorageException;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobStore;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.repositories.ESBlobStoreTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
public class AzureBlobStoreTests extends ESBlobStoreTestCase {
|
||||
|
||||
@Override
|
||||
protected BlobStore newBlobStore() throws IOException {
|
||||
try {
|
||||
RepositoryMetaData repositoryMetaData = new RepositoryMetaData("azure", "ittest", Settings.EMPTY);
|
||||
AzureStorageServiceMock client = new AzureStorageServiceMock();
|
||||
return new AzureBlobStore(repositoryMetaData, Settings.EMPTY, client);
|
||||
} catch (URISyntaxException | StorageException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,128 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.repositories.azure;
|
||||
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
/**
|
||||
* Azure Repository
|
||||
* Main class to easily run Azure from a IDE.
|
||||
* It sets all the options to run the Azure plugin and access it from Sense.
|
||||
*
|
||||
* In order to run this class set configure the following:
|
||||
* 1) Set `-Des.path.home=` to a directory containing an ES config directory
|
||||
* 2) Set `-Dcloud.azure.storage.my_account.account=account_name`
|
||||
* 3) Set `-Dcloud.azure.storage.my_account.key=account_key`
|
||||
*
|
||||
* Then you can run REST calls like:
|
||||
* <pre>
|
||||
# Clean test env
|
||||
curl -XDELETE localhost:9200/foo?pretty
|
||||
curl -XDELETE localhost:9200/_snapshot/my_backup1?pretty
|
||||
curl -XDELETE localhost:9200/_snapshot/my_backup2?pretty
|
||||
|
||||
# Create data
|
||||
curl -XPUT localhost:9200/foo/bar/1?pretty -d '{
|
||||
"foo": "bar"
|
||||
}'
|
||||
curl -XPOST localhost:9200/foo/_refresh?pretty
|
||||
curl -XGET localhost:9200/foo/_count?pretty
|
||||
|
||||
# Create repository using default account
|
||||
curl -XPUT localhost:9200/_snapshot/my_backup1?pretty -d '{
|
||||
"type": "azure"
|
||||
}'
|
||||
|
||||
# Backup
|
||||
curl -XPOST "localhost:9200/_snapshot/my_backup1/snap1?pretty&wait_for_completion=true"
|
||||
|
||||
# Remove data
|
||||
curl -XDELETE localhost:9200/foo?pretty
|
||||
|
||||
# Restore data
|
||||
curl -XPOST "localhost:9200/_snapshot/my_backup1/snap1/_restore?pretty&wait_for_completion=true"
|
||||
curl -XGET localhost:9200/foo/_count?pretty
|
||||
</pre>
|
||||
*
|
||||
* If you want to define a secondary repository:
|
||||
*
|
||||
* 4) Set `-Dcloud.azure.storage.my_account.default=true`
|
||||
* 5) Set `-Dcloud.azure.storage.my_account2.account=account_name`
|
||||
* 6) Set `-Dcloud.azure.storage.my_account2.key=account_key_secondary`
|
||||
*
|
||||
* Then you can run REST calls like:
|
||||
* <pre>
|
||||
# Remove data
|
||||
curl -XDELETE localhost:9200/foo?pretty
|
||||
|
||||
# Create repository using account2 (secondary)
|
||||
curl -XPUT localhost:9200/_snapshot/my_backup2?pretty -d '{
|
||||
"type": "azure",
|
||||
"settings": {
|
||||
"account" : "my_account2",
|
||||
"location_mode": "secondary_only"
|
||||
}
|
||||
}'
|
||||
|
||||
# Restore data from the secondary endpoint
|
||||
curl -XPOST "localhost:9200/_snapshot/my_backup2/snap1/_restore?pretty&wait_for_completion=true"
|
||||
curl -XGET localhost:9200/foo/_count?pretty
|
||||
</pre>
|
||||
*/
|
||||
public class AzureRepositoryF {
|
||||
public static void main(String[] args) throws Throwable {
|
||||
Settings.Builder settings = Settings.builder();
|
||||
settings.put("http.cors.enabled", "true");
|
||||
settings.put("http.cors.allow-origin", "*");
|
||||
settings.put("cluster.name", AzureRepositoryF.class.getSimpleName());
|
||||
|
||||
// Example for azure repo settings
|
||||
// settings.put("cloud.azure.storage.my_account1.account", "account_name");
|
||||
// settings.put("cloud.azure.storage.my_account1.key", "account_key");
|
||||
// settings.put("cloud.azure.storage.my_account1.default", true);
|
||||
// settings.put("cloud.azure.storage.my_account2.account", "account_name");
|
||||
// settings.put("cloud.azure.storage.my_account2.key", "account_key_secondary");
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final Node node = new MockNode(settings.build(), Collections.singletonList(AzureRepositoryPlugin.class));
|
||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
IOUtils.close(node);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException(e);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
node.start();
|
||||
latch.await();
|
||||
}
|
||||
}
|
|
@ -47,7 +47,6 @@ public class AzureRepositorySettingsTests extends ESTestCase {
|
|||
TestEnvironment.newEnvironment(internalSettings), NamedXContentRegistry.EMPTY, null);
|
||||
}
|
||||
|
||||
|
||||
public void testReadonlyDefault() throws StorageException, IOException, URISyntaxException {
|
||||
assertThat(azureRepository(Settings.EMPTY).isReadOnly(), is(false));
|
||||
}
|
||||
|
|
|
@ -0,0 +1,136 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.repositories.azure;
|
||||
|
||||
import com.sun.net.httpserver.HttpExchange;
|
||||
import com.sun.net.httpserver.HttpHandler;
|
||||
import com.sun.net.httpserver.HttpServer;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.mocksocket.MockHttpServer;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.net.Inet6Address;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.SocketAddress;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static java.util.Collections.singletonList;
|
||||
|
||||
/**
|
||||
* {@link AzureStorageFixture} is a fixture that emulates an Azure Storage service.
|
||||
* <p>
|
||||
* It starts an asynchronous socket server that binds to a random local port. The server parses
|
||||
* HTTP requests and uses a {@link AzureStorageTestServer} to handle them before returning
|
||||
* them to the client as HTTP responses.
|
||||
*/
|
||||
public class AzureStorageFixture {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
if (args == null || args.length != 2) {
|
||||
throw new IllegalArgumentException("AzureStorageFixture <working directory> <container>");
|
||||
}
|
||||
|
||||
final InetSocketAddress socketAddress = new InetSocketAddress(InetAddress.getLoopbackAddress(), 0);
|
||||
final HttpServer httpServer = MockHttpServer.createHttp(socketAddress, 0);
|
||||
|
||||
try {
|
||||
final Path workingDirectory = workingDir(args[0]);
|
||||
/// Writes the PID of the current Java process in a `pid` file located in the working directory
|
||||
writeFile(workingDirectory, "pid", ManagementFactory.getRuntimeMXBean().getName().split("@")[0]);
|
||||
|
||||
final String addressAndPort = addressToString(httpServer.getAddress());
|
||||
// Writes the address and port of the http server in a `ports` file located in the working directory
|
||||
writeFile(workingDirectory, "ports", addressAndPort);
|
||||
|
||||
// Emulates Azure
|
||||
final String storageUrl = "http://" + addressAndPort;
|
||||
final AzureStorageTestServer testServer = new AzureStorageTestServer(storageUrl);
|
||||
testServer.createContainer(args[1]);
|
||||
|
||||
httpServer.createContext("/", new ResponseHandler(testServer));
|
||||
httpServer.start();
|
||||
|
||||
// Wait to be killed
|
||||
Thread.sleep(Long.MAX_VALUE);
|
||||
|
||||
} finally {
|
||||
httpServer.stop(0);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Paths#get is fine - we don't have environment here")
|
||||
private static Path workingDir(final String dir) {
|
||||
return Paths.get(dir);
|
||||
}
|
||||
|
||||
private static void writeFile(final Path dir, final String fileName, final String content) throws IOException {
|
||||
final Path tempPidFile = Files.createTempFile(dir, null, null);
|
||||
Files.write(tempPidFile, singleton(content));
|
||||
Files.move(tempPidFile, dir.resolve(fileName), StandardCopyOption.ATOMIC_MOVE);
|
||||
}
|
||||
|
||||
private static String addressToString(final SocketAddress address) {
|
||||
final InetSocketAddress inetSocketAddress = (InetSocketAddress) address;
|
||||
if (inetSocketAddress.getAddress() instanceof Inet6Address) {
|
||||
return "[" + inetSocketAddress.getHostString() + "]:" + inetSocketAddress.getPort();
|
||||
} else {
|
||||
return inetSocketAddress.getHostString() + ":" + inetSocketAddress.getPort();
|
||||
}
|
||||
}
|
||||
|
||||
static class ResponseHandler implements HttpHandler {
|
||||
|
||||
private final AzureStorageTestServer server;
|
||||
|
||||
private ResponseHandler(final AzureStorageTestServer server) {
|
||||
this.server = server;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handle(HttpExchange exchange) throws IOException {
|
||||
String method = exchange.getRequestMethod();
|
||||
String path = server.getEndpoint() + exchange.getRequestURI().getRawPath();
|
||||
String query = exchange.getRequestURI().getRawQuery();
|
||||
Map<String, List<String>> headers = exchange.getRequestHeaders();
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
Streams.copy(exchange.getRequestBody(), out);
|
||||
|
||||
final AzureStorageTestServer.Response response = server.handle(method, path, query, headers, out.toByteArray());
|
||||
|
||||
Map<String, List<String>> responseHeaders = exchange.getResponseHeaders();
|
||||
responseHeaders.put("Content-Type", singletonList(response.contentType));
|
||||
response.headers.forEach((k, v) -> responseHeaders.put(k, singletonList(v)));
|
||||
exchange.sendResponseHeaders(response.status.getStatus(), response.body.length);
|
||||
if (response.body.length > 0) {
|
||||
exchange.getResponseBody().write(response.body);
|
||||
}
|
||||
exchange.close();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -25,8 +25,8 @@ import org.elasticsearch.common.blobstore.BlobMetaData;
|
|||
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.core.internal.io.Streams;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
|
@ -66,6 +66,8 @@ public class AzureStorageServiceMock extends AbstractComponent implements AzureS
|
|||
|
||||
@Override
|
||||
public void deleteFiles(String account, LocationMode mode, String container, String path) {
|
||||
final Map<String, BlobMetaData> blobs = listBlobsByPrefix(account, mode, container, path, null);
|
||||
blobs.keySet().forEach(key -> deleteBlob(account, mode, container, key));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,425 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.repositories.azure;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.path.PathTrie;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.rest.RestUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
/**
|
||||
* {@link AzureStorageTestServer} emulates an Azure Storage service through a {@link #handle(String, String, String, Map, byte[])}
|
||||
* method that provides appropriate responses for specific requests like the real Azure platform would do.
|
||||
* It is based on official documentation available at https://docs.microsoft.com/en-us/rest/api/storageservices/blob-service-rest-api.
|
||||
*/
|
||||
public class AzureStorageTestServer {
|
||||
|
||||
private static byte[] EMPTY_BYTE = new byte[0];
|
||||
|
||||
/** List of the containers stored on this test server **/
|
||||
private final Map<String, Container> containers = ConcurrentCollections.newConcurrentMap();
|
||||
|
||||
/** Request handlers for the requests made by the Azure client **/
|
||||
private final PathTrie<RequestHandler> handlers;
|
||||
|
||||
/** Server endpoint **/
|
||||
private final String endpoint;
|
||||
|
||||
/** Increments for the requests ids **/
|
||||
private final AtomicLong requests = new AtomicLong(0);
|
||||
|
||||
/**
|
||||
* Creates a {@link AzureStorageTestServer} with a custom endpoint
|
||||
*/
|
||||
AzureStorageTestServer(final String endpoint) {
|
||||
this.endpoint = Objects.requireNonNull(endpoint, "endpoint must not be null");
|
||||
this.handlers = defaultHandlers(endpoint, containers);
|
||||
}
|
||||
|
||||
/** Creates a container in the test server **/
|
||||
void createContainer(final String containerName) {
|
||||
containers.put(containerName, new Container(containerName));
|
||||
}
|
||||
|
||||
public String getEndpoint() {
|
||||
return endpoint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a response for the given request
|
||||
*
|
||||
* @param method the HTTP method of the request
|
||||
* @param path the path of the URL of the request
|
||||
* @param query the queryString of the URL of request
|
||||
* @param headers the HTTP headers of the request
|
||||
* @param body the HTTP request body
|
||||
* @return a {@link Response}
|
||||
* @throws IOException if something goes wrong
|
||||
*/
|
||||
public Response handle(final String method,
|
||||
final String path,
|
||||
final String query,
|
||||
final Map<String, List<String>> headers,
|
||||
byte[] body) throws IOException {
|
||||
|
||||
final long requestId = requests.incrementAndGet();
|
||||
|
||||
final Map<String, String> params = new HashMap<>();
|
||||
if (query != null) {
|
||||
RestUtils.decodeQueryString(query, 0, params);
|
||||
}
|
||||
|
||||
final RequestHandler handler = handlers.retrieve(method + " " + path, params);
|
||||
if (handler != null) {
|
||||
return handler.execute(params, headers, body, requestId);
|
||||
} else {
|
||||
return newInternalError(requestId);
|
||||
}
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
interface RequestHandler {
|
||||
|
||||
/**
|
||||
* Simulates the execution of a Azure Storage request and returns a corresponding response.
|
||||
*
|
||||
* @param params the request's query string parameters
|
||||
* @param headers the request's headers
|
||||
* @param body the request body provided as a byte array
|
||||
* @param requestId a unique id for the incoming request
|
||||
* @return the corresponding response
|
||||
*
|
||||
* @throws IOException if something goes wrong
|
||||
*/
|
||||
Response execute(Map<String, String> params, Map<String, List<String>> headers, byte[] body, long requestId) throws IOException;
|
||||
}
|
||||
|
||||
/** Builds the default request handlers **/
|
||||
private static PathTrie<RequestHandler> defaultHandlers(final String endpoint, final Map<String, Container> containers) {
|
||||
final PathTrie<RequestHandler> handlers = new PathTrie<>(RestUtils.REST_DECODER);
|
||||
|
||||
// Get Blob Properties
|
||||
//
|
||||
// https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties
|
||||
objectsPaths("HEAD " + endpoint + "/{container}").forEach(path ->
|
||||
handlers.insert(path, (params, headers, body, requestId) -> {
|
||||
final String containerName = params.get("container");
|
||||
|
||||
final Container container =containers.get(containerName);
|
||||
if (container == null) {
|
||||
return newContainerNotFoundError(requestId);
|
||||
}
|
||||
|
||||
final String blobName = objectName(params);
|
||||
for (Map.Entry<String, byte[]> object : container.objects.entrySet()) {
|
||||
if (object.getKey().equals(blobName)) {
|
||||
Map<String, String> responseHeaders = new HashMap<>();
|
||||
responseHeaders.put("x-ms-blob-content-length", String.valueOf(object.getValue().length));
|
||||
responseHeaders.put("x-ms-blob-type", "blockblob");
|
||||
return new Response(RestStatus.OK, responseHeaders, "text/plain", EMPTY_BYTE);
|
||||
}
|
||||
}
|
||||
return newBlobNotFoundError(requestId);
|
||||
})
|
||||
);
|
||||
|
||||
// PUT Blob
|
||||
//
|
||||
// https://docs.microsoft.com/en-us/rest/api/storageservices/put-blob
|
||||
objectsPaths("PUT " + endpoint + "/{container}").forEach(path ->
|
||||
handlers.insert(path, (params, headers, body, requestId) -> {
|
||||
final String destContainerName = params.get("container");
|
||||
|
||||
final Container destContainer =containers.get(destContainerName);
|
||||
if (destContainer == null) {
|
||||
return newContainerNotFoundError(requestId);
|
||||
}
|
||||
|
||||
final String destBlobName = objectName(params);
|
||||
|
||||
// Request is a copy request
|
||||
List<String> headerCopySource = headers.getOrDefault("x-ms-copy-source", emptyList());
|
||||
if (headerCopySource.isEmpty() == false) {
|
||||
String srcBlobName = headerCopySource.get(0);
|
||||
|
||||
Container srcContainer = null;
|
||||
for (Container container : containers.values()) {
|
||||
String prefix = endpoint + "/" + container.name + "/";
|
||||
if (srcBlobName.startsWith(prefix)) {
|
||||
srcBlobName = srcBlobName.replaceFirst(prefix, "");
|
||||
srcContainer = container;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (srcContainer == null || srcContainer.objects.containsKey(srcBlobName) == false) {
|
||||
return newBlobNotFoundError(requestId);
|
||||
}
|
||||
|
||||
byte[] bytes = srcContainer.objects.get(srcBlobName);
|
||||
if (bytes != null) {
|
||||
destContainer.objects.put(destBlobName, bytes);
|
||||
return new Response(RestStatus.ACCEPTED, singletonMap("x-ms-copy-status", "success"), "text/plain", EMPTY_BYTE);
|
||||
} else {
|
||||
return newBlobNotFoundError(requestId);
|
||||
}
|
||||
} else {
|
||||
destContainer.objects.put(destBlobName, body);
|
||||
}
|
||||
|
||||
return new Response(RestStatus.CREATED, emptyMap(), "text/plain", EMPTY_BYTE);
|
||||
})
|
||||
);
|
||||
|
||||
// GET Object
|
||||
//
|
||||
// https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html
|
||||
objectsPaths("GET " + endpoint + "/{container}").forEach(path ->
|
||||
handlers.insert(path, (params, headers, body, requestId) -> {
|
||||
final String containerName = params.get("container");
|
||||
|
||||
final Container container =containers.get(containerName);
|
||||
if (container == null) {
|
||||
return newContainerNotFoundError(requestId);
|
||||
}
|
||||
|
||||
final String blobName = objectName(params);
|
||||
if (container.objects.containsKey(blobName)) {
|
||||
Map<String, String> responseHeaders = new HashMap<>();
|
||||
responseHeaders.put("x-ms-copy-status", "success");
|
||||
responseHeaders.put("x-ms-blob-type", "blockblob");
|
||||
return new Response(RestStatus.OK, responseHeaders, "application/octet-stream", container.objects.get(blobName));
|
||||
|
||||
}
|
||||
return newBlobNotFoundError(requestId);
|
||||
})
|
||||
);
|
||||
|
||||
// Delete Blob
|
||||
//
|
||||
// https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
|
||||
objectsPaths("DELETE " + endpoint + "/{container}").forEach(path ->
|
||||
handlers.insert(path, (params, headers, body, requestId) -> {
|
||||
final String containerName = params.get("container");
|
||||
|
||||
final Container container =containers.get(containerName);
|
||||
if (container == null) {
|
||||
return newContainerNotFoundError(requestId);
|
||||
}
|
||||
|
||||
final String blobName = objectName(params);
|
||||
if (container.objects.remove(blobName) != null) {
|
||||
return new Response(RestStatus.ACCEPTED, emptyMap(), "text/plain", EMPTY_BYTE);
|
||||
}
|
||||
return newBlobNotFoundError(requestId);
|
||||
})
|
||||
);
|
||||
|
||||
// List Blobs
|
||||
//
|
||||
// https://docs.microsoft.com/en-us/rest/api/storageservices/list-blobs
|
||||
handlers.insert("GET " + endpoint + "/{container}/", (params, headers, body, requestId) -> {
|
||||
final String containerName = params.get("container");
|
||||
|
||||
final Container container =containers.get(containerName);
|
||||
if (container == null) {
|
||||
return newContainerNotFoundError(requestId);
|
||||
}
|
||||
|
||||
final String prefix = params.get("prefix");
|
||||
return newEnumerationResultsResponse(requestId, container, prefix);
|
||||
});
|
||||
|
||||
// Get Container Properties
|
||||
//
|
||||
// https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties
|
||||
handlers.insert("HEAD " + endpoint + "/{container}", (params, headers, body, requestId) -> {
|
||||
String container = params.get("container");
|
||||
if (Strings.hasText(container) && containers.containsKey(container)) {
|
||||
return new Response(RestStatus.OK, emptyMap(), "text/plain", EMPTY_BYTE);
|
||||
} else {
|
||||
return newContainerNotFoundError(requestId);
|
||||
}
|
||||
});
|
||||
|
||||
return handlers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a Azure Storage container.
|
||||
*/
|
||||
static class Container {
|
||||
|
||||
/** Container name **/
|
||||
final String name;
|
||||
|
||||
/** Blobs contained in the container **/
|
||||
final Map<String, byte[]> objects;
|
||||
|
||||
Container(final String name) {
|
||||
this.name = Objects.requireNonNull(name);
|
||||
this.objects = ConcurrentCollections.newConcurrentMap();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a HTTP Response.
|
||||
*/
|
||||
static class Response {
|
||||
|
||||
final RestStatus status;
|
||||
final Map<String, String> headers;
|
||||
final String contentType;
|
||||
final byte[] body;
|
||||
|
||||
Response(final RestStatus status, final Map<String, String> headers, final String contentType, final byte[] body) {
|
||||
this.status = Objects.requireNonNull(status);
|
||||
this.headers = Objects.requireNonNull(headers);
|
||||
this.contentType = Objects.requireNonNull(contentType);
|
||||
this.body = Objects.requireNonNull(body);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decline a path like "http://host:port/{bucket}" into 10 derived paths like:
|
||||
* - http://host:port/{bucket}/{path0}
|
||||
* - http://host:port/{bucket}/{path0}/{path1}
|
||||
* - http://host:port/{bucket}/{path0}/{path1}/{path2}
|
||||
* - etc
|
||||
*/
|
||||
private static List<String> objectsPaths(final String path) {
|
||||
final List<String> paths = new ArrayList<>();
|
||||
String p = path;
|
||||
for (int i = 0; i < 10; i++) {
|
||||
p = p + "/{path" + i + "}";
|
||||
paths.add(p);
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the object name from all derived paths named {pathX} where 0 <= X < 10.
|
||||
*
|
||||
* This is the counterpart of {@link #objectsPaths(String)}
|
||||
*/
|
||||
private static String objectName(final Map<String, String> params) {
|
||||
final StringBuilder name = new StringBuilder();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
String value = params.getOrDefault("path" + i, null);
|
||||
if (value != null) {
|
||||
if (name.length() > 0) {
|
||||
name.append('/');
|
||||
}
|
||||
name.append(value);
|
||||
}
|
||||
}
|
||||
return name.toString();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Azure EnumerationResults Response
|
||||
*/
|
||||
private static Response newEnumerationResultsResponse(final long requestId, final Container container, final String prefix) {
|
||||
final String id = Long.toString(requestId);
|
||||
final StringBuilder response = new StringBuilder();
|
||||
response.append("<?xml version=\"1.0\" encoding=\"utf-8\"?>");
|
||||
response.append("<EnumerationResults ServiceEndpoint=\"http://myaccount.blob.core.windows.net/\"");
|
||||
response.append(" ContainerName=\"").append(container.name).append("\">");
|
||||
if (prefix != null) {
|
||||
response.append("<Prefix>").append(prefix).append("</Prefix>");
|
||||
} else {
|
||||
response.append("<Prefix/>");
|
||||
}
|
||||
response.append("<MaxResults>").append(container.objects.size()).append("</MaxResults>");
|
||||
response.append("<Blobs>");
|
||||
|
||||
int count = 0;
|
||||
for (Map.Entry<String, byte[]> object : container.objects.entrySet()) {
|
||||
String objectName = object.getKey();
|
||||
if (prefix == null || objectName.startsWith(prefix)) {
|
||||
response.append("<Blob>");
|
||||
response.append("<Name>").append(objectName).append("</Name>");
|
||||
response.append("<Properties>");
|
||||
response.append("<Content-Length>").append(object.getValue().length).append("</Content-Length>");
|
||||
response.append("<CopyId>").append(count++).append("</CopyId>");
|
||||
response.append("<CopyStatus>success</CopyStatus>");
|
||||
response.append("<BlobType>BlockBlob</BlobType>");
|
||||
response.append("</Properties>");
|
||||
response.append("</Blob>");
|
||||
}
|
||||
}
|
||||
|
||||
response.append("</Blobs>");
|
||||
response.append("<NextMarker />");
|
||||
response.append("</EnumerationResults>");
|
||||
|
||||
return new Response(RestStatus.OK, singletonMap("x-amz-request-id", id), "application/xml", response.toString().getBytes(UTF_8));
|
||||
}
|
||||
|
||||
private static Response newContainerNotFoundError(final long requestId) {
|
||||
return newError(requestId, RestStatus.NOT_FOUND, "ContainerNotFound", "The specified container does not exist");
|
||||
}
|
||||
|
||||
private static Response newBlobNotFoundError(final long requestId) {
|
||||
return newError(requestId, RestStatus.NOT_FOUND, "BlobNotFound", "The specified blob does not exist");
|
||||
}
|
||||
|
||||
private static Response newInternalError(final long requestId) {
|
||||
return newError(requestId, RestStatus.INTERNAL_SERVER_ERROR, "InternalError", "The server encountered an internal error");
|
||||
}
|
||||
|
||||
/**
|
||||
* Azure Error
|
||||
*
|
||||
* https://docs.microsoft.com/en-us/rest/api/storageservices/status-and-error-codes2
|
||||
*/
|
||||
private static Response newError(final long requestId,
|
||||
final RestStatus status,
|
||||
final String code,
|
||||
final String message) {
|
||||
|
||||
final StringBuilder response = new StringBuilder();
|
||||
response.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
|
||||
response.append("<Error>");
|
||||
response.append("<Code>").append(code).append("</Code>");
|
||||
response.append("<Message>").append(message).append("</Message>");
|
||||
response.append("</Error>");
|
||||
|
||||
final Map<String, String> headers = new HashMap<>(2);
|
||||
headers.put("x-ms-request-id", String.valueOf(requestId));
|
||||
headers.put("x-ms-error-code", code);
|
||||
|
||||
return new Response(status, headers, "application/xml", response.toString().getBytes(UTF_8));
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
# Integration tests for Azure Repository component
|
||||
# Integration tests for repository-azure
|
||||
#
|
||||
"Azure Repository loaded":
|
||||
"Plugin repository-azure is loaded":
|
||||
- do:
|
||||
cluster.state: {}
|
||||
|
||||
|
@ -11,3 +11,177 @@
|
|||
nodes.info: {}
|
||||
|
||||
- match: { nodes.$master.plugins.0.name: repository-azure }
|
||||
---
|
||||
"Snapshot/Restore with repository-azure":
|
||||
|
||||
# Register repository
|
||||
- do:
|
||||
snapshot.create_repository:
|
||||
repository: repository
|
||||
body:
|
||||
type: azure
|
||||
settings:
|
||||
container: "container_test"
|
||||
client: "integration_test"
|
||||
|
||||
- match: { acknowledged: true }
|
||||
|
||||
# Get repository
|
||||
- do:
|
||||
snapshot.get_repository:
|
||||
repository: repository
|
||||
|
||||
- match: {repository.settings.container : "container_test"}
|
||||
- match: {repository.settings.client : "integration_test"}
|
||||
|
||||
# Index documents
|
||||
- do:
|
||||
bulk:
|
||||
refresh: true
|
||||
body:
|
||||
- index:
|
||||
_index: docs
|
||||
_type: doc
|
||||
_id: 1
|
||||
- snapshot: one
|
||||
- index:
|
||||
_index: docs
|
||||
_type: doc
|
||||
_id: 2
|
||||
- snapshot: one
|
||||
- index:
|
||||
_index: docs
|
||||
_type: doc
|
||||
_id: 3
|
||||
- snapshot: one
|
||||
|
||||
- do:
|
||||
count:
|
||||
index: docs
|
||||
|
||||
- match: {count: 3}
|
||||
|
||||
# Create a first snapshot
|
||||
- do:
|
||||
snapshot.create:
|
||||
repository: repository
|
||||
snapshot: snapshot-one
|
||||
wait_for_completion: true
|
||||
|
||||
- match: { snapshot.snapshot: snapshot-one }
|
||||
- match: { snapshot.state : SUCCESS }
|
||||
- match: { snapshot.include_global_state: true }
|
||||
- match: { snapshot.shards.failed : 0 }
|
||||
|
||||
- do:
|
||||
snapshot.status:
|
||||
repository: repository
|
||||
snapshot: snapshot-one
|
||||
|
||||
- is_true: snapshots
|
||||
- match: { snapshots.0.snapshot: snapshot-one }
|
||||
- match: { snapshots.0.state : SUCCESS }
|
||||
|
||||
# Index more documents
|
||||
- do:
|
||||
bulk:
|
||||
refresh: true
|
||||
body:
|
||||
- index:
|
||||
_index: docs
|
||||
_type: doc
|
||||
_id: 4
|
||||
- snapshot: two
|
||||
- index:
|
||||
_index: docs
|
||||
_type: doc
|
||||
_id: 5
|
||||
- snapshot: two
|
||||
- index:
|
||||
_index: docs
|
||||
_type: doc
|
||||
_id: 6
|
||||
- snapshot: two
|
||||
- index:
|
||||
_index: docs
|
||||
_type: doc
|
||||
_id: 7
|
||||
- snapshot: two
|
||||
|
||||
- do:
|
||||
count:
|
||||
index: docs
|
||||
|
||||
- match: {count: 7}
|
||||
|
||||
# Create a second snapshot
|
||||
- do:
|
||||
snapshot.create:
|
||||
repository: repository
|
||||
snapshot: snapshot-two
|
||||
wait_for_completion: true
|
||||
|
||||
- match: { snapshot.snapshot: snapshot-two }
|
||||
- match: { snapshot.state : SUCCESS }
|
||||
- match: { snapshot.shards.failed : 0 }
|
||||
|
||||
- do:
|
||||
snapshot.get:
|
||||
repository: repository
|
||||
snapshot: snapshot-one,snapshot-two
|
||||
|
||||
- is_true: snapshots
|
||||
- match: { snapshots.0.state : SUCCESS }
|
||||
- match: { snapshots.1.state : SUCCESS }
|
||||
|
||||
# Delete the index
|
||||
- do:
|
||||
indices.delete:
|
||||
index: docs
|
||||
|
||||
# Restore the second snapshot
|
||||
- do:
|
||||
snapshot.restore:
|
||||
repository: repository
|
||||
snapshot: snapshot-two
|
||||
wait_for_completion: true
|
||||
|
||||
- do:
|
||||
count:
|
||||
index: docs
|
||||
|
||||
- match: {count: 7}
|
||||
|
||||
# Delete the index again
|
||||
- do:
|
||||
indices.delete:
|
||||
index: docs
|
||||
|
||||
# Restore the first snapshot
|
||||
- do:
|
||||
snapshot.restore:
|
||||
repository: repository
|
||||
snapshot: snapshot-one
|
||||
wait_for_completion: true
|
||||
|
||||
- do:
|
||||
count:
|
||||
index: docs
|
||||
|
||||
- match: {count: 3}
|
||||
|
||||
# Remove the snapshots
|
||||
- do:
|
||||
snapshot.delete:
|
||||
repository: repository
|
||||
snapshot: snapshot-two
|
||||
|
||||
- do:
|
||||
snapshot.delete:
|
||||
repository: repository
|
||||
snapshot: snapshot-one
|
||||
|
||||
# Remove our repository
|
||||
- do:
|
||||
snapshot.delete_repository:
|
||||
repository: repository
|
||||
|
|
|
@ -24,12 +24,13 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
public class GoogleCloudStorageBlobStoreContainerTests extends ESBlobStoreContainerTestCase {
|
||||
|
||||
@Override
|
||||
protected BlobStore newBlobStore() {
|
||||
String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
|
||||
return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockStorage.newStorageClient(bucket, getTestName()));
|
||||
return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, new MockStorage(bucket, new ConcurrentHashMap<>()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,14 +27,13 @@ import org.elasticsearch.common.unit.ByteSizeValue;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.AfterClass;
|
||||
|
||||
import java.net.SocketPermission;
|
||||
import java.security.AccessController;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
|
||||
|
@ -42,9 +41,9 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos
|
|||
|
||||
private static final String BUCKET = "gcs-repository-test";
|
||||
|
||||
// Static storage client shared among all nodes in order to act like a remote repository service:
|
||||
// Static list of blobs shared among all nodes in order to act like a remote repository service:
|
||||
// all nodes must see the same content
|
||||
private static final AtomicReference<Storage> storage = new AtomicReference<>();
|
||||
private static final ConcurrentMap<String, byte[]> blobs = new ConcurrentHashMap<>();
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
|
@ -62,15 +61,17 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos
|
|||
.put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES)));
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpStorage() {
|
||||
storage.set(MockStorage.newStorageClient(BUCKET, GoogleCloudStorageBlobStoreRepositoryTests.class.getName()));
|
||||
@AfterClass
|
||||
public static void wipeRepository() {
|
||||
blobs.clear();
|
||||
}
|
||||
|
||||
public static class MockGoogleCloudStoragePlugin extends GoogleCloudStoragePlugin {
|
||||
|
||||
public MockGoogleCloudStoragePlugin(final Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GoogleCloudStorageService createStorageService(Environment environment) {
|
||||
return new MockGoogleCloudStorageService(environment, getClientsSettings());
|
||||
|
@ -85,9 +86,7 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos
|
|||
|
||||
@Override
|
||||
public Storage createClient(String clientName) {
|
||||
// The actual impl might open a connection. So check we have permission when this call is made.
|
||||
AccessController.checkPermission(new SocketPermission("*", "connect"));
|
||||
return storage.get();
|
||||
return new MockStorage(BUCKET, blobs);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,12 +24,13 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.repositories.ESBlobStoreTestCase;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
public class GoogleCloudStorageBlobStoreTests extends ESBlobStoreTestCase {
|
||||
|
||||
@Override
|
||||
protected BlobStore newBlobStore() {
|
||||
String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
|
||||
return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockStorage.newStorageClient(bucket, getTestName()));
|
||||
return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, new MockStorage(bucket, new ConcurrentHashMap<>()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ import com.sun.net.httpserver.HttpExchange;
|
|||
import com.sun.net.httpserver.HttpHandler;
|
||||
import com.sun.net.httpserver.HttpServer;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.core.internal.io.Streams;
|
||||
import org.elasticsearch.mocksocket.MockHttpServer;
|
||||
import org.elasticsearch.repositories.gcs.GoogleCloudStorageTestServer.Response;
|
||||
|
||||
|
|
|
@ -19,74 +19,289 @@
|
|||
|
||||
package org.elasticsearch.repositories.gcs;
|
||||
|
||||
import com.google.api.client.googleapis.json.GoogleJsonError;
|
||||
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
|
||||
import com.google.api.client.http.AbstractInputStreamContent;
|
||||
import com.google.api.client.http.HttpHeaders;
|
||||
import com.google.api.client.http.HttpMethods;
|
||||
import com.google.api.client.http.HttpRequest;
|
||||
import com.google.api.client.http.HttpRequestInitializer;
|
||||
import com.google.api.client.http.HttpResponseException;
|
||||
import com.google.api.client.http.LowLevelHttpRequest;
|
||||
import com.google.api.client.http.LowLevelHttpResponse;
|
||||
import com.google.api.client.json.jackson2.JacksonFactory;
|
||||
import com.google.api.client.http.MultipartContent;
|
||||
import com.google.api.client.json.JsonFactory;
|
||||
import com.google.api.client.testing.http.MockHttpTransport;
|
||||
import com.google.api.client.testing.http.MockLowLevelHttpRequest;
|
||||
import com.google.api.client.testing.http.MockLowLevelHttpResponse;
|
||||
import com.google.api.services.storage.Storage;
|
||||
import com.google.api.services.storage.model.Bucket;
|
||||
import com.google.api.services.storage.model.StorageObject;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.io.InputStream;
|
||||
import java.math.BigInteger;
|
||||
import java.util.ArrayList;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
/**
|
||||
* {@link MockStorage} is a utility class that provides {@link Storage} clients that works
|
||||
* against an embedded {@link GoogleCloudStorageTestServer}.
|
||||
* {@link MockStorage} mocks a {@link Storage} client by storing all the blobs
|
||||
* in a given concurrent map.
|
||||
*/
|
||||
class MockStorage extends com.google.api.client.testing.http.MockHttpTransport {
|
||||
class MockStorage extends Storage {
|
||||
|
||||
/**
|
||||
* Embedded test server that emulates a Google Cloud Storage service
|
||||
**/
|
||||
private final GoogleCloudStorageTestServer server = new GoogleCloudStorageTestServer();
|
||||
/* A custom HTTP header name used to propagate the name of the blobs to delete in batch requests */
|
||||
private static final String DELETION_HEADER = "x-blob-to-delete";
|
||||
|
||||
private MockStorage() {
|
||||
private final String bucketName;
|
||||
private final ConcurrentMap<String, byte[]> blobs;
|
||||
|
||||
MockStorage(final String bucket, final ConcurrentMap<String, byte[]> blobs) {
|
||||
super(new MockedHttpTransport(blobs), mock(JsonFactory.class), mock(HttpRequestInitializer.class));
|
||||
this.bucketName = bucket;
|
||||
this.blobs = blobs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LowLevelHttpRequest buildRequest(String method, String url) throws IOException {
|
||||
return new MockLowLevelHttpRequest() {
|
||||
@Override
|
||||
public LowLevelHttpResponse execute() throws IOException {
|
||||
return convert(server.handle(method, url, getHeaders(), getContentAsBytes()));
|
||||
}
|
||||
|
||||
/** Returns the LowLevelHttpRequest body as an array of bytes **/
|
||||
byte[] getContentAsBytes() throws IOException {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
if (getStreamingContent() != null) {
|
||||
getStreamingContent().writeTo(out);
|
||||
}
|
||||
return out.toByteArray();
|
||||
}
|
||||
};
|
||||
public Buckets buckets() {
|
||||
return new MockBuckets();
|
||||
}
|
||||
|
||||
private static MockLowLevelHttpResponse convert(final GoogleCloudStorageTestServer.Response response) {
|
||||
final MockLowLevelHttpResponse lowLevelHttpResponse = new MockLowLevelHttpResponse();
|
||||
for (Map.Entry<String, String> header : response.headers.entrySet()) {
|
||||
lowLevelHttpResponse.addHeader(header.getKey(), header.getValue());
|
||||
@Override
|
||||
public Objects objects() {
|
||||
return new MockObjects();
|
||||
}
|
||||
|
||||
class MockBuckets extends Buckets {
|
||||
|
||||
@Override
|
||||
public Get get(String getBucket) {
|
||||
return new Get(getBucket) {
|
||||
@Override
|
||||
public Bucket execute() {
|
||||
if (bucketName.equals(getBucket())) {
|
||||
Bucket bucket = new Bucket();
|
||||
bucket.setId(bucketName);
|
||||
return bucket;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
lowLevelHttpResponse.setContentType(response.contentType);
|
||||
lowLevelHttpResponse.setStatusCode(response.status.getStatus());
|
||||
lowLevelHttpResponse.setReasonPhrase(response.status.toString());
|
||||
if (response.body != null) {
|
||||
lowLevelHttpResponse.setContent(response.body);
|
||||
lowLevelHttpResponse.setContentLength(response.body.length);
|
||||
}
|
||||
|
||||
class MockObjects extends Objects {
|
||||
|
||||
@Override
|
||||
public Get get(String getBucket, String getObject) {
|
||||
return new Get(getBucket, getObject) {
|
||||
@Override
|
||||
public StorageObject execute() throws IOException {
|
||||
if (bucketName.equals(getBucket()) == false) {
|
||||
throw newBucketNotFoundException(getBucket());
|
||||
}
|
||||
if (blobs.containsKey(getObject()) == false) {
|
||||
throw newObjectNotFoundException(getObject());
|
||||
}
|
||||
|
||||
StorageObject storageObject = new StorageObject();
|
||||
storageObject.setId(getObject());
|
||||
return storageObject;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream executeMediaAsInputStream() throws IOException {
|
||||
if (bucketName.equals(getBucket()) == false) {
|
||||
throw newBucketNotFoundException(getBucket());
|
||||
}
|
||||
if (blobs.containsKey(getObject()) == false) {
|
||||
throw newObjectNotFoundException(getObject());
|
||||
}
|
||||
return new ByteArrayInputStream(blobs.get(getObject()));
|
||||
}
|
||||
};
|
||||
}
|
||||
return lowLevelHttpResponse;
|
||||
|
||||
@Override
|
||||
public Insert insert(String insertBucket, StorageObject insertObject, AbstractInputStreamContent insertStream) {
|
||||
return new Insert(insertBucket, insertObject) {
|
||||
@Override
|
||||
public StorageObject execute() throws IOException {
|
||||
if (bucketName.equals(getBucket()) == false) {
|
||||
throw newBucketNotFoundException(getBucket());
|
||||
}
|
||||
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
Streams.copy(insertStream.getInputStream(), out);
|
||||
blobs.put(getName(), out.toByteArray());
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public List list(String listBucket) {
|
||||
return new List(listBucket) {
|
||||
@Override
|
||||
public com.google.api.services.storage.model.Objects execute() throws IOException {
|
||||
if (bucketName.equals(getBucket()) == false) {
|
||||
throw newBucketNotFoundException(getBucket());
|
||||
}
|
||||
|
||||
final com.google.api.services.storage.model.Objects objects = new com.google.api.services.storage.model.Objects();
|
||||
|
||||
final java.util.List<StorageObject> storageObjects = new ArrayList<>();
|
||||
for (Entry<String, byte[]> blob : blobs.entrySet()) {
|
||||
if (getPrefix() == null || blob.getKey().startsWith(getPrefix())) {
|
||||
StorageObject storageObject = new StorageObject();
|
||||
storageObject.setId(blob.getKey());
|
||||
storageObject.setName(blob.getKey());
|
||||
storageObject.setSize(BigInteger.valueOf((long) blob.getValue().length));
|
||||
storageObjects.add(storageObject);
|
||||
}
|
||||
}
|
||||
|
||||
objects.setItems(storageObjects);
|
||||
return objects;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Delete delete(String deleteBucket, String deleteObject) {
|
||||
return new Delete(deleteBucket, deleteObject) {
|
||||
@Override
|
||||
public Void execute() throws IOException {
|
||||
if (bucketName.equals(getBucket()) == false) {
|
||||
throw newBucketNotFoundException(getBucket());
|
||||
}
|
||||
|
||||
if (blobs.containsKey(getObject()) == false) {
|
||||
throw newObjectNotFoundException(getObject());
|
||||
}
|
||||
|
||||
blobs.remove(getObject());
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HttpRequest buildHttpRequest() throws IOException {
|
||||
HttpRequest httpRequest = super.buildHttpRequest();
|
||||
httpRequest.getHeaders().put(DELETION_HEADER, getObject());
|
||||
return httpRequest;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Copy copy(String srcBucket, String srcObject, String destBucket, String destObject, StorageObject content) {
|
||||
return new Copy(srcBucket, srcObject, destBucket, destObject, content) {
|
||||
@Override
|
||||
public StorageObject execute() throws IOException {
|
||||
if (bucketName.equals(getSourceBucket()) == false) {
|
||||
throw newBucketNotFoundException(getSourceBucket());
|
||||
}
|
||||
if (bucketName.equals(getDestinationBucket()) == false) {
|
||||
throw newBucketNotFoundException(getDestinationBucket());
|
||||
}
|
||||
|
||||
final byte[] bytes = blobs.get(getSourceObject());
|
||||
if (bytes == null) {
|
||||
throw newObjectNotFoundException(getSourceObject());
|
||||
}
|
||||
blobs.put(getDestinationObject(), bytes);
|
||||
|
||||
StorageObject storageObject = new StorageObject();
|
||||
storageObject.setId(getDestinationObject());
|
||||
return storageObject;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static GoogleJsonResponseException newBucketNotFoundException(final String bucket) {
|
||||
HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Bucket not found: " + bucket, new HttpHeaders());
|
||||
return new GoogleJsonResponseException(builder, new GoogleJsonError());
|
||||
}
|
||||
|
||||
private static GoogleJsonResponseException newObjectNotFoundException(final String object) {
|
||||
HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Object not found: " + object, new HttpHeaders());
|
||||
return new GoogleJsonResponseException(builder, new GoogleJsonError());
|
||||
}
|
||||
|
||||
/**
|
||||
* Instanciates a mocked Storage client for tests.
|
||||
* {@link MockedHttpTransport} extends the existing testing transport to analyze the content
|
||||
* of {@link com.google.api.client.googleapis.batch.BatchRequest} and delete the appropriates
|
||||
* blobs. We use this because {@link Storage#batch()} is final and there is no other way to
|
||||
* extend batch requests for testing purposes.
|
||||
*/
|
||||
public static Storage newStorageClient(final String bucket, final String applicationName) {
|
||||
MockStorage mockStorage = new MockStorage();
|
||||
mockStorage.server.createBucket(bucket);
|
||||
static class MockedHttpTransport extends MockHttpTransport {
|
||||
|
||||
return new Storage.Builder(mockStorage, JacksonFactory.getDefaultInstance(), null)
|
||||
.setApplicationName(applicationName)
|
||||
.build();
|
||||
private final ConcurrentMap<String, byte[]> blobs;
|
||||
|
||||
MockedHttpTransport(final ConcurrentMap<String, byte[]> blobs) {
|
||||
this.blobs = blobs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LowLevelHttpRequest buildRequest(final String method, final String url) throws IOException {
|
||||
// We analyze the content of the Batch request to detect our custom HTTP header,
|
||||
// and extract from it the name of the blob to delete. Then we reply a simple
|
||||
// batch response so that the client parser is happy.
|
||||
//
|
||||
// See https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch for the
|
||||
// format of the batch request body.
|
||||
if (HttpMethods.POST.equals(method) && url.endsWith("/batch")) {
|
||||
return new MockLowLevelHttpRequest() {
|
||||
@Override
|
||||
public LowLevelHttpResponse execute() throws IOException {
|
||||
final String contentType = new MultipartContent().getType();
|
||||
|
||||
final StringBuilder builder = new StringBuilder();
|
||||
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
|
||||
getStreamingContent().writeTo(out);
|
||||
|
||||
Streams.readAllLines(new ByteArrayInputStream(out.toByteArray()), line -> {
|
||||
if (line != null && line.startsWith(DELETION_HEADER)) {
|
||||
builder.append("--__END_OF_PART__\r\n");
|
||||
builder.append("Content-Type: application/http").append("\r\n");
|
||||
builder.append("\r\n");
|
||||
builder.append("HTTP/1.1 ");
|
||||
|
||||
final String blobName = line.substring(line.indexOf(':') + 1).trim();
|
||||
if (blobs.containsKey(blobName)) {
|
||||
builder.append(RestStatus.OK.getStatus());
|
||||
blobs.remove(blobName);
|
||||
} else {
|
||||
builder.append(RestStatus.NOT_FOUND.getStatus());
|
||||
}
|
||||
builder.append("\r\n");
|
||||
builder.append("Content-Type: application/json; charset=UTF-8").append("\r\n");
|
||||
builder.append("Content-Length: 0").append("\r\n");
|
||||
builder.append("\r\n");
|
||||
}
|
||||
});
|
||||
builder.append("\r\n");
|
||||
builder.append("--__END_OF_PART__--");
|
||||
}
|
||||
|
||||
MockLowLevelHttpResponse response = new MockLowLevelHttpResponse();
|
||||
response.setStatusCode(200);
|
||||
response.setContent(builder.toString());
|
||||
response.setContentType(contentType);
|
||||
return response;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
return super.buildRequest(method, url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -151,8 +151,7 @@ class S3Repository extends BlobStoreRepository {
|
|||
/**
|
||||
* Constructs an s3 backed repository
|
||||
*/
|
||||
S3Repository(RepositoryMetaData metadata, Settings settings,
|
||||
NamedXContentRegistry namedXContentRegistry, AwsS3Service s3Service) throws IOException {
|
||||
S3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry, AwsS3Service s3Service) {
|
||||
super(metadata, settings, namedXContentRegistry);
|
||||
|
||||
String bucket = BUCKET_SETTING.get(metadata.settings());
|
||||
|
|
|
@ -20,14 +20,14 @@
|
|||
package org.elasticsearch.repositories.s3;
|
||||
|
||||
import com.amazonaws.AmazonClientException;
|
||||
import com.amazonaws.AmazonServiceException;
|
||||
import com.amazonaws.SdkClientException;
|
||||
import com.amazonaws.services.s3.AbstractAmazonS3;
|
||||
import com.amazonaws.services.s3.model.AmazonS3Exception;
|
||||
import com.amazonaws.services.s3.model.CopyObjectRequest;
|
||||
import com.amazonaws.services.s3.model.CopyObjectResult;
|
||||
import com.amazonaws.services.s3.model.DeleteObjectRequest;
|
||||
import com.amazonaws.services.s3.model.GetObjectMetadataRequest;
|
||||
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
|
||||
import com.amazonaws.services.s3.model.DeleteObjectsResult;
|
||||
import com.amazonaws.services.s3.model.GetObjectRequest;
|
||||
import com.amazonaws.services.s3.model.ListObjectsRequest;
|
||||
import com.amazonaws.services.s3.model.ObjectListing;
|
||||
|
@ -37,197 +37,163 @@ import com.amazonaws.services.s3.model.PutObjectResult;
|
|||
import com.amazonaws.services.s3.model.S3Object;
|
||||
import com.amazonaws.services.s3.model.S3ObjectInputStream;
|
||||
import com.amazonaws.services.s3.model.S3ObjectSummary;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.Socket;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
class MockAmazonS3 extends AbstractAmazonS3 {
|
||||
|
||||
private final int mockSocketPort;
|
||||
private final ConcurrentMap<String, byte[]> blobs;
|
||||
private final String bucket;
|
||||
private final boolean serverSideEncryption;
|
||||
private final String cannedACL;
|
||||
private final String storageClass;
|
||||
|
||||
private Map<String, InputStream> blobs = new ConcurrentHashMap<>();
|
||||
|
||||
// in ESBlobStoreContainerTestCase.java, the maximum
|
||||
// length of the input data is 100 bytes
|
||||
private byte[] byteCounter = new byte[100];
|
||||
|
||||
|
||||
MockAmazonS3(int mockSocketPort) {
|
||||
this.mockSocketPort = mockSocketPort;
|
||||
}
|
||||
|
||||
// Simulate a socket connection to check that SocketAccess.doPrivileged() is used correctly.
|
||||
// Any method of AmazonS3 might potentially open a socket to the S3 service. Firstly, a call
|
||||
// to any method of AmazonS3 has to be wrapped by SocketAccess.doPrivileged().
|
||||
// Secondly, each method on the stack from doPrivileged to opening the socket has to be
|
||||
// located in a jar that is provided by the plugin.
|
||||
// Thirdly, a SocketPermission has to be configured in plugin-security.policy.
|
||||
// By opening a socket in each method of MockAmazonS3 it is ensured that in production AmazonS3
|
||||
// is able to to open a socket to the S3 Service without causing a SecurityException
|
||||
private void simulateS3SocketConnection() {
|
||||
try (Socket socket = new Socket(InetAddress.getByName("127.0.0.1"), mockSocketPort)) {
|
||||
assertTrue(socket.isConnected()); // NOOP to keep static analysis happy
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean doesBucketExist(String bucket) {
|
||||
return true;
|
||||
MockAmazonS3(final ConcurrentMap<String, byte[]> blobs,
|
||||
final String bucket,
|
||||
final boolean serverSideEncryption,
|
||||
final String cannedACL,
|
||||
final String storageClass) {
|
||||
this.blobs = Objects.requireNonNull(blobs);
|
||||
this.bucket = Objects.requireNonNull(bucket);
|
||||
this.serverSideEncryption = serverSideEncryption;
|
||||
this.cannedACL = cannedACL;
|
||||
this.storageClass = storageClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doesObjectExist(String bucketName, String objectName) throws AmazonServiceException, SdkClientException {
|
||||
simulateS3SocketConnection();
|
||||
public boolean doesBucketExist(final String bucket) {
|
||||
return this.bucket.equalsIgnoreCase(bucket);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doesObjectExist(final String bucketName, final String objectName) throws SdkClientException {
|
||||
assertThat(bucketName, equalTo(bucket));
|
||||
return blobs.containsKey(objectName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectMetadata getObjectMetadata(
|
||||
GetObjectMetadataRequest getObjectMetadataRequest)
|
||||
throws AmazonClientException, AmazonServiceException {
|
||||
simulateS3SocketConnection();
|
||||
String blobName = getObjectMetadataRequest.getKey();
|
||||
public PutObjectResult putObject(final PutObjectRequest request) throws AmazonClientException {
|
||||
assertThat(request.getBucketName(), equalTo(bucket));
|
||||
assertThat(request.getMetadata().getSSEAlgorithm(), serverSideEncryption ? equalTo("AES256") : nullValue());
|
||||
assertThat(request.getCannedAcl(), notNullValue());
|
||||
assertThat(request.getCannedAcl().toString(), cannedACL != null ? equalTo(cannedACL) : equalTo("private"));
|
||||
assertThat(request.getStorageClass(), storageClass != null ? equalTo(storageClass) : equalTo("STANDARD"));
|
||||
|
||||
if (!blobs.containsKey(blobName)) {
|
||||
throw new AmazonS3Exception("[" + blobName + "] does not exist.");
|
||||
|
||||
final String blobName = request.getKey();
|
||||
final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
try {
|
||||
Streams.copy(request.getInputStream(), out);
|
||||
blobs.put(blobName, out.toByteArray());
|
||||
} catch (IOException e) {
|
||||
throw new AmazonClientException(e);
|
||||
}
|
||||
|
||||
return new ObjectMetadata(); // nothing is done with it
|
||||
}
|
||||
|
||||
@Override
|
||||
public PutObjectResult putObject(PutObjectRequest putObjectRequest)
|
||||
throws AmazonClientException, AmazonServiceException {
|
||||
simulateS3SocketConnection();
|
||||
String blobName = putObjectRequest.getKey();
|
||||
|
||||
if (blobs.containsKey(blobName)) {
|
||||
throw new AmazonS3Exception("[" + blobName + "] already exists.");
|
||||
}
|
||||
|
||||
blobs.put(blobName, putObjectRequest.getInputStream());
|
||||
return new PutObjectResult();
|
||||
}
|
||||
|
||||
@Override
|
||||
public S3Object getObject(GetObjectRequest getObjectRequest)
|
||||
throws AmazonClientException, AmazonServiceException {
|
||||
simulateS3SocketConnection();
|
||||
// in ESBlobStoreContainerTestCase.java, the prefix is empty,
|
||||
// so the key and blobName are equivalent to each other
|
||||
String blobName = getObjectRequest.getKey();
|
||||
public S3Object getObject(final GetObjectRequest request) throws AmazonClientException {
|
||||
assertThat(request.getBucketName(), equalTo(bucket));
|
||||
|
||||
if (!blobs.containsKey(blobName)) {
|
||||
throw new AmazonS3Exception("[" + blobName + "] does not exist.");
|
||||
final String blobName = request.getKey();
|
||||
final byte[] content = blobs.get(blobName);
|
||||
if (content == null) {
|
||||
AmazonS3Exception exception = new AmazonS3Exception("[" + blobName + "] does not exist.");
|
||||
exception.setStatusCode(404);
|
||||
throw exception;
|
||||
}
|
||||
|
||||
// the HTTP request attribute is irrelevant for reading
|
||||
S3ObjectInputStream stream = new S3ObjectInputStream(
|
||||
blobs.get(blobName), null, false);
|
||||
ObjectMetadata metadata = new ObjectMetadata();
|
||||
metadata.setContentLength(content.length);
|
||||
|
||||
S3Object s3Object = new S3Object();
|
||||
s3Object.setObjectContent(stream);
|
||||
s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(content), null, false));
|
||||
s3Object.setKey(blobName);
|
||||
s3Object.setObjectMetadata(metadata);
|
||||
|
||||
return s3Object;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectListing listObjects(ListObjectsRequest listObjectsRequest)
|
||||
throws AmazonClientException, AmazonServiceException {
|
||||
simulateS3SocketConnection();
|
||||
MockObjectListing list = new MockObjectListing();
|
||||
list.setTruncated(false);
|
||||
public ObjectListing listObjects(final ListObjectsRequest request) throws AmazonClientException {
|
||||
assertThat(request.getBucketName(), equalTo(bucket));
|
||||
|
||||
String blobName;
|
||||
String prefix = listObjectsRequest.getPrefix();
|
||||
final ObjectListing listing = new ObjectListing();
|
||||
listing.setBucketName(request.getBucketName());
|
||||
listing.setPrefix(request.getPrefix());
|
||||
|
||||
ArrayList<S3ObjectSummary> mockObjectSummaries = new ArrayList<>();
|
||||
|
||||
for (Map.Entry<String, InputStream> blob : blobs.entrySet()) {
|
||||
blobName = blob.getKey();
|
||||
S3ObjectSummary objectSummary = new S3ObjectSummary();
|
||||
|
||||
if (prefix.isEmpty() || blobName.startsWith(prefix)) {
|
||||
objectSummary.setKey(blobName);
|
||||
|
||||
try {
|
||||
objectSummary.setSize(getSize(blob.getValue()));
|
||||
} catch (IOException e) {
|
||||
throw new AmazonS3Exception("Object listing " +
|
||||
"failed for blob [" + blob.getKey() + "]");
|
||||
}
|
||||
|
||||
mockObjectSummaries.add(objectSummary);
|
||||
for (Map.Entry<String, byte[]> blob : blobs.entrySet()) {
|
||||
if (Strings.isEmpty(request.getPrefix()) || blob.getKey().startsWith(request.getPrefix())) {
|
||||
S3ObjectSummary summary = new S3ObjectSummary();
|
||||
summary.setBucketName(request.getBucketName());
|
||||
summary.setKey(blob.getKey());
|
||||
summary.setSize(blob.getValue().length);
|
||||
listing.getObjectSummaries().add(summary);
|
||||
}
|
||||
}
|
||||
|
||||
list.setObjectSummaries(mockObjectSummaries);
|
||||
return list;
|
||||
return listing;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CopyObjectResult copyObject(CopyObjectRequest copyObjectRequest)
|
||||
throws AmazonClientException, AmazonServiceException {
|
||||
simulateS3SocketConnection();
|
||||
String sourceBlobName = copyObjectRequest.getSourceKey();
|
||||
String targetBlobName = copyObjectRequest.getDestinationKey();
|
||||
public CopyObjectResult copyObject(final CopyObjectRequest request) throws AmazonClientException {
|
||||
assertThat(request.getSourceBucketName(), equalTo(bucket));
|
||||
assertThat(request.getDestinationBucketName(), equalTo(bucket));
|
||||
|
||||
if (!blobs.containsKey(sourceBlobName)) {
|
||||
throw new AmazonS3Exception("Source blob [" +
|
||||
sourceBlobName + "] does not exist.");
|
||||
final String sourceBlobName = request.getSourceKey();
|
||||
|
||||
final byte[] content = blobs.get(sourceBlobName);
|
||||
if (content == null) {
|
||||
AmazonS3Exception exception = new AmazonS3Exception("[" + sourceBlobName + "] does not exist.");
|
||||
exception.setStatusCode(404);
|
||||
throw exception;
|
||||
}
|
||||
|
||||
if (blobs.containsKey(targetBlobName)) {
|
||||
throw new AmazonS3Exception("Target blob [" +
|
||||
targetBlobName + "] already exists.");
|
||||
}
|
||||
|
||||
blobs.put(targetBlobName, blobs.get(sourceBlobName));
|
||||
return new CopyObjectResult(); // nothing is done with it
|
||||
blobs.put(request.getDestinationKey(), content);
|
||||
return new CopyObjectResult();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteObject(DeleteObjectRequest deleteObjectRequest)
|
||||
throws AmazonClientException, AmazonServiceException {
|
||||
simulateS3SocketConnection();
|
||||
String blobName = deleteObjectRequest.getKey();
|
||||
public void deleteObject(final DeleteObjectRequest request) throws AmazonClientException {
|
||||
assertThat(request.getBucketName(), equalTo(bucket));
|
||||
|
||||
if (!blobs.containsKey(blobName)) {
|
||||
throw new AmazonS3Exception("[" + blobName + "] does not exist.");
|
||||
final String blobName = request.getKey();
|
||||
if (blobs.remove(blobName) == null) {
|
||||
AmazonS3Exception exception = new AmazonS3Exception("[" + blobName + "] does not exist.");
|
||||
exception.setStatusCode(404);
|
||||
throw exception;
|
||||
}
|
||||
|
||||
blobs.remove(blobName);
|
||||
}
|
||||
|
||||
private int getSize(InputStream stream) throws IOException {
|
||||
int size = stream.read(byteCounter);
|
||||
stream.reset(); // in case we ever need the size again
|
||||
return size;
|
||||
}
|
||||
@Override
|
||||
public DeleteObjectsResult deleteObjects(DeleteObjectsRequest request) throws SdkClientException {
|
||||
assertThat(request.getBucketName(), equalTo(bucket));
|
||||
|
||||
private class MockObjectListing extends ObjectListing {
|
||||
// the objectSummaries attribute in ObjectListing.java
|
||||
// is read-only, but we need to be able to write to it,
|
||||
// so we create a mock of it to work around this
|
||||
private List<S3ObjectSummary> mockObjectSummaries;
|
||||
|
||||
@Override
|
||||
public List<S3ObjectSummary> getObjectSummaries() {
|
||||
return mockObjectSummaries;
|
||||
}
|
||||
|
||||
private void setObjectSummaries(List<S3ObjectSummary> objectSummaries) {
|
||||
mockObjectSummaries = objectSummaries;
|
||||
final List<DeleteObjectsResult.DeletedObject> deletions = new ArrayList<>();
|
||||
for (DeleteObjectsRequest.KeyVersion key : request.getKeys()) {
|
||||
if (blobs.remove(key.getKey()) == null) {
|
||||
AmazonS3Exception exception = new AmazonS3Exception("[" + key + "] does not exist.");
|
||||
exception.setStatusCode(404);
|
||||
throw exception;
|
||||
} else {
|
||||
DeleteObjectsResult.DeletedObject deletion = new DeleteObjectsResult.DeletedObject();
|
||||
deletion.setKey(key.getKey());
|
||||
deletions.add(deletion);
|
||||
}
|
||||
}
|
||||
return new DeleteObjectsResult(deletions);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,26 +37,19 @@ import com.amazonaws.services.s3.model.UploadPartResult;
|
|||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.BlobStore;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.mocksocket.MockServerSocket;
|
||||
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.ServerSocket;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static org.elasticsearch.repositories.s3.S3BlobStoreTests.randomMockS3BlobStore;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.doNothing;
|
||||
|
@ -67,36 +60,11 @@ import static org.mockito.Mockito.when;
|
|||
|
||||
public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
|
||||
|
||||
private static ServerSocket mockS3ServerSocket;
|
||||
|
||||
private static Thread mockS3AcceptorThread;
|
||||
|
||||
// Opens a MockSocket to simulate connections to S3 checking that SocketPermissions are set up correctly.
|
||||
// See MockAmazonS3.simulateS3SocketConnection.
|
||||
@BeforeClass
|
||||
public static void openMockSocket() throws IOException {
|
||||
mockS3ServerSocket = new MockServerSocket(0, 50, InetAddress.getByName("127.0.0.1"));
|
||||
mockS3AcceptorThread = new Thread(() -> {
|
||||
while (!mockS3ServerSocket.isClosed()) {
|
||||
try {
|
||||
// Accept connections from MockAmazonS3.
|
||||
mockS3ServerSocket.accept();
|
||||
} catch (IOException e) {
|
||||
}
|
||||
}
|
||||
});
|
||||
mockS3AcceptorThread.start();
|
||||
protected BlobStore newBlobStore() {
|
||||
return randomMockS3BlobStore();
|
||||
}
|
||||
|
||||
protected BlobStore newBlobStore() throws IOException {
|
||||
MockAmazonS3 client = new MockAmazonS3(mockS3ServerSocket.getLocalPort());
|
||||
String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
|
||||
|
||||
return new S3BlobStore(Settings.EMPTY, client, bucket, false,
|
||||
new ByteSizeValue(10, ByteSizeUnit.MB), "public-read-write", "standard");
|
||||
}
|
||||
|
||||
public void testExecuteSingleUploadBlobSizeTooLarge() throws IOException {
|
||||
public void testExecuteSingleUploadBlobSizeTooLarge() {
|
||||
final long blobSize = ByteSizeUnit.GB.toBytes(randomIntBetween(6, 10));
|
||||
final S3BlobStore blobStore = mock(S3BlobStore.class);
|
||||
final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore);
|
||||
|
@ -106,7 +74,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
|
|||
assertEquals("Upload request size [" + blobSize + "] can't be larger than 5gb", e.getMessage());
|
||||
}
|
||||
|
||||
public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() throws IOException {
|
||||
public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() {
|
||||
final S3BlobStore blobStore = mock(S3BlobStore.class);
|
||||
when(blobStore.bufferSizeInBytes()).thenReturn(ByteSizeUnit.MB.toBytes(1));
|
||||
|
||||
|
@ -168,7 +136,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testExecuteMultipartUploadBlobSizeTooLarge() throws IOException {
|
||||
public void testExecuteMultipartUploadBlobSizeTooLarge() {
|
||||
final long blobSize = ByteSizeUnit.TB.toBytes(randomIntBetween(6, 10));
|
||||
final S3BlobStore blobStore = mock(S3BlobStore.class);
|
||||
final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore);
|
||||
|
@ -179,7 +147,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
|
|||
assertEquals("Multipart upload request size [" + blobSize + "] can't be larger than 5tb", e.getMessage());
|
||||
}
|
||||
|
||||
public void testExecuteMultipartUploadBlobSizeTooSmall() throws IOException {
|
||||
public void testExecuteMultipartUploadBlobSizeTooSmall() {
|
||||
final long blobSize = ByteSizeUnit.MB.toBytes(randomIntBetween(1, 4));
|
||||
final S3BlobStore blobStore = mock(S3BlobStore.class);
|
||||
final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore);
|
||||
|
@ -291,7 +259,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
|
|||
assertEquals(expectedEtags, actualETags);
|
||||
}
|
||||
|
||||
public void testExecuteMultipartUploadAborted() throws IOException {
|
||||
public void testExecuteMultipartUploadAborted() {
|
||||
final String bucketName = randomAlphaOfLengthBetween(1, 10);
|
||||
final String blobName = randomAlphaOfLengthBetween(1, 10);
|
||||
final BlobPath blobPath = new BlobPath();
|
||||
|
@ -418,12 +386,4 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
|
|||
assertEquals("Expected number of parts [" + expectedParts + "] but got [" + result.v1() + "]", expectedParts, (long) result.v1());
|
||||
assertEquals("Expected remaining [" + expectedRemaining + "] but got [" + result.v2() + "]", expectedRemaining, (long) result.v2());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void closeMockSocket() throws IOException, InterruptedException {
|
||||
mockS3ServerSocket.close();
|
||||
mockS3AcceptorThread.join();
|
||||
mockS3AcceptorThread = null;
|
||||
mockS3ServerSocket = null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,109 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.repositories.s3;
|
||||
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.model.CannedAccessControlList;
|
||||
import com.amazonaws.services.s3.model.StorageClass;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
|
||||
public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCase {
|
||||
|
||||
private static final ConcurrentMap<String, byte[]> blobs = new ConcurrentHashMap<>();
|
||||
private static String bucket;
|
||||
private static String client;
|
||||
private static ByteSizeValue bufferSize;
|
||||
private static boolean serverSideEncryption;
|
||||
private static String cannedACL;
|
||||
private static String storageClass;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpRepositorySettings() {
|
||||
bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
|
||||
client = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
|
||||
bufferSize = new ByteSizeValue(randomIntBetween(5, 50), ByteSizeUnit.MB);
|
||||
serverSideEncryption = randomBoolean();
|
||||
if (randomBoolean()) {
|
||||
cannedACL = randomFrom(CannedAccessControlList.values()).toString();
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
storageClass = randomValueOtherThan(StorageClass.Glacier, () -> randomFrom(StorageClass.values())).toString();
|
||||
}
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void wipeRepository() {
|
||||
blobs.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createTestRepository(final String name) {
|
||||
assertAcked(client().admin().cluster().preparePutRepository(name)
|
||||
.setType(S3Repository.TYPE)
|
||||
.setSettings(Settings.builder()
|
||||
.put(S3Repository.BUCKET_SETTING.getKey(), bucket)
|
||||
.put(InternalAwsS3Service.CLIENT_NAME.getKey(), client)
|
||||
.put(S3Repository.BUFFER_SIZE_SETTING.getKey(), bufferSize)
|
||||
.put(S3Repository.SERVER_SIDE_ENCRYPTION_SETTING.getKey(), serverSideEncryption)
|
||||
.put(S3Repository.CANNED_ACL_SETTING.getKey(), cannedACL)
|
||||
.put(S3Repository.STORAGE_CLASS_SETTING.getKey(), storageClass)));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singletonList(TestS3RepositoryPlugin.class);
|
||||
}
|
||||
|
||||
public static class TestS3RepositoryPlugin extends S3RepositoryPlugin {
|
||||
|
||||
public TestS3RepositoryPlugin(final Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Repository.Factory> getRepositories(final Environment env, final NamedXContentRegistry registry) {
|
||||
return Collections.singletonMap(S3Repository.TYPE, (metadata) ->
|
||||
new S3Repository(metadata, env.settings(), registry, new InternalAwsS3Service(env.settings(), emptyMap()) {
|
||||
@Override
|
||||
public synchronized AmazonS3 client(final Settings repositorySettings) {
|
||||
return new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass);
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -19,18 +19,29 @@
|
|||
|
||||
package org.elasticsearch.repositories.s3;
|
||||
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.model.CannedAccessControlList;
|
||||
import com.amazonaws.services.s3.model.StorageClass;
|
||||
import org.elasticsearch.common.blobstore.BlobStore;
|
||||
import org.elasticsearch.common.blobstore.BlobStoreException;
|
||||
import org.elasticsearch.repositories.s3.S3BlobStore;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.repositories.ESBlobStoreTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class S3BlobStoreTests extends ESTestCase {
|
||||
public void testInitCannedACL() throws IOException {
|
||||
public class S3BlobStoreTests extends ESBlobStoreTestCase {
|
||||
|
||||
@Override
|
||||
protected BlobStore newBlobStore() {
|
||||
return randomMockS3BlobStore();
|
||||
}
|
||||
|
||||
public void testInitCannedACL() {
|
||||
String[] aclList = new String[]{
|
||||
"private", "public-read", "public-read-write", "authenticated-read",
|
||||
"log-delivery-write", "bucket-owner-read", "bucket-owner-full-control"};
|
||||
|
@ -52,16 +63,12 @@ public class S3BlobStoreTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testInvalidCannedACL() throws IOException {
|
||||
try {
|
||||
S3BlobStore.initCannedACL("test_invalid");
|
||||
fail("CannedACL should fail");
|
||||
} catch (BlobStoreException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("cannedACL is not valid: [test_invalid]"));
|
||||
}
|
||||
public void testInvalidCannedACL() {
|
||||
BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initCannedACL("test_invalid"));
|
||||
assertThat(ex.getMessage(), equalTo("cannedACL is not valid: [test_invalid]"));
|
||||
}
|
||||
|
||||
public void testInitStorageClass() throws IOException {
|
||||
public void testInitStorageClass() {
|
||||
// it should default to `standard`
|
||||
assertThat(S3BlobStore.initStorageClass(null), equalTo(StorageClass.Standard));
|
||||
assertThat(S3BlobStore.initStorageClass(""), equalTo(StorageClass.Standard));
|
||||
|
@ -72,25 +79,43 @@ public class S3BlobStoreTests extends ESTestCase {
|
|||
assertThat(S3BlobStore.initStorageClass("reduced_redundancy"), equalTo(StorageClass.ReducedRedundancy));
|
||||
}
|
||||
|
||||
public void testCaseInsensitiveStorageClass() throws IOException {
|
||||
public void testCaseInsensitiveStorageClass() {
|
||||
assertThat(S3BlobStore.initStorageClass("sTandaRd"), equalTo(StorageClass.Standard));
|
||||
assertThat(S3BlobStore.initStorageClass("sTandaRd_Ia"), equalTo(StorageClass.StandardInfrequentAccess));
|
||||
assertThat(S3BlobStore.initStorageClass("reduCED_redundancy"), equalTo(StorageClass.ReducedRedundancy));
|
||||
}
|
||||
|
||||
public void testInvalidStorageClass() throws IOException {
|
||||
try {
|
||||
S3BlobStore.initStorageClass("whatever");
|
||||
} catch(BlobStoreException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("`whatever` is not a valid S3 Storage Class."));
|
||||
}
|
||||
public void testInvalidStorageClass() {
|
||||
BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initStorageClass("whatever"));
|
||||
assertThat(ex.getMessage(), equalTo("`whatever` is not a valid S3 Storage Class."));
|
||||
}
|
||||
|
||||
public void testRejectGlacierStorageClass() throws IOException {
|
||||
try {
|
||||
S3BlobStore.initStorageClass("glacier");
|
||||
} catch(BlobStoreException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("Glacier storage class is not supported"));
|
||||
public void testRejectGlacierStorageClass() {
|
||||
BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initStorageClass("glacier"));
|
||||
assertThat(ex.getMessage(), equalTo("Glacier storage class is not supported"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link S3BlobStore} with random settings.
|
||||
* <p>
|
||||
* The blobstore uses a {@link MockAmazonS3} client.
|
||||
*/
|
||||
public static S3BlobStore randomMockS3BlobStore() {
|
||||
String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
|
||||
ByteSizeValue bufferSize = new ByteSizeValue(randomIntBetween(5, 100), ByteSizeUnit.MB);
|
||||
boolean serverSideEncryption = randomBoolean();
|
||||
|
||||
String cannedACL = null;
|
||||
if (randomBoolean()) {
|
||||
cannedACL = randomFrom(CannedAccessControlList.values()).toString();
|
||||
}
|
||||
|
||||
String storageClass = null;
|
||||
if (randomBoolean()) {
|
||||
storageClass = randomValueOtherThan(StorageClass.Glacier, () -> randomFrom(StorageClass.values())).toString();
|
||||
}
|
||||
|
||||
AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass);
|
||||
return new S3BlobStore(Settings.EMPTY, client, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -100,13 +100,12 @@ public class QueryBuilderBWCIT extends ESRestTestCase {
|
|||
new MatchPhraseQueryBuilder("keyword_field", "value").slop(3)
|
||||
);
|
||||
addCandidate("\"range\": { \"long_field\": {\"gte\": 1, \"lte\": 9}}", new RangeQueryBuilder("long_field").from(1).to(9));
|
||||
// bug url https://github.com/elastic/elasticsearch/issues/29376
|
||||
/*addCandidate(
|
||||
addCandidate(
|
||||
"\"bool\": { \"must_not\": [{\"match_all\": {}}], \"must\": [{\"match_all\": {}}], " +
|
||||
"\"filter\": [{\"match_all\": {}}], \"should\": [{\"match_all\": {}}]}",
|
||||
new BoolQueryBuilder().mustNot(new MatchAllQueryBuilder()).must(new MatchAllQueryBuilder())
|
||||
.filter(new MatchAllQueryBuilder()).should(new MatchAllQueryBuilder())
|
||||
);*/
|
||||
);
|
||||
addCandidate(
|
||||
"\"dis_max\": {\"queries\": [{\"match_all\": {}},{\"match_all\": {}},{\"match_all\": {}}], \"tie_breaker\": 0.01}",
|
||||
new DisMaxQueryBuilder().add(new MatchAllQueryBuilder()).add(new MatchAllQueryBuilder()).add(new MatchAllQueryBuilder())
|
||||
|
|
|
@ -1,26 +1,36 @@
|
|||
{
|
||||
"rank_eval": {
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/search-rank-eval.html",
|
||||
"methods": ["POST"],
|
||||
"url": {
|
||||
"path": "/_rank_eval",
|
||||
"paths": ["/_rank_eval", "/{index}/_rank_eval", "/{index}/{type}/_rank_eval"],
|
||||
{
|
||||
"rank_eval": {
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/search-rank-eval.html",
|
||||
"methods": ["GET", "POST"],
|
||||
"url": {
|
||||
"path": "/_rank_eval",
|
||||
"paths": ["/_rank_eval", "/{index}/_rank_eval"],
|
||||
"parts": {
|
||||
"index": {
|
||||
"type": "list",
|
||||
"description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices"
|
||||
},
|
||||
"type": {
|
||||
"type" : "list",
|
||||
"description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types"
|
||||
}
|
||||
},
|
||||
"params": {}
|
||||
"params": {
|
||||
"ignore_unavailable": {
|
||||
"type" : "boolean",
|
||||
"description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)"
|
||||
},
|
||||
"allow_no_indices": {
|
||||
"type" : "boolean",
|
||||
"description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
|
||||
},
|
||||
"expand_wildcards": {
|
||||
"type" : "enum",
|
||||
"options" : ["open","closed","none","all"],
|
||||
"default" : "open",
|
||||
"description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both."
|
||||
}
|
||||
}
|
||||
},
|
||||
"body": {
|
||||
"description": "The search definition using the Query DSL and the prototype for the eval request.",
|
||||
"required": true
|
||||
}
|
||||
}
|
||||
"body": {
|
||||
"description": "The ranking evaluation search definition, including search requests, document ratings and ranking metric definition.",
|
||||
"required": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -281,7 +281,7 @@ setup:
|
|||
sigma: -1
|
||||
|
||||
- do:
|
||||
catch: /parsing_exception/
|
||||
catch: /x_content_parse_exception/
|
||||
search:
|
||||
body:
|
||||
aggs:
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
a731424734fd976b409f1963ba88471caccc18aa
|
|
@ -0,0 +1 @@
|
|||
4325a5cdf8d3fa23f326cd86a2297fee2bc844f5
|
|
@ -1 +0,0 @@
|
|||
5f8ad8c3f8c404803aa81a43ac6f732e19c00935
|
|
@ -0,0 +1 @@
|
|||
3b618a21a924cb35ac1f27d3ca47d9ed04f43588
|
|
@ -1 +0,0 @@
|
|||
19b1a1fff6bb077e0660e4f0666807e24dd26865
|
|
@ -0,0 +1 @@
|
|||
040e2de30c5e6bad868b144e371730200719ceb3
|
|
@ -1 +0,0 @@
|
|||
94dd26d685ae981905b775780e6c824f723b14af
|
|
@ -0,0 +1 @@
|
|||
20a5c472a8be9bec7aa40472791389e875b9e1f2
|
|
@ -1 +0,0 @@
|
|||
9783a0bb56fb8bbd17280d3def97a656999f6a88
|
|
@ -0,0 +1 @@
|
|||
1f92c7d3d9bc2765fe6195bcc4fcb160d11175cc
|
|
@ -1 +0,0 @@
|
|||
01eda74d798af85f846ebd74f53ec7a16e6e2ba1
|
|
@ -0,0 +1 @@
|
|||
da4af75a7e4fe7843fbfa4b58e6a238b6b706d64
|
|
@ -1 +0,0 @@
|
|||
29b8b6324722dc6dda784731e3e918de9715422c
|
|
@ -0,0 +1 @@
|
|||
fc45b02a5086ec454e6d6ae81fc2cbe7be1c0902
|
|
@ -1 +0,0 @@
|
|||
e1ae49522164a721d67459e59792db6f4dff70fc
|
|
@ -0,0 +1 @@
|
|||
b6a2418a94b84c29c4b9fcfe4381f2cc1aa4c214
|
|
@ -1 +0,0 @@
|
|||
87595367717ddc9fbf95bbf649216a5d7954d9d7
|
|
@ -0,0 +1 @@
|
|||
6292a5579a6ab3423ceca60d2ea41cd86481e7c0
|
|
@ -1 +0,0 @@
|
|||
5befbb58ef76c79fc8afebbca781b01320b8ffad
|
|
@ -0,0 +1 @@
|
|||
95b2563e5337377dde2eb987b3fce144be5e7a77
|
|
@ -1 +0,0 @@
|
|||
3d7aa72ccec38ef902b149da36548fb227eeb58a
|
|
@ -0,0 +1 @@
|
|||
1efd2fa7cba1e359e3fbb8b4c11cab37024b2178
|
|
@ -1 +0,0 @@
|
|||
ac1755a69f14c53f7846ef7d9b405d44caf53091
|
|
@ -0,0 +1 @@
|
|||
93512c2160bdc3e602141329e5945a91918b6752
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue