Merge branch 'master' into zen2
This commit is contained in:
commit
93bb24e1f8
|
@ -6,3 +6,5 @@
|
|||
|
||||
ES_BUILD_JAVA=java11
|
||||
ES_RUNTIME_JAVA=java8
|
||||
GRADLE_TASK=build
|
||||
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
<!-- On Windows, Checkstyle matches files using \ path separator -->
|
||||
|
||||
<!-- These files are generated by ANTLR so its silly to hold them to our rules. -->
|
||||
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessLexer\.java" checks="." />
|
||||
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessParser(|BaseVisitor|Visitor)\.java" checks="." />
|
||||
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessLexer\.java" checks="." />
|
||||
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessParser(|BaseVisitor|Visitor)\.java" checks="." />
|
||||
<suppress files="plugin[/\\]sql[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]xpack[/\\]sql[/\\]parser[/\\]SqlBase(Base(Listener|Visitor)|Lexer|Listener|Parser|Visitor).java" checks="." />
|
||||
|
||||
<!-- JNA requires the no-argument constructor on JNAKernel32Library.SizeT to be public-->
|
||||
|
@ -42,7 +42,7 @@
|
|||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]StoredScriptsDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]TasksClientDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]WatcherDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="modules[/\\]reindex[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]ReindexDocumentationIT.jav" id="SnippetLength" />
|
||||
<suppress files="modules[/\\]reindex[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]ReindexDocumentationIT.java" id="SnippetLength" />
|
||||
|
||||
<!-- Hopefully temporary suppression of LineLength on files that don't pass it. We should remove these when we the
|
||||
files start to pass. -->
|
||||
|
@ -154,7 +154,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchPhaseController.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]DelegatingActionListener.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]IndicesOptions.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]ToXContentToBytes.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]BroadcastOperationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]TransportBroadcastAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]node[/\\]TransportBroadcastByNodeAction.java" checks="LineLength" />
|
||||
|
@ -182,7 +181,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNANatives.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JarHell.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]FilterClient.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]support[/\\]AbstractClient.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClient.java" checks="LineLength" />
|
||||
|
@ -237,7 +235,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]FilterableTermsEnum.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]FreqTermsEnum.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]search[/\\]XMoreLikeThis.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]search[/\\]function[/\\]FiltersFunctionScoreQuery.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]search[/\\]function[/\\]FunctionScoreQuery.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]store[/\\]ByteArrayIndexInput.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]Cidrs.java" checks="LineLength" />
|
||||
|
@ -259,7 +256,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]LocalAllocateDangledIndices.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]PrimaryShardAllocator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReplicaShardAllocator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]AlreadyExpiredException.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]CompositeIndexEventListener.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettings.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicyConfig.java" checks="LineLength" />
|
||||
|
@ -267,7 +263,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisRegistry.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]CustomAnalyzerProvider.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]ShingleTokenFilterFactory.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]StemmerOverrideTokenFilterFactory.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]cache[/\\]bitset[/\\]BitsetFilterCache.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]codec[/\\]PerFieldMappingPostingFormatCodec.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]ElasticsearchConcurrentMergeScheduler.java" checks="LineLength" />
|
||||
|
@ -283,13 +278,10 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]ordinals[/\\]MultiOrdinals.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]ordinals[/\\]OrdinalsBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]ordinals[/\\]SinglePackedOrdinals.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]AbstractAtomicParentChildFieldData.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]AbstractIndexOrdinalsFieldData.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]AbstractLatLonPointDVIndexFieldData.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]BinaryDVIndexFieldData.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]IndexIndexFieldData.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]PagedBytesIndexFieldData.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]ParentChildIndexFieldData.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]get[/\\]ShardGetService.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CompletionFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentFieldMappers.java" checks="LineLength" />
|
||||
|
@ -307,18 +299,15 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]Mapping.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MetadataFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ObjectMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ParentFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]RootObjectMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]RoutingFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]SourceFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]TypeFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]UidFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]VersionFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]merge[/\\]MergeStats.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryBuilders.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryValidationException.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MultiMatchQuery.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]CommitPoint.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexEventListener.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexSearcherWrapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexShard.java" checks="LineLength" />
|
||||
|
@ -326,7 +315,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]ShardPath.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]ShardStateMetaData.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]StoreRecovery.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]TranslogRecoveryPerformer.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]Store.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]termvectors[/\\]TermVectorsService.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]translog[/\\]BaseTranslogReader.java" checks="LineLength" />
|
||||
|
@ -360,15 +348,11 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestIndicesAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptContextRegistry.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptModes.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptService.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptSettings.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]MultiValueMode.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]AggregatorFactories.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]InternalMultiBucketAggregation.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]BucketsAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]filters[/\\]FiltersAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]geogrid[/\\]GeoHashGridAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]histogram[/\\]HistogramAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]MissingAggregator.java" checks="LineLength" />
|
||||
|
@ -380,12 +364,10 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]significant[/\\]heuristics[/\\]PercentageScore.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]significant[/\\]heuristics[/\\]ScriptHeuristic.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]significant[/\\]heuristics[/\\]SignificanceHeuristic.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]InternalOrder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]LongTermsAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]StringTermsAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]TermsAggregator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]TermsAggregatorFactory.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]support[/\\]IncludeExclude.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]HyperLogLogPlusPlus.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]AggregationPath.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]AggregatedDfs.java" checks="LineLength" />
|
||||
|
@ -435,13 +417,10 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]BroadcastReplicationTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]TransportInstanceSingleOperationActionTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]AbstractTermVectorsTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]GetTermVectorsCheckDocFreqIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]GetTermVectorsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]MultiTermVectorsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsUnitTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]aliases[/\\]IndexAliasesIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JarHellTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]RestoreBackwardsCompatIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]AbstractClientHeadersTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterHealthIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterInfoServiceIT.java" checks="LineLength" />
|
||||
|
@ -550,18 +529,15 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]AbstractStringFieldDataTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]FieldDataCacheTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]IndexFieldDataServiceTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]ParentChildFieldDataTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CompletionFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CompletionFieldTypeTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CopyToMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentMapperMergeTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentParserTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DynamicMappingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ExternalMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ExternalMetadataMapper.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldNamesFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldTypeTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoPointFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoShapeFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]IdFieldMapperTests.java" checks="LineLength" />
|
||||
|
@ -572,11 +548,9 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]NestedObjectMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]NullValueObjectMappingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ObjectMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ParentFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]PathMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]RoutingFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]SourceFieldMapperTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]UpdateMappingOnClusterIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]UpdateMappingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoolQueryBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoostingQueryBuilderTests.java" checks="LineLength" />
|
||||
|
@ -597,7 +571,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]similarity[/\\]SimilarityTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]CorruptedFileIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]CorruptedTranslogIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]DirectoryUtilsTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]IndexStoreTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]StoreTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]suggest[/\\]stats[/\\]SuggestStatsIT.java" checks="LineLength" />
|
||||
|
@ -637,9 +610,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]BytesRestResponseTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]AliasRoutingIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]SimpleRoutingIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]FileScriptTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptContextRegistryTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptModesTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptServiceTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]MultiValueModeTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]SearchWithRejectionsIT.java" checks="LineLength" />
|
||||
|
@ -650,12 +620,10 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]MissingIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]NaNSortingIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]NestedIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]ParentIdAggIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]ReverseNestedIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]SamplerIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]ShardReduceIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]ShardSizeTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]SignificantTermsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsDocCountErrorIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsShardMinDocCountIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]NestedAggregatorTests.java" checks="LineLength" />
|
||||
|
@ -667,7 +635,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWithRandomExceptionsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWithRandomIOExceptionsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportTwoNodesSearchIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ParentFieldLoadingIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoBoundingBoxIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoFilterIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoShapeQueryTests.java" checks="LineLength" />
|
||||
|
@ -684,7 +651,6 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]GeoDistanceSortBuilderIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CompletionSuggestSearchIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]ContextCompletionSuggestSearchIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CustomSuggester.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CategoryContextMappingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]GeoContextMappingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellCheckerTests.java" checks="LineLength" />
|
||||
|
@ -716,10 +682,6 @@
|
|||
<suppress files="plugins[/\\]repository-hdfs[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]hdfs[/\\]HdfsRepository.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-hdfs[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]hdfs[/\\]HdfsTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]S3Repository.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]AbstractAwsTestCase.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]AbstractS3SnapshotRestoreTest.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]AmazonS3Wrapper.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]MockDefaultS3OutputStream.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]TestAmazonS3.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]store-smb[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]SmbDirectoryWrapper.java" checks="LineLength" />
|
||||
</suppressions>
|
||||
|
|
|
@ -85,6 +85,7 @@ integTestRunner {
|
|||
}
|
||||
|
||||
integTestCluster {
|
||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
// Truststore settings are not used since TLS is not enabled. Included for testing the get certificates API
|
||||
|
|
|
@ -280,7 +280,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
.startObject()
|
||||
.startObject("script")
|
||||
.field("lang", "painless")
|
||||
.field("code", "ctx._source.field += params.count")
|
||||
.field("source", "ctx._source.field += params.count")
|
||||
.endObject()
|
||||
.endObject()));
|
||||
Response response = client().performRequest(request);
|
||||
|
@ -991,10 +991,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// tag::update-by-query-request-conflicts
|
||||
request.setConflicts("proceed"); // <1>
|
||||
// end::update-by-query-request-conflicts
|
||||
// tag::update-by-query-request-typeOrQuery
|
||||
request.setDocTypes("doc"); // <1>
|
||||
request.setQuery(new TermQueryBuilder("user", "kimchy")); // <2>
|
||||
// end::update-by-query-request-typeOrQuery
|
||||
// tag::update-by-query-request-query
|
||||
request.setQuery(new TermQueryBuilder("user", "kimchy")); // <1>
|
||||
// end::update-by-query-request-query
|
||||
// tag::update-by-query-request-size
|
||||
request.setSize(10); // <1>
|
||||
// end::update-by-query-request-size
|
||||
|
@ -1110,10 +1109,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// tag::delete-by-query-request-conflicts
|
||||
request.setConflicts("proceed"); // <1>
|
||||
// end::delete-by-query-request-conflicts
|
||||
// tag::delete-by-query-request-typeOrQuery
|
||||
request.setDocTypes("doc"); // <1>
|
||||
request.setQuery(new TermQueryBuilder("user", "kimchy")); // <2>
|
||||
// end::delete-by-query-request-typeOrQuery
|
||||
// tag::delete-by-query-request-query
|
||||
request.setQuery(new TermQueryBuilder("user", "kimchy")); // <1>
|
||||
// end::delete-by-query-request-query
|
||||
// tag::delete-by-query-request-size
|
||||
request.setSize(10); // <1>
|
||||
// end::delete-by-query-request-size
|
||||
|
|
|
@ -140,10 +140,9 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::search-request-basic
|
||||
}
|
||||
{
|
||||
// tag::search-request-indices-types
|
||||
// tag::search-request-indices
|
||||
SearchRequest searchRequest = new SearchRequest("posts"); // <1>
|
||||
searchRequest.types("doc"); // <2>
|
||||
// end::search-request-indices-types
|
||||
// end::search-request-indices
|
||||
// tag::search-request-routing
|
||||
searchRequest.routing("routing"); // <1>
|
||||
// end::search-request-routing
|
||||
|
|
|
@ -24,14 +24,13 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-con
|
|||
--------------------------------------------------
|
||||
<1> Set `proceed` on version conflict
|
||||
|
||||
You can limit the documents by adding a type to the source or by adding a query.
|
||||
You can limit the documents by adding a query.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-typeOrQuery]
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-query]
|
||||
--------------------------------------------------
|
||||
<1> Only copy `doc` type
|
||||
<2> Only copy documents which have field `user` set to `kimchy`
|
||||
<1> Only copy documents which have field `user` set to `kimchy`
|
||||
|
||||
It’s also possible to limit the number of processed documents by setting size.
|
||||
|
||||
|
|
|
@ -25,14 +25,13 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-con
|
|||
--------------------------------------------------
|
||||
<1> Set `proceed` on version conflict
|
||||
|
||||
You can limit the documents by adding a type to the source or by adding a query.
|
||||
You can limit the documents by adding a query.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-typeOrQuery]
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-query]
|
||||
--------------------------------------------------
|
||||
<1> Only copy `doc` type
|
||||
<2> Only copy documents which have field `user` set to `kimchy`
|
||||
<1> Only copy documents which have field `user` set to `kimchy`
|
||||
|
||||
It’s also possible to limit the number of processed documents by setting size.
|
||||
|
||||
|
|
|
@ -28,10 +28,9 @@ For example:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indices-types]
|
||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indices]
|
||||
--------------------------------------------------
|
||||
<1> Restricts the request to an index
|
||||
<2> Limits the request to a type
|
||||
|
||||
[[java-rest-high-multi-search-sync]]
|
||||
==== Synchronous Execution
|
||||
|
|
|
@ -33,10 +33,9 @@ Let's first look at some of the optional arguments of a +{request}+:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request-indices-types]
|
||||
include-tagged::{doc-tests-file}[{api}-request-indices]
|
||||
--------------------------------------------------
|
||||
<1> Restricts the request to an index
|
||||
<2> Limits the request to a type
|
||||
|
||||
There are a couple of other interesting optional parameters:
|
||||
|
||||
|
|
|
@ -38,6 +38,10 @@ include::shut-down-node.asciidoc[]
|
|||
. *Upgrade all nodes.*
|
||||
+
|
||||
--
|
||||
include::remove-xpack.asciidoc[]
|
||||
--
|
||||
+
|
||||
--
|
||||
include::upgrade-node.asciidoc[]
|
||||
include::set-paths-tip.asciidoc[]
|
||||
--
|
||||
|
@ -47,8 +51,6 @@ include::set-paths-tip.asciidoc[]
|
|||
Use the `elasticsearch-plugin` script to install the upgraded version of each
|
||||
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
|
||||
a node.
|
||||
+
|
||||
include::remove-xpack.asciidoc[]
|
||||
|
||||
. *Start each upgraded node.*
|
||||
+
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
IMPORTANT: If you use {xpack} and are upgrading from a version prior to 6.3,
|
||||
remove {xpack} before restarting: `bin/elasticsearch-plugin remove x-pack`. As
|
||||
of 6.3, {xpack} is included in the default distribution. The node will fail to
|
||||
start if the old {xpack} plugin is present.
|
||||
IMPORTANT: If you are upgrading from a version prior to 6.3 and use {xpack}
|
||||
then you must remove the {xpack} plugin before upgrading with
|
||||
`bin/elasticsearch-plugin remove x-pack`. As of 6.3, {xpack} is included in
|
||||
the default distribution so make sure to upgrade to that one. If you upgrade
|
||||
without removing the {xpack} plugin first the node will fail to start. If you
|
||||
did not remove the {xpack} plugin and the node fails to start then you must
|
||||
downgrade to your previous version, remove {xpack}, and then upgrade again.
|
||||
In general downgrading is not supported but in this particular case it is.
|
||||
|
|
|
@ -44,6 +44,10 @@ include::shut-down-node.asciidoc[]
|
|||
. *Upgrade the node you shut down.*
|
||||
+
|
||||
--
|
||||
include::remove-xpack.asciidoc[]
|
||||
--
|
||||
+
|
||||
--
|
||||
include::upgrade-node.asciidoc[]
|
||||
include::set-paths-tip.asciidoc[]
|
||||
--
|
||||
|
@ -53,8 +57,6 @@ include::set-paths-tip.asciidoc[]
|
|||
Use the `elasticsearch-plugin` script to install the upgraded version of each
|
||||
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
|
||||
a node.
|
||||
+
|
||||
include::remove-xpack.asciidoc[]
|
||||
|
||||
. *Start the upgraded node.*
|
||||
+
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.lucene.expressions.Bindings;
|
||||
import org.apache.lucene.expressions.Expression;
|
||||
import org.apache.lucene.expressions.SimpleBindings;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DoubleValues;
|
||||
import org.apache.lucene.search.DoubleValuesSource;
|
||||
import org.elasticsearch.script.GeneralScriptException;
|
||||
import org.elasticsearch.script.NumberSortScript;
|
||||
|
||||
/**
|
||||
* A bridge to evaluate an {@link Expression} against {@link Bindings} in the context
|
||||
* of a {@link NumberSortScript}.
|
||||
*/
|
||||
class ExpressionNumberSortScript implements NumberSortScript.LeafFactory {
|
||||
|
||||
final Expression exprScript;
|
||||
final SimpleBindings bindings;
|
||||
final DoubleValuesSource source;
|
||||
final boolean needsScores;
|
||||
|
||||
ExpressionNumberSortScript(Expression e, SimpleBindings b, boolean needsScores) {
|
||||
exprScript = e;
|
||||
bindings = b;
|
||||
source = exprScript.getDoubleValuesSource(bindings);
|
||||
this.needsScores = needsScores;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NumberSortScript newInstance(final LeafReaderContext leaf) throws IOException {
|
||||
return new NumberSortScript() {
|
||||
// Fake the scorer until setScorer is called.
|
||||
DoubleValues values = source.getValues(leaf, new DoubleValues() {
|
||||
@Override
|
||||
public double doubleValue() {
|
||||
return 0.0D;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean advanceExact(int doc) {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
@Override
|
||||
public double execute() {
|
||||
try {
|
||||
return values.doubleValue();
|
||||
} catch (Exception exception) {
|
||||
throw new GeneralScriptException("Error evaluating " + exprScript, exception);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int d) {
|
||||
try {
|
||||
values.advanceExact(d);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Can't advance to doc using " + exprScript, e);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needs_score() {
|
||||
return needsScores;
|
||||
}
|
||||
|
||||
}
|
|
@ -42,6 +42,7 @@ import org.elasticsearch.script.BucketAggregationScript;
|
|||
import org.elasticsearch.script.BucketAggregationSelectorScript;
|
||||
import org.elasticsearch.script.ClassPermission;
|
||||
import org.elasticsearch.script.FilterScript;
|
||||
import org.elasticsearch.script.NumberSortScript;
|
||||
import org.elasticsearch.script.ScoreScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptEngine;
|
||||
|
@ -135,6 +136,9 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
|
|||
} else if (context.instanceClazz.equals(AggregationScript.class)) {
|
||||
AggregationScript.Factory factory = (p, lookup) -> newAggregationScript(expr, lookup, p);
|
||||
return context.factoryClazz.cast(factory);
|
||||
} else if (context.instanceClazz.equals(NumberSortScript.class)) {
|
||||
NumberSortScript.Factory factory = (p, lookup) -> newSortScript(expr, lookup, p);
|
||||
return context.factoryClazz.cast(factory);
|
||||
}
|
||||
throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]");
|
||||
}
|
||||
|
@ -187,7 +191,6 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
|
|||
// noop: _value is special for aggregations, and is handled in ExpressionScriptBindings
|
||||
// TODO: if some uses it in a scoring expression, they will get a nasty failure when evaluating...need a
|
||||
// way to know this is for aggregations and so _value is ok to have...
|
||||
|
||||
} else if (vars != null && vars.containsKey(variable)) {
|
||||
bindFromParams(vars, bindings, variable);
|
||||
} else {
|
||||
|
@ -205,6 +208,33 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
|
|||
return new ExpressionSearchScript(expr, bindings, specialValue, needsScores);
|
||||
}
|
||||
|
||||
private NumberSortScript.LeafFactory newSortScript(Expression expr, SearchLookup lookup, @Nullable Map<String, Object> vars) {
|
||||
// NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings,
|
||||
// instead of complicating SimpleBindings (which should stay simple)
|
||||
SimpleBindings bindings = new SimpleBindings();
|
||||
boolean needsScores = false;
|
||||
for (String variable : expr.variables) {
|
||||
try {
|
||||
if (variable.equals("_score")) {
|
||||
bindings.add(new SortField("_score", SortField.Type.SCORE));
|
||||
needsScores = true;
|
||||
} else if (vars != null && vars.containsKey(variable)) {
|
||||
bindFromParams(vars, bindings, variable);
|
||||
} else {
|
||||
// delegate valuesource creation based on field's type
|
||||
// there are three types of "fields" to expressions, and each one has a different "api" of variables and methods.
|
||||
final ValueSource valueSource = getDocValueSource(variable, lookup);
|
||||
needsScores |= valueSource.getSortField(false).needsScores();
|
||||
bindings.add(variable, valueSource.asDoubleValuesSource());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// we defer "binding" of variables until here: give context for that variable
|
||||
throw convertToScriptException("link error", expr.sourceText, variable, e);
|
||||
}
|
||||
}
|
||||
return new ExpressionNumberSortScript(expr, bindings, needsScores);
|
||||
}
|
||||
|
||||
private TermsSetQueryScript.LeafFactory newTermsSetQueryScript(Expression expr, SearchLookup lookup,
|
||||
@Nullable Map<String, Object> vars) {
|
||||
// NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings,
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
import java.util.Collections;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
|
||||
import org.elasticsearch.script.NumberSortScript;
|
||||
import org.elasticsearch.script.ScriptException;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.mockito.Matchers.anyInt;
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class ExpressionNumberSortScriptTests extends ESTestCase {
|
||||
private ExpressionScriptEngine service;
|
||||
private SearchLookup lookup;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
NumberFieldType fieldType = new NumberFieldType(NumberType.DOUBLE);
|
||||
MapperService mapperService = mock(MapperService.class);
|
||||
when(mapperService.fullName("field")).thenReturn(fieldType);
|
||||
when(mapperService.fullName("alias")).thenReturn(fieldType);
|
||||
|
||||
SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class);
|
||||
when(doubleValues.advanceExact(anyInt())).thenReturn(true);
|
||||
when(doubleValues.nextValue()).thenReturn(2.718);
|
||||
|
||||
AtomicNumericFieldData atomicFieldData = mock(AtomicNumericFieldData.class);
|
||||
when(atomicFieldData.getDoubleValues()).thenReturn(doubleValues);
|
||||
|
||||
IndexNumericFieldData fieldData = mock(IndexNumericFieldData.class);
|
||||
when(fieldData.getFieldName()).thenReturn("field");
|
||||
when(fieldData.load(anyObject())).thenReturn(atomicFieldData);
|
||||
|
||||
service = new ExpressionScriptEngine(Settings.EMPTY);
|
||||
lookup = new SearchLookup(mapperService, ignored -> fieldData, null);
|
||||
}
|
||||
|
||||
private NumberSortScript.LeafFactory compile(String expression) {
|
||||
NumberSortScript.Factory factory =
|
||||
service.compile(null, expression, NumberSortScript.CONTEXT, Collections.emptyMap());
|
||||
return factory.newFactory(Collections.emptyMap(), lookup);
|
||||
}
|
||||
|
||||
public void testCompileError() {
|
||||
ScriptException e = expectThrows(ScriptException.class, () -> {
|
||||
compile("doc['field'].value * *@#)(@$*@#$ + 4");
|
||||
});
|
||||
assertTrue(e.getCause() instanceof ParseException);
|
||||
}
|
||||
|
||||
public void testLinkError() {
|
||||
ScriptException e = expectThrows(ScriptException.class, () -> {
|
||||
compile("doc['nonexistent'].value * 5");
|
||||
});
|
||||
assertTrue(e.getCause() instanceof ParseException);
|
||||
}
|
||||
|
||||
public void testFieldAccess() throws IOException {
|
||||
NumberSortScript script = compile("doc['field'].value").newInstance(null);
|
||||
script.setDocument(1);
|
||||
|
||||
double result = script.execute();
|
||||
assertEquals(2.718, result, 0.0);
|
||||
}
|
||||
|
||||
public void testFieldAccessWithFieldAlias() throws IOException {
|
||||
NumberSortScript script = compile("doc['alias'].value").newInstance(null);
|
||||
script.setDocument(1);
|
||||
|
||||
double result = script.execute();
|
||||
assertEquals(2.718, result, 0.0);
|
||||
}
|
||||
}
|
|
@ -23,8 +23,8 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.painless.spi.Whitelist;
|
||||
import org.elasticsearch.script.NumberSortScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
|
@ -43,25 +43,25 @@ public class NeedsScoreTests extends ESSingleNodeTestCase {
|
|||
IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double");
|
||||
|
||||
Map<ScriptContext<?>, List<Whitelist>> contexts = new HashMap<>();
|
||||
contexts.put(SearchScript.CONTEXT, Whitelist.BASE_WHITELISTS);
|
||||
contexts.put(NumberSortScript.CONTEXT, Whitelist.BASE_WHITELISTS);
|
||||
PainlessScriptEngine service = new PainlessScriptEngine(Settings.EMPTY, contexts);
|
||||
|
||||
QueryShardContext shardContext = index.newQueryShardContext(0, null, () -> 0, null);
|
||||
SearchLookup lookup = new SearchLookup(index.mapperService(), shardContext::getForField, null);
|
||||
|
||||
SearchScript.Factory factory = service.compile(null, "1.2", SearchScript.CONTEXT, Collections.emptyMap());
|
||||
SearchScript.LeafFactory ss = factory.newFactory(Collections.emptyMap(), lookup);
|
||||
NumberSortScript.Factory factory = service.compile(null, "1.2", NumberSortScript.CONTEXT, Collections.emptyMap());
|
||||
NumberSortScript.LeafFactory ss = factory.newFactory(Collections.emptyMap(), lookup);
|
||||
assertFalse(ss.needs_score());
|
||||
|
||||
factory = service.compile(null, "doc['d'].value", SearchScript.CONTEXT, Collections.emptyMap());
|
||||
factory = service.compile(null, "doc['d'].value", NumberSortScript.CONTEXT, Collections.emptyMap());
|
||||
ss = factory.newFactory(Collections.emptyMap(), lookup);
|
||||
assertFalse(ss.needs_score());
|
||||
|
||||
factory = service.compile(null, "1/_score", SearchScript.CONTEXT, Collections.emptyMap());
|
||||
factory = service.compile(null, "1/_score", NumberSortScript.CONTEXT, Collections.emptyMap());
|
||||
ss = factory.newFactory(Collections.emptyMap(), lookup);
|
||||
assertTrue(ss.needs_score());
|
||||
|
||||
factory = service.compile(null, "doc['d'].value * _score", SearchScript.CONTEXT, Collections.emptyMap());
|
||||
factory = service.compile(null, "doc['d'].value * _score", NumberSortScript.CONTEXT, Collections.emptyMap());
|
||||
ss = factory.newFactory(Collections.emptyMap(), lookup);
|
||||
assertTrue(ss.needs_score());
|
||||
}
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorable;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.lucene.ScorerAware;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
abstract class AbstractSortScript implements ScorerAware {
|
||||
|
||||
private static final Map<String, String> DEPRECATIONS;
|
||||
|
||||
static {
|
||||
Map<String, String> deprecations = new HashMap<>();
|
||||
deprecations.put(
|
||||
"doc",
|
||||
"Accessing variable [doc] via [params.doc] from within a sort-script " +
|
||||
"is deprecated in favor of directly accessing [doc]."
|
||||
);
|
||||
deprecations.put(
|
||||
"_doc",
|
||||
"Accessing variable [doc] via [params._doc] from within a sort-script " +
|
||||
"is deprecated in favor of directly accessing [doc]."
|
||||
);
|
||||
DEPRECATIONS = Collections.unmodifiableMap(deprecations);
|
||||
}
|
||||
|
||||
/**
|
||||
* The generic runtime parameters for the script.
|
||||
*/
|
||||
private final Map<String, Object> params;
|
||||
|
||||
/** A scorer that will return the score for the current document when the script is run. */
|
||||
private Scorable scorer;
|
||||
|
||||
/**
|
||||
* A leaf lookup for the bound segment this script will operate on.
|
||||
*/
|
||||
private final LeafSearchLookup leafLookup;
|
||||
|
||||
AbstractSortScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
|
||||
this.leafLookup = lookup.getLeafSearchLookup(leafContext);
|
||||
Map<String, Object> parameters = new HashMap<>(params);
|
||||
parameters.putAll(leafLookup.asMap());
|
||||
this.params = new ParameterMap(parameters, DEPRECATIONS);
|
||||
}
|
||||
|
||||
protected AbstractSortScript() {
|
||||
this.params = null;
|
||||
this.leafLookup = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the parameters for this script.
|
||||
*/
|
||||
public Map<String, Object> getParams() {
|
||||
return params;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorable scorer) {
|
||||
this.scorer = scorer;
|
||||
}
|
||||
|
||||
/** Return the score of the current document. */
|
||||
public double get_score() {
|
||||
try {
|
||||
return scorer.score();
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("couldn't lookup score", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The doc lookup for the Lucene segment this script was created for.
|
||||
*/
|
||||
public Map<String, ScriptDocValues<?>> getDoc() {
|
||||
return leafLookup.doc();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the current document to run the script on next.
|
||||
*/
|
||||
public void setDocument(int docid) {
|
||||
leafLookup.setDocument(docid);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
public abstract class NumberSortScript extends AbstractSortScript {
|
||||
|
||||
public static final String[] PARAMETERS = {};
|
||||
|
||||
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("number_sort", Factory.class);
|
||||
|
||||
public NumberSortScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
|
||||
super(params, lookup, leafContext);
|
||||
}
|
||||
|
||||
protected NumberSortScript() {
|
||||
super();
|
||||
}
|
||||
|
||||
public abstract double execute();
|
||||
|
||||
/**
|
||||
* A factory to construct {@link NumberSortScript} instances.
|
||||
*/
|
||||
public interface LeafFactory {
|
||||
NumberSortScript newInstance(LeafReaderContext ctx) throws IOException;
|
||||
|
||||
/**
|
||||
* Return {@code true} if the script needs {@code _score} calculated, or {@code false} otherwise.
|
||||
*/
|
||||
boolean needs_score();
|
||||
}
|
||||
|
||||
/**
|
||||
* A factory to construct stateful {@link NumberSortScript} factories for a specific index.
|
||||
*/
|
||||
public interface Factory {
|
||||
LeafFactory newFactory(Map<String, Object> params, SearchLookup lookup);
|
||||
}
|
||||
}
|
|
@ -43,7 +43,8 @@ public class ScriptModule {
|
|||
SearchScript.CONTEXT,
|
||||
AggregationScript.CONTEXT,
|
||||
ScoreScript.CONTEXT,
|
||||
SearchScript.SCRIPT_SORT_CONTEXT,
|
||||
NumberSortScript.CONTEXT,
|
||||
StringSortScript.CONTEXT,
|
||||
TermsSetQueryScript.CONTEXT,
|
||||
UpdateScript.CONTEXT,
|
||||
BucketAggregationScript.CONTEXT,
|
||||
|
|
|
@ -139,7 +139,4 @@ public abstract class SearchScript implements ScorerAware {
|
|||
|
||||
/** The context used to compile {@link SearchScript} factories. */
|
||||
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("search", Factory.class);
|
||||
// TODO: remove these contexts when it has its own interface
|
||||
// Can return a double. (For ScriptSortType#NUMBER only, for ScriptSortType#STRING normal CONTEXT should be used)
|
||||
public static final ScriptContext<Factory> SCRIPT_SORT_CONTEXT = new ScriptContext<>("sort", Factory.class);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
public abstract class StringSortScript extends AbstractSortScript {
|
||||
|
||||
public static final String[] PARAMETERS = {};
|
||||
|
||||
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("string_sort", Factory.class);
|
||||
|
||||
public StringSortScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
|
||||
super(params, lookup, leafContext);
|
||||
}
|
||||
|
||||
public abstract String execute();
|
||||
|
||||
/**
|
||||
* A factory to construct {@link StringSortScript} instances.
|
||||
*/
|
||||
public interface LeafFactory {
|
||||
StringSortScript newInstance(LeafReaderContext ctx) throws IOException;
|
||||
}
|
||||
|
||||
/**
|
||||
* A factory to construct stateful {@link StringSortScript} factories for a specific index.
|
||||
*/
|
||||
public interface Factory {
|
||||
LeafFactory newFactory(Map<String, Object> params, SearchLookup lookup);
|
||||
}
|
||||
}
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -51,7 +50,8 @@ import org.elasticsearch.index.query.QueryRewriteContext;
|
|||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.script.NumberSortScript;
|
||||
import org.elasticsearch.script.StringSortScript;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
|
@ -305,9 +305,6 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
|
||||
@Override
|
||||
public SortFieldAndFormat build(QueryShardContext context) throws IOException {
|
||||
final SearchScript.Factory factory = context.getScriptService().compile(script, SearchScript.SCRIPT_SORT_CONTEXT);
|
||||
final SearchScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup());
|
||||
|
||||
MultiValueMode valueMode = null;
|
||||
if (sortMode != null) {
|
||||
valueMode = MultiValueMode.fromString(sortMode.toString());
|
||||
|
@ -336,8 +333,10 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
final IndexFieldData.XFieldComparatorSource fieldComparatorSource;
|
||||
switch (type) {
|
||||
case STRING:
|
||||
final StringSortScript.Factory factory = context.getScriptService().compile(script, StringSortScript.CONTEXT);
|
||||
final StringSortScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup());
|
||||
fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) {
|
||||
SearchScript leafScript;
|
||||
StringSortScript leafScript;
|
||||
@Override
|
||||
protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException {
|
||||
leafScript = searchScript.newInstance(context);
|
||||
|
@ -350,9 +349,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
}
|
||||
@Override
|
||||
public BytesRef binaryValue() {
|
||||
final Object run = leafScript.run();
|
||||
CollectionUtils.ensureNoSelfReferences(run, "ScriptSortBuilder leaf script");
|
||||
spare.copyChars(run.toString());
|
||||
spare.copyChars(leafScript.execute());
|
||||
return spare.get();
|
||||
}
|
||||
};
|
||||
|
@ -365,11 +362,13 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
};
|
||||
break;
|
||||
case NUMBER:
|
||||
final NumberSortScript.Factory numberSortFactory = context.getScriptService().compile(script, NumberSortScript.CONTEXT);
|
||||
final NumberSortScript.LeafFactory numberSortScript = numberSortFactory.newFactory(script.getParams(), context.lookup());
|
||||
fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) {
|
||||
SearchScript leafScript;
|
||||
NumberSortScript leafScript;
|
||||
@Override
|
||||
protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException {
|
||||
leafScript = searchScript.newInstance(context);
|
||||
leafScript = numberSortScript.newInstance(context);
|
||||
final NumericDoubleValues values = new NumericDoubleValues() {
|
||||
@Override
|
||||
public boolean advanceExact(int doc) throws IOException {
|
||||
|
@ -378,7 +377,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
}
|
||||
@Override
|
||||
public double doubleValue() {
|
||||
return leafScript.runAsDouble();
|
||||
return leafScript.execute();
|
||||
}
|
||||
};
|
||||
return FieldData.singleton(values);
|
||||
|
|
|
@ -274,6 +274,7 @@ public class RolloverIT extends ESIntegTestCase {
|
|||
|
||||
public void testRolloverWithDateMath() {
|
||||
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
|
||||
assumeTrue("only works on the same day", now.plusMinutes(5).getDayOfYear() == now.getDayOfYear());
|
||||
String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1";
|
||||
String dateMathExp = "<test-{now/d}-1>";
|
||||
assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get());
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.discovery;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlock;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
|
@ -33,7 +34,8 @@ import org.elasticsearch.discovery.zen.ZenPing;
|
|||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration;
|
||||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.test.NodeConfigurationSource;
|
||||
import org.elasticsearch.test.discovery.TestZenDiscovery;
|
||||
import org.elasticsearch.test.disruption.NetworkDisruption;
|
||||
import org.elasticsearch.test.disruption.NetworkDisruption.Bridge;
|
||||
|
@ -52,9 +54,9 @@ import java.util.Collection;
|
|||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.discovery.DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
|
@ -62,7 +64,7 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
|
|||
|
||||
static final TimeValue DISRUPTION_HEALING_OVERHEAD = TimeValue.timeValueSeconds(40); // we use 30s as timeout in many places.
|
||||
|
||||
private ClusterDiscoveryConfiguration discoveryConfig;
|
||||
private NodeConfigurationSource discoveryConfig;
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
|
@ -116,18 +118,14 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
List<String> startCluster(int numberOfNodes) throws ExecutionException, InterruptedException {
|
||||
List<String> startCluster(int numberOfNodes) {
|
||||
return startCluster(numberOfNodes, -1);
|
||||
}
|
||||
|
||||
List<String> startCluster(int numberOfNodes, int minimumMasterNode) throws ExecutionException, InterruptedException {
|
||||
return startCluster(numberOfNodes, minimumMasterNode, null);
|
||||
}
|
||||
|
||||
List<String> startCluster(int numberOfNodes, int minimumMasterNode, @Nullable int[] unicastHostsOrdinals) throws
|
||||
ExecutionException, InterruptedException {
|
||||
configureCluster(numberOfNodes, unicastHostsOrdinals, minimumMasterNode);
|
||||
List<String> nodes = internalCluster().startNodes(numberOfNodes);
|
||||
List<String> startCluster(int numberOfNodes, int minimumMasterNode) {
|
||||
configureCluster(numberOfNodes, minimumMasterNode);
|
||||
InternalTestCluster internalCluster = internalCluster();
|
||||
List<String> nodes = internalCluster.startNodes(numberOfNodes);
|
||||
ensureStableCluster(numberOfNodes);
|
||||
|
||||
// TODO: this is a temporary solution so that nodes will not base their reaction to a partition based on previous successful results
|
||||
|
@ -154,20 +152,11 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
|
|||
return Arrays.asList(MockTransportService.TestPlugin.class);
|
||||
}
|
||||
|
||||
void configureCluster(
|
||||
int numberOfNodes,
|
||||
@Nullable int[] unicastHostsOrdinals,
|
||||
int minimumMasterNode
|
||||
) throws ExecutionException, InterruptedException {
|
||||
configureCluster(DEFAULT_SETTINGS, numberOfNodes, unicastHostsOrdinals, minimumMasterNode);
|
||||
void configureCluster(int numberOfNodes, int minimumMasterNode) {
|
||||
configureCluster(DEFAULT_SETTINGS, numberOfNodes, minimumMasterNode);
|
||||
}
|
||||
|
||||
void configureCluster(
|
||||
Settings settings,
|
||||
int numberOfNodes,
|
||||
@Nullable int[] unicastHostsOrdinals,
|
||||
int minimumMasterNode
|
||||
) throws ExecutionException, InterruptedException {
|
||||
void configureCluster(Settings settings, int numberOfNodes, int minimumMasterNode) {
|
||||
if (minimumMasterNode < 0) {
|
||||
minimumMasterNode = numberOfNodes / 2 + 1;
|
||||
}
|
||||
|
@ -177,14 +166,21 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
|
|||
.put(settings)
|
||||
.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numberOfNodes)
|
||||
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minimumMasterNode)
|
||||
.putList(DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "file")
|
||||
.build();
|
||||
|
||||
if (discoveryConfig == null) {
|
||||
if (unicastHostsOrdinals == null) {
|
||||
discoveryConfig = new ClusterDiscoveryConfiguration.UnicastZen(numberOfNodes, nodeSettings);
|
||||
} else {
|
||||
discoveryConfig = new ClusterDiscoveryConfiguration.UnicastZen(numberOfNodes, nodeSettings, unicastHostsOrdinals);
|
||||
}
|
||||
discoveryConfig = new NodeConfigurationSource() {
|
||||
@Override
|
||||
public Settings nodeSettings(final int nodeOrdinal) {
|
||||
return nodeSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Path nodeConfigPath(final int nodeOrdinal) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -363,7 +363,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase {
|
|||
*/
|
||||
public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Exception {
|
||||
// don't use DEFAULT settings (which can cause node disconnects on a slow CI machine)
|
||||
configureCluster(Settings.EMPTY, 3, null, 1);
|
||||
configureCluster(Settings.EMPTY, 3, 1);
|
||||
internalCluster().startMasterOnlyNode();
|
||||
final String node_1 = internalCluster().startDataOnlyNode();
|
||||
|
||||
|
@ -390,7 +390,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase {
|
|||
|
||||
public void testIndexImportedFromDataOnlyNodesIfMasterLostDataFolder() throws Exception {
|
||||
// test for https://github.com/elastic/elasticsearch/issues/8823
|
||||
configureCluster(2, null, 1);
|
||||
configureCluster(2, 1);
|
||||
String masterNode = internalCluster().startMasterOnlyNode(Settings.EMPTY);
|
||||
internalCluster().startDataOnlyNode(Settings.EMPTY);
|
||||
|
||||
|
@ -421,7 +421,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase {
|
|||
.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "30s") // wait till cluster state is committed
|
||||
.build();
|
||||
final String idxName = "test";
|
||||
configureCluster(settings, 3, null, 2);
|
||||
configureCluster(settings, 3, 2);
|
||||
final List<String> allMasterEligibleNodes = internalCluster().startMasterOnlyNodes(2);
|
||||
final String dataNode = internalCluster().startDataOnlyNode();
|
||||
ensureStableCluster(3);
|
||||
|
|
|
@ -59,7 +59,8 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
|||
public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
|
||||
|
||||
public void testIsolatedUnicastNodes() throws Exception {
|
||||
List<String> nodes = startCluster(4, -1, new int[]{0});
|
||||
internalCluster().setHostsListContainsOnlyFirstNode(true);
|
||||
List<String> nodes = startCluster(4, -1);
|
||||
// Figure out what is the elected master node
|
||||
final String unicastTarget = nodes.get(0);
|
||||
|
||||
|
@ -98,7 +99,8 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
|
|||
* The rejoining node should take this master node and connect.
|
||||
*/
|
||||
public void testUnicastSinglePingResponseContainsMaster() throws Exception {
|
||||
List<String> nodes = startCluster(4, -1, new int[]{0});
|
||||
internalCluster().setHostsListContainsOnlyFirstNode(true);
|
||||
List<String> nodes = startCluster(4, -1);
|
||||
// Figure out what is the elected master node
|
||||
final String masterNode = internalCluster().getMasterName();
|
||||
logger.info("---> legit elected master node={}", masterNode);
|
||||
|
@ -194,7 +196,7 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
|
|||
}
|
||||
|
||||
public void testClusterFormingWithASlowNode() throws Exception {
|
||||
configureCluster(3, null, 2);
|
||||
configureCluster(3, 2);
|
||||
|
||||
SlowClusterStateProcessing disruption = new SlowClusterStateProcessing(random(), 0, 0, 1000, 2000);
|
||||
|
||||
|
@ -210,7 +212,7 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
|
|||
}
|
||||
|
||||
public void testElectMasterWithLatestVersion() throws Exception {
|
||||
configureCluster(3, null, 2);
|
||||
configureCluster(3, 2);
|
||||
final Set<String> nodes = new HashSet<>(internalCluster().startNodes(3));
|
||||
ensureStableCluster(3);
|
||||
ServiceDisruptionScheme isolateAllNodes =
|
||||
|
|
|
@ -60,7 +60,7 @@ public class SnapshotDisruptionIT extends AbstractDisruptionTestCase {
|
|||
.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "30s") // wait till cluster state is committed
|
||||
.build();
|
||||
final String idxName = "test";
|
||||
configureCluster(settings, 4, null, 2);
|
||||
configureCluster(settings, 4, 2);
|
||||
final List<String> allMasterEligibleNodes = internalCluster().startMasterOnlyNodes(3);
|
||||
final String dataNode = internalCluster().startDataOnlyNode();
|
||||
ensureStableCluster(4);
|
||||
|
|
|
@ -105,6 +105,47 @@ public class MockScriptEngine implements ScriptEngine {
|
|||
}
|
||||
};
|
||||
return context.factoryClazz.cast(factory);
|
||||
} else if (context.instanceClazz.equals(NumberSortScript.class)) {
|
||||
NumberSortScript.Factory factory = (parameters, lookup) -> new NumberSortScript.LeafFactory() {
|
||||
@Override
|
||||
public NumberSortScript newInstance(final LeafReaderContext ctx) {
|
||||
return new NumberSortScript(parameters, lookup, ctx) {
|
||||
@Override
|
||||
public double execute() {
|
||||
Map<String, Object> vars = new HashMap<>(parameters);
|
||||
vars.put("params", parameters);
|
||||
vars.put("doc", getDoc());
|
||||
return ((Number) script.apply(vars)).doubleValue();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needs_score() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
return context.factoryClazz.cast(factory);
|
||||
} else if (context.instanceClazz.equals(StringSortScript.class)) {
|
||||
StringSortScript.Factory factory = (parameters, lookup) -> (StringSortScript.LeafFactory) ctx
|
||||
-> new StringSortScript(parameters, lookup, ctx) {
|
||||
@Override
|
||||
public String execute() {
|
||||
Map<String, Object> vars = new HashMap<>(parameters);
|
||||
vars.put("params", parameters);
|
||||
vars.put("doc", getDoc());
|
||||
return String.valueOf(script.apply(vars));
|
||||
}
|
||||
};
|
||||
return context.factoryClazz.cast(factory);
|
||||
} else if (context.instanceClazz.equals(IngestScript.class)) {
|
||||
IngestScript.Factory factory = vars -> new IngestScript(vars) {
|
||||
@Override
|
||||
public void execute(Map<String, Object> ctx) {
|
||||
script.apply(ctx);
|
||||
}
|
||||
};
|
||||
return context.factoryClazz.cast(factory);
|
||||
} else if(context.instanceClazz.equals(AggregationScript.class)) {
|
||||
AggregationScript.Factory factory = (parameters, lookup) -> new AggregationScript.LeafFactory() {
|
||||
@Override
|
||||
|
|
|
@ -232,6 +232,9 @@ public final class InternalTestCluster extends TestCluster {
|
|||
private ServiceDisruptionScheme activeDisruptionScheme;
|
||||
private Function<Client, Client> clientWrapper;
|
||||
|
||||
// If set to true only the first node in the cluster will be made a unicast node
|
||||
private boolean hostsListContainsOnlyFirstNode;
|
||||
|
||||
public InternalTestCluster(
|
||||
final long clusterSeed,
|
||||
final Path baseDir,
|
||||
|
@ -1613,12 +1616,17 @@ public final class InternalTestCluster extends TestCluster {
|
|||
|
||||
private final Object discoveryFileMutex = new Object();
|
||||
|
||||
private void rebuildUnicastHostFiles(Collection<NodeAndClient> newNodes) {
|
||||
private void rebuildUnicastHostFiles(List<NodeAndClient> newNodes) {
|
||||
// cannot be a synchronized method since it's called on other threads from within synchronized startAndPublishNodesAndClients()
|
||||
synchronized (discoveryFileMutex) {
|
||||
try {
|
||||
List<String> discoveryFileContents = Stream.concat(nodes.values().stream(), newNodes.stream())
|
||||
.map(nac -> nac.node.injector().getInstance(TransportService.class)).filter(Objects::nonNull)
|
||||
Stream<NodeAndClient> unicastHosts = Stream.concat(nodes.values().stream(), newNodes.stream());
|
||||
if (hostsListContainsOnlyFirstNode) {
|
||||
unicastHosts = unicastHosts.limit(1L);
|
||||
}
|
||||
List<String> discoveryFileContents = unicastHosts.map(
|
||||
nac -> nac.node.injector().getInstance(TransportService.class)
|
||||
).filter(Objects::nonNull)
|
||||
.map(TransportService::getLocalNode).filter(Objects::nonNull).filter(DiscoveryNode::isMasterNode)
|
||||
.map(n -> n.getAddress().toString())
|
||||
.distinct().collect(Collectors.toList());
|
||||
|
@ -2050,6 +2058,9 @@ public final class InternalTestCluster extends TestCluster {
|
|||
return filterNodes(nodes, NodeAndClient::isMasterEligible).size();
|
||||
}
|
||||
|
||||
public void setHostsListContainsOnlyFirstNode(boolean hostsListContainsOnlyFirstNode) {
|
||||
this.hostsListContainsOnlyFirstNode = hostsListContainsOnlyFirstNode;
|
||||
}
|
||||
|
||||
public void setDisruptionScheme(ServiceDisruptionScheme scheme) {
|
||||
assert activeDisruptionScheme == null :
|
||||
|
|
|
@ -1,185 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.test.discovery;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.RandomizedTest;
|
||||
import com.carrotsearch.randomizedtesting.SysGlobals;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.network.NetworkUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.mocksocket.MockServerSocket;
|
||||
import org.elasticsearch.test.NodeConfigurationSource;
|
||||
import org.elasticsearch.transport.TcpTransport;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.ServerSocket;
|
||||
import java.nio.file.Path;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
public class ClusterDiscoveryConfiguration extends NodeConfigurationSource {
|
||||
|
||||
/**
|
||||
* The number of ports in the range used for this JVM
|
||||
*/
|
||||
private static final int PORTS_PER_JVM = 100;
|
||||
|
||||
private static final int JVM_ORDINAL = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_ID, "0"));
|
||||
|
||||
/**
|
||||
* a per-JVM unique offset to be used for calculating unique port ranges.
|
||||
*/
|
||||
private static final int JVM_BASE_PORT_OFFSET = PORTS_PER_JVM * (JVM_ORDINAL + 1);
|
||||
|
||||
|
||||
static Settings DEFAULT_NODE_SETTINGS = Settings.EMPTY;
|
||||
private static final String IP_ADDR = "127.0.0.1";
|
||||
|
||||
final int numOfNodes;
|
||||
final Settings nodeSettings;
|
||||
final Settings transportClientSettings;
|
||||
|
||||
public ClusterDiscoveryConfiguration(int numOfNodes, Settings extraSettings) {
|
||||
this.numOfNodes = numOfNodes;
|
||||
this.nodeSettings = Settings.builder().put(DEFAULT_NODE_SETTINGS).put(extraSettings).build();
|
||||
this.transportClientSettings = Settings.builder().put(extraSettings).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
return nodeSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Path nodeConfigPath(int nodeOrdinal) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings transportClientSettings() {
|
||||
return transportClientSettings;
|
||||
}
|
||||
|
||||
public static class UnicastZen extends ClusterDiscoveryConfiguration {
|
||||
|
||||
// this variable is incremented on each bind attempt and will maintain the next port that should be tried
|
||||
private static int nextPort = calcBasePort();
|
||||
|
||||
private final int[] unicastHostOrdinals;
|
||||
private final int[] unicastHostPorts;
|
||||
|
||||
public UnicastZen(int numOfNodes, Settings extraSettings) {
|
||||
this(numOfNodes, numOfNodes, extraSettings);
|
||||
}
|
||||
|
||||
public UnicastZen(int numOfNodes, int numOfUnicastHosts, Settings extraSettings) {
|
||||
super(numOfNodes, extraSettings);
|
||||
if (numOfUnicastHosts == numOfNodes) {
|
||||
unicastHostOrdinals = new int[numOfNodes];
|
||||
for (int i = 0; i < numOfNodes; i++) {
|
||||
unicastHostOrdinals[i] = i;
|
||||
}
|
||||
} else {
|
||||
Set<Integer> ordinals = new HashSet<>(numOfUnicastHosts);
|
||||
while (ordinals.size() != numOfUnicastHosts) {
|
||||
ordinals.add(RandomizedTest.randomInt(numOfNodes - 1));
|
||||
}
|
||||
unicastHostOrdinals = CollectionUtils.toArray(ordinals);
|
||||
}
|
||||
this.unicastHostPorts = unicastHostPorts(numOfNodes);
|
||||
assert unicastHostOrdinals.length <= unicastHostPorts.length;
|
||||
}
|
||||
|
||||
public UnicastZen(int numOfNodes, int[] unicastHostOrdinals) {
|
||||
this(numOfNodes, Settings.EMPTY, unicastHostOrdinals);
|
||||
}
|
||||
|
||||
public UnicastZen(int numOfNodes, Settings extraSettings, int[] unicastHostOrdinals) {
|
||||
super(numOfNodes, extraSettings);
|
||||
this.unicastHostOrdinals = unicastHostOrdinals;
|
||||
this.unicastHostPorts = unicastHostPorts(numOfNodes);
|
||||
assert unicastHostOrdinals.length <= unicastHostPorts.length;
|
||||
}
|
||||
|
||||
private static int calcBasePort() {
|
||||
return 30000 + JVM_BASE_PORT_OFFSET;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
Settings.Builder builder = Settings.builder().put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numOfNodes);
|
||||
|
||||
String[] unicastHosts = new String[unicastHostOrdinals.length];
|
||||
if (nodeOrdinal >= unicastHostPorts.length) {
|
||||
throw new ElasticsearchException("nodeOrdinal [" + nodeOrdinal + "] is greater than the number unicast ports ["
|
||||
+ unicastHostPorts.length + "]");
|
||||
} else {
|
||||
// we need to pin the node port & host so we'd know where to point things
|
||||
builder.put(TcpTransport.PORT.getKey(), unicastHostPorts[nodeOrdinal]);
|
||||
builder.put(TcpTransport.HOST.getKey(), IP_ADDR); // only bind on one IF we use v4 here by default
|
||||
for (int i = 0; i < unicastHostOrdinals.length; i++) {
|
||||
unicastHosts[i] = IP_ADDR + ":" + (unicastHostPorts[unicastHostOrdinals[i]]);
|
||||
}
|
||||
}
|
||||
builder.putList("discovery.zen.ping.unicast.hosts", unicastHosts);
|
||||
return builder.put(super.nodeSettings(nodeOrdinal)).build();
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "we know we pass a IP address")
|
||||
protected static synchronized int[] unicastHostPorts(int numHosts) {
|
||||
int[] unicastHostPorts = new int[numHosts];
|
||||
|
||||
final int basePort = calcBasePort();
|
||||
final int maxPort = basePort + PORTS_PER_JVM;
|
||||
int tries = 0;
|
||||
for (int i = 0; i < unicastHostPorts.length; i++) {
|
||||
boolean foundPortInRange = false;
|
||||
while (tries < PORTS_PER_JVM && !foundPortInRange) {
|
||||
try (ServerSocket serverSocket = new MockServerSocket()) {
|
||||
// Set SO_REUSEADDR as we may bind here and not be able to reuse the address immediately without it.
|
||||
serverSocket.setReuseAddress(NetworkUtils.defaultReuseAddress());
|
||||
serverSocket.bind(new InetSocketAddress(IP_ADDR, nextPort));
|
||||
// bind was a success
|
||||
foundPortInRange = true;
|
||||
unicastHostPorts[i] = nextPort;
|
||||
} catch (IOException e) {
|
||||
// Do nothing
|
||||
}
|
||||
|
||||
nextPort++;
|
||||
if (nextPort >= maxPort) {
|
||||
// Roll back to the beginning of the range and do not go into another JVM's port range
|
||||
nextPort = basePort;
|
||||
}
|
||||
tries++;
|
||||
}
|
||||
|
||||
if (!foundPortInRange) {
|
||||
throw new ElasticsearchException("could not find enough open ports in range [" + basePort + "-" + maxPort
|
||||
+ "]. required [" + unicastHostPorts.length + "] ports");
|
||||
}
|
||||
}
|
||||
return unicastHostPorts;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -110,22 +110,12 @@ public class ChainIT extends ESRestTestCase {
|
|||
assertOK(client().performRequest(new Request("POST", "/" + index + "/_refresh")));
|
||||
}
|
||||
|
||||
private static void resumeFollow(String leaderIndex, String followIndex) throws IOException {
|
||||
final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow");
|
||||
request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}");
|
||||
assertOK(client().performRequest(request));
|
||||
}
|
||||
|
||||
private static void followIndex(String leaderIndex, String followIndex) throws IOException {
|
||||
final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow");
|
||||
request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}");
|
||||
assertOK(client().performRequest(request));
|
||||
}
|
||||
|
||||
private static void pauseFollow(String followIndex) throws IOException {
|
||||
assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow")));
|
||||
}
|
||||
|
||||
private static void verifyDocuments(String index, int expectedNumDocs) throws IOException {
|
||||
verifyDocuments(index, expectedNumDocs, client());
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.core.rollup;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
||||
|
@ -15,7 +16,9 @@ import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilde
|
|||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
|
@ -34,8 +37,19 @@ public class RollupField {
|
|||
public static final String TYPE_NAME = "_doc";
|
||||
public static final String AGG = "agg";
|
||||
public static final String ROLLUP_MISSING = "ROLLUP_MISSING_40710B25931745D4B0B8B310F6912A69";
|
||||
public static final List<String> SUPPORTED_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
|
||||
public static final List<String> SUPPORTED_NUMERIC_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
|
||||
SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
|
||||
public static final List<String> SUPPORTED_DATE_METRICS = Arrays.asList(MaxAggregationBuilder.NAME,
|
||||
MinAggregationBuilder.NAME,
|
||||
ValueCountAggregationBuilder.NAME);
|
||||
|
||||
// a set of ALL our supported metrics, to be a union of all other supported metric types (numeric, date, etc.)
|
||||
public static final Set<String> SUPPORTED_METRICS;
|
||||
static {
|
||||
SUPPORTED_METRICS = new HashSet<>();
|
||||
SUPPORTED_METRICS.addAll(SUPPORTED_NUMERIC_METRICS);
|
||||
SUPPORTED_METRICS.addAll(SUPPORTED_DATE_METRICS);
|
||||
}
|
||||
|
||||
// these mapper types are used by the configs (metric, histo, etc) to validate field mappings
|
||||
public static final List<String> NUMERIC_FIELD_MAPPER_TYPES;
|
||||
|
@ -47,6 +61,8 @@ public class RollupField {
|
|||
NUMERIC_FIELD_MAPPER_TYPES = types;
|
||||
}
|
||||
|
||||
public static final String DATE_FIELD_MAPPER_TYPE = DateFieldMapper.CONTENT_TYPE;
|
||||
|
||||
/**
|
||||
* Format to the appropriate Rollup field name convention
|
||||
*
|
||||
|
|
|
@ -19,6 +19,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.xpack.core.rollup.RollupField;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -108,18 +109,24 @@ public class MetricConfig implements Writeable, ToXContentObject {
|
|||
Map<String, FieldCapabilities> fieldCaps = fieldCapsResponse.get(field);
|
||||
if (fieldCaps != null && fieldCaps.isEmpty() == false) {
|
||||
fieldCaps.forEach((key, value) -> {
|
||||
if (value.isAggregatable() == false) {
|
||||
validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " +
|
||||
"but is not.");
|
||||
}
|
||||
if (RollupField.NUMERIC_FIELD_MAPPER_TYPES.contains(key)) {
|
||||
if (value.isAggregatable() == false) {
|
||||
validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " +
|
||||
"but is not.");
|
||||
// nothing to do as all metrics are supported by SUPPORTED_NUMERIC_METRICS currently
|
||||
} else if (RollupField.DATE_FIELD_MAPPER_TYPE.equals(key)) {
|
||||
if (RollupField.SUPPORTED_DATE_METRICS.containsAll(metrics) == false) {
|
||||
validationException.addValidationError(
|
||||
buildSupportedMetricError("date", RollupField.SUPPORTED_DATE_METRICS));
|
||||
}
|
||||
} else {
|
||||
validationException.addValidationError("The field referenced by a metric group must be a [numeric] type, but found " +
|
||||
fieldCaps.keySet().toString() + " for field [" + field + "]");
|
||||
validationException.addValidationError("The field referenced by a metric group must be a [numeric] or [date] type, " +
|
||||
"but found " + fieldCaps.keySet().toString() + " for field [" + field + "]");
|
||||
}
|
||||
});
|
||||
} else {
|
||||
validationException.addValidationError("Could not find a [numeric] field with name [" + field + "] in any of the " +
|
||||
validationException.addValidationError("Could not find a [numeric] or [date] field with name [" + field + "] in any of the " +
|
||||
"indices matching the index pattern.");
|
||||
}
|
||||
}
|
||||
|
@ -166,4 +173,11 @@ public class MetricConfig implements Writeable, ToXContentObject {
|
|||
public static MetricConfig fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private String buildSupportedMetricError(String type, List<String> supportedMetrics) {
|
||||
List<String> unsupportedMetrics = new ArrayList<>(metrics);
|
||||
unsupportedMetrics.removeAll(supportedMetrics);
|
||||
return "Only the metrics " + supportedMetrics + " are supported for [" + type + "] types," +
|
||||
" but unsupported metrics " + unsupportedMetrics + " supplied for field [" + field + "]";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@ public class APMSystemUser extends User {
|
|||
public static final String NAME = UsernamesField.APM_NAME;
|
||||
public static final String ROLE_NAME = UsernamesField.APM_ROLE;
|
||||
public static final Version DEFINED_SINCE = Version.V_6_5_0;
|
||||
public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE);
|
||||
|
||||
public APMSystemUser(boolean enabled) {
|
||||
super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled);
|
||||
|
|
|
@ -16,7 +16,6 @@ public class BeatsSystemUser extends User {
|
|||
public static final String NAME = UsernamesField.BEATS_NAME;
|
||||
public static final String ROLE_NAME = UsernamesField.BEATS_ROLE;
|
||||
public static final Version DEFINED_SINCE = Version.V_6_3_0;
|
||||
public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE);
|
||||
|
||||
public BeatsSystemUser(boolean enabled) {
|
||||
super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled);
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
|
||||
/**
|
||||
* BuiltinUserInfo provides common user meta data for newly introduced pre defined System Users.
|
||||
*/
|
||||
public class BuiltinUserInfo {
|
||||
private final String name;
|
||||
private final String role;
|
||||
private final Version definedSince;
|
||||
|
||||
public BuiltinUserInfo(String name, String role, Version definedSince) {
|
||||
this.name = name;
|
||||
this.role = role;
|
||||
this.definedSince = definedSince;
|
||||
}
|
||||
|
||||
/** Get the builtin users name. */
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/** Get the builtin users default role name. */
|
||||
public String getRole() {
|
||||
return role;
|
||||
}
|
||||
|
||||
/** Get version the builtin user was introduced with. */
|
||||
public Version getDefinedSince() {
|
||||
return definedSince;
|
||||
}
|
||||
}
|
|
@ -11,14 +11,17 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers;
|
||||
import org.elasticsearch.xpack.core.rollup.RollupField;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.isIn;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
@ -45,8 +48,8 @@ public class MetricConfigSerializingTests extends AbstractSerializingTestCase<Me
|
|||
|
||||
MetricConfig config = new MetricConfig("my_field", singletonList("max"));
|
||||
config.validateMappings(responseMap, e);
|
||||
assertThat(e.validationErrors().get(0), equalTo("Could not find a [numeric] field with name [my_field] in any of the " +
|
||||
"indices matching the index pattern."));
|
||||
assertThat(e.validationErrors().get(0), equalTo("Could not find a [numeric] or [date] field with name [my_field] in any" +
|
||||
" of the indices matching the index pattern."));
|
||||
}
|
||||
|
||||
public void testValidateNomatchingField() {
|
||||
|
@ -59,8 +62,8 @@ public class MetricConfigSerializingTests extends AbstractSerializingTestCase<Me
|
|||
|
||||
MetricConfig config = new MetricConfig("my_field", singletonList("max"));
|
||||
config.validateMappings(responseMap, e);
|
||||
assertThat(e.validationErrors().get(0), equalTo("Could not find a [numeric] field with name [my_field] in any of the " +
|
||||
"indices matching the index pattern."));
|
||||
assertThat(e.validationErrors().get(0), equalTo("Could not find a [numeric] or [date] field with name [my_field] in any" +
|
||||
" of the indices matching the index pattern."));
|
||||
}
|
||||
|
||||
public void testValidateFieldWrongType() {
|
||||
|
@ -73,8 +76,8 @@ public class MetricConfigSerializingTests extends AbstractSerializingTestCase<Me
|
|||
|
||||
MetricConfig config = new MetricConfig("my_field", singletonList("max"));
|
||||
config.validateMappings(responseMap, e);
|
||||
assertThat(e.validationErrors().get(0), equalTo("The field referenced by a metric group must be a [numeric] type, " +
|
||||
"but found [keyword] for field [my_field]"));
|
||||
assertThat("The field referenced by a metric group must be a [numeric] or [date] type," +
|
||||
" but found [keyword] for field [my_field]", isIn(e.validationErrors()));
|
||||
}
|
||||
|
||||
public void testValidateFieldMatchingNotAggregatable() {
|
||||
|
@ -91,6 +94,21 @@ public class MetricConfigSerializingTests extends AbstractSerializingTestCase<Me
|
|||
assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not."));
|
||||
}
|
||||
|
||||
public void testValidateDateFieldUnsupportedMetric() {
|
||||
ActionRequestValidationException e = new ActionRequestValidationException();
|
||||
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
|
||||
|
||||
// Have to mock fieldcaps because the ctor's aren't public...
|
||||
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
|
||||
when(fieldCaps.isAggregatable()).thenReturn(true);
|
||||
responseMap.put("my_field", Collections.singletonMap("date", fieldCaps));
|
||||
|
||||
MetricConfig config = new MetricConfig("my_field", Arrays.asList("avg", "max"));
|
||||
config.validateMappings(responseMap, e);
|
||||
assertThat(e.validationErrors().get(0), equalTo("Only the metrics " + RollupField.SUPPORTED_DATE_METRICS.toString() +
|
||||
" are supported for [date] types, but unsupported metrics [avg] supplied for field [my_field]"));
|
||||
}
|
||||
|
||||
public void testValidateMatchingField() {
|
||||
ActionRequestValidationException e = new ActionRequestValidationException();
|
||||
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
|
||||
|
@ -153,6 +171,13 @@ public class MetricConfigSerializingTests extends AbstractSerializingTestCase<Me
|
|||
config = new MetricConfig("my_field", singletonList("max"));
|
||||
config.validateMappings(responseMap, e);
|
||||
assertThat(e.validationErrors().size(), equalTo(0));
|
||||
|
||||
fieldCaps = mock(FieldCapabilities.class);
|
||||
when(fieldCaps.isAggregatable()).thenReturn(true);
|
||||
responseMap.put("my_field", Collections.singletonMap("date", fieldCaps));
|
||||
config = new MetricConfig("my_field", singletonList("max"));
|
||||
config.validateMappings(responseMap, e);
|
||||
assertThat(e.validationErrors().size(), equalTo(0));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
|
@ -60,6 +61,7 @@ final class ExpiredTokenRemover extends AbstractRunnable {
|
|||
.filter(QueryBuilders.boolQuery()
|
||||
.should(QueryBuilders.rangeQuery("expiration_time").lte(now.toEpochMilli()))
|
||||
.should(QueryBuilders.rangeQuery("creation_time").lte(now.minus(24L, ChronoUnit.HOURS).toEpochMilli()))));
|
||||
logger.trace(() -> new ParameterizedMessage("Removing old tokens: [{}]", Strings.toString(expiredDbq)));
|
||||
executeAsyncWithOrigin(client, SECURITY_ORIGIN, DeleteByQueryAction.INSTANCE, expiredDbq,
|
||||
ActionListener.wrap(r -> {
|
||||
debugDbqResponse(r);
|
||||
|
|
|
@ -8,11 +8,8 @@ package org.elasticsearch.xpack.security.authc;
|
|||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -36,9 +33,12 @@ import org.elasticsearch.action.update.UpdateResponse;
|
|||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.ack.AckedRequest;
|
||||
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.cache.Cache;
|
||||
|
@ -60,15 +60,16 @@ import org.elasticsearch.common.util.concurrent.ThreadContext;
|
|||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.index.engine.DocumentMissingException;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.core.XPackField;
|
||||
import org.elasticsearch.xpack.core.XPackPlugin;
|
||||
import org.elasticsearch.xpack.core.XPackSettings;
|
||||
import org.elasticsearch.xpack.core.XPackField;
|
||||
import org.elasticsearch.xpack.core.security.ScrollHelper;
|
||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||
import org.elasticsearch.xpack.core.security.authc.KeyAndTimestamp;
|
||||
|
@ -114,6 +115,7 @@ import java.util.concurrent.ExecutionException;
|
|||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException;
|
||||
|
@ -159,6 +161,7 @@ public final class TokenService extends AbstractComponent {
|
|||
static final String INVALIDATED_TOKEN_DOC_TYPE = "invalidated-token";
|
||||
static final int MINIMUM_BYTES = VERSION_BYTES + SALT_BYTES + IV_BYTES + 1;
|
||||
private static final int MINIMUM_BASE64_BYTES = Double.valueOf(Math.ceil((4 * MINIMUM_BYTES) / 3)).intValue();
|
||||
private static final int MAX_RETRY_ATTEMPTS = 5;
|
||||
|
||||
private final SecureRandom secureRandom = new SecureRandom();
|
||||
private final ClusterService clusterService;
|
||||
|
@ -217,9 +220,10 @@ public final class TokenService extends AbstractComponent {
|
|||
boolean includeRefreshToken) throws IOException {
|
||||
ensureEnabled();
|
||||
if (authentication == null) {
|
||||
listener.onFailure(new IllegalArgumentException("authentication must be provided"));
|
||||
listener.onFailure(traceLog("create token", null, new IllegalArgumentException("authentication must be provided")));
|
||||
} else if (originatingClientAuth == null) {
|
||||
listener.onFailure(new IllegalArgumentException("originating client authentication must be provided"));
|
||||
listener.onFailure(traceLog("create token", null,
|
||||
new IllegalArgumentException("originating client authentication must be provided")));
|
||||
} else {
|
||||
final Instant created = clock.instant();
|
||||
final Instant expiration = getExpirationTime(created);
|
||||
|
@ -252,16 +256,17 @@ public final class TokenService extends AbstractComponent {
|
|||
.field("realm", authentication.getAuthenticatedBy().getName())
|
||||
.endObject();
|
||||
builder.endObject();
|
||||
final String documentId = getTokenDocumentId(userToken);
|
||||
IndexRequest request =
|
||||
client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken))
|
||||
client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, documentId)
|
||||
.setOpType(OpType.CREATE)
|
||||
.setSource(builder)
|
||||
.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL)
|
||||
.request();
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client, SECURITY_ORIGIN, IndexAction.INSTANCE, request,
|
||||
ActionListener.wrap(indexResponse -> listener.onResponse(new Tuple<>(userToken, refreshToken)),
|
||||
listener::onFailure))
|
||||
securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", documentId, ex)),
|
||||
() -> executeAsyncWithOrigin(client, SECURITY_ORIGIN, IndexAction.INSTANCE, request,
|
||||
ActionListener.wrap(indexResponse -> listener.onResponse(new Tuple<>(userToken, refreshToken)),
|
||||
listener::onFailure))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -323,7 +328,7 @@ public final class TokenService extends AbstractComponent {
|
|||
Instant currentTime = clock.instant();
|
||||
if (currentTime.isAfter(userToken.getExpirationTime())) {
|
||||
// token expired
|
||||
listener.onFailure(expiredTokenException());
|
||||
listener.onFailure(traceLog("decode token", token, expiredTokenException()));
|
||||
} else {
|
||||
checkIfTokenIsRevoked(userToken, listener);
|
||||
}
|
||||
|
@ -361,43 +366,50 @@ public final class TokenService extends AbstractComponent {
|
|||
final Cipher cipher = getDecryptionCipher(iv, decodeKey, version, decodedSalt);
|
||||
if (version.onOrAfter(Version.V_6_2_0)) {
|
||||
// we only have the id and need to get the token from the doc!
|
||||
decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId ->
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
|
||||
final GetRequest getRequest =
|
||||
client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE,
|
||||
decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId -> {
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
logger.warn("failed to get token [{}] since index is not available", tokenId);
|
||||
listener.onResponse(null);
|
||||
} else {
|
||||
securityIndex.checkIndexVersionThenExecute(
|
||||
ex -> listener.onFailure(traceLog("prepare security index", tokenId, ex)),
|
||||
() -> {
|
||||
final GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE,
|
||||
getTokenDocumentId(tokenId)).request();
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest,
|
||||
ActionListener.<GetResponse>wrap(response -> {
|
||||
if (response.isExists()) {
|
||||
Map<String, Object> accessTokenSource =
|
||||
Consumer<Exception> onFailure = ex -> listener.onFailure(traceLog("decode token", tokenId, ex));
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest,
|
||||
ActionListener.<GetResponse>wrap(response -> {
|
||||
if (response.isExists()) {
|
||||
Map<String, Object> accessTokenSource =
|
||||
(Map<String, Object>) response.getSource().get("access_token");
|
||||
if (accessTokenSource == null) {
|
||||
listener.onFailure(new IllegalStateException("token document is missing " +
|
||||
"the access_token field"));
|
||||
} else if (accessTokenSource.containsKey("user_token") == false) {
|
||||
listener.onFailure(new IllegalStateException("token document is missing " +
|
||||
"the user_token field"));
|
||||
} else {
|
||||
Map<String, Object> userTokenSource =
|
||||
if (accessTokenSource == null) {
|
||||
onFailure.accept(new IllegalStateException(
|
||||
"token document is missing the access_token field"));
|
||||
} else if (accessTokenSource.containsKey("user_token") == false) {
|
||||
onFailure.accept(new IllegalStateException(
|
||||
"token document is missing the user_token field"));
|
||||
} else {
|
||||
Map<String, Object> userTokenSource =
|
||||
(Map<String, Object>) accessTokenSource.get("user_token");
|
||||
listener.onResponse(UserToken.fromSourceMap(userTokenSource));
|
||||
}
|
||||
} else {
|
||||
listener.onFailure(
|
||||
listener.onResponse(UserToken.fromSourceMap(userTokenSource));
|
||||
}
|
||||
} else {
|
||||
onFailure.accept(
|
||||
new IllegalStateException("token document is missing and must be present"));
|
||||
}
|
||||
}, e -> {
|
||||
// if the index or the shard is not there / available we assume that
|
||||
// the token is not valid
|
||||
if (isShardNotAvailableException(e)) {
|
||||
logger.warn("failed to get token [{}] since index is not available", tokenId);
|
||||
listener.onResponse(null);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("failed to get token [{}]", tokenId), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}), client::get);
|
||||
}), listener::onFailure));
|
||||
}
|
||||
}, e -> {
|
||||
// if the index or the shard is not there / available we assume that
|
||||
// the token is not valid
|
||||
if (isShardNotAvailableException(e)) {
|
||||
logger.warn("failed to get token [{}] since index is not available", tokenId);
|
||||
listener.onResponse(null);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("failed to get token [{}]", tokenId), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}), client::get);
|
||||
});
|
||||
}}, listener::onFailure));
|
||||
} else {
|
||||
decryptToken(in, cipher, version, listener);
|
||||
}
|
||||
|
@ -461,13 +473,14 @@ public final class TokenService extends AbstractComponent {
|
|||
public void invalidateAccessToken(String tokenString, ActionListener<Boolean> listener) {
|
||||
ensureEnabled();
|
||||
if (Strings.isNullOrEmpty(tokenString)) {
|
||||
logger.trace("No token-string provided");
|
||||
listener.onFailure(new IllegalArgumentException("token must be provided"));
|
||||
} else {
|
||||
maybeStartTokenRemover();
|
||||
try {
|
||||
decodeToken(tokenString, ActionListener.wrap(userToken -> {
|
||||
if (userToken == null) {
|
||||
listener.onFailure(malformedTokenException());
|
||||
listener.onFailure(traceLog("invalidate token", tokenString, malformedTokenException()));
|
||||
} else {
|
||||
final long expirationEpochMilli = getExpirationTime().toEpochMilli();
|
||||
indexBwcInvalidation(userToken, listener, new AtomicInteger(0), expirationEpochMilli);
|
||||
|
@ -488,6 +501,7 @@ public final class TokenService extends AbstractComponent {
|
|||
public void invalidateAccessToken(UserToken userToken, ActionListener<Boolean> listener) {
|
||||
ensureEnabled();
|
||||
if (userToken == null) {
|
||||
logger.trace("No access token provided");
|
||||
listener.onFailure(new IllegalArgumentException("token must be provided"));
|
||||
} else {
|
||||
maybeStartTokenRemover();
|
||||
|
@ -499,6 +513,7 @@ public final class TokenService extends AbstractComponent {
|
|||
public void invalidateRefreshToken(String refreshToken, ActionListener<Boolean> listener) {
|
||||
ensureEnabled();
|
||||
if (Strings.isNullOrEmpty(refreshToken)) {
|
||||
logger.trace("No refresh token provided");
|
||||
listener.onFailure(new IllegalArgumentException("refresh token must be provided"));
|
||||
} else {
|
||||
maybeStartTokenRemover();
|
||||
|
@ -521,7 +536,8 @@ public final class TokenService extends AbstractComponent {
|
|||
*/
|
||||
private void indexBwcInvalidation(UserToken userToken, ActionListener<Boolean> listener, AtomicInteger attemptCount,
|
||||
long expirationEpochMilli) {
|
||||
if (attemptCount.get() > 5) {
|
||||
if (attemptCount.get() > MAX_RETRY_ATTEMPTS) {
|
||||
logger.warn("Failed to invalidate token [{}] after [{}] attempts", userToken.getId(), attemptCount.get());
|
||||
listener.onFailure(invalidGrantException("failed to invalidate token"));
|
||||
} else {
|
||||
final String invalidatedTokenId = getInvalidatedTokenDocumentId(userToken);
|
||||
|
@ -532,14 +548,15 @@ public final class TokenService extends AbstractComponent {
|
|||
.request();
|
||||
final String tokenDocId = getTokenDocumentId(userToken);
|
||||
final Version version = userToken.getVersion();
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest,
|
||||
securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)),
|
||||
() -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest,
|
||||
ActionListener.<IndexResponse>wrap(indexResponse -> {
|
||||
ActionListener<Boolean> wrappedListener =
|
||||
ActionListener.wrap(ignore -> listener.onResponse(true), listener::onFailure);
|
||||
indexInvalidation(tokenDocId, version, wrappedListener, attemptCount, "access_token", 1L);
|
||||
}, e -> {
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(e);
|
||||
traceLog("(bwc) invalidate token", tokenDocId, cause);
|
||||
if (cause instanceof VersionConflictEngineException) {
|
||||
// expected since something else could have invalidated
|
||||
ActionListener<Boolean> wrappedListener =
|
||||
|
@ -566,7 +583,8 @@ public final class TokenService extends AbstractComponent {
|
|||
*/
|
||||
private void indexInvalidation(String tokenDocId, Version version, ActionListener<Boolean> listener, AtomicInteger attemptCount,
|
||||
String srcPrefix, long documentVersion) {
|
||||
if (attemptCount.get() > 5) {
|
||||
if (attemptCount.get() > MAX_RETRY_ATTEMPTS) {
|
||||
logger.warn("Failed to invalidate token [{}] after [{}] attempts", tokenDocId, attemptCount.get());
|
||||
listener.onFailure(invalidGrantException("failed to invalidate token"));
|
||||
} else {
|
||||
UpdateRequest request = client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId)
|
||||
|
@ -574,75 +592,79 @@ public final class TokenService extends AbstractComponent {
|
|||
.setVersion(documentVersion)
|
||||
.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL)
|
||||
.request();
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
|
||||
ActionListener.<UpdateResponse>wrap(updateResponse -> {
|
||||
if (updateResponse.getGetResult() != null
|
||||
&& updateResponse.getGetResult().sourceAsMap().containsKey(srcPrefix)
|
||||
&& ((Map<String, Object>) updateResponse.getGetResult().sourceAsMap().get(srcPrefix))
|
||||
.containsKey("invalidated")) {
|
||||
final boolean prevInvalidated = (boolean)
|
||||
((Map<String, Object>) updateResponse.getGetResult().sourceAsMap().get(srcPrefix))
|
||||
.get("invalidated");
|
||||
listener.onResponse(prevInvalidated == false);
|
||||
} else {
|
||||
listener.onResponse(true);
|
||||
}
|
||||
}, e -> {
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(e);
|
||||
if (cause instanceof DocumentMissingException) {
|
||||
if (version.onOrAfter(Version.V_6_2_0)) {
|
||||
// the document should always be there!
|
||||
listener.onFailure(e);
|
||||
} else {
|
||||
listener.onResponse(false);
|
||||
}
|
||||
} else if (cause instanceof VersionConflictEngineException
|
||||
|| isShardNotAvailableException(cause)) {
|
||||
attemptCount.incrementAndGet();
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(),
|
||||
ActionListener.<GetResponse>wrap(getResult -> {
|
||||
if (getResult.isExists()) {
|
||||
Map<String, Object> source = getResult.getSource();
|
||||
Map<String, Object> accessTokenSource =
|
||||
(Map<String, Object>) source.get("access_token");
|
||||
if (accessTokenSource == null) {
|
||||
listener.onFailure(new IllegalArgumentException("token document is " +
|
||||
"missing access_token field"));
|
||||
} else {
|
||||
Boolean invalidated = (Boolean) accessTokenSource.get("invalidated");
|
||||
if (invalidated == null) {
|
||||
listener.onFailure(new IllegalStateException(
|
||||
"token document missing invalidated value"));
|
||||
} else if (invalidated) {
|
||||
listener.onResponse(false);
|
||||
} else {
|
||||
indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix,
|
||||
getResult.getVersion());
|
||||
}
|
||||
}
|
||||
} else if (version.onOrAfter(Version.V_6_2_0)) {
|
||||
logger.warn("could not find token document [{}] but there should " +
|
||||
"be one as token has version [{}]", tokenDocId, version);
|
||||
listener.onFailure(invalidGrantException("could not invalidate the token"));
|
||||
} else {
|
||||
listener.onResponse(false);
|
||||
}
|
||||
},
|
||||
e1 -> {
|
||||
if (isShardNotAvailableException(e1)) {
|
||||
// don't increment count; call again
|
||||
indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix,
|
||||
documentVersion);
|
||||
} else {
|
||||
listener.onFailure(e1);
|
||||
}
|
||||
}), client::get);
|
||||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)),
|
||||
() -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
|
||||
ActionListener.<UpdateResponse>wrap(updateResponse -> {
|
||||
logger.debug("Invalidated [{}] for doc [{}]", srcPrefix, tokenDocId);
|
||||
if (updateResponse.getGetResult() != null
|
||||
&& updateResponse.getGetResult().sourceAsMap().containsKey(srcPrefix)
|
||||
&& ((Map<String, Object>) updateResponse.getGetResult().sourceAsMap().get(srcPrefix))
|
||||
.containsKey("invalidated")) {
|
||||
final boolean prevInvalidated = (boolean)
|
||||
((Map<String, Object>) updateResponse.getGetResult().sourceAsMap().get(srcPrefix))
|
||||
.get("invalidated");
|
||||
listener.onResponse(prevInvalidated == false);
|
||||
} else {
|
||||
listener.onResponse(true);
|
||||
}
|
||||
}, e -> {
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(e);
|
||||
traceLog("invalidate token", tokenDocId, cause);
|
||||
if (cause instanceof DocumentMissingException) {
|
||||
if (version.onOrAfter(Version.V_6_2_0)) {
|
||||
// the document should always be there!
|
||||
listener.onFailure(e);
|
||||
} else {
|
||||
listener.onResponse(false);
|
||||
}
|
||||
}), client::update));
|
||||
} else if (cause instanceof VersionConflictEngineException
|
||||
|| isShardNotAvailableException(cause)) {
|
||||
attemptCount.incrementAndGet();
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(),
|
||||
ActionListener.<GetResponse>wrap(getResult -> {
|
||||
if (getResult.isExists()) {
|
||||
Map<String, Object> source = getResult.getSource();
|
||||
Map<String, Object> accessTokenSource = (Map<String, Object>) source.get("access_token");
|
||||
Consumer<Exception> onFailure = ex -> listener.onFailure(traceLog("get token", tokenDocId, ex));
|
||||
if (accessTokenSource == null) {
|
||||
onFailure.accept(new IllegalArgumentException(
|
||||
"token document is missing access_token field"));
|
||||
} else {
|
||||
Boolean invalidated = (Boolean) accessTokenSource.get("invalidated");
|
||||
if (invalidated == null) {
|
||||
onFailure.accept(new IllegalStateException(
|
||||
"token document missing invalidated value"));
|
||||
} else if (invalidated) {
|
||||
logger.trace("Token [{}] is already invalidated", tokenDocId);
|
||||
listener.onResponse(false);
|
||||
} else {
|
||||
indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix,
|
||||
getResult.getVersion());
|
||||
}
|
||||
}
|
||||
} else if (version.onOrAfter(Version.V_6_2_0)) {
|
||||
logger.warn("could not find token document [{}] but there should " +
|
||||
"be one as token has version [{}]", tokenDocId, version);
|
||||
listener.onFailure(invalidGrantException("could not invalidate the token"));
|
||||
} else {
|
||||
listener.onResponse(false);
|
||||
}
|
||||
},
|
||||
e1 -> {
|
||||
traceLog("get token", tokenDocId, e1);
|
||||
if (isShardNotAvailableException(e1)) {
|
||||
// don't increment count; call again
|
||||
indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix,
|
||||
documentVersion);
|
||||
} else {
|
||||
listener.onFailure(e1);
|
||||
}
|
||||
}), client::get);
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}), client::update));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -663,7 +685,8 @@ public final class TokenService extends AbstractComponent {
|
|||
|
||||
private void findTokenFromRefreshToken(String refreshToken, ActionListener<Tuple<SearchResponse, AtomicInteger>> listener,
|
||||
AtomicInteger attemptCount) {
|
||||
if (attemptCount.get() > 5) {
|
||||
if (attemptCount.get() > MAX_RETRY_ATTEMPTS) {
|
||||
logger.warn("Failed to find token for refresh token [{}] after [{}] attempts", refreshToken, attemptCount.get());
|
||||
listener.onFailure(invalidGrantException("could not refresh the requested token"));
|
||||
} else {
|
||||
SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
|
@ -673,30 +696,37 @@ public final class TokenService extends AbstractComponent {
|
|||
.setVersion(true)
|
||||
.request();
|
||||
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
logger.debug("security index is not available to find token from refresh token, retrying");
|
||||
attemptCount.incrementAndGet();
|
||||
findTokenFromRefreshToken(refreshToken, listener, attemptCount);
|
||||
} else {
|
||||
Consumer<Exception> onFailure = ex -> listener.onFailure(traceLog("find by refresh token", refreshToken, ex));
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
|
||||
ActionListener.<SearchResponse>wrap(searchResponse -> {
|
||||
if (searchResponse.isTimedOut()) {
|
||||
attemptCount.incrementAndGet();
|
||||
findTokenFromRefreshToken(refreshToken, listener, attemptCount);
|
||||
} else if (searchResponse.getHits().getHits().length < 1) {
|
||||
logger.info("could not find token document with refresh_token [{}]", refreshToken);
|
||||
listener.onFailure(invalidGrantException("could not refresh the requested token"));
|
||||
} else if (searchResponse.getHits().getHits().length > 1) {
|
||||
listener.onFailure(new IllegalStateException("multiple tokens share the same refresh token"));
|
||||
} else {
|
||||
listener.onResponse(new Tuple<>(searchResponse, attemptCount));
|
||||
}
|
||||
}, e -> {
|
||||
if (isShardNotAvailableException(e)) {
|
||||
logger.debug("failed to search for token document, retrying", e);
|
||||
attemptCount.incrementAndGet();
|
||||
findTokenFromRefreshToken(refreshToken, listener, attemptCount);
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}),
|
||||
client::search));
|
||||
ActionListener.<SearchResponse>wrap(searchResponse -> {
|
||||
if (searchResponse.isTimedOut()) {
|
||||
attemptCount.incrementAndGet();
|
||||
findTokenFromRefreshToken(refreshToken, listener, attemptCount);
|
||||
} else if (searchResponse.getHits().getHits().length < 1) {
|
||||
logger.info("could not find token document with refresh_token [{}]", refreshToken);
|
||||
onFailure.accept(invalidGrantException("could not refresh the requested token"));
|
||||
} else if (searchResponse.getHits().getHits().length > 1) {
|
||||
onFailure.accept(new IllegalStateException("multiple tokens share the same refresh token"));
|
||||
} else {
|
||||
listener.onResponse(new Tuple<>(searchResponse, attemptCount));
|
||||
}
|
||||
}, e -> {
|
||||
if (isShardNotAvailableException(e)) {
|
||||
logger.debug("failed to search for token document, retrying", e);
|
||||
attemptCount.incrementAndGet();
|
||||
findTokenFromRefreshToken(refreshToken, listener, attemptCount);
|
||||
} else {
|
||||
onFailure.accept(e);
|
||||
}
|
||||
}),
|
||||
client::search));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -707,62 +737,64 @@ public final class TokenService extends AbstractComponent {
|
|||
*/
|
||||
private void innerRefresh(String tokenDocId, Authentication userAuth, ActionListener<Tuple<UserToken, String>> listener,
|
||||
AtomicInteger attemptCount) {
|
||||
if (attemptCount.getAndIncrement() > 5) {
|
||||
if (attemptCount.getAndIncrement() > MAX_RETRY_ATTEMPTS) {
|
||||
logger.warn("Failed to refresh token for doc [{}] after [{}] attempts", tokenDocId, attemptCount.get());
|
||||
listener.onFailure(invalidGrantException("could not refresh the requested token"));
|
||||
} else {
|
||||
Consumer<Exception> onFailure = ex -> listener.onFailure(traceLog("refresh token", tokenDocId, ex));
|
||||
GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request();
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest,
|
||||
ActionListener.<GetResponse>wrap(response -> {
|
||||
if (response.isExists()) {
|
||||
final Map<String, Object> source = response.getSource();
|
||||
final Optional<ElasticsearchSecurityException> invalidSource = checkTokenDocForRefresh(source, userAuth);
|
||||
ActionListener.<GetResponse>wrap(response -> {
|
||||
if (response.isExists()) {
|
||||
final Map<String, Object> source = response.getSource();
|
||||
final Optional<ElasticsearchSecurityException> invalidSource = checkTokenDocForRefresh(source, userAuth);
|
||||
|
||||
if (invalidSource.isPresent()) {
|
||||
listener.onFailure(invalidSource.get());
|
||||
} else {
|
||||
final Map<String, Object> userTokenSource = (Map<String, Object>)
|
||||
((Map<String, Object>) source.get("access_token")).get("user_token");
|
||||
final String authString = (String) userTokenSource.get("authentication");
|
||||
final Integer version = (Integer) userTokenSource.get("version");
|
||||
final Map<String, Object> metadata = (Map<String, Object>) userTokenSource.get("metadata");
|
||||
if (invalidSource.isPresent()) {
|
||||
onFailure.accept(invalidSource.get());
|
||||
} else {
|
||||
final Map<String, Object> userTokenSource = (Map<String, Object>)
|
||||
((Map<String, Object>) source.get("access_token")).get("user_token");
|
||||
final String authString = (String) userTokenSource.get("authentication");
|
||||
final Integer version = (Integer) userTokenSource.get("version");
|
||||
final Map<String, Object> metadata = (Map<String, Object>) userTokenSource.get("metadata");
|
||||
|
||||
Version authVersion = Version.fromId(version);
|
||||
try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(authString))) {
|
||||
in.setVersion(authVersion);
|
||||
Authentication authentication = new Authentication(in);
|
||||
UpdateRequest updateRequest =
|
||||
client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId)
|
||||
.setVersion(response.getVersion())
|
||||
.setDoc("refresh_token", Collections.singletonMap("refreshed", true))
|
||||
.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL)
|
||||
.request();
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, updateRequest,
|
||||
ActionListener.<UpdateResponse>wrap(
|
||||
updateResponse -> createUserToken(authentication, userAuth, listener, metadata, true),
|
||||
e -> {
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(e);
|
||||
if (cause instanceof VersionConflictEngineException ||
|
||||
isShardNotAvailableException(e)) {
|
||||
innerRefresh(tokenDocId, userAuth,
|
||||
listener, attemptCount);
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}),
|
||||
client::update);
|
||||
}
|
||||
Version authVersion = Version.fromId(version);
|
||||
try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(authString))) {
|
||||
in.setVersion(authVersion);
|
||||
Authentication authentication = new Authentication(in);
|
||||
UpdateRequest updateRequest =
|
||||
client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId)
|
||||
.setVersion(response.getVersion())
|
||||
.setDoc("refresh_token", Collections.singletonMap("refreshed", true))
|
||||
.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL)
|
||||
.request();
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, updateRequest,
|
||||
ActionListener.<UpdateResponse>wrap(
|
||||
updateResponse -> createUserToken(authentication, userAuth, listener, metadata, true),
|
||||
e -> {
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(e);
|
||||
if (cause instanceof VersionConflictEngineException ||
|
||||
isShardNotAvailableException(e)) {
|
||||
innerRefresh(tokenDocId, userAuth,
|
||||
listener, attemptCount);
|
||||
} else {
|
||||
onFailure.accept(e);
|
||||
}
|
||||
}),
|
||||
client::update);
|
||||
}
|
||||
} else {
|
||||
logger.info("could not find token document [{}] for refresh", tokenDocId);
|
||||
listener.onFailure(invalidGrantException("could not refresh the requested token"));
|
||||
}
|
||||
}, e -> {
|
||||
if (isShardNotAvailableException(e)) {
|
||||
innerRefresh(tokenDocId, userAuth, listener, attemptCount);
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}), client::get);
|
||||
} else {
|
||||
logger.info("could not find token document [{}] for refresh", tokenDocId);
|
||||
onFailure.accept(invalidGrantException("could not refresh the requested token"));
|
||||
}
|
||||
}, e -> {
|
||||
if (isShardNotAvailableException(e)) {
|
||||
innerRefresh(tokenDocId, userAuth, listener, attemptCount);
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}), client::get);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -831,22 +863,22 @@ public final class TokenService extends AbstractComponent {
|
|||
|
||||
if (Strings.isNullOrEmpty(realmName)) {
|
||||
listener.onFailure(new IllegalArgumentException("Realm name is required"));
|
||||
return;
|
||||
}
|
||||
|
||||
final Instant now = clock.instant();
|
||||
final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery()
|
||||
} else if (securityIndex.isAvailable() == false) {
|
||||
listener.onResponse(Collections.emptyList());
|
||||
} else {
|
||||
final Instant now = clock.instant();
|
||||
final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery()
|
||||
.filter(QueryBuilders.termQuery("doc_type", "token"))
|
||||
.filter(QueryBuilders.termQuery("access_token.realm", realmName))
|
||||
.filter(QueryBuilders.boolQuery()
|
||||
.should(QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.termQuery("access_token.invalidated", false))
|
||||
.must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli()))
|
||||
)
|
||||
.should(QueryBuilders.termQuery("refresh_token.invalidated", false))
|
||||
.should(QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.termQuery("access_token.invalidated", false))
|
||||
.must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli()))
|
||||
)
|
||||
.should(QueryBuilders.termQuery("refresh_token.invalidated", false))
|
||||
);
|
||||
|
||||
final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
.setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings))
|
||||
.setQuery(boolQuery)
|
||||
.setVersion(false)
|
||||
|
@ -854,9 +886,10 @@ public final class TokenService extends AbstractComponent {
|
|||
.setFetchSource(true)
|
||||
.request();
|
||||
|
||||
final Supplier<ThreadContext.StoredContext> supplier = client.threadPool().getThreadContext().newRestorableContext(false);
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), this::parseHit));
|
||||
final Supplier<ThreadContext.StoredContext> supplier = client.threadPool().getThreadContext().newRestorableContext(false);
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
|
||||
ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), this::parseHit));
|
||||
}
|
||||
}
|
||||
|
||||
private Tuple<UserToken, String> parseHit(SearchHit hit) {
|
||||
|
@ -923,14 +956,17 @@ public final class TokenService extends AbstractComponent {
|
|||
*/
|
||||
private void checkIfTokenIsRevoked(UserToken userToken, ActionListener<UserToken> listener) {
|
||||
if (securityIndex.indexExists() == false) {
|
||||
// index doesn't exist so the token is considered valid.
|
||||
// index doesn't exist so the token is considered valid. it is important to note that
|
||||
// we do not use isAvailable as the lack of a shard being available is not equivalent
|
||||
// to the index not existing in the case of revocation checking.
|
||||
listener.onResponse(userToken);
|
||||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
|
||||
MultiGetRequest mGetRequest = client.prepareMultiGet()
|
||||
.add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getInvalidatedTokenDocumentId(userToken))
|
||||
.add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken))
|
||||
.request();
|
||||
Consumer<Exception> onFailure = ex -> listener.onFailure(traceLog("check token state", userToken.getId(), ex));
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
mGetRequest,
|
||||
new ActionListener<MultiGetResponse>() {
|
||||
|
@ -941,26 +977,26 @@ public final class TokenService extends AbstractComponent {
|
|||
if (itemResponse[0].isFailed()) {
|
||||
onFailure(itemResponse[0].getFailure().getFailure());
|
||||
} else if (itemResponse[0].getResponse().isExists()) {
|
||||
listener.onFailure(expiredTokenException());
|
||||
onFailure.accept(expiredTokenException());
|
||||
} else if (itemResponse[1].isFailed()) {
|
||||
onFailure(itemResponse[1].getFailure().getFailure());
|
||||
} else if (itemResponse[1].getResponse().isExists()) {
|
||||
Map<String, Object> source = itemResponse[1].getResponse().getSource();
|
||||
Map<String, Object> accessTokenSource = (Map<String, Object>) source.get("access_token");
|
||||
if (accessTokenSource == null) {
|
||||
listener.onFailure(new IllegalStateException("token document is missing access_token field"));
|
||||
onFailure.accept(new IllegalStateException("token document is missing access_token field"));
|
||||
} else {
|
||||
Boolean invalidated = (Boolean) accessTokenSource.get("invalidated");
|
||||
if (invalidated == null) {
|
||||
listener.onFailure(new IllegalStateException("token document is missing invalidated field"));
|
||||
onFailure.accept(new IllegalStateException("token document is missing invalidated field"));
|
||||
} else if (invalidated) {
|
||||
listener.onFailure(expiredTokenException());
|
||||
onFailure.accept(expiredTokenException());
|
||||
} else {
|
||||
listener.onResponse(userToken);
|
||||
}
|
||||
}
|
||||
} else if (userToken.getVersion().onOrAfter(Version.V_6_2_0)) {
|
||||
listener.onFailure(new IllegalStateException("token document is missing and must be present"));
|
||||
onFailure.accept(new IllegalStateException("token document is missing and must be present"));
|
||||
} else {
|
||||
listener.onResponse(userToken);
|
||||
}
|
||||
|
@ -1122,11 +1158,31 @@ public final class TokenService extends AbstractComponent {
|
|||
*/
|
||||
private static ElasticsearchSecurityException invalidGrantException(String detail) {
|
||||
ElasticsearchSecurityException e =
|
||||
new ElasticsearchSecurityException("invalid_grant", RestStatus.BAD_REQUEST);
|
||||
new ElasticsearchSecurityException("invalid_grant", RestStatus.BAD_REQUEST);
|
||||
e.addHeader("error_description", detail);
|
||||
return e;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs an exception at TRACE level (if enabled)
|
||||
*/
|
||||
private <E extends Throwable> E traceLog(String action, String identifier, E exception) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
if (exception instanceof ElasticsearchException) {
|
||||
final ElasticsearchException esEx = (ElasticsearchException) exception;
|
||||
final Object detail = esEx.getHeader("error_description");
|
||||
if (detail != null) {
|
||||
logger.trace("Failure in [{}] for id [{}] - [{}] [{}]", action, identifier, detail, esEx.getDetailedMessage());
|
||||
} else {
|
||||
logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, esEx.getDetailedMessage());
|
||||
}
|
||||
} else {
|
||||
logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, exception.toString());
|
||||
}
|
||||
}
|
||||
return exception;
|
||||
}
|
||||
|
||||
boolean isExpiredTokenException(ElasticsearchSecurityException e) {
|
||||
final List<String> headers = e.getHeader("WWW-Authenticate");
|
||||
return headers != null && headers.stream().anyMatch(EXPIRED_TOKEN_WWW_AUTH_VALUE::equals);
|
||||
|
|
|
@ -118,8 +118,7 @@ public class NativeUsersStore extends AbstractComponent {
|
|||
}
|
||||
};
|
||||
|
||||
if (securityIndex.indexExists() == false) {
|
||||
// TODO remove this short circuiting and fix tests that fail without this!
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
listener.onResponse(Collections.emptyList());
|
||||
} else if (userNames.length == 1) { // optimization for single user lookup
|
||||
final String username = userNames[0];
|
||||
|
@ -127,7 +126,7 @@ public class NativeUsersStore extends AbstractComponent {
|
|||
(uap) -> listener.onResponse(uap == null ? Collections.emptyList() : Collections.singletonList(uap.user())),
|
||||
handleException));
|
||||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
|
||||
final QueryBuilder query;
|
||||
if (userNames == null || userNames.length == 0) {
|
||||
query = QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), USER_DOC_TYPE);
|
||||
|
@ -155,10 +154,10 @@ public class NativeUsersStore extends AbstractComponent {
|
|||
}
|
||||
|
||||
void getUserCount(final ActionListener<Long> listener) {
|
||||
if (securityIndex.indexExists() == false) {
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
listener.onResponse(0L);
|
||||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareSearch(SECURITY_INDEX_NAME)
|
||||
.setQuery(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), USER_DOC_TYPE))
|
||||
|
@ -182,11 +181,10 @@ public class NativeUsersStore extends AbstractComponent {
|
|||
* Async method to retrieve a user and their password
|
||||
*/
|
||||
private void getUserAndPassword(final String user, final ActionListener<UserAndPassword> listener) {
|
||||
if (securityIndex.indexExists() == false) {
|
||||
// TODO remove this short circuiting and fix tests that fail without this!
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
listener.onResponse(null);
|
||||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareGet(SECURITY_INDEX_NAME,
|
||||
INDEX_TYPE, getIdForUser(USER_DOC_TYPE, user)).request(),
|
||||
|
@ -459,16 +457,19 @@ public class NativeUsersStore extends AbstractComponent {
|
|||
}
|
||||
|
||||
public void deleteUser(final DeleteUserRequest deleteUserRequest, final ActionListener<Boolean> listener) {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
|
||||
DeleteRequest request = client.prepareDelete(SECURITY_INDEX_NAME,
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
listener.onResponse(false);
|
||||
} else {
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
|
||||
DeleteRequest request = client.prepareDelete(SECURITY_INDEX_NAME,
|
||||
INDEX_TYPE, getIdForUser(USER_DOC_TYPE, deleteUserRequest.username())).request();
|
||||
request.setRefreshPolicy(deleteUserRequest.getRefreshPolicy());
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
|
||||
request.setRefreshPolicy(deleteUserRequest.getRefreshPolicy());
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
|
||||
new ActionListener<DeleteResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteResponse deleteResponse) {
|
||||
clearRealmCache(deleteUserRequest.username(), listener,
|
||||
deleteResponse.getResult() == DocWriteResponse.Result.DELETED);
|
||||
deleteResponse.getResult() == DocWriteResponse.Result.DELETED);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -476,7 +477,8 @@ public class NativeUsersStore extends AbstractComponent {
|
|||
listener.onFailure(e);
|
||||
}
|
||||
}, client::delete);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -498,11 +500,10 @@ public class NativeUsersStore extends AbstractComponent {
|
|||
}
|
||||
|
||||
void getReservedUserInfo(String username, ActionListener<ReservedUserInfo> listener) {
|
||||
if (securityIndex.indexExists() == false) {
|
||||
// TODO remove this short circuiting and fix tests that fail without this!
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
listener.onResponse(null);
|
||||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareGet(SECURITY_INDEX_NAME, INDEX_TYPE,
|
||||
getIdForUser(RESERVED_USER_TYPE, username)).request(),
|
||||
|
@ -541,49 +542,53 @@ public class NativeUsersStore extends AbstractComponent {
|
|||
}
|
||||
|
||||
void getAllReservedUserInfo(ActionListener<Map<String, ReservedUserInfo>> listener) {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareSearch(SECURITY_INDEX_NAME)
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
listener.onResponse(Collections.emptyMap());
|
||||
} else {
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareSearch(SECURITY_INDEX_NAME)
|
||||
.setQuery(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), RESERVED_USER_TYPE))
|
||||
.setFetchSource(true).request(),
|
||||
new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(SearchResponse searchResponse) {
|
||||
Map<String, ReservedUserInfo> userInfos = new HashMap<>();
|
||||
assert searchResponse.getHits().getTotalHits() <= 10 :
|
||||
new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(SearchResponse searchResponse) {
|
||||
Map<String, ReservedUserInfo> userInfos = new HashMap<>();
|
||||
assert searchResponse.getHits().getTotalHits() <= 10 :
|
||||
"there are more than 10 reserved users we need to change this to retrieve them all!";
|
||||
for (SearchHit searchHit : searchResponse.getHits().getHits()) {
|
||||
Map<String, Object> sourceMap = searchHit.getSourceAsMap();
|
||||
String password = (String) sourceMap.get(Fields.PASSWORD.getPreferredName());
|
||||
Boolean enabled = (Boolean) sourceMap.get(Fields.ENABLED.getPreferredName());
|
||||
final String id = searchHit.getId();
|
||||
assert id != null && id.startsWith(RESERVED_USER_TYPE) :
|
||||
for (SearchHit searchHit : searchResponse.getHits().getHits()) {
|
||||
Map<String, Object> sourceMap = searchHit.getSourceAsMap();
|
||||
String password = (String) sourceMap.get(Fields.PASSWORD.getPreferredName());
|
||||
Boolean enabled = (Boolean) sourceMap.get(Fields.ENABLED.getPreferredName());
|
||||
final String id = searchHit.getId();
|
||||
assert id != null && id.startsWith(RESERVED_USER_TYPE) :
|
||||
"id [" + id + "] does not start with reserved-user prefix";
|
||||
final String username = id.substring(RESERVED_USER_TYPE.length() + 1);
|
||||
if (password == null) {
|
||||
listener.onFailure(new IllegalStateException("password hash must not be null!"));
|
||||
return;
|
||||
} else if (enabled == null) {
|
||||
listener.onFailure(new IllegalStateException("enabled must not be null!"));
|
||||
return;
|
||||
final String username = id.substring(RESERVED_USER_TYPE.length() + 1);
|
||||
if (password == null) {
|
||||
listener.onFailure(new IllegalStateException("password hash must not be null!"));
|
||||
return;
|
||||
} else if (enabled == null) {
|
||||
listener.onFailure(new IllegalStateException("enabled must not be null!"));
|
||||
return;
|
||||
} else {
|
||||
userInfos.put(username, new ReservedUserInfo(password.toCharArray(), enabled, false));
|
||||
}
|
||||
}
|
||||
listener.onResponse(userInfos);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
if (e instanceof IndexNotFoundException) {
|
||||
logger.trace("could not retrieve built in users since security index does not exist", e);
|
||||
listener.onResponse(Collections.emptyMap());
|
||||
} else {
|
||||
userInfos.put(username, new ReservedUserInfo(password.toCharArray(), enabled, false));
|
||||
logger.error("failed to retrieve built in users", e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
listener.onResponse(userInfos);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
if (e instanceof IndexNotFoundException) {
|
||||
logger.trace("could not retrieve built in users since security index does not exist", e);
|
||||
listener.onResponse(Collections.emptyMap());
|
||||
} else {
|
||||
logger.error("failed to retrieve built in users", e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
}, client::search));
|
||||
}, client::search));
|
||||
}
|
||||
}
|
||||
|
||||
private <Response> void clearRealmCache(String username, ActionListener<Response> listener, Response response) {
|
||||
|
|
|
@ -220,32 +220,32 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol
|
|||
});
|
||||
}
|
||||
|
||||
private void innerDeleteMapping(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) throws IOException {
|
||||
if (securityIndex.isIndexUpToDate() == false) {
|
||||
listener.onFailure(new IllegalStateException(
|
||||
"Security index is not on the current version - the native realm will not be operational until " +
|
||||
"the upgrade API is run on the security index"));
|
||||
return;
|
||||
}
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareDelete(SECURITY_INDEX_NAME, SECURITY_GENERIC_TYPE, getIdForName(request.getName()))
|
||||
private void innerDeleteMapping(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) {
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
listener.onResponse(false);
|
||||
} else {
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareDelete(SECURITY_INDEX_NAME, SECURITY_GENERIC_TYPE, getIdForName(request.getName()))
|
||||
.setRefreshPolicy(request.getRefreshPolicy())
|
||||
.request(),
|
||||
new ActionListener<DeleteResponse>() {
|
||||
new ActionListener<DeleteResponse>() {
|
||||
|
||||
@Override
|
||||
public void onResponse(DeleteResponse deleteResponse) {
|
||||
boolean deleted = deleteResponse.getResult() == DELETED;
|
||||
listener.onResponse(deleted);
|
||||
}
|
||||
@Override
|
||||
public void onResponse(DeleteResponse deleteResponse) {
|
||||
boolean deleted = deleteResponse.getResult() == DELETED;
|
||||
listener.onResponse(deleted);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to delete role-mapping [{}]", request.getName()), e);
|
||||
listener.onFailure(e);
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to delete role-mapping [{}]", request.getName()), e);
|
||||
listener.onFailure(e);
|
||||
|
||||
}
|
||||
}, client::delete);
|
||||
}
|
||||
}, client::delete);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -301,7 +301,7 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol
|
|||
* </ul>
|
||||
*/
|
||||
public void usageStats(ActionListener<Map<String, Object>> listener) {
|
||||
if (securityIndex.indexExists() == false) {
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
reportStats(listener, Collections.emptyList());
|
||||
} else {
|
||||
getMappings(ActionListener.wrap(mappings -> reportStats(listener, mappings), listener::onFailure));
|
||||
|
|
|
@ -88,13 +88,15 @@ public class NativePrivilegeStore extends AbstractComponent {
|
|||
|
||||
public void getPrivileges(Collection<String> applications, Collection<String> names,
|
||||
ActionListener<Collection<ApplicationPrivilegeDescriptor>> listener) {
|
||||
if (applications != null && applications.size() == 1 && names != null && names.size() == 1) {
|
||||
if (securityIndexManager.isAvailable() == false) {
|
||||
listener.onResponse(Collections.emptyList());
|
||||
} else if (applications != null && applications.size() == 1 && names != null && names.size() == 1) {
|
||||
getPrivilege(Objects.requireNonNull(Iterables.get(applications, 0)), Objects.requireNonNull(Iterables.get(names, 0)),
|
||||
ActionListener.wrap(privilege ->
|
||||
listener.onResponse(privilege == null ? Collections.emptyList() : Collections.singletonList(privilege)),
|
||||
listener::onFailure));
|
||||
} else {
|
||||
securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
|
||||
securityIndexManager.checkIndexVersionThenExecute(listener::onFailure, () -> {
|
||||
final QueryBuilder query;
|
||||
final TermQueryBuilder typeQuery = QueryBuilders
|
||||
.termQuery(ApplicationPrivilegeDescriptor.Fields.TYPE.getPreferredName(), DOC_TYPE_VALUE);
|
||||
|
@ -134,33 +136,37 @@ public class NativePrivilegeStore extends AbstractComponent {
|
|||
return collection == null || collection.isEmpty();
|
||||
}
|
||||
|
||||
public void getPrivilege(String application, String name, ActionListener<ApplicationPrivilegeDescriptor> listener) {
|
||||
securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure,
|
||||
() -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareGet(SECURITY_INDEX_NAME, "doc", toDocId(application, name)).request(),
|
||||
new ActionListener<GetResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetResponse response) {
|
||||
if (response.isExists()) {
|
||||
listener.onResponse(buildPrivilege(response.getId(), response.getSourceAsBytesRef()));
|
||||
} else {
|
||||
listener.onResponse(null);
|
||||
void getPrivilege(String application, String name, ActionListener<ApplicationPrivilegeDescriptor> listener) {
|
||||
if (securityIndexManager.isAvailable() == false) {
|
||||
listener.onResponse(null);
|
||||
} else {
|
||||
securityIndexManager.checkIndexVersionThenExecute(listener::onFailure,
|
||||
() -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareGet(SECURITY_INDEX_NAME, "doc", toDocId(application, name)).request(),
|
||||
new ActionListener<GetResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetResponse response) {
|
||||
if (response.isExists()) {
|
||||
listener.onResponse(buildPrivilege(response.getId(), response.getSourceAsBytesRef()));
|
||||
} else {
|
||||
listener.onResponse(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// if the index or the shard is not there / available we just claim the privilege is not there
|
||||
if (TransportActions.isShardNotAvailableException(e)) {
|
||||
logger.warn(new ParameterizedMessage("failed to load privilege [{}] index not available", name), e);
|
||||
listener.onResponse(null);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("failed to load privilege [{}]", name), e);
|
||||
listener.onFailure(e);
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// if the index or the shard is not there / available we just claim the privilege is not there
|
||||
if (TransportActions.isShardNotAvailableException(e)) {
|
||||
logger.warn(new ParameterizedMessage("failed to load privilege [{}] index not available", name), e);
|
||||
listener.onResponse(null);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("failed to load privilege [{}]", name), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
client::get));
|
||||
},
|
||||
client::get));
|
||||
}
|
||||
}
|
||||
|
||||
public void putPrivileges(Collection<ApplicationPrivilegeDescriptor> privileges, WriteRequest.RefreshPolicy refreshPolicy,
|
||||
|
@ -200,23 +206,27 @@ public class NativePrivilegeStore extends AbstractComponent {
|
|||
|
||||
public void deletePrivileges(String application, Collection<String> names, WriteRequest.RefreshPolicy refreshPolicy,
|
||||
ActionListener<Map<String, List<String>>> listener) {
|
||||
securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
|
||||
ActionListener<DeleteResponse> groupListener = new GroupedActionListener<>(
|
||||
ActionListener.wrap(responses -> {
|
||||
final Map<String, List<String>> deletedNames = responses.stream()
|
||||
.filter(r -> r.getResult() == DocWriteResponse.Result.DELETED)
|
||||
.map(r -> r.getId())
|
||||
.map(NativePrivilegeStore::nameFromDocId)
|
||||
.collect(TUPLES_TO_MAP);
|
||||
clearRolesCache(listener, deletedNames);
|
||||
}, listener::onFailure), names.size(), Collections.emptyList());
|
||||
for (String name : names) {
|
||||
ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareDelete(SECURITY_INDEX_NAME, "doc", toDocId(application, name))
|
||||
.setRefreshPolicy(refreshPolicy)
|
||||
.request(), groupListener, client::delete);
|
||||
}
|
||||
});
|
||||
if (securityIndexManager.isAvailable() == false) {
|
||||
listener.onResponse(Collections.emptyMap());
|
||||
} else {
|
||||
securityIndexManager.checkIndexVersionThenExecute(listener::onFailure, () -> {
|
||||
ActionListener<DeleteResponse> groupListener = new GroupedActionListener<>(
|
||||
ActionListener.wrap(responses -> {
|
||||
final Map<String, List<String>> deletedNames = responses.stream()
|
||||
.filter(r -> r.getResult() == DocWriteResponse.Result.DELETED)
|
||||
.map(r -> r.getId())
|
||||
.map(NativePrivilegeStore::nameFromDocId)
|
||||
.collect(TUPLES_TO_MAP);
|
||||
clearRolesCache(listener, deletedNames);
|
||||
}, listener::onFailure), names.size(), Collections.emptyList());
|
||||
for (String name : names) {
|
||||
ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareDelete(SECURITY_INDEX_NAME, "doc", toDocId(application, name))
|
||||
.setRefreshPolicy(refreshPolicy)
|
||||
.request(), groupListener, client::delete);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private <T> void clearRolesCache(ActionListener<T> listener, T value) {
|
||||
|
|
|
@ -116,7 +116,7 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer<Se
|
|||
} else if (names != null && names.size() == 1) {
|
||||
getRoleDescriptor(Objects.requireNonNull(names.iterator().next()), listener);
|
||||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
|
||||
QueryBuilder query;
|
||||
if (names == null || names.isEmpty()) {
|
||||
query = QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE);
|
||||
|
@ -144,16 +144,19 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer<Se
|
|||
}
|
||||
|
||||
public void deleteRole(final DeleteRoleRequest deleteRoleRequest, final ActionListener<Boolean> listener) {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
|
||||
DeleteRequest request = client.prepareDelete(SecurityIndexManager.SECURITY_INDEX_NAME,
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
listener.onResponse(false);
|
||||
} else {
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
|
||||
DeleteRequest request = client.prepareDelete(SecurityIndexManager.SECURITY_INDEX_NAME,
|
||||
ROLE_DOC_TYPE, getIdForUser(deleteRoleRequest.name())).request();
|
||||
request.setRefreshPolicy(deleteRoleRequest.getRefreshPolicy());
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
|
||||
request.setRefreshPolicy(deleteRoleRequest.getRefreshPolicy());
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
|
||||
new ActionListener<DeleteResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteResponse deleteResponse) {
|
||||
clearRoleCache(deleteRoleRequest.name(), listener,
|
||||
deleteResponse.getResult() == DocWriteResponse.Result.DELETED);
|
||||
deleteResponse.getResult() == DocWriteResponse.Result.DELETED);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -162,7 +165,8 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer<Se
|
|||
listener.onFailure(e);
|
||||
}
|
||||
}, client::delete);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public void putRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener<Boolean> listener) {
|
||||
|
@ -210,13 +214,13 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer<Se
|
|||
|
||||
public void usageStats(ActionListener<Map<String, Object>> listener) {
|
||||
Map<String, Object> usageStats = new HashMap<>(3);
|
||||
if (securityIndex.indexExists() == false) {
|
||||
if (securityIndex.isAvailable() == false) {
|
||||
usageStats.put("size", 0L);
|
||||
usageStats.put("fls", false);
|
||||
usageStats.put("dls", false);
|
||||
listener.onResponse(usageStats);
|
||||
} else {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareMultiSearch()
|
||||
.add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
|
||||
|
@ -298,7 +302,7 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer<Se
|
|||
}
|
||||
|
||||
private void executeGetRoleRequest(String role, ActionListener<GetResponse> listener) {
|
||||
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
|
||||
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
|
||||
client.prepareGet(SECURITY_INDEX_NAME,
|
||||
ROLE_DOC_TYPE, getIdForUser(role)).request(),
|
||||
|
|
|
@ -104,6 +104,7 @@ public final class RestGetTokenAction extends SecurityBaseRestHandler {
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.debug("Failed to create token", e);
|
||||
if (e instanceof ActionRequestValidationException) {
|
||||
ActionRequestValidationException validationException = (ActionRequestValidationException) e;
|
||||
final TokenRequestError error;
|
||||
|
|
|
@ -253,6 +253,23 @@ public class SecurityIndexManager extends AbstractComponent implements ClusterSt
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates the security index is up to date and does not need to migrated. If it is not, the
|
||||
* consumer is called with an exception. If the security index is up to date, the runnable will
|
||||
* be executed. <b>NOTE:</b> this method does not check the availability of the index; this check
|
||||
* is left to the caller so that this condition can be handled appropriately.
|
||||
*/
|
||||
public void checkIndexVersionThenExecute(final Consumer<Exception> consumer, final Runnable andThen) {
|
||||
final State indexState = this.indexState; // use a local copy so all checks execute against the same state!
|
||||
if (indexState.indexExists && indexState.isIndexUpToDate == false) {
|
||||
consumer.accept(new IllegalStateException(
|
||||
"Security index is not on the current version. Security features relying on the index will not be available until " +
|
||||
"the upgrade API is run on the security index"));
|
||||
} else {
|
||||
andThen.run();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the index by creating it if it doesn't exist or updating the mappings if the mappings are
|
||||
* out of date. After any tasks have been executed, the runnable is then executed.
|
||||
|
|
|
@ -152,7 +152,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase {
|
|||
public static void initDefaultSettings() {
|
||||
if (SECURITY_DEFAULT_SETTINGS == null) {
|
||||
SECURITY_DEFAULT_SETTINGS =
|
||||
new SecuritySettingsSource(defaultMaxNumberOfNodes(), randomBoolean(), createTempDir(), Scope.SUITE);
|
||||
new SecuritySettingsSource(randomBoolean(), createTempDir(), Scope.SUITE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -367,7 +367,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase {
|
|||
private class CustomSecuritySettingsSource extends SecuritySettingsSource {
|
||||
|
||||
private CustomSecuritySettingsSource(boolean sslEnabled, Path configDir, Scope scope) {
|
||||
super(maxNumberOfNodes(), sslEnabled, configDir, scope);
|
||||
super(sslEnabled, configDir, scope);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,7 +17,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext;
|
|||
import org.elasticsearch.index.reindex.ReindexPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration;
|
||||
import org.elasticsearch.transport.Netty4Plugin;
|
||||
import org.elasticsearch.xpack.core.XPackClientPlugin;
|
||||
import org.elasticsearch.xpack.core.XPackSettings;
|
||||
|
@ -49,13 +48,10 @@ import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile;
|
|||
|
||||
/**
|
||||
* {@link org.elasticsearch.test.NodeConfigurationSource} subclass that allows to set all needed settings for x-pack security.
|
||||
* Unicast discovery is configured through {@link org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration.UnicastZen},
|
||||
* also x-pack is installed with all the needed configuration and files.
|
||||
* X-pack is installed with all the needed configuration and files.
|
||||
* To avoid conflicts, every cluster should have its own instance of this class as some configuration files need to be created.
|
||||
*/
|
||||
public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.UnicastZen {
|
||||
|
||||
public static final Settings DEFAULT_SETTINGS = Settings.EMPTY;
|
||||
public class SecuritySettingsSource extends NodeConfigurationSource {
|
||||
|
||||
public static final String TEST_USER_NAME = "test_user";
|
||||
public static final String TEST_PASSWORD_HASHED =
|
||||
|
@ -93,13 +89,11 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
|
|||
/**
|
||||
* Creates a new {@link org.elasticsearch.test.NodeConfigurationSource} for the security configuration.
|
||||
*
|
||||
* @param numOfNodes the number of nodes for proper unicast configuration (can be more than actually available)
|
||||
* @param sslEnabled whether ssl is enabled
|
||||
* @param parentFolder the parent folder that will contain all of the configuration files that need to be created
|
||||
* @param scope the scope of the test that is requiring an instance of SecuritySettingsSource
|
||||
*/
|
||||
public SecuritySettingsSource(int numOfNodes, boolean sslEnabled, Path parentFolder, Scope scope) {
|
||||
super(numOfNodes, DEFAULT_SETTINGS);
|
||||
public SecuritySettingsSource(boolean sslEnabled, Path parentFolder, Scope scope) {
|
||||
this.parentFolder = parentFolder;
|
||||
this.subfolderPrefix = scope.name();
|
||||
this.sslEnabled = sslEnabled;
|
||||
|
@ -129,7 +123,7 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
|
|||
writeFile(xpackConf, "users", configUsers());
|
||||
writeFile(xpackConf, "users_roles", configUsersRoles());
|
||||
|
||||
Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal))
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put(XPackSettings.SECURITY_ENABLED.getKey(), true)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO)
|
||||
.put(NetworkModule.HTTP_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO)
|
||||
|
@ -156,10 +150,9 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
|
|||
|
||||
@Override
|
||||
public Settings transportClientSettings() {
|
||||
Settings superSettings = super.transportClientSettings();
|
||||
Settings.Builder builder = Settings.builder().put(superSettings);
|
||||
Settings.Builder builder = Settings.builder();
|
||||
addClientSSLSettings(builder, "");
|
||||
addDefaultSecurityTransportType(builder, superSettings);
|
||||
addDefaultSecurityTransportType(builder, Settings.EMPTY);
|
||||
|
||||
if (randomBoolean()) {
|
||||
builder.put(SecurityField.USER_SETTING.getKey(),
|
||||
|
@ -253,16 +246,6 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the configuration settings given the location of a certificate and its password
|
||||
*
|
||||
* @param resourcePathToStore the location of the keystore or truststore
|
||||
* @param password the password
|
||||
*/
|
||||
public static void addSSLSettingsForStore(Settings.Builder builder, String resourcePathToStore, String password) {
|
||||
addSSLSettingsForStore(builder, "", resourcePathToStore, password, true, true, true);
|
||||
}
|
||||
|
||||
private static void addSSLSettingsForStore(Settings.Builder builder, String prefix, String resourcePathToStore, String password,
|
||||
boolean sslEnabled, boolean hostnameVerificationEnabled,
|
||||
boolean transportClient) {
|
||||
|
|
|
@ -65,7 +65,7 @@ public abstract class SecuritySingleNodeTestCase extends ESSingleNodeTestCase {
|
|||
public static void initDefaultSettings() {
|
||||
if (SECURITY_DEFAULT_SETTINGS == null) {
|
||||
SECURITY_DEFAULT_SETTINGS =
|
||||
new SecuritySettingsSource(1, randomBoolean(), createTempDir(), ESIntegTestCase.Scope.SUITE);
|
||||
new SecuritySettingsSource(randomBoolean(), createTempDir(), ESIntegTestCase.Scope.SUITE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -235,7 +235,7 @@ public abstract class SecuritySingleNodeTestCase extends ESSingleNodeTestCase {
|
|||
private class CustomSecuritySettingsSource extends SecuritySettingsSource {
|
||||
|
||||
private CustomSecuritySettingsSource(boolean sslEnabled, Path configDir, ESIntegTestCase.Scope scope) {
|
||||
super(1, sslEnabled, configDir, scope);
|
||||
super(sslEnabled, configDir, scope);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -163,6 +163,11 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase {
|
|||
((Runnable) inv.getArguments()[1]).run();
|
||||
return null;
|
||||
}).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
doAnswer(inv -> {
|
||||
((Runnable) inv.getArguments()[1]).run();
|
||||
return null;
|
||||
}).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
when(securityIndex.isAvailable()).thenReturn(true);
|
||||
|
||||
final ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool);
|
||||
tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService);
|
||||
|
|
|
@ -178,6 +178,11 @@ public class TransportSamlLogoutActionTests extends SamlTestCase {
|
|||
((Runnable) inv.getArguments()[1]).run();
|
||||
return null;
|
||||
}).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
doAnswer(inv -> {
|
||||
((Runnable) inv.getArguments()[1]).run();
|
||||
return null;
|
||||
}).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
when(securityIndex.isAvailable()).thenReturn(true);
|
||||
|
||||
final ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool);
|
||||
tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService);
|
||||
|
|
|
@ -178,7 +178,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
logger.info("--> remote indexing enabled. security enabled: [{}], SSL enabled: [{}], nodes: [{}]", useSecurity, useSSL,
|
||||
numNodes);
|
||||
SecuritySettingsSource cluster2SettingsSource =
|
||||
new SecuritySettingsSource(numNodes, useSSL, createTempDir(), Scope.SUITE) {
|
||||
new SecuritySettingsSource(useSSL, createTempDir(), Scope.SUITE) {
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
|
|
|
@ -95,7 +95,7 @@ public class RemoteIndexAuditTrailStartingTests extends SecurityIntegTestCase {
|
|||
// Setup a second test cluster with a single node, security enabled, and SSL
|
||||
final int numNodes = 1;
|
||||
SecuritySettingsSource cluster2SettingsSource =
|
||||
new SecuritySettingsSource(numNodes, sslEnabled, createTempDir(), Scope.TEST) {
|
||||
new SecuritySettingsSource(sslEnabled, createTempDir(), Scope.TEST) {
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
|
|
|
@ -87,6 +87,7 @@ import java.util.function.Consumer;
|
|||
|
||||
import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException;
|
||||
import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError;
|
||||
import static org.elasticsearch.xpack.security.authc.TokenServiceTests.mockCheckTokenInvalidationFromId;
|
||||
import static org.elasticsearch.xpack.security.authc.TokenServiceTests.mockGetTokenFromId;
|
||||
import static org.hamcrest.Matchers.arrayContaining;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
|
@ -187,6 +188,11 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
runnable.run();
|
||||
return null;
|
||||
}).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
doAnswer(invocationOnMock -> {
|
||||
Runnable runnable = (Runnable) invocationOnMock.getArguments()[1];
|
||||
runnable.run();
|
||||
return null;
|
||||
}).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool);
|
||||
tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService);
|
||||
service = new AuthenticationService(settings, realms, auditTrail,
|
||||
|
@ -898,7 +904,11 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
tokenService.createUserToken(expected, originatingAuth, tokenFuture, Collections.emptyMap(), true);
|
||||
}
|
||||
String token = tokenService.getUserTokenString(tokenFuture.get().v1());
|
||||
when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE));
|
||||
mockGetTokenFromId(tokenFuture.get().v1(), client);
|
||||
mockCheckTokenInvalidationFromId(tokenFuture.get().v1(), client);
|
||||
when(securityIndex.isAvailable()).thenReturn(true);
|
||||
when(securityIndex.indexExists()).thenReturn(true);
|
||||
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
|
||||
threadContext.putHeader("Authorization", "Bearer " + token);
|
||||
service.authenticate("_action", message, (User)null, ActionListener.wrap(result -> {
|
||||
|
|
|
@ -137,6 +137,13 @@ public class TokenServiceTests extends ESTestCase {
|
|||
runnable.run();
|
||||
return null;
|
||||
}).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
doAnswer(invocationOnMock -> {
|
||||
Runnable runnable = (Runnable) invocationOnMock.getArguments()[1];
|
||||
runnable.run();
|
||||
return null;
|
||||
}).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
when(securityIndex.indexExists()).thenReturn(true);
|
||||
when(securityIndex.isAvailable()).thenReturn(true);
|
||||
this.clusterService = ClusterServiceUtils.createClusterService(threadPool);
|
||||
}
|
||||
|
||||
|
@ -161,6 +168,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
mockCheckTokenInvalidationFromId(token);
|
||||
|
||||
ThreadContext requestContext = new ThreadContext(Settings.EMPTY);
|
||||
requestContext.putHeader("Authorization", randomFrom("Bearer ", "BEARER ", "bearer ") + tokenService.getUserTokenString(token));
|
||||
|
@ -207,6 +215,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
mockCheckTokenInvalidationFromId(token);
|
||||
|
||||
ThreadContext requestContext = new ThreadContext(Settings.EMPTY);
|
||||
requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token));
|
||||
|
@ -266,6 +275,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
mockCheckTokenInvalidationFromId(token);
|
||||
|
||||
ThreadContext requestContext = new ThreadContext(Settings.EMPTY);
|
||||
requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token));
|
||||
|
@ -296,6 +306,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
mockCheckTokenInvalidationFromId(token);
|
||||
|
||||
ThreadContext requestContext = new ThreadContext(Settings.EMPTY);
|
||||
requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token));
|
||||
|
@ -357,6 +368,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
mockCheckTokenInvalidationFromId(token);
|
||||
|
||||
ThreadContext requestContext = new ThreadContext(Settings.EMPTY);
|
||||
requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token));
|
||||
|
@ -454,6 +466,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken token = tokenFuture.get().v1();
|
||||
mockGetTokenFromId(token);
|
||||
mockCheckTokenInvalidationFromId(token);
|
||||
|
||||
ThreadContext requestContext = new ThreadContext(Settings.EMPTY);
|
||||
requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token));
|
||||
|
@ -577,14 +590,25 @@ public class TokenServiceTests extends ESTestCase {
|
|||
try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) {
|
||||
PlainActionFuture<UserToken> future = new PlainActionFuture<>();
|
||||
tokenService.getAndValidateToken(requestContext, future);
|
||||
UserToken serialized = future.get();
|
||||
assertEquals(authentication, serialized.getAuthentication());
|
||||
assertNull(future.get());
|
||||
|
||||
when(securityIndex.isAvailable()).thenReturn(false);
|
||||
when(securityIndex.indexExists()).thenReturn(true);
|
||||
future = new PlainActionFuture<>();
|
||||
tokenService.getAndValidateToken(requestContext, future);
|
||||
assertNull(future.get());
|
||||
|
||||
when(securityIndex.indexExists()).thenReturn(false);
|
||||
future = new PlainActionFuture<>();
|
||||
tokenService.getAndValidateToken(requestContext, future);
|
||||
assertNull(future.get());
|
||||
|
||||
when(securityIndex.isAvailable()).thenReturn(true);
|
||||
when(securityIndex.indexExists()).thenReturn(true);
|
||||
mockCheckTokenInvalidationFromId(token);
|
||||
future = new PlainActionFuture<>();
|
||||
tokenService.getAndValidateToken(requestContext, future);
|
||||
assertEquals(token.getAuthentication(), future.get().getAuthentication());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -625,4 +649,38 @@ public class TokenServiceTests extends ESTestCase {
|
|||
return Void.TYPE;
|
||||
}).when(client).get(any(GetRequest.class), any(ActionListener.class));
|
||||
}
|
||||
|
||||
private void mockCheckTokenInvalidationFromId(UserToken userToken) {
|
||||
mockCheckTokenInvalidationFromId(userToken, client);
|
||||
}
|
||||
|
||||
public static void mockCheckTokenInvalidationFromId(UserToken userToken, Client client) {
|
||||
doAnswer(invocationOnMock -> {
|
||||
MultiGetRequest request = (MultiGetRequest) invocationOnMock.getArguments()[0];
|
||||
ActionListener<MultiGetResponse> listener = (ActionListener<MultiGetResponse>) invocationOnMock.getArguments()[1];
|
||||
MultiGetResponse response = mock(MultiGetResponse.class);
|
||||
MultiGetItemResponse[] responses = new MultiGetItemResponse[2];
|
||||
when(response.getResponses()).thenReturn(responses);
|
||||
GetResponse legacyResponse = mock(GetResponse.class);
|
||||
responses[0] = new MultiGetItemResponse(legacyResponse, null);
|
||||
when(legacyResponse.isExists()).thenReturn(false);
|
||||
GetResponse tokenResponse = mock(GetResponse.class);
|
||||
if (userToken.getId().equals(request.getItems().get(1).id().replace("token_", ""))) {
|
||||
when(tokenResponse.isExists()).thenReturn(true);
|
||||
Map<String, Object> sourceMap = new HashMap<>();
|
||||
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
|
||||
userToken.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
Map<String, Object> accessTokenMap = new HashMap<>();
|
||||
accessTokenMap.put("user_token",
|
||||
XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false));
|
||||
accessTokenMap.put("invalidated", false);
|
||||
sourceMap.put("access_token", accessTokenMap);
|
||||
}
|
||||
when(tokenResponse.getSource()).thenReturn(sourceMap);
|
||||
}
|
||||
responses[1] = new MultiGetItemResponse(tokenResponse, null);
|
||||
listener.onResponse(response);
|
||||
return Void.TYPE;
|
||||
}).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -242,6 +242,11 @@ public class NativeUsersStoreTests extends ESTestCase {
|
|||
action.run();
|
||||
return null;
|
||||
}).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
doAnswer((i) -> {
|
||||
Runnable action = (Runnable) i.getArguments()[1];
|
||||
action.run();
|
||||
return null;
|
||||
}).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
return new NativeUsersStore(Settings.EMPTY, client, securityIndex);
|
||||
}
|
||||
|
||||
|
|
|
@ -96,6 +96,12 @@ public class NativePrivilegeStoreTests extends ESTestCase {
|
|||
((Runnable) invocationOnMock.getArguments()[1]).run();
|
||||
return null;
|
||||
}).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
Mockito.doAnswer(invocationOnMock -> {
|
||||
assertThat(invocationOnMock.getArguments().length, equalTo(2));
|
||||
assertThat(invocationOnMock.getArguments()[1], instanceOf(Runnable.class));
|
||||
((Runnable) invocationOnMock.getArguments()[1]).run();
|
||||
return null;
|
||||
}).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
store = new NativePrivilegeStore(Settings.EMPTY, client, securityIndex);
|
||||
}
|
||||
|
||||
|
|
|
@ -57,3 +57,11 @@ artifacts {
|
|||
nodeps nodepsJar
|
||||
archives shadowJar
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
nebula {
|
||||
artifactId = archivesBaseName
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,8 @@ import org.elasticsearch.script.SearchScript;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.elasticsearch.script.NumberSortScript;
|
||||
import org.elasticsearch.script.StringSortScript;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
|
||||
|
@ -31,7 +33,8 @@ public class SqlPainlessExtension implements PainlessExtension {
|
|||
whitelist.put(FilterScript.CONTEXT, list);
|
||||
whitelist.put(AggregationScript.CONTEXT, list);
|
||||
whitelist.put(SearchScript.CONTEXT, list);
|
||||
whitelist.put(SearchScript.SCRIPT_SORT_CONTEXT, list);
|
||||
whitelist.put(NumberSortScript.CONTEXT, list);
|
||||
whitelist.put(StringSortScript.CONTEXT, list);
|
||||
whitelist.put(BucketAggregationSelectorScript.CONTEXT, list);
|
||||
return whitelist;
|
||||
}
|
||||
|
|
|
@ -196,7 +196,7 @@ setup:
|
|||
"Validation failures":
|
||||
|
||||
- do:
|
||||
catch: /Could not find a \[numeric\] field with name \[field_doesnt_exist\] in any of the indices matching the index pattern/
|
||||
catch: /Could not find a \[numeric\] or \[date\] field with name \[field_doesnt_exist\] in any of the indices matching the index pattern/
|
||||
headers:
|
||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||
xpack.rollup.put_job:
|
||||
|
|
|
@ -123,12 +123,6 @@ public class OpenLdapUserSearchSessionFactoryTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private MockSecureSettings newSecureSettings(String key, String value) {
|
||||
MockSecureSettings secureSettings = new MockSecureSettings();
|
||||
secureSettings.setString(key, value);
|
||||
return secureSettings;
|
||||
}
|
||||
|
||||
private LdapSession session(SessionFactory factory, String username, SecureString password) {
|
||||
PlainActionFuture<LdapSession> future = new PlainActionFuture<>();
|
||||
factory.session(username, password, future);
|
||||
|
|
Loading…
Reference in New Issue