Merge branch 'master' into feature/rank-eval
This commit is contained in:
commit
4385b29f80
|
@ -17,6 +17,8 @@ request block and provide responses for all of the below items.
|
|||
|
||||
**Elasticsearch version**:
|
||||
|
||||
**Plugins installed**: []
|
||||
|
||||
**JVM version**:
|
||||
|
||||
**OS version**:
|
||||
|
|
|
@ -17,16 +17,12 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]search[/\\]postingshighlight[/\\]CustomPostingsHighlighter.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]search[/\\]vectorhighlight[/\\]CustomFieldQuery.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]Action.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ActionModule.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ActionRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]health[/\\]ClusterHealthRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]health[/\\]TransportClusterHealthAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]hotthreads[/\\]NodesHotThreadsRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]hotthreads[/\\]TransportNodesHotThreadsAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]info[/\\]NodeInfo.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]info[/\\]TransportNodesInfoAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]stats[/\\]NodesStatsRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]stats[/\\]TransportNodesStatsAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]delete[/\\]DeleteRepositoryRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]delete[/\\]TransportDeleteRepositoryAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]get[/\\]GetRepositoriesRequestBuilder.java" checks="LineLength" />
|
||||
|
@ -49,11 +45,9 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]delete[/\\]DeleteSnapshotRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]delete[/\\]TransportDeleteSnapshotAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]get[/\\]GetSnapshotsRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]get[/\\]TransportGetSnapshotsAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]restore[/\\]RestoreSnapshotRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]restore[/\\]TransportRestoreSnapshotAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]status[/\\]SnapshotsStatusRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]status[/\\]TransportNodesSnapshotsStatus.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]status[/\\]TransportSnapshotsStatusAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]state[/\\]ClusterStateRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]state[/\\]TransportClusterStateAction.java" checks="LineLength" />
|
||||
|
@ -185,7 +179,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]ActionFilter.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]AutoCreateIndex.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]DelegatingActionListener.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]HandledTransportAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]IndicesOptions.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]ToXContentToBytes.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]BroadcastOperationRequestBuilder.java" checks="LineLength" />
|
||||
|
@ -202,7 +195,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]ClusterInfoRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]TransportClusterInfoAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]nodes[/\\]NodesOperationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]nodes[/\\]TransportNodesAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]ReplicationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]TransportBroadcastReplicationAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]InstanceShardOperationRequestBuilder.java" checks="LineLength" />
|
||||
|
@ -244,7 +236,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]IncompatibleClusterStateVersionException.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]InternalClusterInfoService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]LocalNodeMasterListener.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]SnapshotsInProgress.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]action[/\\]index[/\\]NodeIndexDeletedAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]action[/\\]index[/\\]NodeMappingRefreshAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]action[/\\]shard[/\\]ShardStateAction.java" checks="LineLength" />
|
||||
|
@ -273,18 +264,12 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingTable.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]ShardRouting.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]UnassignedInfo.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]AllocationService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FailedRerouteAllocation.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]RoutingAllocation.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]StartedRerouteAllocation.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]allocator[/\\]BalancedShardsAllocator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AbstractAllocateAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocateEmptyPrimaryAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocateReplicaAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocateStalePrimaryAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocationCommands.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]CancelAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]MoveAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]decider[/\\]AllocationDeciders.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]service[/\\]InternalClusterService.java" checks="LineLength" />
|
||||
|
@ -300,7 +285,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]collect[/\\]ImmutableOpenIntMap.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]geo[/\\]GeoDistance.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]DefaultConstructionProxyFactory.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]InjectorImpl.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]internal[/\\]ConstructionContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]multibindings[/\\]MapBinder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]spi[/\\]InjectionPoint.java" checks="LineLength" />
|
||||
|
@ -319,7 +303,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]NetworkModule.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]NetworkService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]recycler[/\\]Recyclers.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]rounding[/\\]Rounding.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]ByteSizeValue.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]BigArrays.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]CancellableThreads.java" checks="LineLength" />
|
||||
|
@ -354,7 +337,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]LocalAllocateDangledIndices.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]PrimaryShardAllocator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReplicaShardAllocator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]TransportNodesListGatewayMetaState.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]AlreadyExpiredException.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]CompositeIndexEventListener.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettings.java" checks="LineLength" />
|
||||
|
@ -376,7 +358,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]InternalEngine.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]LiveVersionMap.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]ShadowEngine.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]VersionValue.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]IndexFieldDataCache.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]IndexFieldDataService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]fieldcomparator[/\\]DoubleValuesComparatorSource.java" checks="LineLength" />
|
||||
|
@ -407,7 +388,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MapperService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]Mapping.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MetadataFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ParseContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ParsedDocument.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]CompletionFieldMapper.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]LegacyDateFieldMapper.java" checks="LineLength" />
|
||||
|
@ -494,7 +474,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]DummyPluginInfo.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]RemovePluginCommand.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]RepositoriesModule.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]RepositoriesService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]Repository.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]RepositoryModule.java" checks="LineLength" />
|
||||
|
@ -520,13 +499,10 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]search[/\\]RestClearScrollAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]search[/\\]RestSearchScrollAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]suggest[/\\]RestSuggestAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]support[/\\]RestActions.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]termvectors[/\\]RestTermVectorsAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]AbstractScriptParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptContextRegistry.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptEngineRegistry.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptModes.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptModule.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptParameterParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptSettings.java" checks="LineLength" />
|
||||
|
@ -551,7 +527,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]InternalMissing.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]MissingAggregator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]InternalReverseNested.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]NestedAggregator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]ReverseNestedAggregator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]range[/\\]RangeAggregator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]sampler[/\\]DiversifiedBytesHashSamplerAggregator.java" checks="LineLength" />
|
||||
|
@ -569,7 +544,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]significant[/\\]heuristics[/\\]ScriptHeuristic.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]significant[/\\]heuristics[/\\]SignificanceHeuristic.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]AbstractTermsParametersParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]DoubleTermsAggregator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]GlobalOrdinalsStringTermsAggregator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]InternalOrder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]LongTermsAggregator.java" checks="LineLength" />
|
||||
|
@ -600,7 +574,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSearchResult.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhaseContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhaseParseElement.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]explain[/\\]ExplainFetchSubPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]fielddata[/\\]FieldDataFieldsParseElement.java" checks="LineLength" />
|
||||
|
@ -614,7 +587,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]PostingsHighlighter.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]vectorhighlight[/\\]SimpleFragmentsBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]vectorhighlight[/\\]SourceScoreOrderFragmentsBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]vectorhighlight[/\\]SourceSimpleFragmentsBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]DefaultSearchContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]FilteredSearchContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]InternalSearchHit.java" checks="LineLength" />
|
||||
|
@ -676,7 +648,6 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]SimulatePipelineRequestParsingTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]SimulatePipelineResponseTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]WriteableIngestDocumentTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]MultiSearchRequestTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchRequestBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]AutoCreateIndexTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]IndicesOptionsTests.java" checks="LineLength" />
|
||||
|
@ -696,8 +667,6 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]BasicAnalysisBackwardCompatibilityIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]BasicBackwardsCompatibilityIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]GetIndexBackwardsCompatibilityIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]OldIndexBackwardsCompatibilityIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]RecoveryWithUnsupportedIndicesIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]RestoreBackwardsCompatIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]AbstractClientHeadersTestCase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterHealthIT.java" checks="LineLength" />
|
||||
|
@ -731,9 +700,7 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]AllocationIdTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]DelayedAllocationIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]PrimaryAllocationIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingServiceTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingTableTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]ShardRoutingHelper.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]ShardRoutingTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]UnassignedInfoTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]AddIncrementallyTests.java" checks="LineLength" />
|
||||
|
@ -785,9 +752,7 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]hash[/\\]MessageDigestsTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]ModuleTestCase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]FreqTermsEnumTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]uid[/\\]VersionsTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]CidrsTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]rounding[/\\]TimeZoneRoundingTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]DistanceUnitTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]FuzzinessTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]BigArraysTests.java" checks="LineLength" />
|
||||
|
@ -795,10 +760,7 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]LongObjectHashMapTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]EsExecutorsTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]PrioritizedExecutorsTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]XContentFactoryTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]builder[/\\]XContentBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]cbor[/\\]JsonVsCborTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]smile[/\\]JsonVsSmileTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]support[/\\]filtering[/\\]FilterPathGeneratorFilteringTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]deps[/\\]joda[/\\]SimpleJodaTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]BlockingClusterStatePublishResponseHandlerTests.java" checks="LineLength" />
|
||||
|
@ -912,7 +874,6 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]RangeQueryBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]SpanMultiTermQueryBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]SpanNotQueryBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]plugin[/\\]CustomQueryParserIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]QueryInnerHitsTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MultiMatchQueryTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]geo[/\\]GeoUtilsTests.java" checks="LineLength" />
|
||||
|
@ -951,7 +912,6 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]mapping[/\\]SimpleGetFieldMappingsIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]mapping[/\\]SimpleGetMappingsIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]mapping[/\\]UpdateMappingIntegrationIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]memory[/\\]breaker[/\\]CircuitBreakerServiceIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]memory[/\\]breaker[/\\]CircuitBreakerUnitTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]memory[/\\]breaker[/\\]RandomExceptionCircuitBreakerIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]IndexPrimaryRelocationIT.java" checks="LineLength" />
|
||||
|
@ -982,18 +942,15 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]BytesRestResponseTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]CorsRegexDefaultIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]CorsRegexIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]RestControllerTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]AliasResolveRoutingIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]AliasRoutingIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]SimpleRoutingIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]FileScriptTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]NativeScriptTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptContextRegistryTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptContextTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptModesTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptParameterParserTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptServiceTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptSettingsTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]MultiValueModeTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]SearchWithRejectionsIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]MissingValueIT.java" checks="LineLength" />
|
||||
|
@ -1009,13 +966,10 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]ShardReduceIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]ShardSizeTestCase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]SignificantTermsIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]SignificantTermsSignificanceScoreIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsDocCountErrorIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsShardMinDocCountIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]NestedAggregatorTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]AbstractGeoTestCase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]AvgIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]SumIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]TopHitsIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]ValueCountIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]ExtendedStatsBucketIT.java" checks="LineLength" />
|
||||
|
@ -1062,26 +1016,21 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SharedClusterSnapshotRestoreIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotBackwardsCompatibilityIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotUtilsTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]mockstore[/\\]MockRepository.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]ESBlobStoreRepositoryIntegTestCase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]geo[/\\]RandomShapeGenerator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]hamcrest[/\\]ElasticsearchGeoAssertions.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]SimpleThreadPoolIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPoolSerializationTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]UpdateThreadPoolSettingsTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]timestamp[/\\]SimpleTimestampIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]tribe[/\\]TribeIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]ttl[/\\]SimpleTTLIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]update[/\\]UpdateIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]validate[/\\]SimpleValidateQueryIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]versioning[/\\]SimpleVersioningIT.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionPlugin.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionScriptEngineService.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionSearchScript.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]IndexedExpressionTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]MoreExpressionTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyPlugin.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptEngineService.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovySecurityTests.java" checks="LineLength" />
|
||||
|
@ -1109,13 +1058,11 @@
|
|||
<suppress files="plugins[/\\]discovery-ec2[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cloud[/\\]aws[/\\]AbstractAwsTestCase.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]discovery-ec2[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]ec2[/\\]AmazonEC2Mock.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]discovery-gce[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]gce[/\\]GceNetworkTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-javascript[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugin[/\\]javascript[/\\]JavaScriptPlugin.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-javascript[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]javascript[/\\]JavaScriptScriptEngineService.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-javascript[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]javascript[/\\]JavaScriptScriptEngineTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-javascript[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]javascript[/\\]JavaScriptScriptMultiThreadedTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-javascript[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]javascript[/\\]JavaScriptSecurityTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-javascript[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]javascript[/\\]SimpleBench.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-python[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugin[/\\]python[/\\]PythonPlugin.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-python[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]python[/\\]PythonScriptEngineTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-python[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]python[/\\]PythonScriptMultiThreadedTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]lang-python[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]python[/\\]PythonSecurityTests.java" checks="LineLength" />
|
||||
|
@ -1138,7 +1085,6 @@
|
|||
<suppress files="plugins[/\\]repository-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cloud[/\\]azure[/\\]AbstractAzureWithThirdPartyTestCase.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cloud[/\\]azure[/\\]storage[/\\]AzureStorageServiceMock.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]azure[/\\]AzureSnapshotRestoreServiceTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]azure[/\\]AzureSnapshotRestoreTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-hdfs[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]hdfs[/\\]HdfsRepository.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-hdfs[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]hdfs[/\\]HdfsTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cloud[/\\]aws[/\\]blobstore[/\\]DefaultS3OutputStream.java" checks="LineLength" />
|
||||
|
@ -1168,7 +1114,6 @@
|
|||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]ExternalNode.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]ExternalTestCluster.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]IndexSettingsModule.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]InternalSettingsPlugin.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]InternalTestCluster.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]MockIndexEventListener.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]TestSearchContext.java" checks="LineLength" />
|
||||
|
@ -1183,7 +1128,6 @@
|
|||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]junit[/\\]listeners[/\\]LoggingListener.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSDirectoryService.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSIndexStore.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]test[/\\]InternalTestClusterTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]cli[/\\]CliTool.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]admin[/\\]indices[/\\]settings[/\\]RestGetSettingsAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]tribe[/\\]TribeService.java" checks="LineLength" />
|
||||
|
|
|
@ -4,7 +4,8 @@ lucene = 6.1.0
|
|||
# optional dependencies
|
||||
spatial4j = 0.6
|
||||
jts = 1.13
|
||||
jackson = 2.7.1
|
||||
jackson = 2.8.1
|
||||
snakeyaml = 1.15
|
||||
log4j = 1.2.17
|
||||
slf4j = 1.6.2
|
||||
jna = 4.2.2
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
4127b62db028f981e81caa248953c0899d720f98
|
|
@ -0,0 +1 @@
|
|||
fd13b1c033741d48291315c6370f7d475a42dccf
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.transport.client;
|
|||
|
||||
import io.netty.util.ThreadDeathWatcher;
|
||||
import io.netty.util.concurrent.GlobalEventExecutor;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
|
@ -57,7 +56,6 @@ public class PreBuiltTransportClient extends TransportClient {
|
|||
Arrays.asList(
|
||||
Netty3Plugin.class,
|
||||
Netty4Plugin.class,
|
||||
TransportPlugin.class,
|
||||
ReindexPlugin.class,
|
||||
PercolatorPlugin.class,
|
||||
MustachePlugin.class));
|
||||
|
@ -71,24 +69,6 @@ public class PreBuiltTransportClient extends TransportClient {
|
|||
super(settings, Settings.EMPTY, addPlugins(plugins, PRE_INSTALLED_PLUGINS));
|
||||
}
|
||||
|
||||
public static final class TransportPlugin extends Plugin {
|
||||
|
||||
private static final Setting<Boolean> ASSERT_NETTY_BUGLEVEL =
|
||||
Setting.boolSetting("netty.assert.buglevel", true, Setting.Property.NodeScope);
|
||||
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Collections.singletonList(ASSERT_NETTY_BUGLEVEL);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
return Settings.builder().put("netty.assert.buglevel", true)
|
||||
.build();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
super.close();
|
||||
|
|
|
@ -69,6 +69,7 @@ dependencies {
|
|||
compile 'org.joda:joda-convert:1.2'
|
||||
|
||||
// json and yaml
|
||||
compile "org.yaml:snakeyaml:${versions.snakeyaml}"
|
||||
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
||||
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${versions.jackson}"
|
||||
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${versions.jackson}"
|
||||
|
|
|
@ -177,7 +177,17 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
return source(buildFromSimplifiedDef(type, source));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param type the mapping type
|
||||
* @param source consisting of field/properties pairs (e.g. "field1",
|
||||
* "type=string,store=true"). If the number of arguments is not
|
||||
* divisible by two an {@link IllegalArgumentException} is thrown
|
||||
* @return the mappings definition
|
||||
*/
|
||||
public static XContentBuilder buildFromSimplifiedDef(String type, Object... source) {
|
||||
if (source.length % 2 != 0) {
|
||||
throw new IllegalArgumentException("mapping source must be pairs of fieldnames and properties definition.");
|
||||
}
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
|
|
|
@ -30,11 +30,11 @@ import java.io.IOException;
|
|||
*/
|
||||
public class PutMappingResponse extends AcknowledgedResponse {
|
||||
|
||||
PutMappingResponse() {
|
||||
protected PutMappingResponse() {
|
||||
|
||||
}
|
||||
|
||||
PutMappingResponse(boolean acknowledged) {
|
||||
protected PutMappingResponse(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
|
|
|
@ -29,10 +29,10 @@ import java.io.IOException;
|
|||
*/
|
||||
public class PutIndexTemplateResponse extends AcknowledgedResponse {
|
||||
|
||||
PutIndexTemplateResponse() {
|
||||
protected PutIndexTemplateResponse() {
|
||||
}
|
||||
|
||||
PutIndexTemplateResponse(boolean acknowledged) {
|
||||
protected PutIndexTemplateResponse(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.elasticsearch.search.internal.InternalSearchResponse;
|
|||
import org.elasticsearch.search.internal.ShardSearchTransportRequest;
|
||||
import org.elasticsearch.search.query.QuerySearchResult;
|
||||
import org.elasticsearch.search.query.QuerySearchResultProvider;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -74,7 +75,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
|||
protected final AtomicArray<FirstResult> firstResults;
|
||||
private volatile AtomicArray<ShardSearchFailure> shardFailures;
|
||||
private final Object shardFailuresMutex = new Object();
|
||||
protected volatile ScoreDoc[] sortedShardList;
|
||||
protected volatile ScoreDoc[] sortedShardDocs;
|
||||
|
||||
protected AbstractSearchAsyncAction(ESLogger logger, SearchTransportService searchTransportService, ClusterService clusterService,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
|
@ -321,8 +322,11 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
|||
// we only release search context that we did not fetch from if we are not scrolling
|
||||
if (request.scroll() == null) {
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults.asList()) {
|
||||
final TopDocs topDocs = entry.value.queryResult().queryResult().topDocs();
|
||||
if (topDocs != null && topDocs.scoreDocs.length > 0 // the shard had matches
|
||||
QuerySearchResult queryResult = entry.value.queryResult().queryResult();
|
||||
final TopDocs topDocs = queryResult.topDocs();
|
||||
final Suggest suggest = queryResult.suggest();
|
||||
if (((topDocs != null && topDocs.scoreDocs.length > 0) // the shard had matches
|
||||
||suggest != null && suggest.hasScoreDocs()) // or had suggest docs
|
||||
&& docIdsToLoad.get(entry.index) == null) { // but none of them made it to the global top docs
|
||||
try {
|
||||
DiscoveryNode node = nodes.get(entry.value.queryResult().shardTarget().nodeId());
|
||||
|
@ -343,12 +347,8 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
|||
|
||||
protected ShardFetchSearchRequest createFetchRequest(QuerySearchResult queryResult, AtomicArray.Entry<IntArrayList> entry,
|
||||
ScoreDoc[] lastEmittedDocPerShard) {
|
||||
if (lastEmittedDocPerShard != null) {
|
||||
ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[entry.index];
|
||||
return new ShardFetchSearchRequest(request, queryResult.id(), entry.value, lastEmittedDoc);
|
||||
} else {
|
||||
return new ShardFetchSearchRequest(request, queryResult.id(), entry.value);
|
||||
}
|
||||
final ScoreDoc lastEmittedDoc = (lastEmittedDocPerShard != null) ? lastEmittedDocPerShard[entry.index] : null;
|
||||
return new ShardFetchSearchRequest(request, queryResult.id(), entry.value, lastEmittedDoc);
|
||||
}
|
||||
|
||||
protected abstract void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request,
|
||||
|
|
|
@ -118,8 +118,8 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<DfsSea
|
|||
threadPool.executor(ThreadPool.Names.SEARCH).execute(new ActionRunnable<SearchResponse>(listener) {
|
||||
@Override
|
||||
public void doRun() throws IOException {
|
||||
sortedShardList = searchPhaseController.sortDocs(true, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults,
|
||||
sortedShardDocs = searchPhaseController.sortDocs(true, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(true, sortedShardDocs, queryFetchResults,
|
||||
queryFetchResults);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
|
|
|
@ -135,18 +135,17 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
|
|||
}
|
||||
|
||||
void innerExecuteFetchPhase() throws Exception {
|
||||
boolean useScroll = request.scroll() != null;
|
||||
sortedShardList = searchPhaseController.sortDocs(useScroll, queryResults);
|
||||
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardList);
|
||||
final boolean isScrollRequest = request.scroll() != null;
|
||||
sortedShardDocs = searchPhaseController.sortDocs(isScrollRequest, queryResults);
|
||||
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardDocs);
|
||||
|
||||
if (docIdsToLoad.asList().isEmpty()) {
|
||||
finishHim();
|
||||
return;
|
||||
}
|
||||
|
||||
final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(
|
||||
request, sortedShardList, firstResults.length()
|
||||
);
|
||||
final ScoreDoc[] lastEmittedDocPerShard = (request.scroll() != null) ?
|
||||
searchPhaseController.getLastEmittedDocPerShard(queryResults.asList(), sortedShardDocs, firstResults.length()) : null;
|
||||
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
|
||||
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
|
||||
QuerySearchResult queryResult = queryResults.get(entry.index);
|
||||
|
@ -196,12 +195,10 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
|
|||
threadPool.executor(ThreadPool.Names.SEARCH).execute(new ActionRunnable<SearchResponse>(listener) {
|
||||
@Override
|
||||
public void doRun() throws IOException {
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults,
|
||||
final boolean isScrollRequest = request.scroll() != null;
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(isScrollRequest, sortedShardDocs, queryResults,
|
||||
fetchResults);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults);
|
||||
}
|
||||
String scrollId = isScrollRequest ? TransportSearchHelper.buildScrollId(request.searchType(), firstResults) : null;
|
||||
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successfulOps.get(),
|
||||
buildTookInMillis(), buildShardFailures()));
|
||||
releaseIrrelevantSearchContexts(queryResults, docIdsToLoad);
|
||||
|
|
|
@ -60,14 +60,11 @@ class SearchQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<QueryFetc
|
|||
threadPool.executor(ThreadPool.Names.SEARCH).execute(new ActionRunnable<SearchResponse>(listener) {
|
||||
@Override
|
||||
public void doRun() throws IOException {
|
||||
boolean useScroll = request.scroll() != null;
|
||||
sortedShardList = searchPhaseController.sortDocs(useScroll, firstResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults,
|
||||
final boolean isScrollRequest = request.scroll() != null;
|
||||
sortedShardDocs = searchPhaseController.sortDocs(isScrollRequest, firstResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(isScrollRequest, sortedShardDocs, firstResults,
|
||||
firstResults);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults);
|
||||
}
|
||||
String scrollId = isScrollRequest ? TransportSearchHelper.buildScrollId(request.searchType(), firstResults) : null;
|
||||
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successfulOps.get(),
|
||||
buildTookInMillis(), buildShardFailures()));
|
||||
}
|
||||
|
|
|
@ -68,18 +68,17 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
|
|||
|
||||
@Override
|
||||
protected void moveToSecondPhase() throws Exception {
|
||||
boolean useScroll = request.scroll() != null;
|
||||
sortedShardList = searchPhaseController.sortDocs(useScroll, firstResults);
|
||||
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardList);
|
||||
final boolean isScrollRequest = request.scroll() != null;
|
||||
sortedShardDocs = searchPhaseController.sortDocs(isScrollRequest, firstResults);
|
||||
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardDocs);
|
||||
|
||||
if (docIdsToLoad.asList().isEmpty()) {
|
||||
finishHim();
|
||||
return;
|
||||
}
|
||||
|
||||
final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(
|
||||
request, sortedShardList, firstResults.length()
|
||||
);
|
||||
final ScoreDoc[] lastEmittedDocPerShard = isScrollRequest ?
|
||||
searchPhaseController.getLastEmittedDocPerShard(firstResults.asList(), sortedShardDocs, firstResults.length()) : null;
|
||||
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
|
||||
for (AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
|
||||
QuerySearchResultProvider queryResult = firstResults.get(entry.index);
|
||||
|
@ -129,12 +128,10 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
|
|||
threadPool.executor(ThreadPool.Names.SEARCH).execute(new ActionRunnable<SearchResponse>(listener) {
|
||||
@Override
|
||||
public void doRun() throws IOException {
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults,
|
||||
final boolean isScrollRequest = request.scroll() != null;
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(isScrollRequest, sortedShardDocs, firstResults,
|
||||
fetchResults);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults);
|
||||
}
|
||||
String scrollId = isScrollRequest ? TransportSearchHelper.buildScrollId(request.searchType(), firstResults) : null;
|
||||
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps,
|
||||
successfulOps.get(), buildTookInMillis(), buildShardFailures()));
|
||||
releaseIrrelevantSearchContexts(firstResults, docIdsToLoad);
|
||||
|
|
|
@ -168,8 +168,8 @@ class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction {
|
|||
}
|
||||
|
||||
private void innerFinishHim() throws Exception {
|
||||
ScoreDoc[] sortedShardList = searchPhaseController.sortDocs(true, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults,
|
||||
ScoreDoc[] sortedShardDocs = searchPhaseController.sortDocs(true, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(true, sortedShardDocs, queryFetchResults,
|
||||
queryFetchResults);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
|
|
|
@ -53,7 +53,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
|
|||
private volatile AtomicArray<ShardSearchFailure> shardFailures;
|
||||
final AtomicArray<QuerySearchResult> queryResults;
|
||||
final AtomicArray<FetchSearchResult> fetchResults;
|
||||
private volatile ScoreDoc[] sortedShardList;
|
||||
private volatile ScoreDoc[] sortedShardDocs;
|
||||
private final AtomicInteger successfulOps;
|
||||
|
||||
SearchScrollQueryThenFetchAsyncAction(ESLogger logger, ClusterService clusterService,
|
||||
|
@ -165,9 +165,9 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
|
|||
}
|
||||
|
||||
private void executeFetchPhase() throws Exception {
|
||||
sortedShardList = searchPhaseController.sortDocs(true, queryResults);
|
||||
sortedShardDocs = searchPhaseController.sortDocs(true, queryResults);
|
||||
AtomicArray<IntArrayList> docIdsToLoad = new AtomicArray<>(queryResults.length());
|
||||
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardList);
|
||||
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardDocs);
|
||||
|
||||
if (docIdsToLoad.asList().isEmpty()) {
|
||||
finishHim();
|
||||
|
@ -175,7 +175,8 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
|
|||
}
|
||||
|
||||
|
||||
final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(sortedShardList, queryResults.length());
|
||||
final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(queryResults.asList(),
|
||||
sortedShardDocs, queryResults.length());
|
||||
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
|
||||
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
|
||||
IntArrayList docIds = entry.value;
|
||||
|
@ -216,7 +217,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
|
|||
}
|
||||
|
||||
private void innerFinishHim() {
|
||||
InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
|
||||
InternalSearchResponse internalResponse = searchPhaseController.merge(true, sortedShardDocs, queryResults, fetchResults);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = request.scrollId();
|
||||
|
|
|
@ -124,6 +124,9 @@ public abstract class TransportClient extends AbstractClient {
|
|||
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
|
||||
entries.addAll(networkModule.getNamedWriteables());
|
||||
entries.addAll(searchModule.getNamedWriteables());
|
||||
entries.addAll(pluginsService.filterPlugins(Plugin.class).stream()
|
||||
.flatMap(p -> p.getNamedWriteables().stream())
|
||||
.collect(Collectors.toList()));
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries);
|
||||
|
||||
ModulesBuilder modules = new ModulesBuilder();
|
||||
|
@ -167,7 +170,7 @@ public abstract class TransportClient extends AbstractClient {
|
|||
transportService.start();
|
||||
transportService.acceptIncomingRequests();
|
||||
|
||||
ClientTemplate transportClient = new ClientTemplate(injector, pluginLifecycleComponents, nodesService, proxy);
|
||||
ClientTemplate transportClient = new ClientTemplate(injector, pluginLifecycleComponents, nodesService, proxy, namedWriteableRegistry);
|
||||
resourcesToClose.clear();
|
||||
return transportClient;
|
||||
} finally {
|
||||
|
@ -180,12 +183,15 @@ public abstract class TransportClient extends AbstractClient {
|
|||
private final List<LifecycleComponent> pluginLifecycleComponents;
|
||||
private final TransportClientNodesService nodesService;
|
||||
private final TransportProxyClient proxy;
|
||||
private final NamedWriteableRegistry namedWriteableRegistry;
|
||||
|
||||
private ClientTemplate(Injector injector, List<LifecycleComponent> pluginLifecycleComponents, TransportClientNodesService nodesService, TransportProxyClient proxy) {
|
||||
private ClientTemplate(Injector injector, List<LifecycleComponent> pluginLifecycleComponents,
|
||||
TransportClientNodesService nodesService, TransportProxyClient proxy, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
this.injector = injector;
|
||||
this.pluginLifecycleComponents = pluginLifecycleComponents;
|
||||
this.nodesService = nodesService;
|
||||
this.proxy = proxy;
|
||||
this.namedWriteableRegistry = namedWriteableRegistry;
|
||||
}
|
||||
|
||||
Settings getSettings() {
|
||||
|
@ -200,6 +206,7 @@ public abstract class TransportClient extends AbstractClient {
|
|||
public static final String CLIENT_TYPE = "transport";
|
||||
|
||||
final Injector injector;
|
||||
final NamedWriteableRegistry namedWriteableRegistry;
|
||||
|
||||
private final List<LifecycleComponent> pluginLifecycleComponents;
|
||||
private final TransportClientNodesService nodesService;
|
||||
|
@ -228,6 +235,7 @@ public abstract class TransportClient extends AbstractClient {
|
|||
this.pluginLifecycleComponents = Collections.unmodifiableList(template.pluginLifecycleComponents);
|
||||
this.nodesService = template.nodesService;
|
||||
this.proxy = template.proxy;
|
||||
this.namedWriteableRegistry = template.namedWriteableRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -33,9 +33,11 @@ import java.io.InputStream;
|
|||
import java.io.OutputStream;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.FileAlreadyExistsException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
|
@ -89,7 +91,19 @@ public class FsBlobContainer extends AbstractBlobContainer {
|
|||
@Override
|
||||
public void deleteBlob(String blobName) throws IOException {
|
||||
Path blobPath = path.resolve(blobName);
|
||||
Files.delete(blobPath);
|
||||
if (Files.isDirectory(blobPath)) {
|
||||
// delete directory recursively as long as it is empty (only contains empty directories),
|
||||
// which is the reason we aren't deleting any files, only the directories on the post-visit
|
||||
Files.walkFileTree(blobPath, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Files.delete(blobPath);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -41,7 +41,9 @@ import java.util.Locale;
|
|||
public enum GeoDistance implements Writeable {
|
||||
/**
|
||||
* Calculates distance as points on a plane. Faster, but less accurate than {@link #ARC}.
|
||||
* @deprecated use {@link GeoUtils#planeDistance}
|
||||
*/
|
||||
@Deprecated
|
||||
PLANE {
|
||||
@Override
|
||||
public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) {
|
||||
|
@ -63,7 +65,11 @@ public enum GeoDistance implements Writeable {
|
|||
|
||||
/**
|
||||
* Calculates distance factor.
|
||||
* Note: {@code calculate} is simply returning the RHS of the spherical law of cosines from 2 lat,lon points.
|
||||
* {@code normalize} also returns the RHS of the spherical law of cosines for a given distance
|
||||
* @deprecated use {@link SloppyMath#haversinMeters} to get distance in meters, law of cosines is being removed
|
||||
*/
|
||||
@Deprecated
|
||||
FACTOR {
|
||||
@Override
|
||||
public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) {
|
||||
|
@ -85,7 +91,9 @@ public enum GeoDistance implements Writeable {
|
|||
},
|
||||
/**
|
||||
* Calculates distance as points on a globe.
|
||||
* @deprecated use {@link GeoUtils#arcDistance}
|
||||
*/
|
||||
@Deprecated
|
||||
ARC {
|
||||
@Override
|
||||
public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) {
|
||||
|
@ -143,6 +151,7 @@ public enum GeoDistance implements Writeable {
|
|||
* Default {@link GeoDistance} function. This method should be used, If no specific function has been selected.
|
||||
* This is an alias for <code>SLOPPY_ARC</code>
|
||||
*/
|
||||
@Deprecated
|
||||
public static final GeoDistance DEFAULT = SLOPPY_ARC;
|
||||
|
||||
public abstract double normalize(double distance, DistanceUnit unit);
|
||||
|
|
|
@ -1,18 +1,20 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
|
@ -300,4 +302,14 @@ public class GeoHashUtils {
|
|||
|
||||
return neighbors;
|
||||
}
|
||||
|
||||
/** returns the latitude value from the string based geohash */
|
||||
public static final double decodeLatitude(final String geohash) {
|
||||
return GeoPointField.decodeLatitude(mortonEncode(geohash));
|
||||
}
|
||||
|
||||
/** returns the latitude value from the string based geohash */
|
||||
public static final double decodeLongitude(final String geohash) {
|
||||
return GeoPointField.decodeLongitude(mortonEncode(geohash));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -478,6 +478,21 @@ public class GeoUtils {
|
|||
return SloppyMath.haversinMeters(centerLat, centerLon, centerLat, (MAX_LON + centerLon) % 360);
|
||||
}
|
||||
|
||||
/** Return the distance (in meters) between 2 lat,lon geo points using the haversine method implemented by lucene */
|
||||
public static double arcDistance(double lat1, double lon1, double lat2, double lon2) {
|
||||
return SloppyMath.haversinMeters(lat1, lon1, lat2, lon2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the distance (in meters) between 2 lat,lon geo points using a simple tangential plane
|
||||
* this provides a faster alternative to {@link GeoUtils#arcDistance} when points are within 5 km
|
||||
*/
|
||||
public static double planeDistance(double lat1, double lon1, double lat2, double lon2) {
|
||||
double x = (lon2 - lon1) * SloppyMath.TO_RADIANS * Math.cos((lat2 + lat1) / 2.0 * SloppyMath.TO_RADIANS);
|
||||
double y = (lat2 - lat1) * SloppyMath.TO_RADIANS;
|
||||
return Math.sqrt(x * x + y * y) * EARTH_MEAN_RADIUS;
|
||||
}
|
||||
|
||||
private GeoUtils() {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import com.fasterxml.jackson.core.JsonStreamContext;
|
|||
import com.fasterxml.jackson.core.base.GeneratorBase;
|
||||
import com.fasterxml.jackson.core.filter.FilteringGeneratorDelegate;
|
||||
import com.fasterxml.jackson.core.io.SerializedString;
|
||||
import com.fasterxml.jackson.core.json.JsonWriteContext;
|
||||
import com.fasterxml.jackson.core.util.DefaultIndenter;
|
||||
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -271,7 +272,9 @@ public class JsonXContentGenerator implements XContentGenerator {
|
|||
public void writeEndRaw() {
|
||||
assert base != null : "JsonGenerator should be of instance GeneratorBase but was: " + generator.getClass();
|
||||
if (base != null) {
|
||||
base.getOutputContext().writeValue();
|
||||
JsonStreamContext context = base.getOutputContext();
|
||||
assert (context instanceof JsonWriteContext) : "Expected an instance of JsonWriteContext but was: " + context.getClass();
|
||||
((JsonWriteContext) context).writeValue();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -87,11 +87,6 @@ public class JsonXContentParser extends AbstractXContentParser {
|
|||
|
||||
@Override
|
||||
public BytesRef utf8Bytes() throws IOException {
|
||||
// Tentative workaround for https://github.com/elastic/elasticsearch/issues/8629
|
||||
// TODO: Remove this when we upgrade jackson to 2.6.x.
|
||||
if (parser.getTextLength() == 0) {
|
||||
return new BytesRef();
|
||||
}
|
||||
return new BytesRef(CharBuffer.wrap(parser.getTextCharacters(), parser.getTextOffset(), parser.getTextLength()));
|
||||
}
|
||||
|
||||
|
|
|
@ -22,8 +22,9 @@ package org.elasticsearch.index.fielddata;
|
|||
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.MutableDateTime;
|
||||
|
@ -190,7 +191,7 @@ public interface ScriptDocValues<T> extends List<T> {
|
|||
}
|
||||
}
|
||||
|
||||
public static class GeoPoints extends AbstractList<GeoPoint> implements ScriptDocValues<GeoPoint> {
|
||||
class GeoPoints extends AbstractList<GeoPoint> implements ScriptDocValues<GeoPoint> {
|
||||
|
||||
private final MultiGeoPointValues values;
|
||||
|
||||
|
@ -253,124 +254,41 @@ public interface ScriptDocValues<T> extends List<T> {
|
|||
return values.count();
|
||||
}
|
||||
|
||||
public double factorDistance(double lat, double lon) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.FACTOR.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
|
||||
}
|
||||
|
||||
public double factorDistanceWithDefault(double lat, double lon, double defaultValue) {
|
||||
if (isEmpty()) {
|
||||
return defaultValue;
|
||||
}
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.FACTOR.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
|
||||
}
|
||||
|
||||
public double factorDistance02(double lat, double lon) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.FACTOR.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT) + 1;
|
||||
}
|
||||
|
||||
public double factorDistance13(double lat, double lon) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.FACTOR.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT) + 2;
|
||||
}
|
||||
|
||||
public double arcDistance(double lat, double lon) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
|
||||
return GeoUtils.arcDistance(point.lat(), point.lon(), lat, lon);
|
||||
}
|
||||
|
||||
public double arcDistanceWithDefault(double lat, double lon, double defaultValue) {
|
||||
if (isEmpty()) {
|
||||
return defaultValue;
|
||||
}
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
|
||||
return arcDistance(lat, lon);
|
||||
}
|
||||
|
||||
public double arcDistanceInKm(double lat, double lon) {
|
||||
public double planeDistance(double lat, double lon) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.KILOMETERS);
|
||||
return GeoUtils.planeDistance(point.lat(), point.lon(), lat, lon);
|
||||
}
|
||||
|
||||
public double arcDistanceInKmWithDefault(double lat, double lon, double defaultValue) {
|
||||
public double planeDistanceWithDefault(double lat, double lon, double defaultValue) {
|
||||
if (isEmpty()) {
|
||||
return defaultValue;
|
||||
}
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.KILOMETERS);
|
||||
}
|
||||
|
||||
public double arcDistanceInMiles(double lat, double lon) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.MILES);
|
||||
}
|
||||
|
||||
public double arcDistanceInMilesWithDefault(double lat, double lon, double defaultValue) {
|
||||
if (isEmpty()) {
|
||||
return defaultValue;
|
||||
}
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.MILES);
|
||||
}
|
||||
|
||||
public double distance(double lat, double lon) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
|
||||
}
|
||||
|
||||
public double distanceWithDefault(double lat, double lon, double defaultValue) {
|
||||
if (isEmpty()) {
|
||||
return defaultValue;
|
||||
}
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
|
||||
}
|
||||
|
||||
public double distanceInKm(double lat, double lon) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.KILOMETERS);
|
||||
}
|
||||
|
||||
public double distanceInKmWithDefault(double lat, double lon, double defaultValue) {
|
||||
if (isEmpty()) {
|
||||
return defaultValue;
|
||||
}
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.KILOMETERS);
|
||||
}
|
||||
|
||||
public double distanceInMiles(double lat, double lon) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.MILES);
|
||||
}
|
||||
|
||||
public double distanceInMilesWithDefault(double lat, double lon, double defaultValue) {
|
||||
if (isEmpty()) {
|
||||
return defaultValue;
|
||||
}
|
||||
GeoPoint point = getValue();
|
||||
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.MILES);
|
||||
return planeDistance(lat, lon);
|
||||
}
|
||||
|
||||
public double geohashDistance(String geohash) {
|
||||
GeoPoint point = getValue();
|
||||
GeoPoint p = new GeoPoint().resetFromGeoHash(geohash);
|
||||
return GeoDistance.ARC.calculate(point.lat(), point.lon(), p.lat(), p.lon(), DistanceUnit.DEFAULT);
|
||||
return GeoUtils.arcDistance(point.lat(), point.lon(), GeoHashUtils.decodeLatitude(geohash),
|
||||
GeoHashUtils.decodeLongitude(geohash));
|
||||
}
|
||||
|
||||
public double geohashDistanceInKm(String geohash) {
|
||||
GeoPoint point = getValue();
|
||||
GeoPoint p = new GeoPoint().resetFromGeoHash(geohash);
|
||||
return GeoDistance.ARC.calculate(point.lat(), point.lon(), p.lat(), p.lon(), DistanceUnit.KILOMETERS);
|
||||
public double geohashDistanceWithDefault(String geohash, double defaultValue) {
|
||||
if (isEmpty()) {
|
||||
return defaultValue;
|
||||
}
|
||||
return geohashDistance(geohash);
|
||||
}
|
||||
|
||||
public double geohashDistanceInMiles(String geohash) {
|
||||
GeoPoint point = getValue();
|
||||
GeoPoint p = new GeoPoint().resetFromGeoHash(geohash);
|
||||
return GeoDistance.ARC.calculate(point.lat(), point.lon(), p.lat(), p.lon(), DistanceUnit.MILES);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -238,6 +238,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
}
|
||||
}
|
||||
|
||||
private final Version indexCreatedVersion;
|
||||
protected MappedFieldType fieldType;
|
||||
protected final MappedFieldType defaultFieldType;
|
||||
protected MultiFields multiFields;
|
||||
|
@ -246,6 +247,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
protected FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName);
|
||||
assert indexSettings != null;
|
||||
this.indexCreatedVersion = Version.indexCreated(indexSettings);
|
||||
fieldType.freeze();
|
||||
this.fieldType = fieldType;
|
||||
defaultFieldType.freeze();
|
||||
|
@ -283,7 +285,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
if (!customBoost()
|
||||
// don't set boosts eg. on dv fields
|
||||
&& field.fieldType().indexOptions() != IndexOptions.NONE
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
&& indexCreatedVersion.before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
context.doc().add(field);
|
||||
|
|
|
@ -102,7 +102,7 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
|
|||
throw new IllegalArgumentException("field name is null or empty");
|
||||
}
|
||||
if (text == null) {
|
||||
throw new IllegalArgumentException("text cannot be null.");
|
||||
throw new IllegalArgumentException("text cannot be null");
|
||||
}
|
||||
this.fieldName = fieldName;
|
||||
this.text = text;
|
||||
|
@ -265,11 +265,8 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
|
|||
|
||||
public static Optional<CommonTermsQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
String fieldName = null;
|
||||
Object text = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String analyzer = null;
|
||||
|
@ -280,78 +277,79 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
|
|||
Operator lowFreqOperator = CommonTermsQueryBuilder.DEFAULT_LOW_FREQ_OCCUR;
|
||||
float cutoffFrequency = CommonTermsQueryBuilder.DEFAULT_CUTOFF_FREQ;
|
||||
String queryName = null;
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
|
||||
String innerFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
innerFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(innerFieldName, LOW_FREQ_FIELD)) {
|
||||
lowFreqMinimumShouldMatch = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(innerFieldName, HIGH_FREQ_FIELD)) {
|
||||
highFreqMinimumShouldMatch = parser.text();
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[common] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
|
||||
String innerFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
innerFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(innerFieldName, LOW_FREQ_FIELD)) {
|
||||
lowFreqMinimumShouldMatch = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(innerFieldName, HIGH_FREQ_FIELD)) {
|
||||
highFreqMinimumShouldMatch = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME +
|
||||
"] query does not support [" + innerFieldName
|
||||
+ "] for [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME +
|
||||
"] query does not support [" + innerFieldName
|
||||
+ "] for [" + currentFieldName + "]");
|
||||
"] unexpected token type [" + token
|
||||
+ "] after [" + innerFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME +
|
||||
"] unexpected token type [" + token
|
||||
+ "] after [" + innerFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
text = parser.objectText();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
|
||||
analyzer = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
|
||||
disableCoord = parser.booleanValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, HIGH_FREQ_OPERATOR_FIELD)) {
|
||||
highFreqOperator = Operator.fromString(parser.text());
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LOW_FREQ_OPERATOR_FIELD)) {
|
||||
lowFreqOperator = Operator.fromString(parser.text());
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
|
||||
lowFreqMinimumShouldMatch = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
|
||||
cutoffFrequency = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
text = parser.objectText();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
|
||||
analyzer = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
|
||||
disableCoord = parser.booleanValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, HIGH_FREQ_OPERATOR_FIELD)) {
|
||||
highFreqOperator = Operator.fromString(parser.text());
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LOW_FREQ_OPERATOR_FIELD)) {
|
||||
lowFreqOperator = Operator.fromString(parser.text());
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
|
||||
lowFreqMinimumShouldMatch = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
|
||||
cutoffFrequency = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
parser.nextToken();
|
||||
} else {
|
||||
text = parser.objectText();
|
||||
// move to the next token
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[common] query parsed in simplified form, with direct field name, but included more options than just " +
|
||||
"the field name, possibly use its 'options' form, with 'query' element?");
|
||||
} else {
|
||||
fieldName = parser.currentName();
|
||||
text = parser.objectText();
|
||||
}
|
||||
}
|
||||
|
||||
if (text == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "No text specified for text query");
|
||||
}
|
||||
return Optional.of(new CommonTermsQueryBuilder(fieldName, text)
|
||||
.lowFreqMinimumShouldMatch(lowFreqMinimumShouldMatch)
|
||||
.highFreqMinimumShouldMatch(highFreqMinimumShouldMatch)
|
||||
|
|
|
@ -152,7 +152,7 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder<FuzzyQueryBuilder> i
|
|||
*/
|
||||
public FuzzyQueryBuilder(String fieldName, Object value) {
|
||||
if (Strings.isEmpty(fieldName)) {
|
||||
throw new IllegalArgumentException("field name cannot be null or empty.");
|
||||
throw new IllegalArgumentException("field name cannot be null or empty");
|
||||
}
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("query value cannot be null");
|
||||
|
@ -258,63 +258,60 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder<FuzzyQueryBuilder> i
|
|||
|
||||
public static Optional<FuzzyQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[fuzzy] query malformed, no field");
|
||||
}
|
||||
|
||||
String fieldName = parser.currentName();
|
||||
String fieldName = null;
|
||||
Object value = null;
|
||||
|
||||
Fuzziness fuzziness = FuzzyQueryBuilder.DEFAULT_FUZZINESS;
|
||||
int prefixLength = FuzzyQueryBuilder.DEFAULT_PREFIX_LENGTH;
|
||||
int maxExpansions = FuzzyQueryBuilder.DEFAULT_MAX_EXPANSIONS;
|
||||
boolean transpositions = FuzzyQueryBuilder.DEFAULT_TRANSPOSITIONS;
|
||||
String rewrite = null;
|
||||
|
||||
String queryName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
|
||||
value = parser.objectBytes();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
|
||||
value = parser.objectBytes();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
|
||||
fuzziness = Fuzziness.parse(parser);
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
|
||||
prefixLength = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
|
||||
maxExpansions = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TRANSPOSITIONS_FIELD)) {
|
||||
transpositions = parser.booleanValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
|
||||
rewrite = parser.textOrNull();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[fuzzy] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[fuzzy] query does not support [" + currentFieldName + "]");
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
|
||||
value = parser.objectBytes();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
|
||||
value = parser.objectBytes();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
|
||||
fuzziness = Fuzziness.parse(parser);
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
|
||||
prefixLength = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
|
||||
maxExpansions = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TRANSPOSITIONS_FIELD)) {
|
||||
transpositions = parser.booleanValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
|
||||
rewrite = parser.textOrNull();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[fuzzy] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fieldName = parser.currentName();
|
||||
value = parser.objectBytes();
|
||||
}
|
||||
parser.nextToken();
|
||||
} else {
|
||||
value = parser.objectBytes();
|
||||
// move to the next token
|
||||
parser.nextToken();
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "no value specified for fuzzy query");
|
||||
}
|
||||
return Optional.of(new FuzzyQueryBuilder(fieldName, value)
|
||||
.fuzziness(fuzziness)
|
||||
|
|
|
@ -359,9 +359,12 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||
fieldName = currentFieldName;
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[geo_distance] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
// the json in the format of -> field : { lat : 30, lon : 12 }
|
||||
String currentName = parser.currentName();
|
||||
assert currentFieldName != null;
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
|
|
@ -192,62 +192,55 @@ public class MatchPhrasePrefixQueryBuilder extends AbstractQueryBuilder<MatchPhr
|
|||
|
||||
public static Optional<MatchPhrasePrefixQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
String fieldName = null;
|
||||
Object value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String analyzer = null;
|
||||
int slop = MatchQuery.DEFAULT_PHRASE_SLOP;
|
||||
int maxExpansion = FuzzyQuery.defaultMaxExpansions;
|
||||
String queryName = null;
|
||||
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.QUERY_FIELD)) {
|
||||
value = parser.objectText();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.ANALYZER_FIELD)) {
|
||||
analyzer = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchPhraseQueryBuilder.SLOP_FIELD)) {
|
||||
slop = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
|
||||
maxExpansion = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[match_phrase_prefix] query doesn't support multiple " +
|
||||
"fields, found [" + fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.QUERY_FIELD)) {
|
||||
value = parser.objectText();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.ANALYZER_FIELD)) {
|
||||
analyzer = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchPhraseQueryBuilder.SLOP_FIELD)) {
|
||||
slop = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
|
||||
maxExpansion = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
"[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
fieldName = parser.currentName();
|
||||
value = parser.objectText();
|
||||
}
|
||||
parser.nextToken();
|
||||
} else {
|
||||
value = parser.objectText();
|
||||
// move to the next token
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME
|
||||
+ "] query parsed in simplified form, with direct field name, "
|
||||
+ "but included more options than just the field name, possibly use its 'options' form, with 'query' element?");
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "No text specified for text query");
|
||||
}
|
||||
|
||||
MatchPhrasePrefixQueryBuilder matchQuery = new MatchPhrasePrefixQueryBuilder(fieldName, value);
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -49,7 +50,7 @@ public class MatchPhraseQueryBuilder extends AbstractQueryBuilder<MatchPhraseQue
|
|||
private int slop = MatchQuery.DEFAULT_PHRASE_SLOP;
|
||||
|
||||
public MatchPhraseQueryBuilder(String fieldName, Object value) {
|
||||
if (fieldName == null) {
|
||||
if (Strings.isEmpty(fieldName)) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires fieldName");
|
||||
}
|
||||
if (value == null) {
|
||||
|
@ -163,59 +164,52 @@ public class MatchPhraseQueryBuilder extends AbstractQueryBuilder<MatchPhraseQue
|
|||
|
||||
public static Optional<MatchPhraseQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
String fieldName = null;
|
||||
Object value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String analyzer = null;
|
||||
int slop = MatchQuery.DEFAULT_PHRASE_SLOP;
|
||||
String queryName = null;
|
||||
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.QUERY_FIELD)) {
|
||||
value = parser.objectText();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.ANALYZER_FIELD)) {
|
||||
analyzer = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
|
||||
slop = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[match_phrase] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.QUERY_FIELD)) {
|
||||
value = parser.objectText();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.ANALYZER_FIELD)) {
|
||||
analyzer = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
|
||||
slop = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
"[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
fieldName = parser.currentName();
|
||||
value = parser.objectText();
|
||||
}
|
||||
parser.nextToken();
|
||||
} else {
|
||||
value = parser.objectText();
|
||||
// move to the next token
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME
|
||||
+ "] query parsed in simplified form, with direct field name, "
|
||||
+ "but included more options than just the field name, possibly use its 'options' form, with 'query' element?");
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "No text specified for text query");
|
||||
}
|
||||
|
||||
MatchPhraseQueryBuilder matchQuery = new MatchPhraseQueryBuilder(fieldName, value);
|
||||
|
|
|
@ -510,13 +510,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
|
||||
public static Optional<MatchQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchQueryBuilder.NAME + "] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
String fieldName = null;
|
||||
MatchQuery.Type type = MatchQuery.Type.BOOLEAN;
|
||||
Object value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
|
@ -533,80 +527,84 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
Float cutOffFrequency = null;
|
||||
ZeroTermsQuery zeroTermsQuery = MatchQuery.DEFAULT_ZERO_TERMS_QUERY;
|
||||
String queryName = null;
|
||||
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
value = parser.objectText();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
|
||||
String tStr = parser.text();
|
||||
if ("boolean".equals(tStr)) {
|
||||
type = MatchQuery.Type.BOOLEAN;
|
||||
} else if ("phrase".equals(tStr)) {
|
||||
type = MatchQuery.Type.PHRASE;
|
||||
} else if ("phrase_prefix".equals(tStr) || ("phrasePrefix".equals(tStr))) {
|
||||
type = MatchQuery.Type.PHRASE_PREFIX;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query does not support type " + tStr);
|
||||
}
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
|
||||
analyzer = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
|
||||
slop = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
|
||||
fuzziness = Fuzziness.parse(parser);
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
|
||||
prefixLength = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
|
||||
maxExpansion = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, OPERATOR_FIELD)) {
|
||||
operator = Operator.fromString(parser.text());
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
|
||||
minimumShouldMatch = parser.textOrNull();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_REWRITE_FIELD)) {
|
||||
fuzzyRewrite = parser.textOrNull();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_TRANSPOSITIONS_FIELD)) {
|
||||
fuzzyTranspositions = parser.booleanValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
|
||||
lenient = parser.booleanValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
|
||||
cutOffFrequency = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ZERO_TERMS_QUERY_FIELD)) {
|
||||
String zeroTermsDocs = parser.text();
|
||||
if ("none".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE;
|
||||
} else if ("all".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
zeroTermsQuery = MatchQuery.ZeroTermsQuery.ALL;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[match] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
value = parser.objectText();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
|
||||
String tStr = parser.text();
|
||||
if ("boolean".equals(tStr)) {
|
||||
type = MatchQuery.Type.BOOLEAN;
|
||||
} else if ("phrase".equals(tStr)) {
|
||||
type = MatchQuery.Type.PHRASE;
|
||||
} else if ("phrase_prefix".equals(tStr) || ("phrasePrefix".equals(tStr))) {
|
||||
type = MatchQuery.Type.PHRASE_PREFIX;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query does not support type " + tStr);
|
||||
}
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
|
||||
analyzer = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
|
||||
slop = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
|
||||
fuzziness = Fuzziness.parse(parser);
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
|
||||
prefixLength = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
|
||||
maxExpansion = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, OPERATOR_FIELD)) {
|
||||
operator = Operator.fromString(parser.text());
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
|
||||
minimumShouldMatch = parser.textOrNull();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_REWRITE_FIELD)) {
|
||||
fuzzyRewrite = parser.textOrNull();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_TRANSPOSITIONS_FIELD)) {
|
||||
fuzzyTranspositions = parser.booleanValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
|
||||
lenient = parser.booleanValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
|
||||
cutOffFrequency = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ZERO_TERMS_QUERY_FIELD)) {
|
||||
String zeroTermsDocs = parser.text();
|
||||
if ("none".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE;
|
||||
} else if ("all".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
zeroTermsQuery = MatchQuery.ZeroTermsQuery.ALL;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
|
||||
}
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
|
||||
"[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
"[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
parser.nextToken();
|
||||
} else {
|
||||
value = parser.objectText();
|
||||
// move to the next token
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[match] query parsed in simplified form, with direct field name, "
|
||||
+ "but included more options than just the field name, possibly use its 'options' form, with 'query' element?");
|
||||
} else {
|
||||
fieldName = parser.currentName();
|
||||
value = parser.objectText();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ public class PrefixQueryBuilder extends AbstractQueryBuilder<PrefixQueryBuilder>
|
|||
throw new IllegalArgumentException("field name is null or empty");
|
||||
}
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("value cannot be null.");
|
||||
throw new IllegalArgumentException("value cannot be null");
|
||||
}
|
||||
this.fieldName = fieldName;
|
||||
this.value = value;
|
||||
|
@ -120,7 +120,7 @@ public class PrefixQueryBuilder extends AbstractQueryBuilder<PrefixQueryBuilder>
|
|||
public static Optional<PrefixQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = parser.currentName();
|
||||
String fieldName = null;
|
||||
String value = null;
|
||||
String rewrite = null;
|
||||
|
||||
|
@ -134,6 +134,10 @@ public class PrefixQueryBuilder extends AbstractQueryBuilder<PrefixQueryBuilder>
|
|||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[prefix] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -149,19 +153,16 @@ public class PrefixQueryBuilder extends AbstractQueryBuilder<PrefixQueryBuilder>
|
|||
rewrite = parser.textOrNull();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[regexp] query does not support [" + currentFieldName + "]");
|
||||
"[prefix] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fieldName = currentFieldName;
|
||||
value = parser.textOrNull();
|
||||
fieldName = currentFieldName;
|
||||
value = parser.textOrNull();
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "No value specified for prefix query");
|
||||
}
|
||||
return Optional.of(new PrefixQueryBuilder(fieldName, value)
|
||||
.rewrite(rewrite)
|
||||
.boost(boost)
|
||||
|
|
|
@ -115,10 +115,11 @@ public class QueryParseContext implements ParseFieldMatcherSupplier {
|
|||
@SuppressWarnings("unchecked")
|
||||
Optional<QueryBuilder> result = (Optional<QueryBuilder>) indicesQueriesRegistry.lookup(queryName, parseFieldMatcher,
|
||||
parser.getTokenLocation()).fromXContent(this);
|
||||
if (parser.currentToken() == XContentParser.Token.END_OBJECT) {
|
||||
// if we are at END_OBJECT, move to the next one...
|
||||
parser.nextToken();
|
||||
if (parser.currentToken() != XContentParser.Token.END_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + queryName + "] malformed query, expected [END_OBJECT] but found [" + parser.currentToken() + "]");
|
||||
}
|
||||
parser.nextToken();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -318,6 +318,10 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
|
|||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[range] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
|
|
@ -77,7 +77,7 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
|
|||
throw new IllegalArgumentException("field name is null or empty");
|
||||
}
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("value cannot be null.");
|
||||
throw new IllegalArgumentException("value cannot be null");
|
||||
}
|
||||
this.fieldName = fieldName;
|
||||
this.value = value;
|
||||
|
@ -180,10 +180,8 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
|
|||
|
||||
public static Optional<RegexpQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = parser.currentName();
|
||||
String fieldName = null;
|
||||
String rewrite = null;
|
||||
|
||||
String value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
int flagsValue = RegexpQueryBuilder.DEFAULT_FLAGS_VALUE;
|
||||
|
@ -197,6 +195,10 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
|
|||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[regexp] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -233,9 +235,6 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
|
|||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "No value specified for regexp query");
|
||||
}
|
||||
return Optional.of(new RegexpQueryBuilder(fieldName, value)
|
||||
.flags(flagsValue)
|
||||
.maxDeterminizedStates(maxDeterminizedStates)
|
||||
|
|
|
@ -186,7 +186,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
/** Add a field to run the query against. */
|
||||
public SimpleQueryStringBuilder field(String field) {
|
||||
if (Strings.isEmpty(field)) {
|
||||
throw new IllegalArgumentException("supplied field is null or empty.");
|
||||
throw new IllegalArgumentException("supplied field is null or empty");
|
||||
}
|
||||
this.fieldsAndWeights.put(field, AbstractQueryBuilder.DEFAULT_BOOST);
|
||||
return this;
|
||||
|
@ -195,7 +195,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
/** Add a field to run the query against with a specific boost. */
|
||||
public SimpleQueryStringBuilder field(String field, float boost) {
|
||||
if (Strings.isEmpty(field)) {
|
||||
throw new IllegalArgumentException("supplied field is null or empty.");
|
||||
throw new IllegalArgumentException("supplied field is null or empty");
|
||||
}
|
||||
this.fieldsAndWeights.put(field, boost);
|
||||
return this;
|
||||
|
|
|
@ -94,49 +94,43 @@ public class SpanTermQueryBuilder extends BaseTermQueryBuilder<SpanTermQueryBuil
|
|||
|
||||
public static Optional<SpanTermQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
token = parser.nextToken();
|
||||
}
|
||||
|
||||
assert token == XContentParser.Token.FIELD_NAME;
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
|
||||
String fieldName = null;
|
||||
Object value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
|
||||
value = parser.objectBytes();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BaseTermQueryBuilder.VALUE_FIELD)) {
|
||||
value = parser.objectBytes();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[span_term] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[span_term] query does not support [" + currentFieldName + "]");
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
|
||||
value = parser.objectBytes();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BaseTermQueryBuilder.VALUE_FIELD)) {
|
||||
value = parser.objectBytes();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[span_term] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fieldName = parser.currentName();
|
||||
value = parser.objectBytes();
|
||||
}
|
||||
parser.nextToken();
|
||||
} else {
|
||||
value = parser.objectBytes();
|
||||
// move to the next token
|
||||
parser.nextToken();
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "No value specified for term query");
|
||||
}
|
||||
|
||||
SpanTermQueryBuilder result = new SpanTermQueryBuilder(fieldName, value);
|
||||
|
|
|
@ -75,7 +75,7 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder<WildcardQueryBuil
|
|||
throw new IllegalArgumentException("field name is null or empty");
|
||||
}
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("value cannot be null.");
|
||||
throw new IllegalArgumentException("value cannot be null");
|
||||
}
|
||||
this.fieldName = fieldName;
|
||||
this.value = value;
|
||||
|
@ -135,49 +135,50 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder<WildcardQueryBuil
|
|||
|
||||
public static Optional<WildcardQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[wildcard] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
String fieldName = null;
|
||||
String rewrite = null;
|
||||
|
||||
String value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, WILDCARD_FIELD)) {
|
||||
value = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
|
||||
value = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
|
||||
rewrite = parser.textOrNull();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[wildcard] query doesn't support multiple fields, found ["
|
||||
+ fieldName + "] and [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[wildcard] query does not support [" + currentFieldName + "]");
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, WILDCARD_FIELD)) {
|
||||
value = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
|
||||
value = parser.text();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
|
||||
rewrite = parser.textOrNull();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[wildcard] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fieldName = parser.currentName();
|
||||
value = parser.text();
|
||||
}
|
||||
parser.nextToken();
|
||||
} else {
|
||||
value = parser.text();
|
||||
parser.nextToken();
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "No value specified for wildcard query");
|
||||
}
|
||||
return Optional.of(new WildcardQueryBuilder(fieldName, value)
|
||||
.rewrite(rewrite)
|
||||
.boost(boost)
|
||||
|
|
|
@ -45,6 +45,10 @@ import static java.util.Collections.emptyMap;
|
|||
* }
|
||||
* }
|
||||
* }</pre>
|
||||
*
|
||||
* Elasticsearch doesn't have any automatic mechanism to share these components between indexes. If any component is heavy enough to warrant
|
||||
* such sharing then it is the Pugin's responsibility to do it in their {@link AnalysisProvider} implementation. We recommend against doing
|
||||
* this unless absolutely necessary because it can be difficult to get the caching right given things like behavior changes across versions.
|
||||
*/
|
||||
public interface AnalysisPlugin {
|
||||
/**
|
||||
|
|
|
@ -38,15 +38,19 @@ public final class IndexId implements Writeable, ToXContent {
|
|||
|
||||
private final String name;
|
||||
private final String id;
|
||||
private final int hashCode;
|
||||
|
||||
public IndexId(final String name, final String id) {
|
||||
this.name = name;
|
||||
this.id = id;
|
||||
this.hashCode = computeHashCode();
|
||||
|
||||
}
|
||||
|
||||
public IndexId(final StreamInput in) throws IOException {
|
||||
this.name = in.readString();
|
||||
this.id = in.readString();
|
||||
this.hashCode = computeHashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -90,6 +94,10 @@ public final class IndexId implements Writeable, ToXContent {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
private int computeHashCode() {
|
||||
return Objects.hash(name, id);
|
||||
}
|
||||
|
||||
|
|
|
@ -101,6 +101,7 @@ import java.io.FileNotFoundException;
|
|||
import java.io.FilterInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.DirectoryNotEmptyException;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
@ -406,7 +407,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
try {
|
||||
// Delete snapshot from the index file, since it is the maintainer of truth of active snapshots
|
||||
writeIndexGen(repositoryData.removeSnapshot(snapshotId));
|
||||
final RepositoryData updatedRepositoryData = repositoryData.removeSnapshot(snapshotId);
|
||||
writeIndexGen(updatedRepositoryData);
|
||||
|
||||
// delete the snapshot file
|
||||
safeSnapshotBlobDelete(snapshot, snapshotId.getUUID());
|
||||
|
@ -436,6 +438,27 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// cleanup indices that are no longer part of the repository
|
||||
final Collection<IndexId> indicesToCleanUp = Sets.newHashSet(repositoryData.getIndices().values());
|
||||
indicesToCleanUp.removeAll(updatedRepositoryData.getIndices().values());
|
||||
final BlobContainer indicesBlobContainer = blobStore().blobContainer(basePath().add("indices"));
|
||||
for (final IndexId indexId : indicesToCleanUp) {
|
||||
try {
|
||||
indicesBlobContainer.deleteBlob(indexId.getId());
|
||||
} catch (DirectoryNotEmptyException dnee) {
|
||||
// if the directory isn't empty for some reason, it will fail to clean up;
|
||||
// we'll ignore that and accept that cleanup didn't fully succeed.
|
||||
// since we are using UUIDs for path names, this won't be an issue for
|
||||
// snapshotting indices of the same name
|
||||
logger.debug("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " +
|
||||
"its index folder due to the directory not being empty.", dnee, metadata.name(), indexId);
|
||||
} catch (IOException ioe) {
|
||||
// a different IOException occurred while trying to delete - will just log the issue for now
|
||||
logger.debug("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " +
|
||||
"its index folder.", ioe, metadata.name(), indexId);
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new RepositoryException(metadata.name(), "failed to update snapshot in repository", ex);
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search;
|
|||
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
|
@ -87,6 +88,8 @@ import org.elasticsearch.search.rescore.RescoreBuilder;
|
|||
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
||||
import org.elasticsearch.search.sort.SortAndFormats;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool.Cancellable;
|
||||
import org.elasticsearch.threadpool.ThreadPool.Names;
|
||||
|
@ -94,6 +97,7 @@ import org.elasticsearch.threadpool.ThreadPool.Names;
|
|||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
@ -265,7 +269,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
|
||||
loadOrExecuteQueryPhase(request, context);
|
||||
|
||||
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scrollContext() == null) {
|
||||
if (hasHits(context.queryResult()) == false && context.scrollContext() == null) {
|
||||
freeContext(context.id());
|
||||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
|
@ -320,7 +324,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
operationListener.onPreQueryPhase(context);
|
||||
long time = System.nanoTime();
|
||||
queryPhase.execute(context);
|
||||
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scrollContext() == null) {
|
||||
if (hasHits(context.queryResult()) == false && context.scrollContext() == null) {
|
||||
// no hits, we can release the context since there will be no fetch phase
|
||||
freeContext(context.id());
|
||||
} else {
|
||||
|
@ -811,40 +815,55 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
}
|
||||
}
|
||||
|
||||
private static final int[] EMPTY_DOC_IDS = new int[0];
|
||||
|
||||
/**
|
||||
* Shortcut ids to load, we load only "from" and up to "size". The phase controller
|
||||
* handles this as well since the result is always size * shards for Q_A_F
|
||||
*/
|
||||
private void shortcutDocIdsToLoad(SearchContext context) {
|
||||
final int[] docIdsToLoad;
|
||||
int docsOffset = 0;
|
||||
final Suggest suggest = context.queryResult().suggest();
|
||||
int numSuggestDocs = 0;
|
||||
final List<CompletionSuggestion> completionSuggestions;
|
||||
if (suggest != null && suggest.hasScoreDocs()) {
|
||||
completionSuggestions = suggest.filter(CompletionSuggestion.class);
|
||||
for (CompletionSuggestion completionSuggestion : completionSuggestions) {
|
||||
numSuggestDocs += completionSuggestion.getOptions().size();
|
||||
}
|
||||
} else {
|
||||
completionSuggestions = Collections.emptyList();
|
||||
}
|
||||
if (context.request().scroll() != null) {
|
||||
TopDocs topDocs = context.queryResult().topDocs();
|
||||
int[] docIdsToLoad = new int[topDocs.scoreDocs.length];
|
||||
docIdsToLoad = new int[topDocs.scoreDocs.length + numSuggestDocs];
|
||||
for (int i = 0; i < topDocs.scoreDocs.length; i++) {
|
||||
docIdsToLoad[i] = topDocs.scoreDocs[i].doc;
|
||||
docIdsToLoad[docsOffset++] = topDocs.scoreDocs[i].doc;
|
||||
}
|
||||
context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
|
||||
} else {
|
||||
TopDocs topDocs = context.queryResult().topDocs();
|
||||
if (topDocs.scoreDocs.length < context.from()) {
|
||||
// no more docs...
|
||||
context.docIdsToLoad(EMPTY_DOC_IDS, 0, 0);
|
||||
return;
|
||||
}
|
||||
int totalSize = context.from() + context.size();
|
||||
int[] docIdsToLoad = new int[Math.min(topDocs.scoreDocs.length - context.from(), context.size())];
|
||||
int counter = 0;
|
||||
for (int i = context.from(); i < totalSize; i++) {
|
||||
if (i < topDocs.scoreDocs.length) {
|
||||
docIdsToLoad[counter] = topDocs.scoreDocs[i].doc;
|
||||
} else {
|
||||
break;
|
||||
docIdsToLoad = new int[numSuggestDocs];
|
||||
} else {
|
||||
int totalSize = context.from() + context.size();
|
||||
docIdsToLoad = new int[Math.min(topDocs.scoreDocs.length - context.from(), context.size()) +
|
||||
numSuggestDocs];
|
||||
for (int i = context.from(); i < Math.min(totalSize, topDocs.scoreDocs.length); i++) {
|
||||
docIdsToLoad[docsOffset++] = topDocs.scoreDocs[i].doc;
|
||||
}
|
||||
counter++;
|
||||
}
|
||||
context.docIdsToLoad(docIdsToLoad, 0, counter);
|
||||
}
|
||||
for (CompletionSuggestion completionSuggestion : completionSuggestions) {
|
||||
for (CompletionSuggestion.Entry.Option option : completionSuggestion.getOptions()) {
|
||||
docIdsToLoad[docsOffset++] = option.getDoc().doc;
|
||||
}
|
||||
}
|
||||
context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
|
||||
}
|
||||
|
||||
private static boolean hasHits(final QuerySearchResult searchResult) {
|
||||
return searchResult.topDocs().scoreDocs.length > 0 ||
|
||||
(searchResult.suggest() != null && searchResult.suggest().hasScoreDocs());
|
||||
}
|
||||
|
||||
private void processScroll(InternalScrollSearchRequest request, SearchContext context) {
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.search.SortField;
|
|||
import org.apache.lucene.search.TermStatistics;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.collect.HppcMaps;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
|
@ -53,18 +52,22 @@ import org.elasticsearch.search.internal.InternalSearchHits;
|
|||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||
import org.elasticsearch.search.profile.ProfileShardResult;
|
||||
import org.elasticsearch.search.profile.SearchProfileShardResults;
|
||||
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
||||
import org.elasticsearch.search.query.QuerySearchResult;
|
||||
import org.elasticsearch.search.query.QuerySearchResultProvider;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
import org.elasticsearch.search.suggest.Suggest.Suggestion;
|
||||
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
|
@ -154,6 +157,10 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns a score doc array of top N search docs across all shards, followed by top suggest docs for each
|
||||
* named completion suggestion across all shards. If more than one named completion suggestion is specified in the
|
||||
* request, the suggest docs for a named suggestion are ordered by the suggestion name.
|
||||
*
|
||||
* @param ignoreFrom Whether to ignore the from and sort all hits in each shard result.
|
||||
* Enabled only for scroll search, because that only retrieves hits of length 'size' in the query phase.
|
||||
* @param resultsArr Shard result holder
|
||||
|
@ -191,19 +198,40 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
offset = 0;
|
||||
}
|
||||
ScoreDoc[] scoreDocs = result.topDocs().scoreDocs;
|
||||
ScoreDoc[] docs;
|
||||
int numSuggestDocs = 0;
|
||||
final Suggest suggest = result.queryResult().suggest();
|
||||
final List<CompletionSuggestion> completionSuggestions;
|
||||
if (suggest != null) {
|
||||
completionSuggestions = suggest.filter(CompletionSuggestion.class);
|
||||
for (CompletionSuggestion suggestion : completionSuggestions) {
|
||||
numSuggestDocs += suggestion.getOptions().size();
|
||||
}
|
||||
} else {
|
||||
completionSuggestions = Collections.emptyList();
|
||||
}
|
||||
int docsOffset = 0;
|
||||
if (scoreDocs.length == 0 || scoreDocs.length < offset) {
|
||||
return EMPTY_DOCS;
|
||||
docs = new ScoreDoc[numSuggestDocs];
|
||||
} else {
|
||||
int resultDocsSize = result.size();
|
||||
if ((scoreDocs.length - offset) < resultDocsSize) {
|
||||
resultDocsSize = scoreDocs.length - offset;
|
||||
}
|
||||
docs = new ScoreDoc[resultDocsSize + numSuggestDocs];
|
||||
for (int i = 0; i < resultDocsSize; i++) {
|
||||
ScoreDoc scoreDoc = scoreDocs[offset + i];
|
||||
scoreDoc.shardIndex = shardIndex;
|
||||
docs[i] = scoreDoc;
|
||||
docsOffset++;
|
||||
}
|
||||
}
|
||||
|
||||
int resultDocsSize = result.size();
|
||||
if ((scoreDocs.length - offset) < resultDocsSize) {
|
||||
resultDocsSize = scoreDocs.length - offset;
|
||||
}
|
||||
ScoreDoc[] docs = new ScoreDoc[resultDocsSize];
|
||||
for (int i = 0; i < resultDocsSize; i++) {
|
||||
ScoreDoc scoreDoc = scoreDocs[offset + i];
|
||||
scoreDoc.shardIndex = shardIndex;
|
||||
docs[i] = scoreDoc;
|
||||
for (CompletionSuggestion suggestion: completionSuggestions) {
|
||||
for (CompletionSuggestion.Entry.Option option : suggestion.getOptions()) {
|
||||
ScoreDoc doc = option.getDoc();
|
||||
doc.shardIndex = shardIndex;
|
||||
docs[docsOffset++] = doc;
|
||||
}
|
||||
}
|
||||
return docs;
|
||||
}
|
||||
|
@ -213,13 +241,7 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
Arrays.sort(sortedResults, QUERY_RESULT_ORDERING);
|
||||
QuerySearchResultProvider firstResult = sortedResults[0].value;
|
||||
|
||||
int topN = firstResult.queryResult().size();
|
||||
if (firstResult.includeFetch()) {
|
||||
// if we did both query and fetch on the same go, we have fetched all the docs from each shards already, use them...
|
||||
// this is also important since we shortcut and fetch only docs from "from" and up to "size"
|
||||
topN *= sortedResults.length;
|
||||
}
|
||||
|
||||
int topN = topN(results);
|
||||
int from = firstResult.queryResult().from();
|
||||
if (ignoreFrom) {
|
||||
from = 0;
|
||||
|
@ -258,40 +280,86 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
}
|
||||
mergedTopDocs = TopDocs.merge(from, topN, shardTopDocs);
|
||||
}
|
||||
return mergedTopDocs.scoreDocs;
|
||||
}
|
||||
|
||||
public ScoreDoc[] getLastEmittedDocPerShard(SearchRequest request, ScoreDoc[] sortedShardList, int numShards) {
|
||||
if (request.scroll() != null) {
|
||||
return getLastEmittedDocPerShard(sortedShardList, numShards);
|
||||
} else {
|
||||
return null;
|
||||
ScoreDoc[] scoreDocs = mergedTopDocs.scoreDocs;
|
||||
final Map<String, List<Suggestion<CompletionSuggestion.Entry>>> groupedCompletionSuggestions = new HashMap<>();
|
||||
// group suggestions and assign shard index
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> sortedResult : sortedResults) {
|
||||
Suggest shardSuggest = sortedResult.value.queryResult().suggest();
|
||||
if (shardSuggest != null) {
|
||||
for (CompletionSuggestion suggestion : shardSuggest.filter(CompletionSuggestion.class)) {
|
||||
suggestion.setShardIndex(sortedResult.index);
|
||||
List<Suggestion<CompletionSuggestion.Entry>> suggestions =
|
||||
groupedCompletionSuggestions.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>());
|
||||
suggestions.add(suggestion);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (groupedCompletionSuggestions.isEmpty() == false) {
|
||||
int numSuggestDocs = 0;
|
||||
List<Suggestion<? extends Entry<? extends Entry.Option>>> completionSuggestions =
|
||||
new ArrayList<>(groupedCompletionSuggestions.size());
|
||||
for (List<Suggestion<CompletionSuggestion.Entry>> groupedSuggestions : groupedCompletionSuggestions.values()) {
|
||||
final CompletionSuggestion completionSuggestion = CompletionSuggestion.reduceTo(groupedSuggestions);
|
||||
assert completionSuggestion != null;
|
||||
numSuggestDocs += completionSuggestion.getOptions().size();
|
||||
completionSuggestions.add(completionSuggestion);
|
||||
}
|
||||
scoreDocs = new ScoreDoc[mergedTopDocs.scoreDocs.length + numSuggestDocs];
|
||||
System.arraycopy(mergedTopDocs.scoreDocs, 0, scoreDocs, 0, mergedTopDocs.scoreDocs.length);
|
||||
int offset = mergedTopDocs.scoreDocs.length;
|
||||
Suggest suggestions = new Suggest(completionSuggestions);
|
||||
for (CompletionSuggestion completionSuggestion : suggestions.filter(CompletionSuggestion.class)) {
|
||||
for (CompletionSuggestion.Entry.Option option : completionSuggestion.getOptions()) {
|
||||
scoreDocs[offset++] = option.getDoc();
|
||||
}
|
||||
}
|
||||
}
|
||||
return scoreDocs;
|
||||
}
|
||||
|
||||
public ScoreDoc[] getLastEmittedDocPerShard(ScoreDoc[] sortedShardList, int numShards) {
|
||||
public ScoreDoc[] getLastEmittedDocPerShard(List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> queryResults,
|
||||
ScoreDoc[] sortedScoreDocs, int numShards) {
|
||||
ScoreDoc[] lastEmittedDocPerShard = new ScoreDoc[numShards];
|
||||
for (ScoreDoc scoreDoc : sortedShardList) {
|
||||
lastEmittedDocPerShard[scoreDoc.shardIndex] = scoreDoc;
|
||||
if (queryResults.isEmpty() == false) {
|
||||
long fetchHits = 0;
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> queryResult : queryResults) {
|
||||
fetchHits += queryResult.value.queryResult().topDocs().scoreDocs.length;
|
||||
}
|
||||
// from is always zero as when we use scroll, we ignore from
|
||||
long size = Math.min(fetchHits, topN(queryResults));
|
||||
for (int sortedDocsIndex = 0; sortedDocsIndex < size; sortedDocsIndex++) {
|
||||
ScoreDoc scoreDoc = sortedScoreDocs[sortedDocsIndex];
|
||||
lastEmittedDocPerShard[scoreDoc.shardIndex] = scoreDoc;
|
||||
}
|
||||
}
|
||||
return lastEmittedDocPerShard;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an array, with potential null elements, with docs to load.
|
||||
*/
|
||||
public void fillDocIdsToLoad(AtomicArray<IntArrayList> docsIdsToLoad, ScoreDoc[] shardDocs) {
|
||||
public void fillDocIdsToLoad(AtomicArray<IntArrayList> docIdsToLoad, ScoreDoc[] shardDocs) {
|
||||
for (ScoreDoc shardDoc : shardDocs) {
|
||||
IntArrayList list = docsIdsToLoad.get(shardDoc.shardIndex);
|
||||
if (list == null) {
|
||||
list = new IntArrayList(); // can't be shared!, uses unsafe on it later on
|
||||
docsIdsToLoad.set(shardDoc.shardIndex, list);
|
||||
IntArrayList shardDocIdsToLoad = docIdsToLoad.get(shardDoc.shardIndex);
|
||||
if (shardDocIdsToLoad == null) {
|
||||
shardDocIdsToLoad = new IntArrayList(); // can't be shared!, uses unsafe on it later on
|
||||
docIdsToLoad.set(shardDoc.shardIndex, shardDocIdsToLoad);
|
||||
}
|
||||
list.add(shardDoc.doc);
|
||||
shardDocIdsToLoad.add(shardDoc.doc);
|
||||
}
|
||||
}
|
||||
|
||||
public InternalSearchResponse merge(ScoreDoc[] sortedDocs, AtomicArray<? extends QuerySearchResultProvider> queryResultsArr,
|
||||
/**
|
||||
* Enriches search hits and completion suggestion hits from <code>sortedDocs</code> using <code>fetchResultsArr</code>,
|
||||
* merges suggestions, aggregations and profile results
|
||||
*
|
||||
* Expects sortedDocs to have top search docs across all shards, optionally followed by top suggest docs for each named
|
||||
* completion suggestion ordered by suggestion name
|
||||
*/
|
||||
public InternalSearchResponse merge(boolean ignoreFrom, ScoreDoc[] sortedDocs,
|
||||
AtomicArray<? extends QuerySearchResultProvider> queryResultsArr,
|
||||
AtomicArray<? extends FetchSearchResultProvider> fetchResultsArr) {
|
||||
|
||||
List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> queryResults = queryResultsArr.asList();
|
||||
|
@ -317,6 +385,7 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
|
||||
// count the total (we use the query result provider here, since we might not get any hits (we scrolled past them))
|
||||
long totalHits = 0;
|
||||
long fetchHits = 0;
|
||||
float maxScore = Float.NEGATIVE_INFINITY;
|
||||
boolean timedOut = false;
|
||||
Boolean terminatedEarly = null;
|
||||
|
@ -333,6 +402,7 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
totalHits += result.topDocs().totalHits;
|
||||
fetchHits += result.topDocs().scoreDocs.length;
|
||||
if (!Float.isNaN(result.topDocs().getMaxScore())) {
|
||||
maxScore = Math.max(maxScore, result.topDocs().getMaxScore());
|
||||
}
|
||||
|
@ -345,11 +415,13 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
for (AtomicArray.Entry<? extends FetchSearchResultProvider> entry : fetchResults) {
|
||||
entry.value.fetchResult().initCounter();
|
||||
}
|
||||
|
||||
int from = ignoreFrom ? 0 : firstResult.queryResult().from();
|
||||
int numSearchHits = (int) Math.min(fetchHits - from, topN(queryResults));
|
||||
// merge hits
|
||||
List<InternalSearchHit> hits = new ArrayList<>();
|
||||
if (!fetchResults.isEmpty()) {
|
||||
for (ScoreDoc shardDoc : sortedDocs) {
|
||||
for (int i = 0; i < numSearchHits; i++) {
|
||||
ScoreDoc shardDoc = sortedDocs[i];
|
||||
FetchSearchResultProvider fetchResultProvider = fetchResultsArr.get(shardDoc.shardIndex);
|
||||
if (fetchResultProvider == null) {
|
||||
continue;
|
||||
|
@ -360,7 +432,6 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
InternalSearchHit searchHit = fetchResult.hits().internalHits()[index];
|
||||
searchHit.score(shardDoc.score);
|
||||
searchHit.shard(fetchResult.shardTarget());
|
||||
|
||||
if (sorted) {
|
||||
FieldDoc fieldDoc = (FieldDoc) shardDoc;
|
||||
searchHit.sortValues(fieldDoc.fields, firstResult.sortValueFormats());
|
||||
|
@ -368,7 +439,6 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
searchHit.score(((Number) fieldDoc.fields[sortScoreIndex]).floatValue());
|
||||
}
|
||||
}
|
||||
|
||||
hits.add(searchHit);
|
||||
}
|
||||
}
|
||||
|
@ -376,38 +446,72 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
|
||||
// merge suggest results
|
||||
Suggest suggest = null;
|
||||
if (!queryResults.isEmpty()) {
|
||||
final Map<String, List<Suggest.Suggestion>> groupedSuggestions = new HashMap<>();
|
||||
boolean hasSuggestions = false;
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
|
||||
Suggest shardResult = entry.value.queryResult().queryResult().suggest();
|
||||
|
||||
if (shardResult == null) {
|
||||
continue;
|
||||
if (firstResult.suggest() != null) {
|
||||
final Map<String, List<Suggestion>> groupedSuggestions = new HashMap<>();
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> queryResult : queryResults) {
|
||||
Suggest shardSuggest = queryResult.value.queryResult().suggest();
|
||||
if (shardSuggest != null) {
|
||||
for (Suggestion<? extends Suggestion.Entry<? extends Suggestion.Entry.Option>> suggestion : shardSuggest) {
|
||||
List<Suggestion> suggestionList = groupedSuggestions.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>());
|
||||
suggestionList.add(suggestion);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (groupedSuggestions.isEmpty() == false) {
|
||||
suggest = new Suggest(Suggest.reduce(groupedSuggestions));
|
||||
if (!fetchResults.isEmpty()) {
|
||||
int currentOffset = numSearchHits;
|
||||
for (CompletionSuggestion suggestion : suggest.filter(CompletionSuggestion.class)) {
|
||||
final List<CompletionSuggestion.Entry.Option> suggestionOptions = suggestion.getOptions();
|
||||
for (int scoreDocIndex = currentOffset; scoreDocIndex < currentOffset + suggestionOptions.size(); scoreDocIndex++) {
|
||||
ScoreDoc shardDoc = sortedDocs[scoreDocIndex];
|
||||
FetchSearchResultProvider fetchSearchResultProvider = fetchResultsArr.get(shardDoc.shardIndex);
|
||||
if (fetchSearchResultProvider == null) {
|
||||
continue;
|
||||
}
|
||||
FetchSearchResult fetchResult = fetchSearchResultProvider.fetchResult();
|
||||
int fetchResultIndex = fetchResult.counterGetAndIncrement();
|
||||
if (fetchResultIndex < fetchResult.hits().internalHits().length) {
|
||||
InternalSearchHit hit = fetchResult.hits().internalHits()[fetchResultIndex];
|
||||
CompletionSuggestion.Entry.Option suggestOption =
|
||||
suggestionOptions.get(scoreDocIndex - currentOffset);
|
||||
hit.score(shardDoc.score);
|
||||
hit.shard(fetchResult.shardTarget());
|
||||
suggestOption.setHit(hit);
|
||||
}
|
||||
}
|
||||
currentOffset += suggestionOptions.size();
|
||||
}
|
||||
assert currentOffset == sortedDocs.length : "expected no more score doc slices";
|
||||
}
|
||||
hasSuggestions = true;
|
||||
Suggest.group(groupedSuggestions, shardResult);
|
||||
}
|
||||
|
||||
suggest = hasSuggestions ? new Suggest(Suggest.reduce(groupedSuggestions)) : null;
|
||||
}
|
||||
|
||||
// merge addAggregation
|
||||
// merge Aggregation
|
||||
InternalAggregations aggregations = null;
|
||||
if (!queryResults.isEmpty()) {
|
||||
if (firstResult.aggregations() != null && firstResult.aggregations().asList() != null) {
|
||||
List<InternalAggregations> aggregationsList = new ArrayList<>(queryResults.size());
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
|
||||
aggregationsList.add((InternalAggregations) entry.value.queryResult().aggregations());
|
||||
if (firstResult.aggregations() != null && firstResult.aggregations().asList() != null) {
|
||||
List<InternalAggregations> aggregationsList = new ArrayList<>(queryResults.size());
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
|
||||
aggregationsList.add((InternalAggregations) entry.value.queryResult().aggregations());
|
||||
}
|
||||
ReduceContext reduceContext = new ReduceContext(bigArrays, scriptService, clusterService.state());
|
||||
aggregations = InternalAggregations.reduce(aggregationsList, reduceContext);
|
||||
List<SiblingPipelineAggregator> pipelineAggregators = firstResult.pipelineAggregators();
|
||||
if (pipelineAggregators != null) {
|
||||
List<InternalAggregation> newAggs = StreamSupport.stream(aggregations.spliterator(), false)
|
||||
.map((p) -> (InternalAggregation) p)
|
||||
.collect(Collectors.toList());
|
||||
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), reduceContext);
|
||||
newAggs.add(newAgg);
|
||||
}
|
||||
ReduceContext reduceContext = new ReduceContext(bigArrays, scriptService, clusterService.state());
|
||||
aggregations = InternalAggregations.reduce(aggregationsList, reduceContext);
|
||||
aggregations = new InternalAggregations(newAggs);
|
||||
}
|
||||
}
|
||||
|
||||
//Collect profile results
|
||||
SearchProfileShardResults shardResults = null;
|
||||
if (!queryResults.isEmpty() && firstResult.profileResults() != null) {
|
||||
if (firstResult.profileResults() != null) {
|
||||
Map<String, ProfileShardResult> profileResults = new HashMap<>(queryResults.size());
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
|
||||
String key = entry.value.queryResult().shardTarget().toString();
|
||||
|
@ -416,24 +520,22 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
shardResults = new SearchProfileShardResults(profileResults);
|
||||
}
|
||||
|
||||
if (aggregations != null) {
|
||||
List<SiblingPipelineAggregator> pipelineAggregators = firstResult.pipelineAggregators();
|
||||
if (pipelineAggregators != null) {
|
||||
List<InternalAggregation> newAggs = StreamSupport.stream(aggregations.spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
||||
ReduceContext reduceContext = new ReduceContext(bigArrays, scriptService, clusterService.state());
|
||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), reduceContext);
|
||||
newAggs.add(newAgg);
|
||||
}
|
||||
aggregations = new InternalAggregations(newAggs);
|
||||
}
|
||||
}
|
||||
|
||||
InternalSearchHits searchHits = new InternalSearchHits(hits.toArray(new InternalSearchHit[hits.size()]), totalHits, maxScore);
|
||||
|
||||
return new InternalSearchResponse(searchHits, aggregations, suggest, shardResults, timedOut, terminatedEarly);
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the number of top results to be considered across all shards
|
||||
*/
|
||||
private static int topN(List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> queryResults) {
|
||||
QuerySearchResultProvider firstResult = queryResults.get(0).value;
|
||||
int topN = firstResult.queryResult().size();
|
||||
if (firstResult.includeFetch()) {
|
||||
// if we did both query and fetch on the same go, we have fetched all the docs from each shards already, use them...
|
||||
// this is also important since we shortcut and fetch only docs from "from" and up to "size"
|
||||
topN *= queryResults.size();
|
||||
}
|
||||
return topN;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,10 +39,7 @@ public class ShardFetchSearchRequest extends ShardFetchRequest implements Indice
|
|||
private OriginalIndices originalIndices;
|
||||
|
||||
public ShardFetchSearchRequest() {
|
||||
}
|
||||
|
||||
public ShardFetchSearchRequest(SearchRequest request, long id, IntArrayList list) {
|
||||
this(request, id, list, null);
|
||||
}
|
||||
|
||||
public ShardFetchSearchRequest(SearchRequest request, long id, IntArrayList list, ScoreDoc lastEmittedDoc) {
|
||||
|
|
|
@ -43,7 +43,9 @@ public final class MatchedQueriesFetchSubPhase implements FetchSubPhase {
|
|||
|
||||
@Override
|
||||
public void hitsExecute(SearchContext context, InternalSearchHit[] hits) {
|
||||
if (hits.length == 0) {
|
||||
if (hits.length == 0 ||
|
||||
// in case the request has only suggest, parsed query is null
|
||||
context.parsedQuery() == null) {
|
||||
return;
|
||||
}
|
||||
hits = hits.clone(); // don't modify the incoming hits
|
||||
|
|
|
@ -415,8 +415,8 @@ public class InternalSearchHit implements SearchHit {
|
|||
static final String INNER_HITS = "inner_hits";
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
// public because we render hit as part of completion suggestion option
|
||||
public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
List<SearchHitField> metaFields = new ArrayList<>();
|
||||
List<SearchHitField> otherFields = new ArrayList<>();
|
||||
if (fields != null && !fields.isEmpty()) {
|
||||
|
@ -432,7 +432,6 @@ public class InternalSearchHit implements SearchHit {
|
|||
}
|
||||
}
|
||||
|
||||
builder.startObject();
|
||||
// For inner_hit hits shard is null and that is ok, because the parent search hit has all this information.
|
||||
// Even if this was included in the inner_hit hits this would be the same, so better leave it out.
|
||||
if (explanation() != null && shard != null) {
|
||||
|
@ -516,7 +515,6 @@ public class InternalSearchHit implements SearchHit {
|
|||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
@ -533,6 +531,15 @@ public class InternalSearchHit implements SearchHit {
|
|||
builder.endArray();
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
toInnerXContent(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static InternalSearchHit readSearchHit(StreamInput in, InternalSearchHits.StreamContext context) throws IOException {
|
||||
|
|
|
@ -40,6 +40,7 @@ import java.util.HashMap;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Top level suggest result, containing the result for each suggestion.
|
||||
|
@ -48,18 +49,16 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
|
||||
private static final String NAME = "suggest";
|
||||
|
||||
private static final Comparator<Option> COMPARATOR = new Comparator<Suggest.Suggestion.Entry.Option>() {
|
||||
@Override
|
||||
public int compare(Option first, Option second) {
|
||||
int cmp = Float.compare(second.getScore(), first.getScore());
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
return first.getText().compareTo(second.getText());
|
||||
}
|
||||
};
|
||||
public static final Comparator<Option> COMPARATOR = (first, second) -> {
|
||||
int cmp = Float.compare(second.getScore(), first.getScore());
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
return first.getText().compareTo(second.getText());
|
||||
};
|
||||
|
||||
private List<Suggestion<? extends Entry<? extends Option>>> suggestions;
|
||||
private boolean hasScoreDocs;
|
||||
|
||||
private Map<String, Suggestion<? extends Entry<? extends Option>>> suggestMap;
|
||||
|
||||
|
@ -68,7 +67,12 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
}
|
||||
|
||||
public Suggest(List<Suggestion<? extends Entry<? extends Option>>> suggestions) {
|
||||
// we sort suggestions by their names to ensure iteration over suggestions are consistent
|
||||
// this is needed as we need to fill in suggestion docs in SearchPhaseController#sortDocs
|
||||
// in the same order as we enrich the suggestions with fetch results in SearchPhaseController#merge
|
||||
suggestions.sort((o1, o2) -> o1.getName().compareTo(o2.getName()));
|
||||
this.suggestions = suggestions;
|
||||
this.hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -97,6 +101,13 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
return (T) suggestMap.get(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether any suggestions had query hits
|
||||
*/
|
||||
public boolean hasScoreDocs() {
|
||||
return hasScoreDocs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
final int size = in.readVInt();
|
||||
|
@ -125,6 +136,7 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
suggestion.readFrom(in);
|
||||
suggestions.add(suggestion);
|
||||
}
|
||||
hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -160,18 +172,6 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
return result;
|
||||
}
|
||||
|
||||
public static Map<String, List<Suggest.Suggestion>> group(Map<String, List<Suggest.Suggestion>> groupedSuggestions, Suggest suggest) {
|
||||
for (Suggestion<? extends Entry<? extends Option>> suggestion : suggest) {
|
||||
List<Suggestion> list = groupedSuggestions.get(suggestion.getName());
|
||||
if (list == null) {
|
||||
list = new ArrayList<>();
|
||||
groupedSuggestions.put(suggestion.getName(), list);
|
||||
}
|
||||
list.add(suggestion);
|
||||
}
|
||||
return groupedSuggestions;
|
||||
}
|
||||
|
||||
public static List<Suggestion<? extends Entry<? extends Option>>> reduce(Map<String, List<Suggest.Suggestion>> groupedSuggestions) {
|
||||
List<Suggestion<? extends Entry<? extends Option>>> reduced = new ArrayList<>(groupedSuggestions.size());
|
||||
for (java.util.Map.Entry<String, List<Suggestion>> unmergedResults : groupedSuggestions.entrySet()) {
|
||||
|
@ -193,6 +193,16 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
return reduced;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return only suggestions of type <code>suggestionType</code> contained in this {@link Suggest} instance
|
||||
*/
|
||||
public <T extends Suggestion> List<T> filter(Class<T> suggestionType) {
|
||||
return suggestions.stream()
|
||||
.filter(suggestion -> suggestion.getClass() == suggestionType)
|
||||
.map(suggestion -> (T) suggestion)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* The suggestion responses corresponding with the suggestions in the request.
|
||||
*/
|
||||
|
@ -238,6 +248,13 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The number of requested suggestion option size
|
||||
*/
|
||||
public int getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges the result of another suggestion into this suggestion.
|
||||
* For internal usage.
|
||||
|
@ -331,7 +348,6 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
return builder;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Represents a part from the suggest text with suggested options.
|
||||
*/
|
||||
|
|
|
@ -109,7 +109,7 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
|
|||
}
|
||||
}
|
||||
if (numResult++ < suggestionContext.getSize()) {
|
||||
CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option(
|
||||
CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option(suggestDoc.doc,
|
||||
new Text(suggestDoc.key.toString()), suggestDoc.score, contexts, payload);
|
||||
completionSuggestEntry.addOption(option);
|
||||
} else {
|
||||
|
|
|
@ -18,11 +18,16 @@
|
|||
*/
|
||||
package org.elasticsearch.search.suggest.completion;
|
||||
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.suggest.Lookup;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHits;
|
||||
import org.elasticsearch.search.internal.InternalSearchHits.StreamContext.ShardTargetType;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -35,6 +40,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.search.suggest.Suggest.COMPARATOR;
|
||||
|
||||
/**
|
||||
* Suggestion response for {@link CompletionSuggester} results
|
||||
*
|
||||
|
@ -62,6 +69,25 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
|
|||
super(name, size);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the result options for the suggestion
|
||||
*/
|
||||
public List<Entry.Option> getOptions() {
|
||||
if (entries.isEmpty() == false) {
|
||||
assert entries.size() == 1 : "CompletionSuggestion must have only one entry";
|
||||
return entries.get(0).getOptions();
|
||||
} else {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return whether there is any hits for the suggestion
|
||||
*/
|
||||
public boolean hasScoreDocs() {
|
||||
return getOptions().size() > 0;
|
||||
}
|
||||
|
||||
private static final class OptionPriorityQueue extends org.apache.lucene.util.PriorityQueue<Entry.Option> {
|
||||
|
||||
private final Comparator<Suggest.Suggestion.Entry.Option> comparator;
|
||||
|
@ -90,30 +116,54 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Suggest.Suggestion<Entry> reduce(List<Suggest.Suggestion<Entry>> toReduce) {
|
||||
if (toReduce.size() == 1) {
|
||||
return toReduce.get(0);
|
||||
/**
|
||||
* Reduces suggestions to a single suggestion containing at most
|
||||
* top {@link CompletionSuggestion#getSize()} options across <code>toReduce</code>
|
||||
*/
|
||||
public static CompletionSuggestion reduceTo(List<Suggest.Suggestion<Entry>> toReduce) {
|
||||
if (toReduce.isEmpty()) {
|
||||
return null;
|
||||
} else {
|
||||
// combine suggestion entries from participating shards on the coordinating node
|
||||
// the global top <code>size</code> entries are collected from the shard results
|
||||
// using a priority queue
|
||||
OptionPriorityQueue priorityQueue = new OptionPriorityQueue(size, sortComparator());
|
||||
for (Suggest.Suggestion<Entry> entries : toReduce) {
|
||||
assert entries.getEntries().size() == 1 : "CompletionSuggestion must have only one entry";
|
||||
for (Entry.Option option : entries.getEntries().get(0)) {
|
||||
if (option == priorityQueue.insertWithOverflow(option)) {
|
||||
// if the current option has overflown from pq,
|
||||
// we can assume all of the successive options
|
||||
// from this shard result will be overflown as well
|
||||
break;
|
||||
final CompletionSuggestion leader = (CompletionSuggestion) toReduce.get(0);
|
||||
final Entry leaderEntry = leader.getEntries().get(0);
|
||||
final String name = leader.getName();
|
||||
if (toReduce.size() == 1) {
|
||||
return leader;
|
||||
} else {
|
||||
// combine suggestion entries from participating shards on the coordinating node
|
||||
// the global top <code>size</code> entries are collected from the shard results
|
||||
// using a priority queue
|
||||
OptionPriorityQueue priorityQueue = new OptionPriorityQueue(leader.getSize(), COMPARATOR);
|
||||
for (Suggest.Suggestion<Entry> suggestion : toReduce) {
|
||||
assert suggestion.getName().equals(name) : "name should be identical across all suggestions";
|
||||
for (Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) {
|
||||
if (option == priorityQueue.insertWithOverflow(option)) {
|
||||
// if the current option has overflown from pq,
|
||||
// we can assume all of the successive options
|
||||
// from this shard result will be overflown as well
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
final CompletionSuggestion suggestion = new CompletionSuggestion(leader.getName(), leader.getSize());
|
||||
final Entry entry = new Entry(leaderEntry.getText(), leaderEntry.getOffset(), leaderEntry.getLength());
|
||||
Collections.addAll(entry.getOptions(), priorityQueue.get());
|
||||
suggestion.addTerm(entry);
|
||||
return suggestion;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Suggest.Suggestion<Entry> reduce(List<Suggest.Suggestion<Entry>> toReduce) {
|
||||
return reduceTo(toReduce);
|
||||
}
|
||||
|
||||
public void setShardIndex(int shardIndex) {
|
||||
if (entries.isEmpty() == false) {
|
||||
for (Entry.Option option : getOptions()) {
|
||||
option.setShardIndex(shardIndex);
|
||||
}
|
||||
Entry options = this.entries.get(0);
|
||||
options.getOptions().clear();
|
||||
Collections.addAll(options.getOptions(), priorityQueue.get());
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -145,9 +195,12 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
|
|||
public static class Option extends Suggest.Suggestion.Entry.Option {
|
||||
private Map<String, Set<CharSequence>> contexts;
|
||||
private Map<String, List<Object>> payload;
|
||||
private ScoreDoc doc;
|
||||
private InternalSearchHit hit;
|
||||
|
||||
public Option(Text text, float score, Map<String, Set<CharSequence>> contexts, Map<String, List<Object>> payload) {
|
||||
public Option(int docID, Text text, float score, Map<String, Set<CharSequence>> contexts, Map<String, List<Object>> payload) {
|
||||
super(text, score);
|
||||
this.doc = new ScoreDoc(docID, score);
|
||||
this.payload = payload;
|
||||
this.contexts = contexts;
|
||||
}
|
||||
|
@ -171,14 +224,30 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
|
|||
return contexts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScore(float score) {
|
||||
super.setScore(score);
|
||||
public ScoreDoc getDoc() {
|
||||
return doc;
|
||||
}
|
||||
|
||||
public InternalSearchHit getHit() {
|
||||
return hit;
|
||||
}
|
||||
|
||||
public void setShardIndex(int shardIndex) {
|
||||
this.doc.shardIndex = shardIndex;
|
||||
}
|
||||
|
||||
public void setHit(InternalSearchHit hit) {
|
||||
this.hit = hit;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
super.innerToXContent(builder, params);
|
||||
builder.field("text", getText());
|
||||
if (hit != null) {
|
||||
hit.toInnerXContent(builder, params);
|
||||
} else {
|
||||
builder.field("score", getScore());
|
||||
}
|
||||
if (payload.size() > 0) {
|
||||
builder.startObject("payload");
|
||||
for (Map.Entry<String, List<Object>> entry : payload.entrySet()) {
|
||||
|
@ -207,6 +276,11 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
this.doc = Lucene.readScoreDoc(in);
|
||||
if (in.readBoolean()) {
|
||||
this.hit = InternalSearchHit.readSearchHit(in,
|
||||
InternalSearchHits.streamContext().streamShardTarget(ShardTargetType.STREAM));
|
||||
}
|
||||
int payloadSize = in.readInt();
|
||||
this.payload = new LinkedHashMap<>(payloadSize);
|
||||
for (int i = 0; i < payloadSize; i++) {
|
||||
|
@ -234,6 +308,13 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
Lucene.writeScoreDoc(out, doc);
|
||||
if (hit != null) {
|
||||
out.writeBoolean(true);
|
||||
hit.writeTo(out, InternalSearchHits.streamContext().streamShardTarget(ShardTargetType.STREAM));
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeInt(payload.size());
|
||||
for (Map.Entry<String, List<Object>> entry : payload.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
|
|
|
@ -57,4 +57,11 @@ public class PutMappingRequestTests extends ESTestCase {
|
|||
"Validation Failed: 1: either concrete index or unresolved indices can be set," +
|
||||
" concrete index: [[foo/bar]] and indices: [myindex];");
|
||||
}
|
||||
|
||||
public void testBuildFromSimplifiedDef() {
|
||||
// test that method rejects input where input varargs fieldname/properites are not paired correctly
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> PutMappingRequest.buildFromSimplifiedDef("type", "only_field"));
|
||||
assertEquals("mapping source must be pairs of fieldnames and properties definition.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,19 +44,33 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
public class MultiSearchRequestTests extends ESTestCase {
|
||||
public void testSimpleAdd() throws Exception {
|
||||
MultiSearchRequest request = parseMultiSearchRequest("/org/elasticsearch/action/search/simple-msearch1.json");
|
||||
assertThat(request.requests().size(), equalTo(8));
|
||||
assertThat(request.requests().get(0).indices()[0], equalTo("test"));
|
||||
assertThat(request.requests().get(0).indicesOptions(), equalTo(IndicesOptions.fromOptions(true, true, true, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
assertThat(request.requests().get(0).types().length, equalTo(0));
|
||||
assertThat(request.requests().get(1).indices()[0], equalTo("test"));
|
||||
assertThat(request.requests().get(1).indicesOptions(), equalTo(IndicesOptions.fromOptions(false, true, true, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
assertThat(request.requests().get(1).types()[0], equalTo("type1"));
|
||||
assertThat(request.requests().get(2).indices()[0], equalTo("test"));
|
||||
assertThat(request.requests().get(2).indicesOptions(), equalTo(IndicesOptions.fromOptions(false, true, true, false, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
assertThat(request.requests().get(3).indices()[0], equalTo("test"));
|
||||
assertThat(request.requests().get(3).indicesOptions(), equalTo(IndicesOptions.fromOptions(true, true, true, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
assertThat(request.requests().get(4).indices()[0], equalTo("test"));
|
||||
assertThat(request.requests().get(4).indicesOptions(), equalTo(IndicesOptions.fromOptions(true, false, false, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
assertThat(request.requests().size(),
|
||||
equalTo(8));
|
||||
assertThat(request.requests().get(0).indices()[0],
|
||||
equalTo("test"));
|
||||
assertThat(request.requests().get(0).indicesOptions(),
|
||||
equalTo(IndicesOptions.fromOptions(true, true, true, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
assertThat(request.requests().get(0).types().length,
|
||||
equalTo(0));
|
||||
assertThat(request.requests().get(1).indices()[0],
|
||||
equalTo("test"));
|
||||
assertThat(request.requests().get(1).indicesOptions(),
|
||||
equalTo(IndicesOptions.fromOptions(false, true, true, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
assertThat(request.requests().get(1).types()[0],
|
||||
equalTo("type1"));
|
||||
assertThat(request.requests().get(2).indices()[0],
|
||||
equalTo("test"));
|
||||
assertThat(request.requests().get(2).indicesOptions(),
|
||||
equalTo(IndicesOptions.fromOptions(false, true, true, false, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
assertThat(request.requests().get(3).indices()[0],
|
||||
equalTo("test"));
|
||||
assertThat(request.requests().get(3).indicesOptions(),
|
||||
equalTo(IndicesOptions.fromOptions(true, true, true, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
assertThat(request.requests().get(4).indices()[0],
|
||||
equalTo("test"));
|
||||
assertThat(request.requests().get(4).indicesOptions(),
|
||||
equalTo(IndicesOptions.fromOptions(true, false, false, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
|
||||
|
||||
assertThat(request.requests().get(5).indices(), is(Strings.EMPTY_ARRAY));
|
||||
assertThat(request.requests().get(5).types().length, equalTo(0));
|
||||
assertThat(request.requests().get(6).indices(), is(Strings.EMPTY_ARRAY));
|
||||
|
@ -119,10 +133,27 @@ public class MultiSearchRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testResponseErrorToXContent() throws IOException {
|
||||
MultiSearchResponse response = new MultiSearchResponse(new MultiSearchResponse.Item[]{new MultiSearchResponse.Item(null, new IllegalStateException("foobar")), new MultiSearchResponse.Item(null, new IllegalStateException("baaaaaazzzz"))});
|
||||
MultiSearchResponse response = new MultiSearchResponse(
|
||||
new MultiSearchResponse.Item[]{
|
||||
new MultiSearchResponse.Item(null, new IllegalStateException("foobar")),
|
||||
new MultiSearchResponse.Item(null, new IllegalStateException("baaaaaazzzz"))
|
||||
});
|
||||
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("\"responses\"[{\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"}],\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"},\"status\":500},{\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"}],\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"},\"status\":500}]",
|
||||
builder.endObject();
|
||||
|
||||
assertEquals("{\"responses\":["
|
||||
+ "{"
|
||||
+ "\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"}],"
|
||||
+ "\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"},\"status\":500"
|
||||
+ "},"
|
||||
+ "{"
|
||||
+ "\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"}],"
|
||||
+ "\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"},\"status\":500"
|
||||
+ "}"
|
||||
+ "]}",
|
||||
builder.string());
|
||||
}
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ import static org.hamcrest.Matchers.startsWith;
|
|||
|
||||
@ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 1.0)
|
||||
public class TransportClientIT extends ESIntegTestCase {
|
||||
|
||||
public void testPickingUpChangesInDiscoveryNode() {
|
||||
String nodeName = internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false));
|
||||
|
||||
|
|
|
@ -20,10 +20,20 @@
|
|||
package org.elasticsearch.client.transport;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.transport.MockTransportClient;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
@ -38,4 +48,44 @@ public class TransportClientTests extends ESTestCase {
|
|||
expectThrows(IllegalStateException.class, () -> client.admin().cluster().health(new ClusterHealthRequest()).get());
|
||||
assertThat(e, hasToString(containsString("transport client is closed")));
|
||||
}
|
||||
|
||||
/**
|
||||
* test that when plugins are provided that want to register
|
||||
* {@link NamedWriteable}, those are also made known to the
|
||||
* {@link NamedWriteableRegistry} of the transport client
|
||||
*/
|
||||
public void testPluginNamedWriteablesRegistered() {
|
||||
Settings baseSettings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.build();
|
||||
try (TransportClient client = new MockTransportClient(baseSettings, Arrays.asList(MockPlugin.class))) {
|
||||
assertNotNull(client.namedWriteableRegistry.getReader(MockPlugin.MockNamedWriteable.class, MockPlugin.MockNamedWriteable.NAME));
|
||||
}
|
||||
}
|
||||
|
||||
public static class MockPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public List<Entry> getNamedWriteables() {
|
||||
return Arrays.asList(new Entry[]{ new Entry(MockNamedWriteable.class, MockNamedWriteable.NAME, MockNamedWriteable::new)});
|
||||
}
|
||||
|
||||
public class MockNamedWriteable implements NamedWriteable {
|
||||
|
||||
static final String NAME = "mockNamedWritable";
|
||||
|
||||
MockNamedWriteable(StreamInput in) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -356,6 +356,7 @@ public class XContentBuilderTests extends ESTestCase {
|
|||
public void testWriteFieldMapWithNullKeys() throws IOException {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
|
||||
try {
|
||||
builder.startObject();
|
||||
builder.field("map", Collections.singletonMap(null, "test"));
|
||||
fail("write map should have failed");
|
||||
} catch(IllegalArgumentException e) {
|
||||
|
|
|
@ -19,9 +19,8 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
@ -39,12 +38,12 @@ public class ScriptDocValuesTests extends ESTestCase {
|
|||
}
|
||||
return points[i];
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setDocument(int docId) {
|
||||
this.docID = docId;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
if (docID != 0) {
|
||||
|
@ -94,18 +93,18 @@ public class ScriptDocValuesTests extends ESTestCase {
|
|||
|
||||
final double otherLat = randomLat();
|
||||
final double otherLon = randomLon();
|
||||
|
||||
assertEquals(GeoDistance.ARC.calculate(lat, lon, otherLat, otherLon, DistanceUnit.KILOMETERS),
|
||||
script.arcDistanceInKm(otherLat, otherLon), 0.01);
|
||||
assertEquals(GeoDistance.ARC.calculate(lat, lon, otherLat, otherLon, DistanceUnit.KILOMETERS),
|
||||
script.arcDistanceInKmWithDefault(otherLat, otherLon, 42), 0.01);
|
||||
assertEquals(42, emptyScript.arcDistanceInKmWithDefault(otherLat, otherLon, 42), 0);
|
||||
|
||||
assertEquals(GeoDistance.PLANE.calculate(lat, lon, otherLat, otherLon, DistanceUnit.KILOMETERS),
|
||||
script.distanceInKm(otherLat, otherLon), 0.01);
|
||||
assertEquals(GeoDistance.PLANE.calculate(lat, lon, otherLat, otherLon, DistanceUnit.KILOMETERS),
|
||||
script.distanceInKmWithDefault(otherLat, otherLon, 42), 0.01);
|
||||
assertEquals(42, emptyScript.distanceInKmWithDefault(otherLat, otherLon, 42), 0);
|
||||
assertEquals(GeoUtils.arcDistance(lat, lon, otherLat, otherLon) / 1000d,
|
||||
script.arcDistance(otherLat, otherLon) / 1000d, 0.01);
|
||||
assertEquals(GeoUtils.arcDistance(lat, lon, otherLat, otherLon) / 1000d,
|
||||
script.arcDistanceWithDefault(otherLat, otherLon, 42) / 1000d, 0.01);
|
||||
assertEquals(42, emptyScript.arcDistanceWithDefault(otherLat, otherLon, 42), 0);
|
||||
|
||||
assertEquals(GeoUtils.planeDistance(lat, lon, otherLat, otherLon) / 1000d,
|
||||
script.planeDistance(otherLat, otherLon) / 1000d, 0.01);
|
||||
assertEquals(GeoUtils.planeDistance(lat, lon, otherLat, otherLon) / 1000d,
|
||||
script.planeDistanceWithDefault(otherLat, otherLon, 42) / 1000d, 0.01);
|
||||
assertEquals(42, emptyScript.planeDistanceWithDefault(otherLat, otherLon, 42), 0);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -319,9 +319,12 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true")));
|
||||
builder.endObject();
|
||||
|
||||
String mappingString = builder.string();
|
||||
assertTrue(mappingString.contains("analyzer"));
|
||||
assertTrue(mappingString.contains("search_analyzer"));
|
||||
|
|
|
@ -337,9 +337,12 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true")));
|
||||
builder.endObject();
|
||||
|
||||
String mappingString = builder.string();
|
||||
assertTrue(mappingString.contains("analyzer"));
|
||||
assertTrue(mappingString.contains("search_analyzer"));
|
||||
|
|
|
@ -30,23 +30,11 @@ public abstract class AbstractTermQueryTestCase<QB extends BaseTermQueryBuilder<
|
|||
protected abstract QB createQueryBuilder(String fieldName, Object value);
|
||||
|
||||
public void testIllegalArguments() throws QueryShardException {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
createQueryBuilder(null, randomAsciiOfLengthBetween(1, 30));
|
||||
} else {
|
||||
createQueryBuilder("", randomAsciiOfLengthBetween(1, 30));
|
||||
}
|
||||
fail("fieldname cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
try {
|
||||
createQueryBuilder("field", null);
|
||||
fail("value cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
String term = randomAsciiOfLengthBetween(1, 30);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createQueryBuilder(null, term));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> createQueryBuilder("", term));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -163,30 +163,10 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
|
|||
|
||||
public void testIllegalArguments() {
|
||||
BoolQueryBuilder booleanQuery = new BoolQueryBuilder();
|
||||
|
||||
try {
|
||||
booleanQuery.must(null);
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
}
|
||||
|
||||
try {
|
||||
booleanQuery.mustNot(null);
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
}
|
||||
|
||||
try {
|
||||
booleanQuery.filter(null);
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
}
|
||||
|
||||
try {
|
||||
booleanQuery.should(null);
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> booleanQuery.must(null));
|
||||
expectThrows(IllegalArgumentException.class, () -> booleanQuery.mustNot(null));
|
||||
expectThrows(IllegalArgumentException.class, () -> booleanQuery.filter(null));
|
||||
expectThrows(IllegalArgumentException.class, () -> booleanQuery.should(null));
|
||||
}
|
||||
|
||||
// https://github.com/elastic/elasticsearch/issues/7240
|
||||
|
|
|
@ -54,26 +54,10 @@ public class BoostingQueryBuilderTests extends AbstractQueryTestCase<BoostingQue
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
new BoostingQueryBuilder(null, new MatchAllQueryBuilder());
|
||||
fail("must not be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
//
|
||||
}
|
||||
|
||||
try {
|
||||
new BoostingQueryBuilder(new MatchAllQueryBuilder(), null);
|
||||
fail("must not be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
//
|
||||
}
|
||||
|
||||
try {
|
||||
new BoostingQueryBuilder(new MatchAllQueryBuilder(), new MatchAllQueryBuilder()).negativeBoost(-1.0f);
|
||||
fail("must not be negative");
|
||||
} catch (IllegalArgumentException e) {
|
||||
//
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> new BoostingQueryBuilder(null, new MatchAllQueryBuilder()));
|
||||
expectThrows(IllegalArgumentException.class, () -> new BoostingQueryBuilder(new MatchAllQueryBuilder(), null));
|
||||
expectThrows(IllegalArgumentException.class,
|
||||
() -> new BoostingQueryBuilder(new MatchAllQueryBuilder(), new MatchAllQueryBuilder()).negativeBoost(-1.0f));
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -103,7 +87,6 @@ public class BoostingQueryBuilderTests extends AbstractQueryTestCase<BoostingQue
|
|||
|
||||
BoostingQueryBuilder queryBuilder = (BoostingQueryBuilder) parseQuery(query);
|
||||
checkGeneratedJson(query, queryBuilder);
|
||||
|
||||
assertEquals(query, 42, queryBuilder.boost(), 0.00001);
|
||||
assertEquals(query, 23, queryBuilder.negativeBoost(), 0.00001);
|
||||
assertEquals(query, 8, queryBuilder.negativeQuery().boost(), 0.00001);
|
||||
|
|
|
@ -21,9 +21,12 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery;
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
|
@ -81,6 +84,20 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTe
|
|||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, CommonTermsQueryBuilder> getAlternateVersions() {
|
||||
Map<String, CommonTermsQueryBuilder> alternateVersions = new HashMap<>();
|
||||
CommonTermsQueryBuilder commonTermsQuery = new CommonTermsQueryBuilder(randomAsciiOfLengthBetween(1, 10),
|
||||
randomAsciiOfLengthBetween(1, 10));
|
||||
String contentString = "{\n" +
|
||||
" \"common\" : {\n" +
|
||||
" \"" + commonTermsQuery.fieldName() + "\" : \"" + commonTermsQuery.value() + "\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
alternateVersions.put(contentString, commonTermsQuery);
|
||||
return alternateVersions;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(CommonTermsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, instanceOf(ExtendedCommonTermsQuery.class));
|
||||
|
@ -90,23 +107,12 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTe
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new CommonTermsQueryBuilder(null, "text");
|
||||
} else {
|
||||
new CommonTermsQueryBuilder("", "text");
|
||||
}
|
||||
fail("must be non null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// okay
|
||||
}
|
||||
|
||||
try {
|
||||
new CommonTermsQueryBuilder("fieldName", null);
|
||||
fail("must be non null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// okay
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new CommonTermsQueryBuilder(null, "text"));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new CommonTermsQueryBuilder("", "text"));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new CommonTermsQueryBuilder("fieldName", null));
|
||||
assertEquals("text cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -173,4 +179,20 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTe
|
|||
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
|
||||
assertThat(ectQuery.isCoordDisabled(), equalTo(disableCoord));
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json = "{\n" +
|
||||
" \"common\" : {\n" +
|
||||
" \"message1\" : {\n" +
|
||||
" \"query\" : \"nelly the elephant not as a cartoon\"\n" +
|
||||
" },\n" +
|
||||
" \"message2\" : {\n" +
|
||||
" \"query\" : \"nelly the elephant not as a cartoon\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[common] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,12 +61,8 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
|||
*/
|
||||
public void testFilterElement() throws IOException {
|
||||
String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : {} }";
|
||||
try {
|
||||
parseQuery(queryString);
|
||||
fail("Expected ParsingException");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("requires a 'filter' element"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(queryString));
|
||||
assertThat(e.getMessage(), containsString("requires a 'filter' element"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -77,12 +73,8 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
|||
"\"filter\" : { \"term\": { \"foo\": \"a\" } },\n" +
|
||||
"\"filter\" : { \"term\": { \"foo\": \"x\" } },\n" +
|
||||
"} }";
|
||||
try {
|
||||
parseQuery(queryString);
|
||||
fail("Expected ParsingException");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("accepts only one 'filter' element"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(queryString));
|
||||
assertThat(e.getMessage(), containsString("accepts only one 'filter' element"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -93,12 +85,8 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
|||
"\"filter\" : [ { \"term\": { \"foo\": \"a\" } },\n" +
|
||||
"{ \"term\": { \"foo\": \"x\" } } ]\n" +
|
||||
"} }";
|
||||
try {
|
||||
parseQuery(queryString);
|
||||
fail("Expected ParsingException");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("unexpected token [START_ARRAY]"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(queryString));
|
||||
assertThat(e.getMessage(), containsString("unexpected token [START_ARRAY]"));
|
||||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
|
|
|
@ -102,12 +102,7 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBu
|
|||
|
||||
public void testIllegalArguments() {
|
||||
DisMaxQueryBuilder disMaxQuery = new DisMaxQueryBuilder();
|
||||
try {
|
||||
disMaxQuery.add(null);
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> disMaxQuery.add(null));
|
||||
}
|
||||
|
||||
public void testToQueryInnerPrefixQuery() throws Exception {
|
||||
|
|
|
@ -56,24 +56,10 @@ public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase<Fie
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
new FieldMaskingSpanQueryBuilder(null, "maskedField");
|
||||
fail("must be non null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// okay
|
||||
}
|
||||
|
||||
try {
|
||||
SpanQueryBuilder span = new SpanTermQueryBuilder("name", "value");
|
||||
if (randomBoolean()) {
|
||||
new FieldMaskingSpanQueryBuilder(span, null);
|
||||
} else {
|
||||
new FieldMaskingSpanQueryBuilder(span, "");
|
||||
}
|
||||
fail("must be non null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// okay
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> new FieldMaskingSpanQueryBuilder(null, "maskedField"));
|
||||
SpanQueryBuilder span = new SpanTermQueryBuilder("name", "value");
|
||||
expectThrows(IllegalArgumentException.class, () -> new FieldMaskingSpanQueryBuilder(span, null));
|
||||
expectThrows(IllegalArgumentException.class, () -> new FieldMaskingSpanQueryBuilder(span, ""));
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -93,10 +79,8 @@ public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase<Fie
|
|||
" \"_name\" : \"KPI\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
FieldMaskingSpanQueryBuilder parsed = (FieldMaskingSpanQueryBuilder) parseQuery(json);
|
||||
checkGeneratedJson(json, parsed);
|
||||
|
||||
assertEquals(json, 42.0, parsed.boost(), 0.00001);
|
||||
assertEquals(json, 0.23, parsed.innerQuery().boost(), 0.00001);
|
||||
}
|
||||
|
|
|
@ -23,12 +23,15 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
|
@ -55,47 +58,42 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
|||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, FuzzyQueryBuilder> getAlternateVersions() {
|
||||
Map<String, FuzzyQueryBuilder> alternateVersions = new HashMap<>();
|
||||
FuzzyQueryBuilder fuzzyQuery = new FuzzyQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
|
||||
String contentString = "{\n" +
|
||||
" \"fuzzy\" : {\n" +
|
||||
" \"" + fuzzyQuery.fieldName() + "\" : \"" + fuzzyQuery.value() + "\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
alternateVersions.put(contentString, fuzzyQuery);
|
||||
return alternateVersions;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(FuzzyQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, instanceOf(FuzzyQuery.class));
|
||||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
new FuzzyQueryBuilder(null, "text");
|
||||
fail("must not be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new FuzzyQueryBuilder(null, "text"));
|
||||
assertEquals("field name cannot be null or empty", e.getMessage());
|
||||
|
||||
try {
|
||||
new FuzzyQueryBuilder("", "text");
|
||||
fail("must not be empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new FuzzyQueryBuilder("", "text"));
|
||||
assertEquals("field name cannot be null or empty", e.getMessage());
|
||||
|
||||
try {
|
||||
new FuzzyQueryBuilder("field", null);
|
||||
fail("must not be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new FuzzyQueryBuilder("field", null));
|
||||
assertEquals("query value cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testUnsupportedFuzzinessForStringType() throws IOException {
|
||||
QueryShardContext context = createShardContext();
|
||||
context.setAllowUnmappedFields(true);
|
||||
|
||||
FuzzyQueryBuilder fuzzyQueryBuilder = new FuzzyQueryBuilder(STRING_FIELD_NAME, "text");
|
||||
fuzzyQueryBuilder.fuzziness(Fuzziness.build(randomFrom("a string which is not auto", "3h", "200s")));
|
||||
|
||||
try {
|
||||
fuzzyQueryBuilder.toQuery(context);
|
||||
fail("should have failed with NumberFormatException");
|
||||
} catch (NumberFormatException e) {
|
||||
assertThat(e.getMessage(), Matchers.containsString("For input string"));
|
||||
}
|
||||
NumberFormatException e = expectThrows(NumberFormatException.class, () -> fuzzyQueryBuilder.toQuery(context));
|
||||
assertThat(e.getMessage(), containsString("For input string"));
|
||||
}
|
||||
|
||||
public void testToQueryWithStringField() throws IOException {
|
||||
|
@ -119,7 +117,6 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
|||
assertThat(fuzzyQuery.getTerm(), equalTo(new Term(STRING_FIELD_NAME, "sh")));
|
||||
assertThat(fuzzyQuery.getMaxEdits(), equalTo(Fuzziness.AUTO.asDistance("sh")));
|
||||
assertThat(fuzzyQuery.getPrefixLength(), equalTo(1));
|
||||
|
||||
}
|
||||
|
||||
public void testToQueryWithNumericField() throws IOException {
|
||||
|
@ -157,4 +154,20 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
|||
assertEquals(json, 42.0, parsed.boost(), 0.00001);
|
||||
assertEquals(json, 2, parsed.fuzziness().asFloat(), 0f);
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json = "{\n" +
|
||||
" \"fuzzy\" : {\n" +
|
||||
" \"message1\" : {\n" +
|
||||
" \"value\" : \"this is a test\"\n" +
|
||||
" },\n" +
|
||||
" \"message2\" : {\n" +
|
||||
" \"value\" : \"this is a test\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[fuzzy] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,7 +44,6 @@ import static org.hamcrest.CoreMatchers.instanceOf;
|
|||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBoundingBoxQueryBuilder> {
|
||||
/** Randomly generate either NaN or one of the two infinity values. */
|
||||
|
@ -104,22 +103,14 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
|
||||
public void testValidationNullType() {
|
||||
GeoBoundingBoxQueryBuilder qb = new GeoBoundingBoxQueryBuilder("teststring");
|
||||
try {
|
||||
qb.type((GeoExecType) null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("Type is not allowed to be null."));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> qb.type((GeoExecType) null));
|
||||
assertEquals("Type is not allowed to be null.", e.getMessage());
|
||||
}
|
||||
|
||||
public void testValidationNullTypeString() {
|
||||
GeoBoundingBoxQueryBuilder qb = new GeoBoundingBoxQueryBuilder("teststring");
|
||||
try {
|
||||
qb.type((String) null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("cannot parse type from null string"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> qb.type((String) null));
|
||||
assertEquals("cannot parse type from null string", e.getMessage());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -130,27 +121,17 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
|
||||
public void testExceptionOnMissingTypes() throws IOException {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length == 0);
|
||||
try {
|
||||
super.testToQuery();
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (QueryShardException e) {
|
||||
assertThat(e.getMessage(), is("failed to find geo_point field [mapped_geo_point]"));
|
||||
}
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> super.testToQuery());
|
||||
assertEquals("failed to find geo_point field [mapped_geo_point]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testBrokenCoordinateCannotBeSet() {
|
||||
PointTester[] testers = { new TopTester(), new LeftTester(), new BottomTester(), new RightTester() };
|
||||
|
||||
GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder();
|
||||
builder.setValidationMethod(GeoValidationMethod.STRICT);
|
||||
|
||||
for (PointTester tester : testers) {
|
||||
try {
|
||||
tester.invalidateCoordinate(builder, true);
|
||||
fail("expected exception for broken " + tester.getClass().getName() + " coordinate");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> tester.invalidateCoordinate(builder, true));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,12 +196,9 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
|
||||
assumeTrue("top should not be equal to bottom for flip check", top != bottom);
|
||||
logger.info("top: {} bottom: {}", top, bottom);
|
||||
try {
|
||||
builder.setValidationMethod(GeoValidationMethod.STRICT).setCorners(bottom, left, top, right);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("top is below bottom corner:"));
|
||||
}
|
||||
builder.setValidationMethod(GeoValidationMethod.STRICT);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.setCorners(bottom, left, top, right));
|
||||
assertThat(e.getMessage(), containsString("top is below bottom corner:"));
|
||||
}
|
||||
|
||||
public void testTopBottomCanBeFlippedOnIgnoreMalformed() {
|
||||
|
@ -482,7 +460,7 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
assertEquals(json, 40.01, parsed.bottomRight().getLat(), 0.0001);
|
||||
assertEquals(json, 1.0, parsed.boost(), 0.0001);
|
||||
assertEquals(json, GeoExecType.MEMORY, parsed.type());
|
||||
json =
|
||||
String deprecatedJson =
|
||||
"{\n" +
|
||||
" \"geo_bbox\" : {\n" +
|
||||
" \"pin.location\" : {\n" +
|
||||
|
@ -498,12 +476,8 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
QueryBuilder parsedGeoBboxShortcut = parseQuery(json, ParseFieldMatcher.EMPTY);
|
||||
assertThat(parsedGeoBboxShortcut, equalTo(parsed));
|
||||
|
||||
try {
|
||||
parseQuery(json);
|
||||
fail("parse query should have failed in strict mode");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("Deprecated field [geo_bbox] used, expected [geo_bounding_box] instead"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(deprecatedJson));
|
||||
assertEquals("Deprecated field [geo_bbox] used, expected [geo_bounding_box] instead", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFromJsonCoerceFails() throws IOException {
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
@ -85,82 +86,41 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
|
|||
}
|
||||
|
||||
public void testIllegalValues() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new GeoDistanceQueryBuilder("");
|
||||
} else {
|
||||
new GeoDistanceQueryBuilder((String) null);
|
||||
}
|
||||
fail("must not be null or empty");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("fieldName must not be null or empty"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoDistanceQueryBuilder(""));
|
||||
assertEquals("fieldName must not be null or empty", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new GeoDistanceQueryBuilder((String) null));
|
||||
assertEquals("fieldName must not be null or empty", e.getMessage());
|
||||
|
||||
GeoDistanceQueryBuilder query = new GeoDistanceQueryBuilder("fieldName");
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
query.distance("");
|
||||
} else {
|
||||
query.distance(null);
|
||||
}
|
||||
fail("must not be null or empty");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("distance must not be null or empty"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.distance(""));
|
||||
assertEquals("distance must not be null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.distance(null));
|
||||
assertEquals("distance must not be null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.distance("", DistanceUnit.DEFAULT));
|
||||
assertEquals("distance must not be null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.distance(null, DistanceUnit.DEFAULT));
|
||||
assertEquals("distance must not be null or empty", e.getMessage());
|
||||
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
query.distance("", DistanceUnit.DEFAULT);
|
||||
} else {
|
||||
query.distance(null, DistanceUnit.DEFAULT);
|
||||
}
|
||||
fail("distance must not be null or empty");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("distance must not be null or empty"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.distance("1", null));
|
||||
assertEquals("distance unit must not be null", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.distance(1, null));
|
||||
assertEquals("distance unit must not be null", e.getMessage());
|
||||
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
query.distance("1", null);
|
||||
} else {
|
||||
query.distance(1, null);
|
||||
}
|
||||
fail("distance must not be null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("distance unit must not be null"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.distance(
|
||||
randomIntBetween(Integer.MIN_VALUE, 0), DistanceUnit.DEFAULT));
|
||||
assertEquals("distance must be greater than zero", e.getMessage());
|
||||
|
||||
try {
|
||||
query.distance(randomIntBetween(Integer.MIN_VALUE, 0), DistanceUnit.DEFAULT);
|
||||
fail("distance must be greater than zero");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("distance must be greater than zero"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.geohash(null));
|
||||
assertEquals("geohash must not be null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.geohash(""));
|
||||
assertEquals("geohash must not be null or empty", e.getMessage());
|
||||
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
query.geohash(null);
|
||||
} else {
|
||||
query.geohash("");
|
||||
}
|
||||
fail("geohash must not be null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("geohash must not be null or empty"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.geoDistance(null));
|
||||
assertEquals("geoDistance must not be null", e.getMessage());
|
||||
|
||||
try {
|
||||
query.geoDistance(null);
|
||||
fail("geodistance must not be null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("geoDistance must not be null"));
|
||||
}
|
||||
|
||||
try {
|
||||
query.optimizeBbox(null);
|
||||
fail("optimizeBbox must not be null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("optimizeBbox must not be null"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> query.optimizeBbox(null));
|
||||
assertEquals("optimizeBbox must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -474,4 +434,19 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
|
|||
QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(shardContext));
|
||||
assertThat(e.getMessage(), containsString("failed to find geo_point field [unmapped]"));
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json = "{\n" +
|
||||
" \"geo_distance\" : {\n" +
|
||||
" \"point1\" : {\n" +
|
||||
" \"lat\" : 30, \"lon\" : 12\n" +
|
||||
" },\n" +
|
||||
" \"point2\" : {\n" +
|
||||
" \"lat\" : 30, \"lon\" : 12\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[geo_distance] query doesn't support multiple fields, found [point1] and [point2]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,6 @@ import static org.hamcrest.CoreMatchers.instanceOf;
|
|||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanceRangeQueryBuilder> {
|
||||
|
||||
|
@ -213,96 +212,57 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
|
|||
}
|
||||
|
||||
public void testNullFieldName() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new GeoDistanceRangeQueryBuilder(null, new GeoPoint());
|
||||
} else {
|
||||
new GeoDistanceRangeQueryBuilder("", new GeoPoint());
|
||||
}
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("fieldName must not be null"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new GeoDistanceRangeQueryBuilder(null, new GeoPoint()));
|
||||
assertEquals("fieldName must not be null", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new GeoDistanceRangeQueryBuilder("", new GeoPoint()));
|
||||
assertEquals("fieldName must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testNoPoint() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (GeoPoint) null);
|
||||
} else {
|
||||
new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (String) null);
|
||||
}
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("point must not be null"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (GeoPoint) null));
|
||||
assertEquals("point must not be null", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (String) null));
|
||||
assertEquals("point must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidFrom() {
|
||||
GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint());
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
builder.from((String) null);
|
||||
} else {
|
||||
builder.from((Number) null);
|
||||
}
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("[from] must not be null"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.from((String) null));
|
||||
assertEquals("[from] must not be null", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> builder.from((Number) null));
|
||||
assertEquals("[from] must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidTo() {
|
||||
GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint());
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
builder.to((String) null);
|
||||
} else {
|
||||
builder.to((Number) null);
|
||||
}
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("[to] must not be null"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.to((String) null));
|
||||
assertEquals("[to] must not be null", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> builder.to((Number) null));
|
||||
assertEquals("[to] must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidOptimizeBBox() {
|
||||
GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint());
|
||||
if (randomBoolean()) {
|
||||
try {
|
||||
builder.optimizeBbox(null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("optimizeBbox must not be null"));
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
builder.optimizeBbox("foo");
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("optimizeBbox must be one of [none, memory, indexed]"));
|
||||
}
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.optimizeBbox(null));
|
||||
assertEquals("optimizeBbox must not be null", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> builder.optimizeBbox("foo"));
|
||||
assertEquals("optimizeBbox must be one of [none, memory, indexed]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidGeoDistance() {
|
||||
GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint());
|
||||
try {
|
||||
builder.geoDistance(null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("geoDistance calculation mode must not be null"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.geoDistance(null));
|
||||
assertEquals("geoDistance calculation mode must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidDistanceUnit() {
|
||||
GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint());
|
||||
try {
|
||||
builder.unit(null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("distance unit must not be null"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.unit(null));
|
||||
assertEquals("distance unit must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testNestedRangeQuery() throws IOException {
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery;
|
||||
|
@ -39,6 +38,7 @@ import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
|
@ -47,7 +47,6 @@ import static org.hamcrest.CoreMatchers.instanceOf;
|
|||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygonQueryBuilder> {
|
||||
@Override
|
||||
|
@ -144,25 +143,17 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
}
|
||||
|
||||
public void testNullFieldName() {
|
||||
try {
|
||||
new GeoPolygonQueryBuilder(null, randomPolygon(5));
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("fieldName must not be null"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoPolygonQueryBuilder(null, randomPolygon(5)));
|
||||
assertEquals("fieldName must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testEmptyPolygon() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, new ArrayList<GeoPoint>());
|
||||
} else {
|
||||
new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, null);
|
||||
}
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("polygon must not be null or empty"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, Collections.emptyList()));
|
||||
assertEquals("polygon must not be null or empty", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, null));
|
||||
assertEquals("polygon must not be null or empty", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidClosedPolygon() {
|
||||
|
@ -170,24 +161,18 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
points.add(new GeoPoint(0, 90));
|
||||
points.add(new GeoPoint(90, 90));
|
||||
points.add(new GeoPoint(0, 90));
|
||||
try {
|
||||
new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, points);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("too few points defined for geo_polygon query"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, points));
|
||||
assertEquals("too few points defined for geo_polygon query", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidOpenPolygon() {
|
||||
List<GeoPoint> points = new ArrayList<>();
|
||||
points.add(new GeoPoint(0, 90));
|
||||
points.add(new GeoPoint(90, 90));
|
||||
try {
|
||||
new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, points);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("too few points defined for geo_polygon query"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, points));
|
||||
assertEquals("too few points defined for geo_polygon query", e.getMessage());
|
||||
}
|
||||
|
||||
public void testDeprecatedXContent() throws IOException {
|
||||
|
@ -205,12 +190,8 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
builder.field("normalize", true); // deprecated
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
try {
|
||||
parseQuery(builder.string());
|
||||
fail("normalize is deprecated");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("Deprecated field [normalize] used, replaced by [use validation_method instead]", ex.getMessage());
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(builder.string()));
|
||||
assertEquals("Deprecated field [normalize] used, replaced by [use validation_method instead]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testParsingAndToQueryParsingExceptions() throws IOException {
|
||||
|
@ -223,12 +204,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
};
|
||||
for (String brokenFile : brokenFiles) {
|
||||
String query = copyToStringFromClasspath(brokenFile);
|
||||
try {
|
||||
parseQuery(query);
|
||||
fail("parsing a broken geo_polygon filter didn't fail as expected while parsing: " + brokenFile);
|
||||
} catch (ParsingException e) {
|
||||
// success!
|
||||
}
|
||||
expectThrows(ParsingException.class, () -> parseQuery(query));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
|
@ -50,7 +49,6 @@ import static org.hamcrest.CoreMatchers.instanceOf;
|
|||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQueryBuilder> {
|
||||
|
||||
|
@ -156,70 +154,44 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
|
||||
public void testNoFieldName() throws Exception {
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
try {
|
||||
new GeoShapeQueryBuilder(null, shape);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("fieldName is required"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(null, shape));
|
||||
assertEquals("fieldName is required", e.getMessage());
|
||||
}
|
||||
|
||||
public void testNoShape() throws IOException {
|
||||
try {
|
||||
new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, (ShapeBuilder) null);
|
||||
fail("exception expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, null));
|
||||
}
|
||||
|
||||
public void testNoIndexedShape() throws IOException {
|
||||
try {
|
||||
new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, null, "type");
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("either shapeBytes or indexedShapeId and indexedShapeType are required"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, null, "type"));
|
||||
assertEquals("either shapeBytes or indexedShapeId and indexedShapeType are required", e.getMessage());
|
||||
}
|
||||
|
||||
public void testNoIndexedShapeType() throws IOException {
|
||||
try {
|
||||
new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, "id", null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("indexedShapeType is required if indexedShapeId is specified"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, "id", null));
|
||||
assertEquals("indexedShapeType is required if indexedShapeId is specified", e.getMessage());
|
||||
}
|
||||
|
||||
public void testNoRelation() throws IOException {
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
try {
|
||||
builder.relation(null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("No Shape Relation defined"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.relation(null));
|
||||
assertEquals("No Shape Relation defined", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidRelation() throws IOException {
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
try {
|
||||
builder.strategy(SpatialStrategy.TERM);
|
||||
builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN));
|
||||
fail("Illegal combination of strategy and relation setting");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// okay
|
||||
}
|
||||
|
||||
try {
|
||||
builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN));
|
||||
builder.strategy(SpatialStrategy.TERM);
|
||||
fail("Illegal combination of strategy and relation setting");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// okay
|
||||
}
|
||||
builder.strategy(SpatialStrategy.TERM);
|
||||
expectThrows(IllegalArgumentException.class, () -> builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)));
|
||||
GeoShapeQueryBuilder builder2 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
builder2.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN));
|
||||
expectThrows(IllegalArgumentException.class, () -> builder2.strategy(SpatialStrategy.TERM));
|
||||
GeoShapeQueryBuilder builder3 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
builder3.strategy(SpatialStrategy.TERM);
|
||||
expectThrows(IllegalArgumentException.class, () -> builder3.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)));
|
||||
}
|
||||
|
||||
// see #3878
|
||||
|
@ -256,16 +228,15 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
sqb = doCreateTestQueryBuilder();
|
||||
// do this until we get one without a shape
|
||||
} while (sqb.shape() != null);
|
||||
try {
|
||||
sqb.toQuery(createShardContext());
|
||||
fail();
|
||||
} catch (UnsupportedOperationException e) {
|
||||
assertEquals("query must be rewritten first", e.getMessage());
|
||||
}
|
||||
QueryBuilder rewrite = sqb.rewrite(createShardContext());
|
||||
|
||||
GeoShapeQueryBuilder query = sqb;
|
||||
|
||||
UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, () -> query.toQuery(createShardContext()));
|
||||
assertEquals("query must be rewritten first", e.getMessage());
|
||||
QueryBuilder rewrite = query.rewrite(createShardContext());
|
||||
GeoShapeQueryBuilder geoShapeQueryBuilder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, indexedShapeToReturn);
|
||||
geoShapeQueryBuilder.strategy(sqb.strategy());
|
||||
geoShapeQueryBuilder.relation(sqb.relation());
|
||||
geoShapeQueryBuilder.strategy(query.strategy());
|
||||
geoShapeQueryBuilder.relation(query.relation());
|
||||
assertEquals(geoShapeQueryBuilder, rewrite);
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,6 @@ import static org.hamcrest.CoreMatchers.containsString;
|
|||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class GeohashCellQueryBuilderTests extends AbstractQueryTestCase<Builder> {
|
||||
|
||||
|
@ -92,39 +91,23 @@ public class GeohashCellQueryBuilderTests extends AbstractQueryTestCase<Builder>
|
|||
}
|
||||
|
||||
public void testNullField() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new Builder(null, new GeoPoint());
|
||||
} else {
|
||||
new Builder("", new GeoPoint());
|
||||
}
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("fieldName must not be null"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Builder(null, new GeoPoint()));
|
||||
assertEquals("fieldName must not be null", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new Builder("", new GeoPoint()));
|
||||
assertEquals("fieldName must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testNullGeoPoint() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new Builder(GEO_POINT_FIELD_NAME, (GeoPoint) null);
|
||||
} else {
|
||||
new Builder(GEO_POINT_FIELD_NAME, "");
|
||||
}
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("geohash or point must be defined"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Builder(GEO_POINT_FIELD_NAME, (GeoPoint) null));
|
||||
assertEquals("geohash or point must be defined", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new Builder(GEO_POINT_FIELD_NAME, ""));
|
||||
assertEquals("geohash or point must be defined", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidPrecision() {
|
||||
GeohashCellQuery.Builder builder = new Builder(GEO_POINT_FIELD_NAME, new GeoPoint());
|
||||
try {
|
||||
builder.precision(-1);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("precision must be greater than 0"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.precision(-1));
|
||||
assertThat(e.getMessage(), containsString("precision must be greater than 0"));
|
||||
}
|
||||
|
||||
public void testLocationParsing() throws IOException {
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
|
@ -63,7 +62,6 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
|||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.CoreMatchers.startsWith;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQueryBuilder> {
|
||||
protected static final String PARENT_TYPE = "parent";
|
||||
|
@ -367,24 +365,17 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
* Should throw {@link IllegalArgumentException} instead of NPE.
|
||||
*/
|
||||
public void testThatNullFromStringThrowsException() {
|
||||
try {
|
||||
HasChildQueryBuilder.parseScoreMode(null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("No score mode for child query [null] found"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> HasChildQueryBuilder.parseScoreMode(null));
|
||||
assertEquals("No score mode for child query [null] found", e.getMessage());
|
||||
}
|
||||
|
||||
/**
|
||||
* Failure should not change (and the value should never match anything...).
|
||||
*/
|
||||
public void testThatUnrecognizedFromStringThrowsException() {
|
||||
try {
|
||||
HasChildQueryBuilder.parseScoreMode("unrecognized value");
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("No score mode for child query [unrecognized value] found"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> HasChildQueryBuilder.parseScoreMode("unrecognized value"));
|
||||
assertEquals("No score mode for child query [unrecognized value] found", e.getMessage());
|
||||
}
|
||||
|
||||
public void testIgnoreUnmapped() throws IOException {
|
||||
|
|
|
@ -157,12 +157,8 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
builder.field("type", "foo"); // deprecated
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
try {
|
||||
parseQuery(builder.string());
|
||||
fail("type is deprecated");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("Deprecated field [type] used, expected [parent_type] instead", ex.getMessage());
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(builder.string()));
|
||||
assertEquals("Deprecated field [type] used, expected [parent_type] instead", e.getMessage());
|
||||
|
||||
HasParentQueryBuilder queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string(), ParseFieldMatcher.EMPTY);
|
||||
assertEquals("foo", queryBuilder.type());
|
||||
|
|
|
@ -32,7 +32,6 @@ import java.io.IOException;
|
|||
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder> {
|
||||
/**
|
||||
|
@ -40,12 +39,8 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||
*/
|
||||
public void testIdsNotProvided() throws IOException {
|
||||
String noIdsFieldQuery = "{\"ids\" : { \"type\" : \"my_type\" }";
|
||||
try {
|
||||
parseQuery(noIdsFieldQuery);
|
||||
fail("Expected ParsingException");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("no ids values provided"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(noIdsFieldQuery));
|
||||
assertThat(e.getMessage(), containsString("no ids values provided"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -94,30 +89,19 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
new IdsQueryBuilder((String[])null);
|
||||
fail("must be not null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IdsQueryBuilder((String[]) null));
|
||||
assertEquals("[ids] types cannot be null", e.getMessage());
|
||||
|
||||
try {
|
||||
new IdsQueryBuilder().addIds((String[])null);
|
||||
fail("must be not null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
IdsQueryBuilder idsQueryBuilder = new IdsQueryBuilder();
|
||||
e = expectThrows(IllegalArgumentException.class, () -> idsQueryBuilder.addIds((String[])null));
|
||||
assertEquals("[ids] ids cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
// see #7686.
|
||||
public void testIdsQueryWithInvalidValues() throws Exception {
|
||||
String query = "{ \"ids\": { \"values\": [[1]] } }";
|
||||
try {
|
||||
parseQuery(query);
|
||||
fail("Expected ParsingException");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), is("Illegal value for id, expecting a string or number, got: START_ARRAY"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query));
|
||||
assertEquals("Illegal value for id, expecting a string or number, got: START_ARRAY", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -143,7 +127,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||
IdsQueryBuilder testQuery = new IdsQueryBuilder(type);
|
||||
|
||||
//single value type can also be called _type
|
||||
String contentString = "{\n" +
|
||||
final String contentString = "{\n" +
|
||||
" \"ids\" : {\n" +
|
||||
" \"_type\" : \"" + type + "\",\n" +
|
||||
" \"values\" : []\n" +
|
||||
|
@ -153,15 +137,11 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||
IdsQueryBuilder parsed = (IdsQueryBuilder) parseQuery(contentString, ParseFieldMatcher.EMPTY);
|
||||
assertEquals(testQuery, parsed);
|
||||
|
||||
try {
|
||||
parseQuery(contentString);
|
||||
fail("parse should have failed");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertEquals("Deprecated field [_type] used, expected [type] instead", e.getMessage());
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(contentString));
|
||||
assertEquals("Deprecated field [_type] used, expected [type] instead", e.getMessage());
|
||||
|
||||
//array of types can also be called type rather than types
|
||||
contentString = "{\n" +
|
||||
final String contentString2 = "{\n" +
|
||||
" \"ids\" : {\n" +
|
||||
" \"types\" : [\"" + type + "\"],\n" +
|
||||
" \"values\" : []\n" +
|
||||
|
@ -169,11 +149,8 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||
"}";
|
||||
parsed = (IdsQueryBuilder) parseQuery(contentString, ParseFieldMatcher.EMPTY);
|
||||
assertEquals(testQuery, parsed);
|
||||
try {
|
||||
parseQuery(contentString);
|
||||
fail("parse should have failed");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertEquals("Deprecated field [types] used, expected [type] instead", e.getMessage());
|
||||
}
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> parseQuery(contentString2));
|
||||
assertEquals("Deprecated field [types] used, expected [type] instead", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,12 +61,7 @@ public class IndicesQueryBuilderTests extends AbstractQueryTestCase<IndicesQuery
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
new IndicesQueryBuilder(null, "index");
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> new IndicesQueryBuilder(null, "index"));
|
||||
|
||||
expectThrows(IllegalArgumentException.class, () -> new IndicesQueryBuilder(new MatchAllQueryBuilder(), (String[]) null));
|
||||
expectThrows(IllegalArgumentException.class, () -> new IndicesQueryBuilder(new MatchAllQueryBuilder(), new String[0]));
|
||||
|
|
|
@ -23,11 +23,15 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.PointRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.either;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -69,6 +73,20 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase<Ma
|
|||
return matchQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, MatchPhrasePrefixQueryBuilder> getAlternateVersions() {
|
||||
Map<String, MatchPhrasePrefixQueryBuilder> alternateVersions = new HashMap<>();
|
||||
MatchPhrasePrefixQueryBuilder matchPhrasePrefixQuery = new MatchPhrasePrefixQueryBuilder(randomAsciiOfLengthBetween(1, 10),
|
||||
randomAsciiOfLengthBetween(1, 10));
|
||||
String contentString = "{\n" +
|
||||
" \"match_phrase_prefix\" : {\n" +
|
||||
" \"" + matchPhrasePrefixQuery.fieldName() + "\" : \"" + matchPhrasePrefixQuery.value() + "\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
alternateVersions.put(contentString, matchPhrasePrefixQuery);
|
||||
return alternateVersions;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchPhrasePrefixQueryBuilder queryBuilder, Query query, QueryShardContext context)
|
||||
throws IOException {
|
||||
|
@ -79,39 +97,22 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase<Ma
|
|||
}
|
||||
|
||||
public void testIllegalValues() {
|
||||
try {
|
||||
new MatchPhrasePrefixQueryBuilder(null, "value");
|
||||
fail("value must not be non-null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
// expected
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchPhrasePrefixQueryBuilder(null, "value"));
|
||||
assertEquals("[match_phrase_prefix] requires fieldName", e.getMessage());
|
||||
|
||||
try {
|
||||
new MatchPhrasePrefixQueryBuilder("fieldName", null);
|
||||
fail("value must not be non-null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
// expected
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new MatchPhrasePrefixQueryBuilder("fieldName", null));
|
||||
assertEquals("[match_phrase_prefix] requires query value", e.getMessage());
|
||||
|
||||
MatchPhrasePrefixQueryBuilder matchQuery = new MatchPhrasePrefixQueryBuilder("fieldName", "text");
|
||||
|
||||
try {
|
||||
matchQuery.maxExpansions(-1);
|
||||
fail("must not be positive");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
// expected
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> matchQuery.maxExpansions(-1));
|
||||
}
|
||||
|
||||
public void testBadAnalyzer() throws IOException {
|
||||
MatchPhrasePrefixQueryBuilder matchQuery = new MatchPhrasePrefixQueryBuilder("fieldName", "text");
|
||||
matchQuery.analyzer("bogusAnalyzer");
|
||||
try {
|
||||
matchQuery.toQuery(createShardContext());
|
||||
fail("Expected QueryShardException");
|
||||
} catch (QueryShardException e) {
|
||||
assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found"));
|
||||
}
|
||||
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> matchQuery.toQuery(createShardContext()));
|
||||
assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found"));
|
||||
}
|
||||
|
||||
public void testPhrasePrefixMatchQuery() throws IOException {
|
||||
|
@ -155,4 +156,20 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase<Ma
|
|||
qb = (MatchPhrasePrefixQueryBuilder) parseQuery(json3);
|
||||
checkGeneratedJson(expected, qb);
|
||||
}
|
||||
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json = "{\n" +
|
||||
" \"match_phrase_prefix\" : {\n" +
|
||||
" \"message1\" : {\n" +
|
||||
" \"query\" : \"this is a test\"\n" +
|
||||
" },\n" +
|
||||
" \"message2\" : {\n" +
|
||||
" \"query\" : \"this is a test\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[match_phrase_prefix] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,10 +24,13 @@ import org.apache.lucene.search.PhraseQuery;
|
|||
import org.apache.lucene.search.PointRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.either;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
|
@ -66,6 +69,20 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase<MatchPhr
|
|||
return matchQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, MatchPhraseQueryBuilder> getAlternateVersions() {
|
||||
Map<String, MatchPhraseQueryBuilder> alternateVersions = new HashMap<>();
|
||||
MatchPhraseQueryBuilder matchPhraseQuery = new MatchPhraseQueryBuilder(randomAsciiOfLengthBetween(1, 10),
|
||||
randomAsciiOfLengthBetween(1, 10));
|
||||
String contentString = "{\n" +
|
||||
" \"match_phrase\" : {\n" +
|
||||
" \"" + matchPhraseQuery.fieldName() + "\" : \"" + matchPhraseQuery.value() + "\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
alternateVersions.put(contentString, matchPhraseQuery);
|
||||
return alternateVersions;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchPhraseQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, notNullValue());
|
||||
|
@ -74,30 +91,18 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase<MatchPhr
|
|||
}
|
||||
|
||||
public void testIllegalValues() {
|
||||
try {
|
||||
new MatchPhraseQueryBuilder(null, "value");
|
||||
fail("value must not be non-null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
// expected
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchPhraseQueryBuilder(null, "value"));
|
||||
assertEquals("[match_phrase] requires fieldName", e.getMessage());
|
||||
|
||||
try {
|
||||
new MatchPhraseQueryBuilder("fieldName", null);
|
||||
fail("value must not be non-null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
// expected
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new MatchPhraseQueryBuilder("fieldName", null));
|
||||
assertEquals("[match_phrase] requires query value", e.getMessage());
|
||||
}
|
||||
|
||||
public void testBadAnalyzer() throws IOException {
|
||||
MatchPhraseQueryBuilder matchQuery = new MatchPhraseQueryBuilder("fieldName", "text");
|
||||
matchQuery.analyzer("bogusAnalyzer");
|
||||
try {
|
||||
matchQuery.toQuery(createShardContext());
|
||||
fail("Expected QueryShardException");
|
||||
} catch (QueryShardException e) {
|
||||
assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found"));
|
||||
}
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> matchQuery.toQuery(createShardContext()));
|
||||
assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found"));
|
||||
}
|
||||
|
||||
public void testPhraseMatchQuery() throws IOException {
|
||||
|
@ -119,4 +124,19 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase<MatchPhr
|
|||
MatchPhraseQueryBuilder qb = (MatchPhraseQueryBuilder) parseQuery(json1);
|
||||
checkGeneratedJson(expected, qb);
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json = "{\n" +
|
||||
" \"match_phrase\" : {\n" +
|
||||
" \"message1\" : {\n" +
|
||||
" \"query\" : \"this is a test\"\n" +
|
||||
" },\n" +
|
||||
" \"message2\" : {\n" +
|
||||
" \"query\" : \"this is a test\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[match_phrase] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.lucene.search.PointRangeQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -40,7 +41,9 @@ import org.elasticsearch.test.AbstractQueryTestCase;
|
|||
import org.hamcrest.Matcher;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.either;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
|
@ -118,6 +121,19 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
return matchQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, MatchQueryBuilder> getAlternateVersions() {
|
||||
Map<String, MatchQueryBuilder> alternateVersions = new HashMap<>();
|
||||
MatchQueryBuilder matchQuery = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
|
||||
String contentString = "{\n" +
|
||||
" \"match\" : {\n" +
|
||||
" \"" + matchQuery.fieldName() + "\" : \"" + matchQuery.value() + "\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
alternateVersions.put(contentString, matchQuery);
|
||||
return alternateVersions;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, notNullValue());
|
||||
|
@ -297,13 +313,9 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
assertSerialization(qb);
|
||||
|
||||
// Now check with strict parsing an exception is thrown
|
||||
try {
|
||||
parseQuery(json, ParseFieldMatcher.STRICT);
|
||||
fail("Expected query to fail with strict parsing");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(),
|
||||
containsString("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json, ParseFieldMatcher.STRICT));
|
||||
assertThat(e.getMessage(),
|
||||
containsString("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]"));
|
||||
}
|
||||
|
||||
public void testLegacyMatchPhraseQuery() throws IOException {
|
||||
|
@ -334,13 +346,9 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
assertSerialization(qb);
|
||||
|
||||
// Now check with strict parsing an exception is thrown
|
||||
try {
|
||||
parseQuery(json, ParseFieldMatcher.STRICT);
|
||||
fail("Expected query to fail with strict parsing");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(),
|
||||
containsString("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json, ParseFieldMatcher.STRICT));
|
||||
assertThat(e.getMessage(),
|
||||
containsString("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]"));
|
||||
}
|
||||
|
||||
public void testLegacyFuzzyMatchQuery() throws IOException {
|
||||
|
@ -365,13 +373,8 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
assertThat(qb, equalTo(expectedQB));
|
||||
|
||||
// Now check with strict parsing an exception is thrown
|
||||
try {
|
||||
parseQuery(json, ParseFieldMatcher.STRICT);
|
||||
fail("Expected query to fail with strict parsing");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(),
|
||||
containsString("Deprecated field [" + type + "] used, expected [match] instead"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json, ParseFieldMatcher.STRICT));
|
||||
assertThat(e.getMessage(), containsString("Deprecated field [" + type + "] used, expected [match] instead"));
|
||||
}
|
||||
|
||||
public void testFuzzinessOnNonStringField() throws Exception {
|
||||
|
@ -399,11 +402,24 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
MatchQueryBuilder query = new MatchQueryBuilder(GEO_POINT_FIELD_NAME, "2,3");
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]",
|
||||
e.getMessage());
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(context));
|
||||
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]", e.getMessage());
|
||||
query.lenient(true);
|
||||
query.toQuery(context); // no exception
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json = "{\n" +
|
||||
" \"match\" : {\n" +
|
||||
" \"message1\" : {\n" +
|
||||
" \"query\" : \"this is a test\"\n" +
|
||||
" },\n" +
|
||||
" \"message2\" : {\n" +
|
||||
" \"query\" : \"this is a test\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[match] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -245,23 +245,16 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||
}
|
||||
|
||||
public void testValidateEmptyFields() {
|
||||
try {
|
||||
new MoreLikeThisQueryBuilder(new String[0], new String[]{"likeText"}, null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("requires 'fields' to be specified"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new MoreLikeThisQueryBuilder(new String[0], new String[]{"likeText"}, null));
|
||||
assertThat(e.getMessage(), containsString("requires 'fields' to be specified"));
|
||||
}
|
||||
|
||||
public void testValidateEmptyLike() {
|
||||
String[] likeTexts = randomBoolean() ? null : new String[0];
|
||||
Item[] likeItems = randomBoolean() ? null : new Item[0];
|
||||
try {
|
||||
new MoreLikeThisQueryBuilder(likeTexts, likeItems);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("requires either 'like' texts or items to be specified"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MoreLikeThisQueryBuilder(likeTexts, likeItems));
|
||||
assertThat(e.getMessage(), containsString("requires either 'like' texts or items to be specified"));
|
||||
}
|
||||
|
||||
public void testUnsupportedFields() throws IOException {
|
||||
|
@ -269,12 +262,8 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||
String unsupportedField = randomFrom(INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME);
|
||||
MoreLikeThisQueryBuilder queryBuilder = new MoreLikeThisQueryBuilder(new String[] {unsupportedField}, new String[]{"some text"}, null)
|
||||
.failOnUnsupportedField(true);
|
||||
try {
|
||||
queryBuilder.toQuery(createShardContext());
|
||||
fail("should have failed with IllegalArgumentException for field: " + unsupportedField);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("more_like_this only supports text/keyword fields"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> queryBuilder.toQuery(createShardContext()));
|
||||
assertThat(e.getMessage(), containsString("more_like_this only supports text/keyword fields"));
|
||||
}
|
||||
|
||||
public void testMoreLikeThisBuilder() throws Exception {
|
||||
|
@ -337,7 +326,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||
assertEquals(json, 2, parsed.fields().length);
|
||||
assertEquals(json, "and potentially some more text here as well", parsed.likeTexts()[0]);
|
||||
|
||||
json =
|
||||
String deprecatedJson =
|
||||
"{\n" +
|
||||
" \"mlt\" : {\n" +
|
||||
" \"fields\" : [ \"title\", \"description\" ],\n" +
|
||||
|
@ -364,14 +353,10 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||
" }\n" +
|
||||
"}";
|
||||
|
||||
MoreLikeThisQueryBuilder parsedQueryMltShortcut = (MoreLikeThisQueryBuilder) parseQuery(json, ParseFieldMatcher.EMPTY);
|
||||
MoreLikeThisQueryBuilder parsedQueryMltShortcut = (MoreLikeThisQueryBuilder) parseQuery(deprecatedJson, ParseFieldMatcher.EMPTY);
|
||||
assertThat(parsedQueryMltShortcut, equalTo(parsed));
|
||||
|
||||
try {
|
||||
parseQuery(json);
|
||||
fail("parse query should have failed in strict mode");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("Deprecated field [mlt] used, expected [more_like_this] instead"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(deprecatedJson));
|
||||
assertEquals("Deprecated field [mlt] used, expected [more_like_this] instead", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,33 +154,10 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
|
|||
}
|
||||
|
||||
public void testIllegaArguments() {
|
||||
try {
|
||||
new MultiMatchQueryBuilder(null, "field");
|
||||
fail("value must not be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
try {
|
||||
new MultiMatchQueryBuilder("value", (String[]) null);
|
||||
fail("initial fields must be supplied at construction time must not be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
try {
|
||||
new MultiMatchQueryBuilder("value", new String[]{""});
|
||||
fail("field names cannot be empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
try {
|
||||
new MultiMatchQueryBuilder("value", "field").type(null);
|
||||
fail("type must not be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> new MultiMatchQueryBuilder(null, "field"));
|
||||
expectThrows(IllegalArgumentException.class, () -> new MultiMatchQueryBuilder("value", (String[]) null));
|
||||
expectThrows(IllegalArgumentException.class, () -> new MultiMatchQueryBuilder("value", new String[]{""}));
|
||||
expectThrows(IllegalArgumentException.class, () -> new MultiMatchQueryBuilder("value", "field").type(null));
|
||||
}
|
||||
|
||||
public void testToQueryBoost() throws IOException {
|
||||
|
|
|
@ -23,9 +23,12 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -35,16 +38,32 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
|
|||
|
||||
@Override
|
||||
protected PrefixQueryBuilder doCreateTestQueryBuilder() {
|
||||
String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10);
|
||||
String value = randomAsciiOfLengthBetween(1, 10);
|
||||
PrefixQueryBuilder query = new PrefixQueryBuilder(fieldName, value);
|
||||
|
||||
PrefixQueryBuilder query = randomPrefixQuery();
|
||||
if (randomBoolean()) {
|
||||
query.rewrite(getRandomRewriteMethod());
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, PrefixQueryBuilder> getAlternateVersions() {
|
||||
Map<String, PrefixQueryBuilder> alternateVersions = new HashMap<>();
|
||||
PrefixQueryBuilder prefixQuery = randomPrefixQuery();
|
||||
String contentString = "{\n" +
|
||||
" \"prefix\" : {\n" +
|
||||
" \"" + prefixQuery.fieldName() + "\" : \"" + prefixQuery.value() + "\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
alternateVersions.put(contentString, prefixQuery);
|
||||
return alternateVersions;
|
||||
}
|
||||
|
||||
private static PrefixQueryBuilder randomPrefixQuery() {
|
||||
String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10);
|
||||
String value = randomAsciiOfLengthBetween(1, 10);
|
||||
return new PrefixQueryBuilder(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(PrefixQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, instanceOf(PrefixQuery.class));
|
||||
|
@ -54,23 +73,13 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new PrefixQueryBuilder(null, "text");
|
||||
} else {
|
||||
new PrefixQueryBuilder("", "text");
|
||||
}
|
||||
fail("cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new PrefixQueryBuilder(null, "text"));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new PrefixQueryBuilder("", "text"));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
|
||||
try {
|
||||
new PrefixQueryBuilder("field", null);
|
||||
fail("cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new PrefixQueryBuilder("field", null));
|
||||
assertEquals("value cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testBlendedRewriteMethod() throws IOException {
|
||||
|
@ -103,4 +112,20 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
|
|||
assertEquals("Can only use prefix queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"prefix\": {\n" +
|
||||
" \"user1\": {\n" +
|
||||
" \"value\": \"ki\"\n" +
|
||||
" },\n" +
|
||||
" \"user2\": {\n" +
|
||||
" \"value\": \"ki\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[prefix] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,12 +54,8 @@ public class QueryShardContextTests extends ESTestCase {
|
|||
MappedFieldType fieldType = new TextFieldMapper.TextFieldType();
|
||||
MappedFieldType result = context.failIfFieldMappingNotFound("name", fieldType);
|
||||
assertThat(result, sameInstance(fieldType));
|
||||
try {
|
||||
context.failIfFieldMappingNotFound("name", null);
|
||||
fail("exception expected");
|
||||
} catch (QueryShardException e) {
|
||||
assertThat(e.getMessage(), equalTo("No field mapping can be found for the field with name [name]"));
|
||||
}
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> context.failIfFieldMappingNotFound("name", null));
|
||||
assertEquals("No field mapping can be found for the field with name [name]", e.getMessage());
|
||||
|
||||
context.setAllowUnmappedFields(true);
|
||||
result = context.failIfFieldMappingNotFound("name", fieldType);
|
||||
|
|
|
@ -382,13 +382,12 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
|
||||
public void testToQueryRegExpQueryTooComplex() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
try {
|
||||
queryStringQuery("/[ac]*a[ac]{50,200}/").defaultField(STRING_FIELD_NAME).toQuery(createShardContext());
|
||||
fail("Expected TooComplexToDeterminizeException");
|
||||
} catch (TooComplexToDeterminizeException e) {
|
||||
assertThat(e.getMessage(), containsString("Determinizing [ac]*"));
|
||||
assertThat(e.getMessage(), containsString("would result in more than 10000 states"));
|
||||
}
|
||||
QueryStringQueryBuilder queryBuilder = queryStringQuery("/[ac]*a[ac]{50,200}/").defaultField(STRING_FIELD_NAME);
|
||||
|
||||
TooComplexToDeterminizeException e = expectThrows(TooComplexToDeterminizeException.class,
|
||||
() -> queryBuilder.toQuery(createShardContext()));
|
||||
assertThat(e.getMessage(), containsString("Determinizing [ac]*"));
|
||||
assertThat(e.getMessage(), containsString("would result in more than 10000 states"));
|
||||
}
|
||||
|
||||
public void testFuzzyNumeric() throws Exception {
|
||||
|
@ -440,18 +439,13 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
QueryStringQueryBuilder queryStringQueryBuilder = (QueryStringQueryBuilder) queryBuilder;
|
||||
assertThat(queryStringQueryBuilder.timeZone(), equalTo(DateTimeZone.forID("Europe/Paris")));
|
||||
|
||||
try {
|
||||
queryAsString = "{\n" +
|
||||
" \"query_string\":{\n" +
|
||||
" \"time_zone\":\"This timezone does not exist\",\n" +
|
||||
" \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
parseQuery(queryAsString);
|
||||
fail("we expect a ParsingException as we are providing an unknown time_zome");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// We expect this one
|
||||
}
|
||||
String invalidQueryAsString = "{\n" +
|
||||
" \"query_string\":{\n" +
|
||||
" \"time_zone\":\"This timezone does not exist\",\n" +
|
||||
" \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
expectThrows(IllegalArgumentException.class, () -> parseQuery(invalidQueryAsString));
|
||||
}
|
||||
|
||||
public void testToQueryBooleanQueryMultipleBoosts() throws Exception {
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
|
@ -171,27 +172,10 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
expectThrows(IllegalArgumentException.class, () -> new RangeQueryBuilder(""));
|
||||
|
||||
RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder("test");
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
rangeQueryBuilder.timeZone(null);
|
||||
} else {
|
||||
rangeQueryBuilder.timeZone("badID");
|
||||
}
|
||||
fail("cannot be null or unknown id");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
rangeQueryBuilder.format(null);
|
||||
} else {
|
||||
rangeQueryBuilder.format("badFormat");
|
||||
}
|
||||
fail("cannot be null or bad format");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.timeZone(null));
|
||||
expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.timeZone("badID"));
|
||||
expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.format(null));
|
||||
expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.format("badFormat"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -200,12 +184,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
public void testToQueryNonDateWithTimezone() throws QueryShardException, IOException {
|
||||
RangeQueryBuilder query = new RangeQueryBuilder(INT_FIELD_NAME);
|
||||
query.from(1).to(10).timeZone("UTC");
|
||||
try {
|
||||
query.toQuery(createShardContext());
|
||||
fail("Expected QueryShardException");
|
||||
} catch (QueryShardException e) {
|
||||
assertThat(e.getMessage(), containsString("[range] time_zone can not be applied"));
|
||||
}
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext()));
|
||||
assertThat(e.getMessage(), containsString("[range] time_zone can not be applied"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -214,12 +194,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
public void testToQueryUnmappedWithTimezone() throws QueryShardException, IOException {
|
||||
RangeQueryBuilder query = new RangeQueryBuilder("bogus_field");
|
||||
query.from(1).to(10).timeZone("UTC");
|
||||
try {
|
||||
query.toQuery(createShardContext());
|
||||
fail("Expected QueryShardException");
|
||||
} catch (QueryShardException e) {
|
||||
assertThat(e.getMessage(), containsString("[range] time_zone can not be applied"));
|
||||
}
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext()));
|
||||
assertThat(e.getMessage(), containsString("[range] time_zone can not be applied"));
|
||||
}
|
||||
|
||||
public void testToQueryNumericField() throws IOException {
|
||||
|
@ -270,7 +246,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
}
|
||||
|
||||
// Test Invalid format
|
||||
query = "{\n" +
|
||||
final String invalidQuery = "{\n" +
|
||||
" \"range\" : {\n" +
|
||||
" \"" + DATE_FIELD_NAME + "\" : {\n" +
|
||||
" \"gte\": \"01/01/2012\",\n" +
|
||||
|
@ -279,12 +255,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
try {
|
||||
parseQuery(query).toQuery(createShardContext()).rewrite(null);
|
||||
fail("A Range Query with a specific format but with an unexpected date should raise a ParsingException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
// We expect it
|
||||
}
|
||||
Query rewrittenQuery = parseQuery(invalidQuery).toQuery(createShardContext());
|
||||
expectThrows(ElasticsearchParseException.class, () -> rewrittenQuery.rewrite(null));
|
||||
}
|
||||
|
||||
public void testDateRangeBoundaries() throws IOException {
|
||||
|
@ -382,12 +354,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
try {
|
||||
parseQuery(query).toQuery(createShardContext());
|
||||
fail("A Range Query on a numeric field with a TimeZone should raise a ParsingException");
|
||||
} catch (QueryShardException e) {
|
||||
// We expect it
|
||||
}
|
||||
QueryBuilder queryBuilder = parseQuery(query);
|
||||
expectThrows(QueryShardException.class, () -> queryBuilder.toQuery(createShardContext()));
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -426,7 +394,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
"}";
|
||||
assertNotNull(parseQuery(json));
|
||||
|
||||
json =
|
||||
final String deprecatedJson =
|
||||
"{\n" +
|
||||
" \"range\" : {\n" +
|
||||
" \"timestamp\" : {\n" +
|
||||
|
@ -442,12 +410,10 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
assertNotNull(parseQuery(json, ParseFieldMatcher.EMPTY));
|
||||
|
||||
// with strict parsing, ParseField will throw exception
|
||||
try {
|
||||
parseQuery(json, ParseFieldMatcher.STRICT);
|
||||
fail("Strict parsing should trigger exception for '_name' on top level");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("Deprecated field [_name] used, replaced by [query name is not supported in short version of range query]"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> parseQuery(deprecatedJson, ParseFieldMatcher.STRICT));
|
||||
assertEquals("Deprecated field [_name] used, replaced by [query name is not supported in short version of range query]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
public void testRewriteDateToMatchAll() throws IOException {
|
||||
|
@ -460,8 +426,6 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
};
|
||||
DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC());
|
||||
DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC());
|
||||
DateTime shardMinValue = new DateTime(2015, 3, 1, 0, 0, 0, ISOChronology.getInstanceUTC());
|
||||
DateTime shardMaxValue = new DateTime(2015, 9, 1, 0, 0, 0, ISOChronology.getInstanceUTC());
|
||||
query.from(queryFromValue);
|
||||
query.to(queryToValue);
|
||||
QueryShardContext queryShardContext = createShardContext();
|
||||
|
@ -519,4 +483,22 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
QueryBuilder rewritten = query.rewrite(queryShardContext);
|
||||
assertThat(rewritten, sameInstance(query));
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"range\": {\n" +
|
||||
" \"age\": {\n" +
|
||||
" \"gte\": 30,\n" +
|
||||
" \"lte\": 40\n" +
|
||||
" },\n" +
|
||||
" \"price\": {\n" +
|
||||
" \"gte\": 10,\n" +
|
||||
" \"lte\": 30\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[range] query doesn't support multiple fields, found [age] and [price]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,11 +21,14 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
@ -34,11 +37,7 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
|
|||
|
||||
@Override
|
||||
protected RegexpQueryBuilder doCreateTestQueryBuilder() {
|
||||
// mapped or unmapped fields
|
||||
String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10);
|
||||
String value = randomAsciiOfLengthBetween(1, 10);
|
||||
RegexpQueryBuilder query = new RegexpQueryBuilder(fieldName, value);
|
||||
|
||||
RegexpQueryBuilder query = randomRegexpQuery();
|
||||
if (randomBoolean()) {
|
||||
List<RegexpFlag> flags = new ArrayList<>();
|
||||
int iter = randomInt(5);
|
||||
|
@ -56,6 +55,26 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
|
|||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, RegexpQueryBuilder> getAlternateVersions() {
|
||||
Map<String, RegexpQueryBuilder> alternateVersions = new HashMap<>();
|
||||
RegexpQueryBuilder regexpQuery = randomRegexpQuery();
|
||||
String contentString = "{\n" +
|
||||
" \"regexp\" : {\n" +
|
||||
" \"" + regexpQuery.fieldName() + "\" : \"" + regexpQuery.value() + "\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
alternateVersions.put(contentString, regexpQuery);
|
||||
return alternateVersions;
|
||||
}
|
||||
|
||||
private static RegexpQueryBuilder randomRegexpQuery() {
|
||||
// mapped or unmapped fields
|
||||
String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10);
|
||||
String value = randomAsciiOfLengthBetween(1, 10);
|
||||
return new RegexpQueryBuilder(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, instanceOf(RegexpQuery.class));
|
||||
|
@ -64,23 +83,13 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new RegexpQueryBuilder(null, "text");
|
||||
} else {
|
||||
new RegexpQueryBuilder("", "text");
|
||||
}
|
||||
fail("cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new RegexpQueryBuilder(null, "text"));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new RegexpQueryBuilder("", "text"));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
|
||||
try {
|
||||
new RegexpQueryBuilder("field", null);
|
||||
fail("cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new RegexpQueryBuilder("field", null));
|
||||
assertEquals("value cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -107,9 +116,24 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
|
|||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
RegexpQueryBuilder query = new RegexpQueryBuilder(INT_FIELD_NAME, "12");
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(context));
|
||||
assertEquals("Can only use regexp queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"regexp\": {\n" +
|
||||
" \"user1\": {\n" +
|
||||
" \"value\": \"k.*y\"\n" +
|
||||
" },\n" +
|
||||
" \"user2\": {\n" +
|
||||
" \"value\": \"k.*y\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[regexp] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -179,42 +179,26 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
|
||||
public void testFieldCannotBeNull() {
|
||||
SimpleQueryStringBuilder qb = createTestQueryBuilder();
|
||||
try {
|
||||
qb.field(null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("supplied field is null or empty."));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> qb.field(null));
|
||||
assertEquals("supplied field is null or empty", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFieldCannotBeNullAndWeighted() {
|
||||
SimpleQueryStringBuilder qb = createTestQueryBuilder();
|
||||
try {
|
||||
qb.field(null, AbstractQueryBuilder.DEFAULT_BOOST);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("supplied field is null or empty."));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> qb.field(null, AbstractQueryBuilder.DEFAULT_BOOST));
|
||||
assertEquals("supplied field is null or empty", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFieldCannotBeEmpty() {
|
||||
SimpleQueryStringBuilder qb = createTestQueryBuilder();
|
||||
try {
|
||||
qb.field("");
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("supplied field is null or empty."));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> qb.field(""));
|
||||
assertEquals("supplied field is null or empty", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFieldCannotBeEmptyAndWeighted() {
|
||||
SimpleQueryStringBuilder qb = createTestQueryBuilder();
|
||||
try {
|
||||
qb.field("", AbstractQueryBuilder.DEFAULT_BOOST);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("supplied field is null or empty."));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> qb.field("", AbstractQueryBuilder.DEFAULT_BOOST));
|
||||
assertEquals("supplied field is null or empty", e.getMessage());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -223,12 +207,8 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
* */
|
||||
public void testFieldsCannotBeSetToNull() {
|
||||
SimpleQueryStringBuilder qb = createTestQueryBuilder();
|
||||
try {
|
||||
qb.fields(null);
|
||||
fail("Expected NullPointerException");
|
||||
} catch (NullPointerException e) {
|
||||
assertThat(e.getMessage(), is("fields cannot be null"));
|
||||
}
|
||||
NullPointerException e = expectThrows(NullPointerException.class, () -> qb.fields(null));
|
||||
assertEquals("fields cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testDefaultFieldParsing() throws IOException {
|
||||
|
|
|
@ -40,19 +40,9 @@ public class SpanContainingQueryBuilderTests extends AbstractQueryTestCase<SpanC
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
new SpanContainingQueryBuilder(null, new SpanTermQueryBuilder("field", "value"));
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
try {
|
||||
new SpanContainingQueryBuilder(new SpanTermQueryBuilder("field", "value"), null);
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
SpanTermQueryBuilder spanTermQuery = new SpanTermQueryBuilder("field", "value");
|
||||
expectThrows(IllegalArgumentException.class, () -> new SpanContainingQueryBuilder(null, spanTermQuery));
|
||||
expectThrows(IllegalArgumentException.class, () -> new SpanContainingQueryBuilder(spanTermQuery, null));
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
|
|
@ -56,12 +56,8 @@ public class SpanFirstQueryBuilderTests extends AbstractQueryTestCase<SpanFirstQ
|
|||
builder.endObject();
|
||||
builder.endObject();
|
||||
|
||||
try {
|
||||
parseQuery(builder.string());
|
||||
fail("missing [end] parameter should raise exception");
|
||||
} catch (ParsingException e) {
|
||||
assertTrue(e.getMessage().contains("spanFirst must have [end] set"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string()));
|
||||
assertTrue(e.getMessage().contains("spanFirst must have [end] set"));
|
||||
}
|
||||
{
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
@ -71,12 +67,8 @@ public class SpanFirstQueryBuilderTests extends AbstractQueryTestCase<SpanFirstQ
|
|||
builder.endObject();
|
||||
builder.endObject();
|
||||
|
||||
try {
|
||||
parseQuery(builder.string());
|
||||
fail("missing [match] parameter should raise exception");
|
||||
} catch (ParsingException e) {
|
||||
assertTrue(e.getMessage().contains("spanFirst must have [match] span query clause"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string()));
|
||||
assertTrue(e.getMessage().contains("spanFirst must have [match] span query clause"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.test.AbstractQueryTestCase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
|
||||
|
@ -72,15 +73,14 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase<SpanMu
|
|||
public void testUnsupportedInnerQueryType() throws IOException {
|
||||
QueryShardContext context = createShardContext();
|
||||
// test makes only sense if we have at least one type registered with date field mapping
|
||||
if (getCurrentTypes().length > 0 && context.fieldMapper(DATE_FIELD_NAME) != null) {
|
||||
try {
|
||||
RangeQueryBuilder query = new RangeQueryBuilder(DATE_FIELD_NAME);
|
||||
new SpanMultiTermQueryBuilder(query).toQuery(createShardContext());
|
||||
fail("Exception expected, range query on date fields should not generate a lucene " + MultiTermQuery.class.getName());
|
||||
} catch (UnsupportedOperationException e) {
|
||||
assert(e.getMessage().contains("unsupported inner query, should be " + MultiTermQuery.class.getName()));
|
||||
}
|
||||
}
|
||||
assumeTrue("test runs only if there is a registered type",
|
||||
getCurrentTypes().length > 0 && context.fieldMapper(DATE_FIELD_NAME) != null);
|
||||
|
||||
RangeQueryBuilder query = new RangeQueryBuilder(DATE_FIELD_NAME);
|
||||
SpanMultiTermQueryBuilder spamMultiTermQuery = new SpanMultiTermQueryBuilder(query);
|
||||
UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class,
|
||||
() -> spamMultiTermQuery.toQuery(createShardContext()));
|
||||
assertThat(e.getMessage(), containsString("unsupported inner query, should be " + MultiTermQuery.class.getName()));
|
||||
}
|
||||
|
||||
public void testToQueryInnerSpanMultiTerm() throws IOException {
|
||||
|
|
|
@ -62,18 +62,9 @@ public class SpanNotQueryBuilderTests extends AbstractQueryTestCase<SpanNotQuery
|
|||
}
|
||||
|
||||
public void testIllegalArgument() {
|
||||
try {
|
||||
new SpanNotQueryBuilder(null, new SpanTermQueryBuilder("field", "value"));
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
try {
|
||||
new SpanNotQueryBuilder(new SpanTermQueryBuilder("field", "value"), null);
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
SpanTermQueryBuilder spanTermQuery = new SpanTermQueryBuilder("field", "value");
|
||||
expectThrows(IllegalArgumentException.class, () -> new SpanNotQueryBuilder(null, spanTermQuery));
|
||||
expectThrows(IllegalArgumentException.class, () -> new SpanNotQueryBuilder(spanTermQuery, null));
|
||||
}
|
||||
|
||||
public void testDist() {
|
||||
|
@ -136,12 +127,8 @@ public class SpanNotQueryBuilderTests extends AbstractQueryTestCase<SpanNotQuery
|
|||
builder.endObject();
|
||||
builder.endObject();
|
||||
|
||||
try {
|
||||
parseQuery(builder.string());
|
||||
fail("ParsingException should have been caught");
|
||||
} catch (ParsingException e) {
|
||||
assertThat("ParsingException should have been caught", e.getDetailedMessage(), containsString("spanNot must have [include]"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string()));
|
||||
assertThat(e.getDetailedMessage(), containsString("spanNot must have [include]"));
|
||||
}
|
||||
{
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
@ -154,12 +141,8 @@ public class SpanNotQueryBuilderTests extends AbstractQueryTestCase<SpanNotQuery
|
|||
builder.endObject();
|
||||
builder.endObject();
|
||||
|
||||
try {
|
||||
parseQuery(builder.string());
|
||||
fail("ParsingException should have been caught");
|
||||
} catch (ParsingException e) {
|
||||
assertThat("ParsingException should have been caught", e.getDetailedMessage(), containsString("spanNot must have [exclude]"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string()));
|
||||
assertThat(e.getDetailedMessage(), containsString("spanNot must have [exclude]"));
|
||||
}
|
||||
{
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
@ -175,12 +158,8 @@ public class SpanNotQueryBuilderTests extends AbstractQueryTestCase<SpanNotQuery
|
|||
builder.endObject();
|
||||
builder.endObject();
|
||||
|
||||
try {
|
||||
parseQuery(builder.string());
|
||||
fail("ParsingException should have been caught");
|
||||
} catch (ParsingException e) {
|
||||
assertThat("ParsingException should have been caught", e.getDetailedMessage(), containsString("spanNot can either use [dist] or [pre] & [post] (or none)"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string()));
|
||||
assertThat(e.getDetailedMessage(), containsString("spanNot can either use [dist] or [pre] & [post] (or none)"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
|
@ -99,13 +100,26 @@ public class SpanTermQueryBuilderTests extends AbstractTermQueryTestCase<SpanTer
|
|||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
String json =
|
||||
"{ \"span_term\" : { \"user\" : { \"value\" : \"kimchy\", \"boost\" : 2.0 } }} ";
|
||||
|
||||
String json = "{ \"span_term\" : { \"user\" : { \"value\" : \"kimchy\", \"boost\" : 2.0 } }}";
|
||||
SpanTermQueryBuilder parsed = (SpanTermQueryBuilder) parseQuery(json);
|
||||
checkGeneratedJson(json, parsed);
|
||||
|
||||
assertEquals(json, "kimchy", parsed.value());
|
||||
assertEquals(json, 2.0, parsed.boost(), 0.0001);
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json = "{\n" +
|
||||
" \"span_term\" : {\n" +
|
||||
" \"message1\" : {\n" +
|
||||
" \"term\" : \"this\"\n" +
|
||||
" },\n" +
|
||||
" \"message2\" : {\n" +
|
||||
" \"term\" : \"this\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[span_term] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -40,19 +40,9 @@ public class SpanWithinQueryBuilderTests extends AbstractQueryTestCase<SpanWithi
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
new SpanWithinQueryBuilder(null, new SpanTermQueryBuilder("field", "value"));
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
try {
|
||||
new SpanWithinQueryBuilder(new SpanTermQueryBuilder("field", "value"), null);
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
SpanTermQueryBuilder spanTermQuery = new SpanTermQueryBuilder("field", "value");
|
||||
expectThrows(IllegalArgumentException.class, () -> new SpanWithinQueryBuilder(null, spanTermQuery));
|
||||
expectThrows(IllegalArgumentException.class, () -> new SpanWithinQueryBuilder(spanTermQuery, null));
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.fasterxml.jackson.core.io.JsonStringEncoder;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.PointRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -27,14 +28,11 @@ import org.elasticsearch.common.ParsingException;
|
|||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import com.fasterxml.jackson.core.io.JsonStringEncoder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.either;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.either;
|
||||
|
||||
public class TermQueryBuilderTests extends AbstractTermQueryTestCase<TermQueryBuilder> {
|
||||
|
||||
|
@ -115,12 +113,8 @@ public class TermQueryBuilderTests extends AbstractTermQueryTestCase<TermQueryBu
|
|||
" \"age\": [34, 35]\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
try {
|
||||
parseQuery(queryAsString);
|
||||
fail("Expected ParsingException");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), is("[term] query does not support array of values"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(queryAsString));
|
||||
assertEquals("[term] query does not support array of values", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -136,7 +130,6 @@ public class TermQueryBuilderTests extends AbstractTermQueryTestCase<TermQueryBu
|
|||
|
||||
TermQueryBuilder parsed = (TermQueryBuilder) parseQuery(json);
|
||||
checkGeneratedJson(json, parsed);
|
||||
|
||||
assertEquals(json, "Quick Foxes!", parsed.value());
|
||||
}
|
||||
|
||||
|
@ -144,9 +137,23 @@ public class TermQueryBuilderTests extends AbstractTermQueryTestCase<TermQueryBu
|
|||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
TermQueryBuilder query = new TermQueryBuilder(GEO_POINT_FIELD_NAME, "2,3");
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(context));
|
||||
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json = "{\n" +
|
||||
" \"term\" : {\n" +
|
||||
" \"message1\" : {\n" +
|
||||
" \"value\" : \"this\"\n" +
|
||||
" },\n" +
|
||||
" \"message2\" : {\n" +
|
||||
" \"value\" : \"this\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[term] query does not support different field names, use [bool] query instead", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.indices.TermsLookup;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -49,7 +48,6 @@ import java.util.stream.Collectors;
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuilder> {
|
||||
private List<Object> randomTerms;
|
||||
|
@ -146,56 +144,32 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
}
|
||||
|
||||
public void testEmtpyFieldName() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new TermsQueryBuilder(null, "term");
|
||||
} else {
|
||||
new TermsQueryBuilder("", "term");
|
||||
}
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("field name cannot be null."));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder(null, "term"));
|
||||
assertEquals("field name cannot be null.", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("", "term"));
|
||||
assertEquals("field name cannot be null.", e.getMessage());
|
||||
}
|
||||
|
||||
public void testEmtpyTermsLookup() {
|
||||
try {
|
||||
new TermsQueryBuilder("field", (TermsLookup) null);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("No value or termsLookup specified for terms query"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (TermsLookup) null));
|
||||
assertEquals("No value or termsLookup specified for terms query", e.getMessage());
|
||||
}
|
||||
|
||||
public void testNullValues() {
|
||||
try {
|
||||
switch (randomInt(6)) {
|
||||
case 0:
|
||||
new TermsQueryBuilder("field", (String[]) null);
|
||||
break;
|
||||
case 1:
|
||||
new TermsQueryBuilder("field", (int[]) null);
|
||||
break;
|
||||
case 2:
|
||||
new TermsQueryBuilder("field", (long[]) null);
|
||||
break;
|
||||
case 3:
|
||||
new TermsQueryBuilder("field", (float[]) null);
|
||||
break;
|
||||
case 4:
|
||||
new TermsQueryBuilder("field", (double[]) null);
|
||||
break;
|
||||
case 5:
|
||||
new TermsQueryBuilder("field", (Object[]) null);
|
||||
break;
|
||||
default:
|
||||
new TermsQueryBuilder("field", (Iterable<?>) null);
|
||||
break;
|
||||
}
|
||||
fail("should have failed with IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), Matchers.containsString("No value specified for terms query"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (String[]) null));
|
||||
assertThat(e.getMessage(), containsString("No value specified for terms query"));
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (int[]) null));
|
||||
assertThat(e.getMessage(), containsString("No value specified for terms query"));
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (long[]) null));
|
||||
assertThat(e.getMessage(), containsString("No value specified for terms query"));
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (float[]) null));
|
||||
assertThat(e.getMessage(), containsString("No value specified for terms query"));
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (double[]) null));
|
||||
assertThat(e.getMessage(), containsString("No value specified for terms query"));
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (Object[]) null));
|
||||
assertThat(e.getMessage(), containsString("No value specified for terms query"));
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (Iterable<?>) null));
|
||||
assertThat(e.getMessage(), containsString("No value specified for terms query"));
|
||||
}
|
||||
|
||||
public void testBothValuesAndLookupSet() throws IOException {
|
||||
|
@ -213,12 +187,9 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
try {
|
||||
parseQuery(query);
|
||||
fail("Expected ParsingException");
|
||||
} catch(ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("[" + TermsQueryBuilder.NAME + "] query does not support more than one field."));
|
||||
}
|
||||
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query));
|
||||
assertThat(e.getMessage(), containsString("[" + TermsQueryBuilder.NAME + "] query does not support more than one field."));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -267,12 +238,8 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
String query = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("terms").array("foo", 123).array("bar", 456).endObject()
|
||||
.endObject().string();
|
||||
try {
|
||||
parseQuery(query);
|
||||
fail("parsing should have failed");
|
||||
} catch (ParsingException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("[" + TermsQueryBuilder.NAME + "] query does not support multiple fields"));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query));
|
||||
assertEquals("[" + TermsQueryBuilder.NAME + "] query does not support multiple fields", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -288,7 +255,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
checkGeneratedJson(json, parsed);
|
||||
assertEquals(json, 2, parsed.values().size());
|
||||
|
||||
json =
|
||||
String deprecatedJson =
|
||||
"{\n" +
|
||||
" \"in\" : {\n" +
|
||||
" \"user\" : [ \"kimchy\", \"elasticsearch\" ],\n" +
|
||||
|
@ -298,23 +265,16 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
QueryBuilder inShortcutParsed = parseQuery(json, ParseFieldMatcher.EMPTY);
|
||||
assertThat(inShortcutParsed, equalTo(parsed));
|
||||
|
||||
try {
|
||||
parseQuery(json);
|
||||
fail("parse query should have failed in strict mode");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("Deprecated field [in] used, expected [terms] instead"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(deprecatedJson));
|
||||
assertEquals("Deprecated field [in] used, expected [terms] instead", e.getMessage());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testMustRewrite() throws IOException {
|
||||
TermsQueryBuilder termsQueryBuilder = new TermsQueryBuilder(STRING_FIELD_NAME, randomTermsLookup());
|
||||
try {
|
||||
termsQueryBuilder.toQuery(createShardContext());
|
||||
fail();
|
||||
} catch (UnsupportedOperationException ex) {
|
||||
assertEquals("query must be rewritten first", ex.getMessage());
|
||||
}
|
||||
UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class,
|
||||
() -> termsQueryBuilder.toQuery(createShardContext()));
|
||||
assertEquals("query must be rewritten first", e.getMessage());
|
||||
assertEquals(termsQueryBuilder.rewrite(createShardContext()), new TermsQueryBuilder(STRING_FIELD_NAME,
|
||||
randomTerms.stream().filter(x -> x != null).collect(Collectors.toList()))); // terms lookup removes null values
|
||||
}
|
||||
|
|
|
@ -44,12 +44,7 @@ public class TypeQueryBuilderTests extends AbstractQueryTestCase<TypeQueryBuilde
|
|||
}
|
||||
|
||||
public void testIllegalArgument() {
|
||||
try {
|
||||
new TypeQueryBuilder((String) null);
|
||||
fail("cannot be null");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> new TypeQueryBuilder((String) null));
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
|
|
@ -21,9 +21,12 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.WildcardQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
@ -32,21 +35,36 @@ public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQue
|
|||
|
||||
@Override
|
||||
protected WildcardQueryBuilder doCreateTestQueryBuilder() {
|
||||
WildcardQueryBuilder query;
|
||||
|
||||
// mapped or unmapped field
|
||||
String text = randomAsciiOfLengthBetween(1, 10);
|
||||
if (randomBoolean()) {
|
||||
query = new WildcardQueryBuilder(STRING_FIELD_NAME, text);
|
||||
} else {
|
||||
query = new WildcardQueryBuilder(randomAsciiOfLengthBetween(1, 10), text);
|
||||
}
|
||||
WildcardQueryBuilder query = randomWildcardQuery();
|
||||
if (randomBoolean()) {
|
||||
query.rewrite(randomFrom(getRandomRewriteMethod()));
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, WildcardQueryBuilder> getAlternateVersions() {
|
||||
Map<String, WildcardQueryBuilder> alternateVersions = new HashMap<>();
|
||||
WildcardQueryBuilder wildcardQuery = randomWildcardQuery();
|
||||
String contentString = "{\n" +
|
||||
" \"wildcard\" : {\n" +
|
||||
" \"" + wildcardQuery.fieldName() + "\" : \"" + wildcardQuery.value() + "\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
alternateVersions.put(contentString, wildcardQuery);
|
||||
return alternateVersions;
|
||||
}
|
||||
|
||||
private static WildcardQueryBuilder randomWildcardQuery() {
|
||||
// mapped or unmapped field
|
||||
String text = randomAsciiOfLengthBetween(1, 10);
|
||||
if (randomBoolean()) {
|
||||
return new WildcardQueryBuilder(STRING_FIELD_NAME, text);
|
||||
} else {
|
||||
return new WildcardQueryBuilder(randomAsciiOfLengthBetween(1, 10), text);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(WildcardQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, instanceOf(WildcardQuery.class));
|
||||
|
@ -57,41 +75,43 @@ public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQue
|
|||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new WildcardQueryBuilder(null, "text");
|
||||
} else {
|
||||
new WildcardQueryBuilder("", "text");
|
||||
}
|
||||
fail("cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new WildcardQueryBuilder(null, "text"));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new WildcardQueryBuilder("", "text"));
|
||||
assertEquals("field name is null or empty", e.getMessage());
|
||||
|
||||
try {
|
||||
new WildcardQueryBuilder("field", null);
|
||||
fail("cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () -> new WildcardQueryBuilder("field", null));
|
||||
assertEquals("value cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
public void testEmptyValue() throws IOException {
|
||||
QueryShardContext context = createShardContext();
|
||||
context.setAllowUnmappedFields(true);
|
||||
|
||||
WildcardQueryBuilder wildcardQueryBuilder = new WildcardQueryBuilder(getRandomType(), "");
|
||||
assertEquals(wildcardQueryBuilder.toQuery(context).getClass(), WildcardQuery.class);
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
String json =
|
||||
"{ \"wildcard\" : { \"user\" : { \"wildcard\" : \"ki*y\", \"boost\" : 2.0 } }}";
|
||||
|
||||
String json = "{ \"wildcard\" : { \"user\" : { \"wildcard\" : \"ki*y\", \"boost\" : 2.0 } }}";
|
||||
WildcardQueryBuilder parsed = (WildcardQueryBuilder) parseQuery(json);
|
||||
checkGeneratedJson(json, parsed);
|
||||
|
||||
assertEquals(json, "ki*y", parsed.value());
|
||||
assertEquals(json, 2.0, parsed.boost(), 0.0001);
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"wildcard\": {\n" +
|
||||
" \"user1\": {\n" +
|
||||
" \"wildcard\": \"ki*y\"\n" +
|
||||
" },\n" +
|
||||
" \"user2\": {\n" +
|
||||
" \"wildcard\": \"ki*y\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[wildcard] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,38 +61,12 @@ public class WrapperQueryBuilderTests extends AbstractQueryTestCase<WrapperQuery
|
|||
}
|
||||
|
||||
public void testIllegalArgument() {
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new WrapperQueryBuilder((byte[]) null);
|
||||
} else {
|
||||
new WrapperQueryBuilder(new byte[0]);
|
||||
}
|
||||
fail("cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new WrapperQueryBuilder((String) null);
|
||||
} else {
|
||||
new WrapperQueryBuilder("");
|
||||
}
|
||||
fail("cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
try {
|
||||
if (randomBoolean()) {
|
||||
new WrapperQueryBuilder((BytesReference) null);
|
||||
} else {
|
||||
new WrapperQueryBuilder(new BytesArray(new byte[0]));
|
||||
}
|
||||
fail("cannot be null or empty");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> new WrapperQueryBuilder((byte[]) null));
|
||||
expectThrows(IllegalArgumentException.class, () -> new WrapperQueryBuilder(new byte[0]));
|
||||
expectThrows(IllegalArgumentException.class, () -> new WrapperQueryBuilder((String) null));
|
||||
expectThrows(IllegalArgumentException.class, () -> new WrapperQueryBuilder(""));
|
||||
expectThrows(IllegalArgumentException.class, () -> new WrapperQueryBuilder((BytesReference) null));
|
||||
expectThrows(IllegalArgumentException.class, () -> new WrapperQueryBuilder(new BytesArray(new byte[0])));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -102,12 +76,9 @@ public class WrapperQueryBuilderTests extends AbstractQueryTestCase<WrapperQuery
|
|||
*/
|
||||
@Override
|
||||
public void testUnknownField() throws IOException {
|
||||
try {
|
||||
parseQuery("{ \"" + WrapperQueryBuilder.NAME + "\" : {\"bogusField\" : \"someValue\"} }");
|
||||
fail("ParsingException expected.");
|
||||
} catch (ParsingException e) {
|
||||
assertTrue(e.getMessage().contains("bogusField"));
|
||||
}
|
||||
String json = "{ \"" + WrapperQueryBuilder.NAME + "\" : {\"bogusField\" : \"someValue\"} }";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertTrue(e.getMessage().contains("bogusField"));
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -133,12 +104,8 @@ public class WrapperQueryBuilderTests extends AbstractQueryTestCase<WrapperQuery
|
|||
public void testMustRewrite() throws IOException {
|
||||
TermQueryBuilder tqb = new TermQueryBuilder("foo", "bar");
|
||||
WrapperQueryBuilder qb = new WrapperQueryBuilder(tqb.toString());
|
||||
try {
|
||||
qb.toQuery(createShardContext());
|
||||
fail();
|
||||
} catch (UnsupportedOperationException e) {
|
||||
assertEquals("this query must be rewritten first", e.getMessage());
|
||||
}
|
||||
UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, () -> qb.toQuery(createShardContext()));
|
||||
assertEquals("this query must be rewritten first", e.getMessage());
|
||||
QueryBuilder rewrite = qb.rewrite(createShardContext());
|
||||
assertEquals(tqb, rewrite);
|
||||
}
|
||||
|
|
|
@ -414,12 +414,8 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
" \"weight\": 2\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
try {
|
||||
parseQuery(functionScoreQuery);
|
||||
fail("parsing should have failed");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("use [functions] array if you want to define several functions."));
|
||||
}
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(functionScoreQuery));
|
||||
assertThat(e.getMessage(), containsString("use [functions] array if you want to define several functions."));
|
||||
}
|
||||
|
||||
public void testProperErrorMessageWhenTwoFunctionsDefinedInFunctionsArray() throws IOException {
|
||||
|
|
|
@ -42,6 +42,9 @@ import org.elasticsearch.common.unit.ByteSizeValue;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.translog.Translog.Location;
|
||||
|
@ -351,10 +354,14 @@ public class TranslogTests extends ESTestCase {
|
|||
|
||||
assertEquals(6, copy.estimatedNumberOfOperations());
|
||||
assertEquals(431, copy.getTranslogSizeInBytes());
|
||||
assertEquals("\"translog\"{\n" +
|
||||
" \"operations\" : 6,\n" +
|
||||
" \"size_in_bytes\" : 431\n" +
|
||||
"}", copy.toString().trim());
|
||||
|
||||
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
|
||||
builder.startObject();
|
||||
copy.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
|
||||
assertEquals("{\"translog\":{\"operations\":6,\"size_in_bytes\":431}}", builder.string());
|
||||
}
|
||||
|
||||
try {
|
||||
new TranslogStats(1, -1);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue