From 92076497e507ab27bbb603f8dbcc6c2e236eaafa Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 21 Aug 2018 12:43:25 +0200 Subject: [PATCH 01/48] Use a dedicated ConnectionManger for RemoteClusterConnection (#32988) This change introduces a dedicated ConnectionManager for every RemoteClusterConnection such that there is not state shared with the TransportService internal ConnectionManager. All connections to a remote cluster are isolated from the TransportService but still uses the TransportService and it's internal properties like the Transport, tracing and internal listener actions on disconnects etc. This allows a remote cluster connection to have a different lifecycle than a local cluster connection, also local discovery code doesn't get notified if there is a disconnect on from a remote cluster and each connection can use it's own dedicated connection profile which allows to have a reduced set of connections per cluster without conflicting with the local cluster. Closes #31835 --- .../transport/ConnectionManager.java | 69 ++++++++------- .../transport/RemoteClusterConnection.java | 86 +++++++++---------- .../transport/RemoteClusterService.java | 8 +- .../transport/TransportService.java | 25 ++++-- .../TransportClientNodesServiceTests.java | 4 +- .../cluster/NodeConnectionsServiceTests.java | 2 +- .../transport/ConnectionManagerTests.java | 6 +- .../transport/RemoteClusterClientTests.java | 18 ++-- .../RemoteClusterConnectionTests.java | 58 +++++++------ .../transport/RemoteClusterServiceTests.java | 19 ++-- .../transport/StubbableConnectionManager.java | 4 +- 11 files changed, 168 insertions(+), 131 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java index 84c337399d5..0c6be15fd92 100644 --- a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java +++ b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java @@ -44,6 +44,7 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; @@ -62,6 +63,7 @@ public class ConnectionManager implements Closeable { private final TimeValue pingSchedule; private final ConnectionProfile defaultProfile; private final Lifecycle lifecycle = new Lifecycle(); + private final AtomicBoolean closed = new AtomicBoolean(false); private final ReadWriteLock closeLock = new ReentrantReadWriteLock(); private final DelegatingNodeConnectionListener connectionListener = new DelegatingNodeConnectionListener(); @@ -83,7 +85,9 @@ public class ConnectionManager implements Closeable { } public void addListener(TransportConnectionListener listener) { - this.connectionListener.listeners.add(listener); + if (connectionListener.listeners.contains(listener) == false) { + this.connectionListener.listeners.add(listener); + } } public void removeListener(TransportConnectionListener listener) { @@ -186,45 +190,50 @@ public class ConnectionManager implements Closeable { } } - public int connectedNodeCount() { + /** + * Returns the number of nodes this manager is connected to. + */ + public int size() { return connectedNodes.size(); } @Override public void close() { - lifecycle.moveToStopped(); - CountDownLatch latch = new CountDownLatch(1); + if (closed.compareAndSet(false, true)) { + lifecycle.moveToStopped(); + CountDownLatch latch = new CountDownLatch(1); - // TODO: Consider moving all read/write lock (in Transport and this class) to the TransportService - threadPool.generic().execute(() -> { - closeLock.writeLock().lock(); - try { - // we are holding a write lock so nobody modifies the connectedNodes / openConnections map - it's safe to first close - // all instances and then clear them maps - Iterator> iterator = connectedNodes.entrySet().iterator(); - while (iterator.hasNext()) { - Map.Entry next = iterator.next(); - try { - IOUtils.closeWhileHandlingException(next.getValue()); - } finally { - iterator.remove(); + // TODO: Consider moving all read/write lock (in Transport and this class) to the TransportService + threadPool.generic().execute(() -> { + closeLock.writeLock().lock(); + try { + // we are holding a write lock so nobody modifies the connectedNodes / openConnections map - it's safe to first close + // all instances and then clear them maps + Iterator> iterator = connectedNodes.entrySet().iterator(); + while (iterator.hasNext()) { + Map.Entry next = iterator.next(); + try { + IOUtils.closeWhileHandlingException(next.getValue()); + } finally { + iterator.remove(); + } } + } finally { + closeLock.writeLock().unlock(); + latch.countDown(); + } + }); + + try { + try { + latch.await(30, TimeUnit.SECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + // ignore } } finally { - closeLock.writeLock().unlock(); - latch.countDown(); + lifecycle.moveToClosed(); } - }); - - try { - try { - latch.await(30, TimeUnit.SECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - // ignore - } - } finally { - lifecycle.moveToClosed(); } } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 15cf7899dc0..5621b385578 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -35,6 +35,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -80,16 +81,17 @@ import java.util.stream.Collectors; final class RemoteClusterConnection extends AbstractComponent implements TransportConnectionListener, Closeable { private final TransportService transportService; + private final ConnectionManager connectionManager; private final ConnectionProfile remoteProfile; private final ConnectedNodes connectedNodes; private final String clusterAlias; private final int maxNumRemoteConnections; private final Predicate nodePredicate; + private final ThreadPool threadPool; private volatile List> seedNodes; private volatile boolean skipUnavailable; private final ConnectHandler connectHandler; private SetOnce remoteClusterName = new SetOnce<>(); - private final ClusterName localClusterName; /** * Creates a new {@link RemoteClusterConnection} @@ -97,13 +99,14 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo * @param clusterAlias the configured alias of the cluster to connect to * @param seedNodes a list of seed nodes to discover eligible nodes from * @param transportService the local nodes transport service + * @param connectionManager the connection manager to use for this remote connection * @param maxNumRemoteConnections the maximum number of connections to the remote cluster * @param nodePredicate a predicate to filter eligible remote nodes to connect to */ RemoteClusterConnection(Settings settings, String clusterAlias, List> seedNodes, - TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate) { + TransportService transportService, ConnectionManager connectionManager, int maxNumRemoteConnections, + Predicate nodePredicate) { super(settings); - this.localClusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); this.transportService = transportService; this.maxNumRemoteConnections = maxNumRemoteConnections; this.nodePredicate = nodePredicate; @@ -122,7 +125,11 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo this.skipUnavailable = RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE .getConcreteSettingForNamespace(clusterAlias).get(settings); this.connectHandler = new ConnectHandler(); - transportService.addConnectionListener(this); + this.threadPool = transportService.threadPool; + this.connectionManager = connectionManager; + connectionManager.addListener(this); + // we register the transport service here as a listener to make sure we notify handlers on disconnect etc. + connectionManager.addListener(transportService); } /** @@ -183,8 +190,9 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo private void fetchShardsInternal(ClusterSearchShardsRequest searchShardsRequest, final ActionListener listener) { - final DiscoveryNode node = connectedNodes.getAny(); - transportService.sendRequest(node, ClusterSearchShardsAction.NAME, searchShardsRequest, + final DiscoveryNode node = getAnyConnectedNode(); + Transport.Connection connection = connectionManager.getConnection(node); + transportService.sendRequest(connection, ClusterSearchShardsAction.NAME, searchShardsRequest, TransportRequestOptions.EMPTY, new TransportResponseHandler() { @Override @@ -219,12 +227,16 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo request.clear(); request.nodes(true); request.local(true); // run this on the node that gets the request it's as good as any other - final DiscoveryNode node = connectedNodes.getAny(); - transportService.sendRequest(node, ClusterStateAction.NAME, request, TransportRequestOptions.EMPTY, + final DiscoveryNode node = getAnyConnectedNode(); + Transport.Connection connection = connectionManager.getConnection(node); + transportService.sendRequest(connection, ClusterStateAction.NAME, request, TransportRequestOptions.EMPTY, new TransportResponseHandler() { + @Override - public ClusterStateResponse newInstance() { - return new ClusterStateResponse(); + public ClusterStateResponse read(StreamInput in) throws IOException { + ClusterStateResponse response = new ClusterStateResponse(); + response.readFrom(in); + return response; } @Override @@ -261,11 +273,11 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo * If such node is not connected, the returned connection will be a proxy connection that redirects to it. */ Transport.Connection getConnection(DiscoveryNode remoteClusterNode) { - if (transportService.nodeConnected(remoteClusterNode)) { - return transportService.getConnection(remoteClusterNode); + if (connectionManager.nodeConnected(remoteClusterNode)) { + return connectionManager.getConnection(remoteClusterNode); } - DiscoveryNode discoveryNode = connectedNodes.getAny(); - Transport.Connection connection = transportService.getConnection(discoveryNode); + DiscoveryNode discoveryNode = getAnyConnectedNode(); + Transport.Connection connection = connectionManager.getConnection(discoveryNode); return new ProxyConnection(connection, remoteClusterNode); } @@ -317,33 +329,18 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo } Transport.Connection getConnection() { - return transportService.getConnection(getAnyConnectedNode()); + return connectionManager.getConnection(getAnyConnectedNode()); } @Override public void close() throws IOException { - connectHandler.close(); + IOUtils.close(connectHandler, connectionManager); } public boolean isClosed() { return connectHandler.isClosed(); } - private ConnectionProfile getRemoteProfile(ClusterName name) { - // we can only compare the cluster name to make a decision if we should use a remote profile - // we can't use a cluster UUID here since we could be connecting to that remote cluster before - // the remote node has joined its cluster and have a cluster UUID. The fact that we just lose a - // rather smallish optimization on the connection layer under certain situations where remote clusters - // have the same name as the local one is minor here. - // the alternative here is to complicate the remote infrastructure to also wait until we formed a cluster, - // gained a cluster UUID and then start connecting etc. we rather use this simplification in order to maintain simplicity - if (this.localClusterName.equals(name)) { - return null; - } else { - return remoteProfile; - } - } - /** * The connect handler manages node discovery and the actual connect to the remote cluster. * There is at most one connect job running at any time. If such a connect job is triggered @@ -387,7 +384,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo final boolean runConnect; final Collection> toNotify; final ActionListener listener = connectListener == null ? null : - ContextPreservingActionListener.wrapPreservingContext(connectListener, transportService.getThreadPool().getThreadContext()); + ContextPreservingActionListener.wrapPreservingContext(connectListener, threadPool.getThreadContext()); synchronized (queue) { if (listener != null && queue.offer(listener) == false) { listener.onFailure(new RejectedExecutionException("connect queue is full")); @@ -415,7 +412,6 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo } private void forkConnect(final Collection> toNotify) { - ThreadPool threadPool = transportService.getThreadPool(); ExecutorService executor = threadPool.executor(ThreadPool.Names.MANAGEMENT); executor.submit(new AbstractRunnable() { @Override @@ -452,13 +448,13 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo maybeConnect(); } }); - collectRemoteNodes(seedNodes.iterator(), transportService, listener); + collectRemoteNodes(seedNodes.iterator(), transportService, connectionManager, listener); } }); } private void collectRemoteNodes(Iterator> seedNodes, - final TransportService transportService, ActionListener listener) { + final TransportService transportService, final ConnectionManager manager, ActionListener listener) { if (Thread.currentThread().isInterrupted()) { listener.onFailure(new InterruptedException("remote connect thread got interrupted")); } @@ -467,7 +463,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo cancellableThreads.executeIO(() -> { final DiscoveryNode seedNode = seedNodes.next().get(); final TransportService.HandshakeResponse handshakeResponse; - Transport.Connection connection = transportService.openConnection(seedNode, + Transport.Connection connection = manager.openConnection(seedNode, ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null)); boolean success = false; try { @@ -482,7 +478,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo final DiscoveryNode handshakeNode = handshakeResponse.getDiscoveryNode(); if (nodePredicate.test(handshakeNode) && connectedNodes.size() < maxNumRemoteConnections) { - transportService.connectToNode(handshakeNode, getRemoteProfile(handshakeResponse.getClusterName())); + manager.connectToNode(handshakeNode, remoteProfile, transportService.connectionValidator(handshakeNode)); if (remoteClusterName.get() == null) { assert handshakeResponse.getClusterName().value() != null; remoteClusterName.set(handshakeResponse.getClusterName()); @@ -524,7 +520,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo // ISE if we fail the handshake with an version incompatible node if (seedNodes.hasNext()) { logger.debug(() -> new ParameterizedMessage("fetching nodes from external cluster {} failed", clusterAlias), ex); - collectRemoteNodes(seedNodes, transportService, listener); + collectRemoteNodes(seedNodes, transportService, manager, listener); } else { listener.onFailure(ex); } @@ -552,7 +548,6 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo /* This class handles the _state response from the remote cluster when sniffing nodes to connect to */ private class SniffClusterStateResponseHandler implements TransportResponseHandler { - private final TransportService transportService; private final Transport.Connection connection; private final ActionListener listener; private final Iterator> seedNodes; @@ -561,7 +556,6 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo SniffClusterStateResponseHandler(TransportService transportService, Transport.Connection connection, ActionListener listener, Iterator> seedNodes, CancellableThreads cancellableThreads) { - this.transportService = transportService; this.connection = connection; this.listener = listener; this.seedNodes = seedNodes; @@ -592,8 +586,8 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo for (DiscoveryNode node : nodesIter) { if (nodePredicate.test(node) && connectedNodes.size() < maxNumRemoteConnections) { try { - transportService.connectToNode(node, getRemoteProfile(remoteClusterName.get())); // noop if node is - // connected + connectionManager.connectToNode(node, remoteProfile, + transportService.connectionValidator(node)); // noop if node is connected connectedNodes.add(node); } catch (ConnectTransportException | IllegalStateException ex) { // ISE if we fail the handshake with an version incompatible node @@ -609,7 +603,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo listener.onFailure(ex); // we got canceled - fail the listener and step out } catch (Exception ex) { logger.warn(() -> new ParameterizedMessage("fetching nodes from external cluster {} failed", clusterAlias), ex); - collectRemoteNodes(seedNodes, transportService, listener); + collectRemoteNodes(seedNodes, transportService, connectionManager, listener); } } @@ -620,7 +614,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo IOUtils.closeWhileHandlingException(connection); } finally { // once the connection is closed lets try the next node - collectRemoteNodes(seedNodes, transportService, listener); + collectRemoteNodes(seedNodes, transportService, connectionManager, listener); } } @@ -715,4 +709,8 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo } } } + + ConnectionManager getConnectionManager() { + return connectionManager; + } } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index 956a0d94179..34f13b67287 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.transport; +import java.util.Collection; import java.util.function.Supplier; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; @@ -139,7 +140,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl } if (remote == null) { // this is a new cluster we have to add a new representation - remote = new RemoteClusterConnection(settings, entry.getKey(), entry.getValue(), transportService, numRemoteConnections, + remote = new RemoteClusterConnection(settings, entry.getKey(), entry.getValue(), transportService, + new ConnectionManager(settings, transportService.transport, transportService.threadPool), numRemoteConnections, getNodePredicate(settings)); remoteClusters.put(entry.getKey(), remote); } @@ -411,4 +413,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl } return new RemoteClusterAwareClient(settings, threadPool, transportService, clusterAlias); } + + Collection getConnections() { + return remoteClusters.values(); + } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index fb14ae96dbf..e37ea81211a 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -56,6 +57,7 @@ import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.io.UncheckedIOException; import java.net.UnknownHostException; import java.util.Arrays; import java.util.Collections; @@ -268,8 +270,9 @@ public class TransportService extends AbstractLifecycleComponent implements Tran @Override protected void doStop() { try { - connectionManager.close(); - transport.stop(); + IOUtils.close(connectionManager, remoteClusterService, transport::stop); + } catch (IOException e) { + throw new UncheckedIOException(e); } finally { // in case the transport is not connected to our local node (thus cleaned on node disconnect) // make sure to clean any leftover on going handles @@ -306,7 +309,7 @@ public class TransportService extends AbstractLifecycleComponent implements Tran @Override protected void doClose() throws IOException { - IOUtils.close(remoteClusterService, transport); + transport.close(); } /** @@ -364,14 +367,18 @@ public class TransportService extends AbstractLifecycleComponent implements Tran if (isLocalNode(node)) { return; } + connectionManager.connectToNode(node, connectionProfile, connectionValidator(node)); + } - connectionManager.connectToNode(node, connectionProfile, (newConnection, actualProfile) -> { + public CheckedBiConsumer connectionValidator(DiscoveryNode node) { + return (newConnection, actualProfile) -> { // We don't validate cluster names to allow for CCS connections. final DiscoveryNode remote = handshake(newConnection, actualProfile.getHandshakeTimeout().millis(), cn -> true).discoveryNode; if (validateConnections && node.equals(remote) == false) { throw new ConnectTransportException(node, "handshake failed. unexpected remote node " + remote); } - }); + }; + } /** @@ -562,8 +569,12 @@ public class TransportService extends AbstractLifecycleComponent implements Tran final TransportRequest request, final TransportRequestOptions options, TransportResponseHandler handler) { - - asyncSender.sendRequest(connection, action, request, options, handler); + try { + asyncSender.sendRequest(connection, action, request, options, handler); + } catch (NodeNotConnectedException ex) { + // the caller might not handle this so we invoke the handler + handler.handleException(ex); + } } /** diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index afc6b47483e..9baf2e1c956 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -379,10 +379,10 @@ public class TransportClientNodesServiceTests extends ESTestCase { transportClientNodesService.addTransportAddresses(remoteService.getLocalDiscoNode().getAddress()); assertEquals(1, transportClientNodesService.connectedNodes().size()); - assertEquals(1, clientService.connectionManager().connectedNodeCount()); + assertEquals(1, clientService.connectionManager().size()); transportClientNodesService.doSample(); - assertEquals(1, clientService.connectionManager().connectedNodeCount()); + assertEquals(1, clientService.connectionManager().size()); establishedConnections.clear(); handler.blockRequest(); diff --git a/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java index 473f5152e8f..b8c39c48f88 100644 --- a/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java @@ -134,7 +134,7 @@ public class NodeConnectionsServiceTests extends ESTestCase { private void assertConnectedExactlyToNodes(ClusterState state) { assertConnected(state.nodes()); - assertThat(transportService.getConnectionManager().connectedNodeCount(), equalTo(state.nodes().getSize())); + assertThat(transportService.getConnectionManager().size(), equalTo(state.nodes().getSize())); } private void assertConnected(Iterable nodes) { diff --git a/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java index 3c099c32bde..bff5a2b122d 100644 --- a/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java @@ -159,7 +159,7 @@ public class ConnectionManagerTests extends ESTestCase { assertFalse(connection.isClosed()); assertTrue(connectionManager.nodeConnected(node)); assertSame(connection, connectionManager.getConnection(node)); - assertEquals(1, connectionManager.connectedNodeCount()); + assertEquals(1, connectionManager.size()); assertEquals(1, nodeConnectedCount.get()); assertEquals(0, nodeDisconnectedCount.get()); @@ -169,7 +169,7 @@ public class ConnectionManagerTests extends ESTestCase { connection.close(); } assertTrue(connection.isClosed()); - assertEquals(0, connectionManager.connectedNodeCount()); + assertEquals(0, connectionManager.size()); assertEquals(1, nodeConnectedCount.get()); assertEquals(1, nodeDisconnectedCount.get()); } @@ -205,7 +205,7 @@ public class ConnectionManagerTests extends ESTestCase { assertTrue(connection.isClosed()); assertFalse(connectionManager.nodeConnected(node)); expectThrows(NodeNotConnectedException.class, () -> connectionManager.getConnection(node)); - assertEquals(0, connectionManager.connectedNodeCount()); + assertEquals(0, connectionManager.size()); assertEquals(0, nodeConnectedCount.get()); assertEquals(0, nodeDisconnectedCount.get()); } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java index 8cfec0a07f9..34e22fd20de 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java @@ -81,13 +81,15 @@ public class RemoteClusterClientTests extends ESTestCase { try (MockTransportService service = MockTransportService.createNewService(localSettings, Version.CURRENT, threadPool, null)) { Semaphore semaphore = new Semaphore(1); service.start(); - service.addConnectionListener(new TransportConnectionListener() { - @Override - public void onNodeDisconnected(DiscoveryNode node) { - if (remoteNode.equals(node)) { - semaphore.release(); + service.getRemoteClusterService().getConnections().forEach(con -> { + con.getConnectionManager().addListener(new TransportConnectionListener() { + @Override + public void onNodeDisconnected(DiscoveryNode node) { + if (remoteNode.equals(node)) { + semaphore.release(); + } } - } + }); }); // this test is not perfect since we might reconnect concurrently but it will fail most of the time if we don't have // the right calls in place in the RemoteAwareClient @@ -95,7 +97,9 @@ public class RemoteClusterClientTests extends ESTestCase { for (int i = 0; i < 10; i++) { semaphore.acquire(); try { - service.disconnectFromNode(remoteNode); + service.getRemoteClusterService().getConnections().forEach(con -> { + con.getConnectionManager().disconnectFromNode(remoteNode); + }); semaphore.acquire(); RemoteClusterService remoteClusterService = service.getRemoteClusterService(); Client client = remoteClusterService.getRemoteClusterClient(threadPool, "test"); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 3d0388ccfad..e40486d63dc 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -145,7 +145,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { } } - public void testLocalProfileIsUsedForLocalCluster() throws Exception { + public void testRemoteProfileIsUsedForLocalCluster() throws Exception { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) { @@ -159,7 +159,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); @@ -175,9 +175,12 @@ public class RemoteClusterConnectionTests extends ESTestCase { }); TransportRequestOptions options = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.BULK) .build(); - service.sendRequest(connection.getConnection(), ClusterSearchShardsAction.NAME, new ClusterSearchShardsRequest(), - options, futureHandler); - futureHandler.txGet(); + IllegalStateException ise = (IllegalStateException) expectThrows(SendRequestTransportException.class, () -> { + service.sendRequest(discoverableNode, + ClusterSearchShardsAction.NAME, new ClusterSearchShardsRequest(), options, futureHandler); + futureHandler.txGet(); + }).getCause(); + assertEquals(ise.getMessage(), "can't select channel size is 0 for types: [RECOVERY, BULK, STATE]"); } } } @@ -199,7 +202,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); @@ -255,7 +258,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); @@ -284,7 +287,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - seedNodes, service, Integer.MAX_VALUE, n -> true)) { + seedNodes, service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { updateSeedNodes(connection, seedNodes); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); @@ -311,7 +314,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); @@ -360,7 +363,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, + n -> n.equals(rejectedNode) == false)) { updateSeedNodes(connection, Arrays.asList(() -> seedNode)); if (rejectedNode.equals(seedNode)) { assertFalse(service.nodeConnected(seedNode)); @@ -399,7 +403,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { expectThrows(Exception.class, () -> updateSeedNodes(connection, Arrays.asList(() -> seedNode))); assertFalse(service.nodeConnected(seedNode)); assertTrue(connection.assertNoRunningConnections()); @@ -462,7 +466,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { connection.addConnectedNode(seedNode); for (DiscoveryNode node : knownNodes) { final Transport.Connection transportConnection = connection.getConnection(node); @@ -505,7 +509,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { CountDownLatch listenerCalled = new CountDownLatch(1); AtomicReference exceptionReference = new AtomicReference<>(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { ActionListener listener = ActionListener.wrap(x -> { listenerCalled.countDown(); fail("expected exception"); @@ -542,7 +546,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.acceptIncomingRequests(); List> nodes = Collections.singletonList(() -> seedNode); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - nodes, service, Integer.MAX_VALUE, n -> true)) { + nodes, service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { if (randomBoolean()) { updateSeedNodes(connection, nodes); } @@ -582,7 +586,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.acceptIncomingRequests(); List> nodes = Collections.singletonList(() -> seedNode); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - nodes, service, Integer.MAX_VALUE, n -> true)) { + nodes, service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { SearchRequest request = new SearchRequest("test-index"); Thread[] threads = new Thread[10]; for (int i = 0; i < threads.length; i++) { @@ -636,7 +640,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Collections.singletonList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Collections.singletonList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { SearchRequest request = new SearchRequest("test-index"); ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index") @@ -746,7 +750,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - seedNodes, service, Integer.MAX_VALUE, n -> true)) { + seedNodes, service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { int numThreads = randomIntBetween(4, 10); Thread[] threads = new Thread[numThreads]; CyclicBarrier barrier = new CyclicBarrier(numThreads); @@ -824,7 +828,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - seedNodes, service, Integer.MAX_VALUE, n -> true)) { + seedNodes, service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { int numThreads = randomIntBetween(4, 10); Thread[] threads = new Thread[numThreads]; CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); @@ -913,7 +917,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.acceptIncomingRequests(); int maxNumConnections = randomIntBetween(1, 5); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - seedNodes, service, maxNumConnections, n -> true)) { + seedNodes, service, service.connectionManager(), maxNumConnections, n -> true)) { // test no nodes connected RemoteConnectionInfo remoteConnectionInfo = assertSerialization(connection.getConnectionInfo()); assertNotNull(remoteConnectionInfo); @@ -1060,7 +1064,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { assertFalse(service.nodeConnected(seedNode)); assertFalse(service.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -1109,7 +1113,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(() -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { if (randomBoolean()) { updateSeedNodes(connection, Arrays.asList(() -> seedNode)); } @@ -1157,7 +1161,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - seedNodes, service, Integer.MAX_VALUE, n -> true)) { + seedNodes, service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { final int numGetThreads = randomIntBetween(4, 10); final Thread[] getThreads = new Thread[numGetThreads]; final int numModifyingThreads = randomIntBetween(4, 10); @@ -1247,7 +1251,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList( () -> seedNode), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList( () -> seedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { updateSeedNodes(connection, Arrays.asList(() -> seedNode)); assertTrue(service.nodeConnected(seedNode)); assertTrue(service.nodeConnected(discoverableNode)); @@ -1327,7 +1331,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Collections.singletonList(() -> connectedNode), service, Integer.MAX_VALUE, n -> true)) { + Collections.singletonList(() -> connectedNode), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { connection.addConnectedNode(connectedNode); for (int i = 0; i < 10; i++) { //always a direct connection as the remote node is already connected @@ -1335,9 +1339,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { assertSame(seedConnection, remoteConnection); } for (int i = 0; i < 10; i++) { - //always a direct connection as the remote node is already connected + // we don't use the transport service connection manager so we will get a proxy connection for the local node Transport.Connection remoteConnection = connection.getConnection(service.getLocalNode()); - assertThat(remoteConnection, not(instanceOf(RemoteClusterConnection.ProxyConnection.class))); + assertThat(remoteConnection, instanceOf(RemoteClusterConnection.ProxyConnection.class)); assertThat(remoteConnection.getNode(), equalTo(service.getLocalNode())); } for (int i = 0; i < 10; i++) { @@ -1369,7 +1373,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { return seedNode; }; try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true)) { + Arrays.asList(seedSupplier), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true)) { updateSeedNodes(connection, Arrays.asList(seedSupplier)); // Closing connections leads to RemoteClusterConnection.ConnectHandler.collectRemoteNodes // being called again so we try to resolve the same seed node's host twice diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index c94b1cbdef5..f1929e72d8b 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -283,6 +283,7 @@ public class RemoteClusterServiceTests extends ESTestCase { assertTrue(service.isRemoteClusterRegistered("cluster_2")); assertFalse(service.isRemoteNodeConnected("cluster_2", c2N1Node)); assertTrue(service.isRemoteNodeConnected("cluster_2", c2N2Node)); + assertEquals(0, transportService.getConnectionManager().size()); } } } @@ -347,6 +348,7 @@ public class RemoteClusterServiceTests extends ESTestCase { assertTrue(service.isRemoteClusterRegistered("cluster_2")); assertFalse(service.isRemoteNodeConnected("cluster_2", c2N1Node)); assertTrue(service.isRemoteNodeConnected("cluster_2", c2N2Node)); + assertEquals(0, transportService.getConnectionManager().size()); } } } @@ -579,14 +581,16 @@ public class RemoteClusterServiceTests extends ESTestCase { } CountDownLatch disconnectedLatch = new CountDownLatch(numDisconnectedClusters); - service.addConnectionListener(new TransportConnectionListener() { - @Override - public void onNodeDisconnected(DiscoveryNode node) { - if (disconnectedNodes.remove(node)) { - disconnectedLatch.countDown(); + for (RemoteClusterConnection connection : remoteClusterService.getConnections()) { + connection.getConnectionManager().addListener(new TransportConnectionListener() { + @Override + public void onNodeDisconnected(DiscoveryNode node) { + if (disconnectedNodes.remove(node)) { + disconnectedLatch.countDown(); + } } - } - }); + }); + } for (DiscoveryNode disconnectedNode : disconnectedNodes) { service.addFailToSendNoConnectRule(disconnectedNode.getAddress()); @@ -664,6 +668,7 @@ public class RemoteClusterServiceTests extends ESTestCase { assertTrue(shardsResponse != ClusterSearchShardsResponse.EMPTY); } } + assertEquals(0, service.getConnectionManager().size()); } } } finally { diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java index 486ccc805d0..012369feb83 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java @@ -120,8 +120,8 @@ public class StubbableConnectionManager extends ConnectionManager { } @Override - public int connectedNodeCount() { - return delegate.connectedNodeCount(); + public int size() { + return delegate.size(); } @Override From 38bdf9ce3238c5399e18b33efba34df3a269e17c Mon Sep 17 00:00:00 2001 From: markharwood Date: Tue, 21 Aug 2018 13:29:18 +0100 Subject: [PATCH 02/48] HLRC GraphClient and associated tests (#32366) GraphClient for the high level REST client and associated tests. Part of #29827 work --- .../org/elasticsearch/client/GraphClient.java | 63 +++++ .../client/RequestConverters.java | 8 + .../client/RestHighLevelClient.java | 11 + .../org/elasticsearch/client/GraphIT.java | 139 +++++++++++ .../client/RequestConvertersTests.java | 32 +++ .../client/RestHighLevelClientTests.java | 1 + .../documentation/GraphDocumentationIT.java | 125 ++++++++++ .../high-level/graph/explore.asciidoc | 53 ++++ .../high-level/supported-apis.asciidoc | 8 + .../xpack/core/graph/action/Connection.java | 141 ----------- .../core/graph/action/GraphExploreAction.java | 1 + .../action/GraphExploreRequestBuilder.java | 3 + .../action/TransportGraphExploreAction.java | 18 +- .../graph/rest/action/RestGraphAction.java | 8 +- .../xpack/graph/test/GraphTests.java | 10 +- .../authz/IndicesAndAliasesResolver.java | 2 +- .../authz/IndicesAndAliasesResolverTests.java | 2 +- .../protocol/xpack/graph/Connection.java | 229 ++++++++++++++++++ .../xpack/graph}/GraphExploreRequest.java | 150 ++++++++---- .../xpack/graph}/GraphExploreResponse.java | 103 ++++++-- .../protocol/xpack/graph}/Hop.java | 41 +++- .../protocol/xpack/graph}/Vertex.java | 99 +++++++- .../protocol/xpack/graph}/VertexRequest.java | 64 ++++- .../protocol/xpack/graph/package-info.java | 24 ++ .../graph/GraphExploreResponseTests.java | 131 ++++++++++ 25 files changed, 1219 insertions(+), 247 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/GraphClient.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/GraphDocumentationIT.java create mode 100644 docs/java-rest/high-level/graph/explore.asciidoc delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Connection.java create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java rename x-pack/{plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action => protocol/src/main/java/org/elasticsearch/protocol/xpack/graph}/GraphExploreRequest.java (67%) rename x-pack/{plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action => protocol/src/main/java/org/elasticsearch/protocol/xpack/graph}/GraphExploreResponse.java (55%) rename x-pack/{plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action => protocol/src/main/java/org/elasticsearch/protocol/xpack/graph}/Hop.java (73%) rename x-pack/{plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action => protocol/src/main/java/org/elasticsearch/protocol/xpack/graph}/Vertex.java (59%) rename x-pack/{plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action => protocol/src/main/java/org/elasticsearch/protocol/xpack/graph}/VertexRequest.java (70%) create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java create mode 100644 x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphClient.java new file mode 100644 index 00000000000..293105f5abe --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphClient.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse; + +import java.io.IOException; + +import static java.util.Collections.emptySet; + + +public class GraphClient { + private final RestHighLevelClient restHighLevelClient; + + GraphClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Executes an exploration request using the Graph API. + * + * See Graph API + * on elastic.co. + */ + public final GraphExploreResponse explore(GraphExploreRequest graphExploreRequest, + RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(graphExploreRequest, RequestConverters::xPackGraphExplore, + options, GraphExploreResponse::fromXContext, emptySet()); + } + + /** + * Asynchronously executes an exploration request using the Graph API. + * + * See Graph API + * on elastic.co. + */ + public final void exploreAsync(GraphExploreRequest graphExploreRequest, + RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(graphExploreRequest, RequestConverters::xPackGraphExplore, + options, GraphExploreResponse::fromXContext, listener, emptySet()); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 0e5fce5b227..9dd316a0fb0 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -114,6 +114,7 @@ import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -1124,6 +1125,13 @@ final class RequestConverters { return request; } + static Request xPackGraphExplore(GraphExploreRequest exploreRequest) throws IOException { + String endpoint = endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore"); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index e705ca12806..2b71b5be59d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -209,6 +209,7 @@ public class RestHighLevelClient implements Closeable { private final TasksClient tasksClient = new TasksClient(this); private final XPackClient xPackClient = new XPackClient(this); private final WatcherClient watcherClient = new WatcherClient(this); + private final GraphClient graphClient = new GraphClient(this); private final LicenseClient licenseClient = new LicenseClient(this); private final MigrationClient migrationClient = new MigrationClient(this); private final MachineLearningClient machineLearningClient = new MachineLearningClient(this); @@ -324,6 +325,16 @@ public class RestHighLevelClient implements Closeable { * Watcher APIs on elastic.co for more information. */ public WatcherClient watcher() { return watcherClient; } + + /** + * Provides methods for accessing the Elastic Licensed Graph explore API that + * is shipped with the default distribution of Elasticsearch. All of + * these APIs will 404 if run against the OSS distribution of Elasticsearch. + *

+ * See the + * Graph API on elastic.co for more information. + */ + public GraphClient graph() { return graphClient; } /** * Provides methods for accessing the Elastic Licensed Licensing APIs that diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java new file mode 100644 index 00000000000..4376b47d737 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java @@ -0,0 +1,139 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse; +import org.elasticsearch.protocol.xpack.graph.Hop; +import org.elasticsearch.protocol.xpack.graph.Vertex; +import org.elasticsearch.protocol.xpack.graph.VertexRequest; +import org.hamcrest.Matchers; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +public class GraphIT extends ESRestHighLevelClientTestCase { + + @Before + public void indexDocuments() throws IOException { + // Create chain of doc IDs across indices 1->2->3 + Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/type/1"); + doc1.setJsonEntity("{ \"num\":[1], \"const\":\"start\"}"); + client().performRequest(doc1); + + Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/type/1"); + doc2.setJsonEntity("{\"num\":[1,2], \"const\":\"foo\"}"); + client().performRequest(doc2); + + Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/type/2"); + doc3.setJsonEntity("{\"num\":[2,3], \"const\":\"foo\"}"); + client().performRequest(doc3); + + Request doc4 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2"); + doc4.setJsonEntity("{\"num\":\"string\", \"const\":\"foo\"}"); + client().performRequest(doc4); + + Request doc5 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2"); + doc5.setJsonEntity("{\"num\":[2,4], \"const\":\"foo\"}"); + client().performRequest(doc5); + + + client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh")); + } + + public void testCleanExplore() throws Exception { + GraphExploreRequest graphExploreRequest = new GraphExploreRequest(); + graphExploreRequest.indices("index1", "index2"); + graphExploreRequest.useSignificance(false); + int numHops = 3; + for (int i = 0; i < numHops; i++) { + QueryBuilder guidingQuery = null; + if (i == 0) { + guidingQuery = new TermQueryBuilder("const.keyword", "start"); + } else if (randomBoolean()){ + guidingQuery = new TermQueryBuilder("const.keyword", "foo"); + } + Hop hop = graphExploreRequest.createNextHop(guidingQuery); + VertexRequest vr = hop.addVertexRequest("num"); + vr.minDocCount(1); + } + Map expectedTermsAndDepths = new HashMap<>(); + expectedTermsAndDepths.put("1", 0); + expectedTermsAndDepths.put("2", 1); + expectedTermsAndDepths.put("3", 2); + + GraphExploreResponse exploreResponse = highLevelClient().graph().explore(graphExploreRequest, RequestOptions.DEFAULT); + Map actualTermsAndDepths = new HashMap<>(); + Collection v = exploreResponse.getVertices(); + for (Vertex vertex : v) { + actualTermsAndDepths.put(vertex.getTerm(), vertex.getHopDepth()); + } + assertEquals(expectedTermsAndDepths, actualTermsAndDepths); + assertThat(exploreResponse.isTimedOut(), Matchers.is(false)); + ShardOperationFailedException[] failures = exploreResponse.getShardFailures(); + assertThat(failures.length, Matchers.equalTo(0)); + + } + + public void testBadExplore() throws Exception { + //Explore indices where lack of fielddata=true on one index leads to partial failures + GraphExploreRequest graphExploreRequest = new GraphExploreRequest(); + graphExploreRequest.indices("index1", "index2", "index_no_field_data"); + graphExploreRequest.useSignificance(false); + int numHops = 3; + for (int i = 0; i < numHops; i++) { + QueryBuilder guidingQuery = null; + if (i == 0) { + guidingQuery = new TermQueryBuilder("const.keyword", "start"); + } else if (randomBoolean()){ + guidingQuery = new TermQueryBuilder("const.keyword", "foo"); + } + Hop hop = graphExploreRequest.createNextHop(guidingQuery); + VertexRequest vr = hop.addVertexRequest("num"); + vr.minDocCount(1); + } + Map expectedTermsAndDepths = new HashMap<>(); + expectedTermsAndDepths.put("1", 0); + expectedTermsAndDepths.put("2", 1); + expectedTermsAndDepths.put("3", 2); + + GraphExploreResponse exploreResponse = highLevelClient().graph().explore(graphExploreRequest, RequestOptions.DEFAULT); + Map actualTermsAndDepths = new HashMap<>(); + Collection v = exploreResponse.getVertices(); + for (Vertex vertex : v) { + actualTermsAndDepths.put(vertex.getTerm(), vertex.getHopDepth()); + } + assertEquals(expectedTermsAndDepths, actualTermsAndDepths); + assertThat(exploreResponse.isTimedOut(), Matchers.is(false)); + ShardOperationFailedException[] failures = exploreResponse.getShardFailures(); + assertThat(failures.length, Matchers.equalTo(1)); + assertTrue(failures[0].reason().contains("Fielddata is disabled")); + + } + + +} \ No newline at end of file diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 47195f0bb2a..ebabb8f95b5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -118,6 +118,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.rankeval.PrecisionAtK; @@ -128,6 +129,8 @@ import org.elasticsearch.index.rankeval.RestRankEvalAction; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +import org.elasticsearch.protocol.xpack.graph.Hop; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -2598,6 +2601,35 @@ public class RequestConvertersTests extends ESTestCase { request.getEntity().writeTo(bos); assertThat(bos.toString("UTF-8"), is(body)); } + + public void testGraphExplore() throws Exception { + Map expectedParams = new HashMap<>(); + + GraphExploreRequest graphExploreRequest = new GraphExploreRequest(); + graphExploreRequest.sampleDiversityField("diversity"); + graphExploreRequest.indices("index1", "index2"); + graphExploreRequest.types("type1", "type2"); + int timeout = randomIntBetween(10000, 20000); + graphExploreRequest.timeout(TimeValue.timeValueMillis(timeout)); + graphExploreRequest.useSignificance(randomBoolean()); + int numHops = randomIntBetween(1, 5); + for (int i = 0; i < numHops; i++) { + int hopNumber = i + 1; + QueryBuilder guidingQuery = null; + if (randomBoolean()) { + guidingQuery = new TermQueryBuilder("field" + hopNumber, "value" + hopNumber); + } + Hop hop = graphExploreRequest.createNextHop(guidingQuery); + hop.addVertexRequest("field" + hopNumber); + hop.getVertexRequest(0).addInclude("value" + hopNumber, hopNumber); + } + Request request = RequestConverters.xPackGraphExplore(graphExploreRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals("/index1,index2/type1,type2/_xpack/graph/_explore", request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); + assertToXContentBody(graphExploreRequest, request.getEntity()); + } public void testXPackDeleteWatch() { DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index b5d8dbb628e..1036b79a4a5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -758,6 +758,7 @@ public class RestHighLevelClientTests extends ESTestCase { apiName.startsWith("license.") == false && apiName.startsWith("machine_learning.") == false && apiName.startsWith("watcher.") == false && + apiName.startsWith("graph.") == false && apiName.startsWith("migration.") == false) { apiNotFound.add(apiName); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/GraphDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/GraphDocumentationIT.java new file mode 100644 index 00000000000..8631e18b873 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/GraphDocumentationIT.java @@ -0,0 +1,125 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.documentation; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.protocol.xpack.graph.Connection; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse; +import org.elasticsearch.protocol.xpack.graph.Hop; +import org.elasticsearch.protocol.xpack.graph.Vertex; +import org.elasticsearch.protocol.xpack.graph.VertexRequest; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collection; + +public class GraphDocumentationIT extends ESRestHighLevelClientTestCase { + + + @Before + public void indexDocuments() throws IOException { + // Create chain of doc IDs across indices 1->2->3 + Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/type/1"); + doc1.setJsonEntity("{ \"participants\":[1,2], \"text\":\"let's start projectx\", \"attachment_md5\":\"324FHDGHFDG4564\"}"); + client().performRequest(doc1); + + Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/type/2"); + doc2.setJsonEntity("{\"participants\":[2,3,4], \"text\":\"got something you both may be interested in\"}"); + client().performRequest(doc2); + + client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh")); + } + + @SuppressForbidden(reason = "system out is ok for a documentation example") + public void testExplore() throws Exception { + RestHighLevelClient client = highLevelClient(); + + + + // tag::x-pack-graph-explore-request + GraphExploreRequest request = new GraphExploreRequest(); + request.indices("index1", "index2"); + request.useSignificance(false); + TermQueryBuilder startingQuery = new TermQueryBuilder("text", "projectx"); + + Hop hop1 = request.createNextHop(startingQuery); // <1> + VertexRequest people = hop1.addVertexRequest("participants"); // <2> + people.minDocCount(1); + VertexRequest files = hop1.addVertexRequest("attachment_md5"); + files.minDocCount(1); + + Hop hop2 = request.createNextHop(null); // <3> + VertexRequest vr2 = hop2.addVertexRequest("participants"); + vr2.minDocCount(5); + + GraphExploreResponse exploreResponse = client.graph().explore(request, RequestOptions.DEFAULT); // <4> + // end::x-pack-graph-explore-request + + + // tag::x-pack-graph-explore-response + Collection v = exploreResponse.getVertices(); + Collection c = exploreResponse.getConnections(); + for (Vertex vertex : v) { + System.out.println(vertex.getField() + ":" + vertex.getTerm() + // <1> + " discovered at hop depth " + vertex.getHopDepth()); + } + for (Connection link : c) { + System.out.println(link.getFrom() + " -> " + link.getTo() // <2> + + " evidenced by " + link.getDocCount() + " docs"); + } + // end::x-pack-graph-explore-response + + + Collection initialVertices = exploreResponse.getVertices(); + + // tag::x-pack-graph-explore-expand + GraphExploreRequest expandRequest = new GraphExploreRequest(); + expandRequest.indices("index1", "index2"); + + + Hop expandHop1 = expandRequest.createNextHop(null); // <1> + VertexRequest fromPeople = expandHop1.addVertexRequest("participants"); // <2> + for (Vertex vertex : initialVertices) { + if (vertex.getField().equals("participants")) { + fromPeople.addInclude(vertex.getTerm(), 1f); + } + } + + Hop expandHop2 = expandRequest.createNextHop(null); + VertexRequest newPeople = expandHop2.addVertexRequest("participants"); // <3> + for (Vertex vertex : initialVertices) { + if (vertex.getField().equals("participants")) { + newPeople.addExclude(vertex.getTerm()); + } + } + + GraphExploreResponse expandResponse = client.graph().explore(expandRequest, RequestOptions.DEFAULT); + // end::x-pack-graph-explore-expand + + } + +} diff --git a/docs/java-rest/high-level/graph/explore.asciidoc b/docs/java-rest/high-level/graph/explore.asciidoc new file mode 100644 index 00000000000..f2718209f4b --- /dev/null +++ b/docs/java-rest/high-level/graph/explore.asciidoc @@ -0,0 +1,53 @@ +[[java-rest-high-x-pack-graph-explore]] +=== X-Pack Graph explore API + +[[java-rest-high-x-pack-graph-explore-execution]] +==== Initial request + +Graph queries are executed using the `explore()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/GraphDocumentationIT.java[x-pack-graph-explore-request] +-------------------------------------------------- +<1> In this example we seed the exploration with a query to find messages mentioning the mysterious `projectx` +<2> What we want to discover in these messages are the ids of `participants` in the communications and the md5 hashes +of any attached files. In each case, we want to find people or files that have had at least one document connecting them +to projectx. +<3> The next "hop" in the graph exploration is to find the people who have shared several messages with the people or files +discovered in the previous hop (the projectx conspirators). The `minDocCount` control is used here to ensure the people +discovered have had at least 5 communications with projectx entities. Note we could also supply a "guiding query" here e.g. a +date range to consider only recent communications but we pass null to consider all connections. +<4> Finally we call the graph explore API with the GraphExploreRequest object. + + +==== Response + +Graph responses consist of Vertex and Connection objects (aka "nodes" and "edges" respectively): + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/GraphDocumentationIT.java[x-pack-graph-explore-response] +-------------------------------------------------- +<1> Each Vertex is a unique term (a combination of fieldname and term value). The "hopDepth" property tells us at which point in the +requested exploration this term was first discovered. +<2> Each Connection is a pair of Vertex objects and includes a docCount property telling us how many times these two +Vertex terms have been sighted together + + +[[java-rest-high-x-pack-graph-expand-execution]] +==== Expanding a client-side Graph + +Typically once an application has rendered an initial GraphExploreResponse as a collection of vertices and connecting lines (graph visualization toolkits such as D3, sigma.js or Keylines help here) the next step a user may want to do is "expand". This involves finding new vertices that might be connected to the existing ones currently shown. + +To do this we use the same `explore` method but our request contains details about which vertices to expand from and which vertices to avoid re-discovering. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/GraphDocumentationIT.java[x-pack-graph-explore-expand] +-------------------------------------------------- +<1> Unlike the initial request we do not need to pass a starting query +<2> In the first hop which represents our "from" vertices we explicitly list the terms that we already have on-screen and want to expand by using the `addInclude` filter. +We can supply a boost for those terms that are considered more important to follow than others but here we select a common value of 1 for all. +<3> When defining the second hop which represents the "to" vertices we hope to discover we explicitly list the terms that we already know about using the `addExclude` filter + diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 96e93ba204c..b3de26e56bd 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -231,3 +231,11 @@ The Java High Level REST Client supports the following Watcher APIs: include::watcher/put-watch.asciidoc[] include::watcher/delete-watch.asciidoc[] + +== Graph APIs + +The Java High Level REST Client supports the following Graph APIs: + +* <> + +include::graph/explore.asciidoc[] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Connection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Connection.java deleted file mode 100644 index f3d92896449..00000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Connection.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.graph.action; - -import com.carrotsearch.hppc.ObjectIntHashMap; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.ToXContent.Params; -import org.elasticsearch.xpack.core.graph.action.Vertex.VertexId; - -import java.io.IOException; -import java.util.Map; - -/** - * A Connection links exactly two {@link Vertex} objects. The basis of a - * connection is one or more documents have been found that contain - * this pair of terms and the strength of the connection is recorded - * as a weight. - */ -public class Connection { - Vertex from; - Vertex to; - double weight; - long docCount; - - public Connection(Vertex from, Vertex to, double weight, long docCount) { - this.from = from; - this.to = to; - this.weight = weight; - this.docCount = docCount; - } - - void readFrom(StreamInput in, Map vertices) throws IOException { - from = vertices.get(new VertexId(in.readString(), in.readString())); - to = vertices.get(new VertexId(in.readString(), in.readString())); - weight = in.readDouble(); - docCount = in.readVLong(); - } - - Connection() { - } - - void writeTo(StreamOutput out) throws IOException { - out.writeString(from.getField()); - out.writeString(from.getTerm()); - out.writeString(to.getField()); - out.writeString(to.getTerm()); - out.writeDouble(weight); - out.writeVLong(docCount); - } - - public ConnectionId getId() { - return new ConnectionId(from.getId(), to.getId()); - } - - public Vertex getFrom() { - return from; - } - - public Vertex getTo() { - return to; - } - - /** - * @return a measure of the relative connectedness between a pair of {@link Vertex} objects - */ - public double getWeight() { - return weight; - } - - /** - * @return the number of documents in the sampled set that contained this - * pair of {@link Vertex} objects. - */ - public long getDocCount() { - return docCount; - } - - void toXContent(XContentBuilder builder, Params params, ObjectIntHashMap vertexNumbers) throws IOException { - builder.field("source", vertexNumbers.get(from)); - builder.field("target", vertexNumbers.get(to)); - builder.field("weight", weight); - builder.field("doc_count", docCount); - } - - /** - * An identifier (implements hashcode and equals) that represents a - * unique key for a {@link Connection} - */ - public static class ConnectionId { - private final VertexId source; - private final VertexId target; - - public ConnectionId(VertexId source, VertexId target) { - this.source = source; - this.target = target; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - ConnectionId vertexId = (ConnectionId) o; - - if (source != null ? !source.equals(vertexId.source) : vertexId.source != null) - return false; - if (target != null ? !target.equals(vertexId.target) : vertexId.target != null) - return false; - - return true; - } - - @Override - public int hashCode() { - int result = source != null ? source.hashCode() : 0; - result = 31 * result + (target != null ? target.hashCode() : 0); - return result; - } - - public VertexId getSource() { - return source; - } - - public VertexId getTarget() { - return target; - } - - @Override - public String toString() { - return getSource() + "->" + getTarget(); - } - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java index 5503eb69255..e4fd8d04351 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.graph.action; import org.elasticsearch.action.Action; +import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse; public class GraphExploreAction extends Action { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java index d5e756f78a2..37456f23464 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java @@ -11,6 +11,9 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse; +import org.elasticsearch.protocol.xpack.graph.Hop; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 4eb136040e9..25f2511fbc0 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -24,6 +24,15 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.graph.Connection; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse; +import org.elasticsearch.protocol.xpack.graph.Hop; +import org.elasticsearch.protocol.xpack.graph.Vertex; +import org.elasticsearch.protocol.xpack.graph.VertexRequest; +import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost; +import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; @@ -39,16 +48,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; -import org.elasticsearch.xpack.core.graph.action.Connection; -import org.elasticsearch.xpack.core.graph.action.Connection.ConnectionId; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; -import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest; -import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest.TermBoost; -import org.elasticsearch.xpack.core.graph.action.GraphExploreResponse; -import org.elasticsearch.xpack.core.graph.action.Hop; -import org.elasticsearch.xpack.core.graph.action.Vertex; -import org.elasticsearch.xpack.core.graph.action.Vertex.VertexId; -import org.elasticsearch.xpack.core.graph.action.VertexRequest; import java.util.ArrayList; import java.util.HashMap; diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index 3f11d0c72bd..778eb261a07 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -12,14 +12,14 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +import org.elasticsearch.protocol.xpack.graph.Hop; +import org.elasticsearch.protocol.xpack.graph.VertexRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.XPackClient; -import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest; -import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest.TermBoost; -import org.elasticsearch.xpack.core.graph.action.Hop; -import org.elasticsearch.xpack.core.graph.action.VertexRequest; import org.elasticsearch.xpack.core.rest.XPackRestHandler; import java.io.IOException; diff --git a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java index 5bebef3d2d4..a58d8e8a8b0 100644 --- a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -17,6 +17,11 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.ScriptQueryBuilder; import org.elasticsearch.license.LicenseService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse; +import org.elasticsearch.protocol.xpack.graph.Hop; +import org.elasticsearch.protocol.xpack.graph.Vertex; +import org.elasticsearch.protocol.xpack.graph.VertexRequest; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -24,12 +29,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.graph.Graph; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; -import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest; import org.elasticsearch.xpack.core.graph.action.GraphExploreRequestBuilder; -import org.elasticsearch.xpack.core.graph.action.GraphExploreResponse; -import org.elasticsearch.xpack.core.graph.action.Hop; -import org.elasticsearch.xpack.core.graph.action.Vertex; -import org.elasticsearch.xpack.core.graph.action.VertexRequest; import java.util.Collection; import java.util.Collections; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index c388fd5627c..3cf2034cc74 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -25,9 +25,9 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest; import org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField; import java.util.ArrayList; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index cf9c09759ea..c0867875b01 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -46,12 +46,12 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; -import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest; import org.elasticsearch.xpack.core.security.authc.DefaultAuthenticationFailureHandler; import org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java new file mode 100644 index 00000000000..455434f7ac4 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java @@ -0,0 +1,229 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import com.carrotsearch.hppc.ObjectIntHashMap; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * A Connection links exactly two {@link Vertex} objects. The basis of a + * connection is one or more documents have been found that contain + * this pair of terms and the strength of the connection is recorded + * as a weight. + */ +public class Connection { + private Vertex from; + private Vertex to; + private double weight; + private long docCount; + + public Connection(Vertex from, Vertex to, double weight, long docCount) { + this.from = from; + this.to = to; + this.weight = weight; + this.docCount = docCount; + } + + public Connection(StreamInput in, Map vertices) throws IOException { + from = vertices.get(new VertexId(in.readString(), in.readString())); + to = vertices.get(new VertexId(in.readString(), in.readString())); + weight = in.readDouble(); + docCount = in.readVLong(); + } + + Connection() { + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(from.getField()); + out.writeString(from.getTerm()); + out.writeString(to.getField()); + out.writeString(to.getTerm()); + out.writeDouble(weight); + out.writeVLong(docCount); + } + + public ConnectionId getId() { + return new ConnectionId(from.getId(), to.getId()); + } + + public Vertex getFrom() { + return from; + } + + public Vertex getTo() { + return to; + } + + /** + * @return a measure of the relative connectedness between a pair of {@link Vertex} objects + */ + public double getWeight() { + return weight; + } + + /** + * @return the number of documents in the sampled set that contained this + * pair of {@link Vertex} objects. + */ + public long getDocCount() { + return docCount; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Connection other = (Connection) obj; + return docCount == other.docCount && + weight == other.weight && + Objects.equals(to, other.to) && + Objects.equals(from, other.from); + } + + @Override + public int hashCode() { + return Objects.hash(docCount, weight, from, to); + } + + + private static final ParseField SOURCE = new ParseField("source"); + private static final ParseField TARGET = new ParseField("target"); + private static final ParseField WEIGHT = new ParseField("weight"); + private static final ParseField DOC_COUNT = new ParseField("doc_count"); + + + void toXContent(XContentBuilder builder, Params params, ObjectIntHashMap vertexNumbers) throws IOException { + builder.field(SOURCE.getPreferredName(), vertexNumbers.get(from)); + builder.field(TARGET.getPreferredName(), vertexNumbers.get(to)); + builder.field(WEIGHT.getPreferredName(), weight); + builder.field(DOC_COUNT.getPreferredName(), docCount); + } + + //When deserializing from XContent we need to wait for all vertices to be loaded before + // Connection objects can be created that reference them. This class provides the interim + // state for connections. + static class UnresolvedConnection { + int fromIndex; + int toIndex; + double weight; + long docCount; + UnresolvedConnection(int fromIndex, int toIndex, double weight, long docCount) { + super(); + this.fromIndex = fromIndex; + this.toIndex = toIndex; + this.weight = weight; + this.docCount = docCount; + } + public Connection resolve(List vertices) { + return new Connection(vertices.get(fromIndex), vertices.get(toIndex), weight, docCount); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "ConnectionParser", true, + args -> { + int source = (Integer) args[0]; + int target = (Integer) args[1]; + double weight = (Double) args[2]; + long docCount = (Long) args[3]; + return new UnresolvedConnection(source, target, weight, docCount); + }); + + static { + PARSER.declareInt(constructorArg(), SOURCE); + PARSER.declareInt(constructorArg(), TARGET); + PARSER.declareDouble(constructorArg(), WEIGHT); + PARSER.declareLong(constructorArg(), DOC_COUNT); + } + static UnresolvedConnection fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + } + + + /** + * An identifier (implements hashcode and equals) that represents a + * unique key for a {@link Connection} + */ + public static class ConnectionId { + private final VertexId source; + private final VertexId target; + + public ConnectionId(VertexId source, VertexId target) { + this.source = source; + this.target = target; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ConnectionId vertexId = (ConnectionId) o; + + if (source != null ? !source.equals(vertexId.source) : vertexId.source != null) + return false; + if (target != null ? !target.equals(vertexId.target) : vertexId.target != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = source != null ? source.hashCode() : 0; + result = 31 * result + (target != null ? target.hashCode() : 0); + return result; + } + + public VertexId getSource() { + return source; + } + + public VertexId getTarget() { + return target; + } + + @Override + public String toString() { + return getSource() + "->" + getTarget(); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java similarity index 67% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequest.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java index e44f9f76037..495ea5fd28a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequest.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java @@ -1,9 +1,22 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.graph.action; +package org.elasticsearch.protocol.xpack.graph; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -14,6 +27,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; @@ -29,7 +44,7 @@ import java.util.List; * Holds the criteria required to guide the exploration of connected terms which * can be returned as a graph. */ -public class GraphExploreRequest extends ActionRequest implements IndicesRequest.Replaceable { +public class GraphExploreRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContentObject { public static final String NO_HOPS_ERROR_MESSAGE = "Graph explore request must have at least one hop"; public static final String NO_VERTICES_ERROR_MESSAGE = "Graph explore hop must have at least one VertexRequest"; @@ -51,8 +66,8 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest } /** - * Constructs a new graph request to run against the provided - * indices. No indices means it will run against all indices. + * Constructs a new graph request to run against the provided indices. No + * indices means it will run against all indices. */ public GraphExploreRequest(String... indices) { this.indices = indices; @@ -75,7 +90,6 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest return this.indices; } - @Override public GraphExploreRequest indices(String... indices) { this.indices = indices; @@ -123,10 +137,14 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest } /** - * Graph exploration can be set to timeout after the given period. Search operations involved in - * each hop are limited to the remaining time available but can still overrun due to the nature - * of their "best efforts" timeout support. When a timeout occurs partial results are returned. - * @param timeout a {@link TimeValue} object which determines the maximum length of time to spend exploring + * Graph exploration can be set to timeout after the given period. Search + * operations involved in each hop are limited to the remaining time + * available but can still overrun due to the nature of their "best efforts" + * timeout support. When a timeout occurs partial results are returned. + * + * @param timeout + * a {@link TimeValue} object which determines the maximum length + * of time to spend exploring */ public GraphExploreRequest timeout(TimeValue timeout) { if (timeout == null) { @@ -153,10 +171,10 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest sampleSize = in.readInt(); sampleDiversityField = in.readOptionalString(); maxDocsPerDiversityValue = in.readInt(); - + useSignificance = in.readBoolean(); returnDetailedInfo = in.readBoolean(); - + int numHops = in.readInt(); Hop parentHop = null; for (int i = 0; i < numHops; i++) { @@ -180,7 +198,7 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest out.writeInt(sampleSize); out.writeOptionalString(sampleDiversityField); out.writeInt(maxDocsPerDiversityValue); - + out.writeBoolean(useSignificance); out.writeBoolean(returnDetailedInfo); out.writeInt(hops.size()); @@ -196,18 +214,21 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest } /** - * The number of top-matching documents that are considered during each hop (default is - * {@link SamplerAggregationBuilder#DEFAULT_SHARD_SAMPLE_SIZE} - * Very small values (less than 50) may not provide sufficient weight-of-evidence to identify - * significant connections between terms. - *

Very large values (many thousands) are not recommended with loosely defined queries (fuzzy queries or those - * with many OR clauses). - * This is because any useful signals in the best documents are diluted with irrelevant noise from low-quality matches. - * Performance is also typically better with smaller samples as there are less look-ups required for background frequencies - * of terms found in the documents + * The number of top-matching documents that are considered during each hop + * (default is {@link SamplerAggregationBuilder#DEFAULT_SHARD_SAMPLE_SIZE} + * Very small values (less than 50) may not provide sufficient + * weight-of-evidence to identify significant connections between terms. + *

+ * Very large values (many thousands) are not recommended with loosely + * defined queries (fuzzy queries or those with many OR clauses). This is + * because any useful signals in the best documents are diluted with + * irrelevant noise from low-quality matches. Performance is also typically + * better with smaller samples as there are less look-ups required for + * background frequencies of terms found in the documents *

* - * @param maxNumberOfDocsPerHop shard-level sample size in documents + * @param maxNumberOfDocsPerHop + * shard-level sample size in documents */ public void sampleSize(int maxNumberOfDocsPerHop) { sampleSize = maxNumberOfDocsPerHop; @@ -242,10 +263,13 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest } /** - * Controls the choice of algorithm used to select interesting terms. The default - * value is true which means terms are selected based on significance (see the {@link SignificantTerms} - * aggregation) rather than popularity (using the {@link TermsAggregator}). - * @param value true if the significant_terms algorithm should be used. + * Controls the choice of algorithm used to select interesting terms. The + * default value is true which means terms are selected based on + * significance (see the {@link SignificantTerms} aggregation) rather than + * popularity (using the {@link TermsAggregator}). + * + * @param value + * true if the significant_terms algorithm should be used. */ public void useSignificance(boolean value) { this.useSignificance = value; @@ -254,32 +278,37 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest public boolean useSignificance() { return useSignificance; } - + /** - * Return detailed information about vertex frequencies as part of JSON results - defaults to false - * @param value true if detailed information is required in JSON responses + * Return detailed information about vertex frequencies as part of JSON + * results - defaults to false + * + * @param value + * true if detailed information is required in JSON responses */ public void returnDetailedInfo(boolean value) { this.returnDetailedInfo = value; - } + } public boolean returnDetailedInfo() { return returnDetailedInfo; } - /** - * Add a stage in the graph exploration. Each hop represents a stage of - * querying elasticsearch to identify terms which can then be connnected - * to other terms in a subsequent hop. - * @param guidingQuery optional choice of query which influences which documents - * are considered in this stage - * @return a {@link Hop} object that holds settings for a stage in the graph exploration + * Add a stage in the graph exploration. Each hop represents a stage of + * querying elasticsearch to identify terms which can then be connnected to + * other terms in a subsequent hop. + * + * @param guidingQuery + * optional choice of query which influences which documents are + * considered in this stage + * @return a {@link Hop} object that holds settings for a stage in the graph + * exploration */ public Hop createNextHop(QueryBuilder guidingQuery) { Hop parent = null; if (hops.size() > 0) { - parent = hops.get(hops.size() - 1); + parent = hops.get(hops.size() - 1); } Hop newHop = new Hop(parent); newHop.guidingQuery = guidingQuery; @@ -330,6 +359,43 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest } } - + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.startObject("controls"); + { + if (sampleSize != SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE) { + builder.field("sample_size", sampleSize); + } + if (sampleDiversityField != null) { + builder.startObject("sample_diversity"); + builder.field("field", sampleDiversityField); + builder.field("max_docs_per_value", maxDocsPerDiversityValue); + builder.endObject(); + } + builder.field("use_significance", useSignificance); + if (returnDetailedInfo) { + builder.field("return_detailed_stats", returnDetailedInfo); + } + } + builder.endObject(); + + for (Hop hop : hops) { + if (hop.parentHop != null) { + builder.startObject("connections"); + } + hop.toXContent(builder, params); + } + for (Hop hop : hops) { + if (hop.parentHop != null) { + builder.endObject(); + } + } + builder.endObject(); + + return builder; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java similarity index 55% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreResponse.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java index 3d6c5f5aaca..baaaedf0163 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreResponse.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java @@ -1,28 +1,49 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.graph.action; +package org.elasticsearch.protocol.xpack.graph; import com.carrotsearch.hppc.ObjectIntHashMap; + import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.graph.action.Connection.ConnectionId; -import org.elasticsearch.xpack.core.graph.action.Vertex.VertexId; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId; +import org.elasticsearch.protocol.xpack.graph.Connection.UnresolvedConnection; +import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; import java.io.IOException; import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Graph explore response holds a graph of {@link Vertex} and {@link Connection} objects @@ -100,8 +121,7 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb connections = new HashMap<>(); for (int i = 0; i < size; i++) { - Connection e = new Connection(); - e.readFrom(in, vertices); + Connection e = new Connection(in, vertices); connections.put(e.getId(), e); } @@ -146,23 +166,19 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb } - static final class Fields { - static final String TOOK = "took"; - static final String TIMED_OUT = "timed_out"; - static final String INDICES = "_indices"; - static final String FAILURES = "failures"; - static final String VERTICES = "vertices"; - static final String CONNECTIONS = "connections"; - - } + private static final ParseField TOOK = new ParseField("took"); + private static final ParseField TIMED_OUT = new ParseField("timed_out"); + private static final ParseField VERTICES = new ParseField("vertices"); + private static final ParseField CONNECTIONS = new ParseField("connections"); + private static final ParseField FAILURES = new ParseField("failures"); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Fields.TOOK, tookInMillis); - builder.field(Fields.TIMED_OUT, timedOut); + builder.field(TOOK.getPreferredName(), tookInMillis); + builder.field(TIMED_OUT.getPreferredName(), timedOut); - builder.startArray(Fields.FAILURES); + builder.startArray(FAILURES.getPreferredName()); if (shardFailures != null) { for (ShardOperationFailedException shardFailure : shardFailures) { builder.startObject(); @@ -178,7 +194,7 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb extraParams.put(RETURN_DETAILED_INFO_PARAM, Boolean.toString(returnDetailedInfo)); Params extendedParams = new DelegatingMapParams(extraParams, params); - builder.startArray(Fields.VERTICES); + builder.startArray(VERTICES.getPreferredName()); for (Vertex vertex : vertices.values()) { builder.startObject(); vertexNumbers.put(vertex, vertexNumbers.size()); @@ -187,7 +203,7 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb } builder.endArray(); - builder.startArray(Fields.CONNECTIONS); + builder.startArray(CONNECTIONS.getPreferredName()); for (Connection connection : connections.values()) { builder.startObject(); connection.toXContent(builder, extendedParams, vertexNumbers); @@ -198,5 +214,48 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb return builder; } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "GraphExploreResponsenParser", true, + args -> { + GraphExploreResponse result = new GraphExploreResponse(); + result.vertices = new HashMap<>(); + result.connections = new HashMap<>(); + + result.tookInMillis = (Long) args[0]; + result.timedOut = (Boolean) args[1]; + + @SuppressWarnings("unchecked") + List vertices = (List) args[2]; + @SuppressWarnings("unchecked") + List unresolvedConnections = (List) args[3]; + @SuppressWarnings("unchecked") + List failures = (List) args[4]; + for (Vertex vertex : vertices) { + // reverse-engineer if detailed stats were requested - + // mainly here for testing framework's equality tests + result.returnDetailedInfo = result.returnDetailedInfo || vertex.getFg() > 0; + result.vertices.put(vertex.getId(), vertex); + } + for (UnresolvedConnection unresolvedConnection : unresolvedConnections) { + Connection resolvedConnection = unresolvedConnection.resolve(vertices); + result.connections.put(resolvedConnection.getId(), resolvedConnection); + } + if (failures.size() > 0) { + result.shardFailures = failures.toArray(new ShardSearchFailure[failures.size()]); + } + return result; + }); + + static { + PARSER.declareLong(constructorArg(), TOOK); + PARSER.declareBoolean(constructorArg(), TIMED_OUT); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> Vertex.fromXContent(p), VERTICES); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> UnresolvedConnection.fromXContent(p), CONNECTIONS); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ShardSearchFailure.fromXContent(p), FAILURES); + } + + public static GraphExploreResponse fromXContext(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Hop.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java similarity index 73% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Hop.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java index 8ba7005f15f..70ec61067f5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Hop.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java @@ -1,14 +1,29 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.graph.action; +package org.elasticsearch.protocol.xpack.graph; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -41,7 +56,7 @@ import java.util.List; *

* */ -public class Hop { +public class Hop implements ToXContentFragment{ final Hop parentHop; List vertices = null; QueryBuilder guidingQuery = null; @@ -139,4 +154,20 @@ public class Hop { public VertexRequest getVertexRequest(int requestNumber) { return getEffectiveVertexRequests().get(requestNumber); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (guidingQuery != null) { + builder.field("query"); + guidingQuery.toXContent(builder, params); + } + if(vertices != null && vertices.size()>0) { + builder.startArray("vertices"); + for (VertexRequest vertexRequest : vertices) { + vertexRequest.toXContent(builder, params); + } + builder.endArray(); + } + return builder; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Vertex.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java similarity index 59% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Vertex.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java index c85d6d7dfd6..cfc26f44fac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Vertex.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java @@ -1,16 +1,36 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.graph.action; +package org.elasticsearch.protocol.xpack.graph; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * A vertex in a graph response represents a single term (a field and value pair) @@ -27,6 +47,13 @@ public class Vertex implements ToXContentFragment { private final int depth; private final long bg; private long fg; + private static final ParseField FIELD = new ParseField("field"); + private static final ParseField TERM = new ParseField("term"); + private static final ParseField WEIGHT = new ParseField("weight"); + private static final ParseField DEPTH = new ParseField("depth"); + private static final ParseField FG = new ParseField("fg"); + private static final ParseField BG = new ParseField("bg"); + public Vertex(String field, String term, double weight, int depth, long bg, long fg) { super(); @@ -50,20 +77,72 @@ public class Vertex implements ToXContentFragment { out.writeVLong(bg); out.writeVLong(fg); } + + @Override + public int hashCode() { + return Objects.hash(field, term, weight, depth, bg, fg); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Vertex other = (Vertex) obj; + return depth == other.depth && + weight == other.weight && + bg == other.bg && + fg == other.fg && + Objects.equals(field, other.field) && + Objects.equals(term, other.term); + + } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { boolean returnDetailedInfo = params.paramAsBoolean(GraphExploreResponse.RETURN_DETAILED_INFO_PARAM, false); - builder.field("field", field); - builder.field("term", term); - builder.field("weight", weight); - builder.field("depth", depth); + builder.field(FIELD.getPreferredName(), field); + builder.field(TERM.getPreferredName(), term); + builder.field(WEIGHT.getPreferredName(), weight); + builder.field(DEPTH.getPreferredName(), depth); if (returnDetailedInfo) { - builder.field("fg", fg); - builder.field("bg", bg); + builder.field(FG.getPreferredName(), fg); + builder.field(BG.getPreferredName(), bg); } return builder; } + + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "VertexParser", true, + args -> { + String field = (String) args[0]; + String term = (String) args[1]; + double weight = (Double) args[2]; + int depth = (Integer) args[3]; + Long optionalBg = (Long) args[4]; + Long optionalFg = (Long) args[5]; + long bg = optionalBg == null ? 0 : optionalBg; + long fg = optionalFg == null ? 0 : optionalFg; + return new Vertex(field, term, weight, depth, bg, fg); + }); + + static { + PARSER.declareString(constructorArg(), FIELD); + PARSER.declareString(constructorArg(), TERM); + PARSER.declareDouble(constructorArg(), WEIGHT); + PARSER.declareInt(constructorArg(), DEPTH); + PARSER.declareLong(optionalConstructorArg(), BG); + PARSER.declareLong(optionalConstructorArg(), FG); + } + + static Vertex fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + /** * @return a {@link VertexId} object that uniquely identifies this Vertex diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/VertexRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java similarity index 70% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/VertexRequest.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java index f7f7dec4b17..116497fe230 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/VertexRequest.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java @@ -1,13 +1,28 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.graph.action; +package org.elasticsearch.protocol.xpack.graph; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest.TermBoost; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost; import java.io.IOException; import java.util.HashMap; @@ -21,9 +36,10 @@ import java.util.Set; * inclusion list to filter which terms are considered. * */ -public class VertexRequest { +public class VertexRequest implements ToXContentObject { private String fieldName; - private int size = 5; + private int size = DEFAULT_SIZE; + public static final int DEFAULT_SIZE = 5; private Map includes; private Set excludes; public static final int DEFAULT_MIN_DOC_COUNT = 3; @@ -195,4 +211,38 @@ public class VertexRequest { return this; } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("field", fieldName); + if (size != DEFAULT_SIZE) { + builder.field("size", size); + } + if (minDocCount != DEFAULT_MIN_DOC_COUNT) { + builder.field("min_doc_count", minDocCount); + } + if (shardMinDocCount != DEFAULT_SHARD_MIN_DOC_COUNT) { + builder.field("shard_min_doc_count", shardMinDocCount); + } + if(includes!=null) { + builder.startArray("include"); + for (TermBoost tb : includes.values()) { + builder.startObject(); + builder.field("term", tb.term); + builder.field("boost", tb.boost); + builder.endObject(); + } + builder.endArray(); + } + if(excludes!=null) { + builder.startArray("exclude"); + for (String value : excludes) { + builder.value(value); + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + } diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java new file mode 100644 index 00000000000..f4f666074a1 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Request and Response objects for the default distribution's Graph + * APIs. + */ +package org.elasticsearch.protocol.xpack.graph; diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java new file mode 100644 index 00000000000..74b43458178 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class GraphExploreResponseTests extends AbstractXContentTestCase< GraphExploreResponse> { + + @Override + protected GraphExploreResponse createTestInstance() { + return createInstance(0); + } + private static GraphExploreResponse createInstance(int numFailures) { + int numItems = randomIntBetween(4, 128); + boolean timedOut = randomBoolean(); + boolean showDetails = randomBoolean(); + long overallTookInMillis = randomNonNegativeLong(); + Map vertices = new HashMap<>(); + Map connections = new HashMap<>(); + ShardOperationFailedException [] failures = new ShardOperationFailedException [numFailures]; + for (int i = 0; i < failures.length; i++) { + failures[i] = new ShardSearchFailure(new ElasticsearchException("an error")); + } + + //Create random set of vertices + for (int i = 0; i < numItems; i++) { + Vertex v = new Vertex("field1", randomAlphaOfLength(5), randomDouble(), 0, + showDetails?randomIntBetween(100, 200):0, + showDetails?randomIntBetween(1, 100):0); + vertices.put(v.getId(), v); + } + + //Wire up half the vertices randomly + Vertex[] vs = vertices.values().toArray(new Vertex[vertices.size()]); + for (int i = 0; i < numItems/2; i++) { + Vertex v1 = vs[randomIntBetween(0, vs.length-1)]; + Vertex v2 = vs[randomIntBetween(0, vs.length-1)]; + if(v1 != v2) { + Connection conn = new Connection(v1, v2, randomDouble(), randomLongBetween(1, 10)); + connections.put(conn.getId(), conn); + } + } + return new GraphExploreResponse(overallTookInMillis, timedOut, failures, vertices, connections, showDetails); + } + + + private static GraphExploreResponse createTestInstanceWithFailures() { + return createInstance(randomIntBetween(1, 128)); + } + + @Override + protected GraphExploreResponse doParseInstance(XContentParser parser) throws IOException { + return GraphExploreResponse.fromXContext(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + protected Predicate getRandomFieldsExcludeFilterWhenResultHasErrors() { + return field -> field.startsWith("responses"); + } + + @Override + protected void assertEqualInstances( GraphExploreResponse expectedInstance, GraphExploreResponse newInstance) { + assertThat(newInstance.getTook(), equalTo(expectedInstance.getTook())); + assertThat(newInstance.isTimedOut(), equalTo(expectedInstance.isTimedOut())); + + Connection[] newConns = newInstance.getConnections().toArray(new Connection[0]); + Connection[] expectedConns = expectedInstance.getConnections().toArray(new Connection[0]); + assertArrayEquals(expectedConns, newConns); + + Vertex[] newVertices = newInstance.getVertices().toArray(new Vertex[0]); + Vertex[] expectedVertices = expectedInstance.getVertices().toArray(new Vertex[0]); + assertArrayEquals(expectedVertices, newVertices); + + ShardOperationFailedException[] newFailures = newInstance.getShardFailures(); + ShardOperationFailedException[] expectedFailures = expectedInstance.getShardFailures(); + assertEquals(expectedFailures.length, newFailures.length); + + } + + /** + * Test parsing {@link GraphExploreResponse} with inner failures as they don't support asserting on xcontent equivalence, given + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier< GraphExploreResponse> instanceSupplier = GraphExploreResponseTests::createTestInstanceWithFailures; + //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + //but that does not bother our assertions, as we only want to test that we don't break. + boolean supportsUnknownFields = true; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY, + getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS); + } + +} From 3f91bbfa6b408c2e72471c39cdeef4cbfe30eef2 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 21 Aug 2018 08:08:26 -0500 Subject: [PATCH 03/48] [ML] Allowing _close to accept body payloads for options (#32989) (#33000) --- .../xpack/ml/rest/job/RestCloseJobAction.java | 25 ++++---- .../xpack/ml/integration/CloseJobsIT.java | 57 +++++++++++++++++++ 2 files changed, 72 insertions(+), 10 deletions(-) create mode 100644 x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CloseJobsIT.java diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java index fc0638048ce..ae6257d5385 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java @@ -12,10 +12,10 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.CloseJobAction.Request; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -34,16 +34,21 @@ public class RestCloseJobAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - Request request = new Request(restRequest.param(Job.ID.getPreferredName())); - if (restRequest.hasParam(Request.TIMEOUT.getPreferredName())) { - request.setCloseTimeout(TimeValue.parseTimeValue( + Request request; + if (restRequest.hasContentOrSourceParam()) { + request = Request.parseRequest(restRequest.param(Job.ID.getPreferredName()), restRequest.contentParser()); + } else { + request = new Request(restRequest.param(Job.ID.getPreferredName())); + if (restRequest.hasParam(Request.TIMEOUT.getPreferredName())) { + request.setCloseTimeout(TimeValue.parseTimeValue( restRequest.param(Request.TIMEOUT.getPreferredName()), Request.TIMEOUT.getPreferredName())); - } - if (restRequest.hasParam(Request.FORCE.getPreferredName())) { - request.setForce(restRequest.paramAsBoolean(Request.FORCE.getPreferredName(), request.isForce())); - } - if (restRequest.hasParam(Request.ALLOW_NO_JOBS.getPreferredName())) { - request.setAllowNoJobs(restRequest.paramAsBoolean(Request.ALLOW_NO_JOBS.getPreferredName(), request.allowNoJobs())); + } + if (restRequest.hasParam(Request.FORCE.getPreferredName())) { + request.setForce(restRequest.paramAsBoolean(Request.FORCE.getPreferredName(), request.isForce())); + } + if (restRequest.hasParam(Request.ALLOW_NO_JOBS.getPreferredName())) { + request.setAllowNoJobs(restRequest.paramAsBoolean(Request.ALLOW_NO_JOBS.getPreferredName(), request.allowNoJobs())); + } } return channel -> client.execute(CloseJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CloseJobsIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CloseJobsIT.java new file mode 100644 index 00000000000..95ec9728842 --- /dev/null +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CloseJobsIT.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.hamcrest.Matchers.equalTo; + +public class CloseJobsIT extends ESRestTestCase { + + private static final String BASIC_AUTH_VALUE = basicAuthHeaderValue("x_pack_rest_user", + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING); + + @Override + protected Settings restClientSettings() { + return Settings.builder().put(super.restClientSettings()).put(ThreadContext.PREFIX + ".Authorization", BASIC_AUTH_VALUE).build(); + } + + public void testCloseJobsAcceptsOptionsFromPayload() throws Exception { + + Request request = new Request("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + "job-that-doesnot-exist*" + "/_close"); + request.setJsonEntity("{\"allow_no_jobs\":false}"); + request.setOptions(RequestOptions.DEFAULT); + ResponseException exception = expectThrows(ResponseException.class, () -> client().performRequest(request)); + assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + + request.setJsonEntity("{\"allow_no_jobs\":true}"); + Response response = client().performRequest(request); + assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + String responseAsString = responseEntityToString(response); + assertEquals(responseAsString, "{\"closed\":true}"); + } + + private static String responseEntityToString(Response response) throws IOException { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { + return reader.lines().collect(Collectors.joining("\n")); + } + } +} From 1b583978e9659de9ba6506ee6c4bbadff174ef65 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 21 Aug 2018 16:20:00 +0300 Subject: [PATCH 04/48] [DOCS] Add FIPS 140-2 documentation (#32928) * Add relevant documentation for FIPS 140-2 compliance. * Introduce `fips_mode` setting. * Discuss necessary configuration for FIPS 140-2 * Discuss introduced limitations by FIPS 140-2 --- .../settings/security-settings.asciidoc | 16 ++- .../docs/en/security/configuring-es.asciidoc | 4 + .../en/security/fips-140-compliance.asciidoc | 128 ++++++++++++++++++ 3 files changed, 147 insertions(+), 1 deletion(-) create mode 100644 x-pack/docs/en/security/fips-140-compliance.asciidoc diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index 6a7742c4c00..74deee3473f 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -46,6 +46,9 @@ settings for the ad1 realm: `xpack.security.authc.realms.ad1.*`. The API already omits all `ssl` settings, `bind_dn`, and `bind_password` due to the sensitive nature of the information. +`xpack.security.fips_mode.enabled`:: +Enables fips mode of operation. Set this to `true` if you run this {es} instance in a FIPS 140-2 enabled JVM. For more information, see <>. Defaults to `false`. + [float] [[password-security-settings]] ==== Default password security settings @@ -1124,7 +1127,12 @@ settings such as those for HTTP or Transport. `xpack.ssl.supported_protocols`:: Supported protocols with versions. Valid protocols: `SSLv2Hello`, `SSLv3`, `TLSv1`, `TLSv1.1`, `TLSv1.2`. Defaults to `TLSv1.2`, `TLSv1.1`, -`TLSv1`. +`TLSv1`. ++ +-- +NOTE: If `xpack.security.fips_mode.enabled` is `true`, you cannot use `SSLv2Hello` +or `SSLv3`. See <>. +-- `xpack.ssl.client_authentication`:: Controls the server's behavior in regard to requesting a certificate @@ -1223,6 +1231,9 @@ Password to the truststore. `xpack.ssl.truststore.secure_password` (<>):: Password to the truststore. +WARNING: If `xpack.security.fips_mode.enabled` is `true`, you cannot use Java +keystore files. See <>. + [float] ===== PKCS#12 files @@ -1261,6 +1272,9 @@ Password to the truststore. `xpack.ssl.truststore.secure_password` (<>):: Password to the truststore. +WARNING: If `xpack.security.fips_mode.enabled` is `true`, you cannot use PKCS#12 +keystore files. See <>. + [[pkcs12-truststore-note]] [NOTE] Storing trusted certificates in a PKCS#12 file, although supported, is diff --git a/x-pack/docs/en/security/configuring-es.asciidoc b/x-pack/docs/en/security/configuring-es.asciidoc index a13547263a5..47d580491c1 100644 --- a/x-pack/docs/en/security/configuring-es.asciidoc +++ b/x-pack/docs/en/security/configuring-es.asciidoc @@ -27,6 +27,9 @@ https://www.elastic.co/subscriptions and your cluster. If you are using a trial license, the default value is `false`. For more information, see {ref}/security-settings.html[Security Settings in {es}]. +. If you plan to run {es} in a Federal Information Processing Standard (FIPS) +140-2 enabled JVM, see <>. + . Configure Transport Layer Security (TLS/SSL) for internode-communication. + -- @@ -145,5 +148,6 @@ include::authentication/configuring-pki-realm.asciidoc[] include::authentication/configuring-saml-realm.asciidoc[] :edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authentication/configuring-kerberos-realm.asciidoc include::authentication/configuring-kerberos-realm.asciidoc[] +include::fips-140-compliance.asciidoc[] include::{es-repo-dir}/settings/security-settings.asciidoc[] include::{es-repo-dir}/settings/audit-settings.asciidoc[] diff --git a/x-pack/docs/en/security/fips-140-compliance.asciidoc b/x-pack/docs/en/security/fips-140-compliance.asciidoc new file mode 100644 index 00000000000..ceb605c2e2d --- /dev/null +++ b/x-pack/docs/en/security/fips-140-compliance.asciidoc @@ -0,0 +1,128 @@ +[role="xpack"] +[[fips-140-compliance]] +=== FIPS 140-2 + +The Federal Information Processing Standard (FIPS) Publication 140-2, (FIPS PUB +140-2), titled "Security Requirements for Cryptographic Modules" is a U.S. +government computer security standard used to approve cryptographic modules. +{es} offers a FIPS 140-2 compliant mode and as such can run in a FIPS 140-2 +enabled JVM. In order to set {es} in fips mode, you must set the +`xpack.security.fips_mode.enabled` to `true` in `elasticsearch.yml` + +For {es}, FIPS 140-2 compliance is ensured by + +- Using FIPS approved / NIST recommended cryptographic algorithms. +- Delegating the implementation of these cryptographic algorithms to a NIST + validated cryptographic module (available via the Java Security Provider + in use in the JVM). +- Allowing the configuration of {es} in a FIPS 140-2 compliant manner, as + documented below. + +[float] +=== Upgrade considerations + +If you plan to upgrade your existing Cluster to a version that can be run in +a FIPS 140-2 enabled JVM, the suggested approach is to first perform a rolling +upgrade to the new version in your existing JVM and perform all necessary +configuration changes in preparation for running in fips mode. You can then +perform a rolling restart of the nodes, this time starting each node in the FIPS +140-2 JVM. This will allow {es} to take care of a couple of things automatically for you: + +- <> will be upgraded to the latest format version as + previous format versions cannot be loaded in a FIPS 140-2 JVM. +- Self-generated trial licenses will be upgraded to the latest format that + is compliant with FIPS 140-2. + +If you are on a appropriate license level (platinum) you can elect to perform +a rolling upgrade while at the same time running each upgraded node in a +FIPS 140-2 JVM. In this case, you would need to also regenerate your +`elasticsearch.keystore` and migrate all secure settings to it, in addition to the +necessary configuration changes outlined below, before starting each node. + +[float] +=== Configuring {es} for FIPS 140-2 + +Apart from setting `xpack.security.fips_mode.enabled`, a number of security +related settings need to be configured accordingly in order to be compliant +and able to run {es} successfully in a FIPS 140-2 enabled JVM. + +[float] +==== TLS + +SSLv2 and SSLv3 are not allowed by FIPS 140-2, so `SSLv2Hello` and `SSLv3` cannot +be used for <> + +NOTE: The use of TLS ciphers is mainly governed by the relevant crypto module +(the FIPS Approved Security Provider that your JVM uses). All the ciphers that +are configured by default in {es} are FIPS 140-2 compliant and as such can be +used in a FIPS 140-2 JVM. (see <>) + +[float] +==== TLS Keystores and keys + +Keystores can be used in a number of <> in order to +conveniently store key and trust material. Neither `JKS`, nor `PKCS#12` keystores +can be used in a FIPS 140-2 enabled JVM however, so you must refrain from using +these keystores. Your FIPS 140-2 provider may provide a compliant keystore that +can be used or you can use PEM encoded files. To use PEM encoded key material, +you can use the relevant `\*.key` and `*.certificate` configuration +options, and for trust material you can use `*.certificate_authorities`. + + +FIPS 140-2 compliance dictates that the length of the public keys used for TLS +must correspond to the strength of the symmetric key algorithm in use in TLS. +Depending on the value of <> that +you select to use, the TLS keys must have corresponding length according to +the following table: + +[[comparable-key-strength]] +.Comparable key strengths +|======================= +| Symmetric Key Algorithm | RSA key Length | ECC key length +| `3DES` | 2048 | 224-255 +| `AES-128` | 3072 | 256-383 +| `AES-256` | 15630 | 512+ +|======================= + +[float] +==== Password Hashing + +{es} offers a number of algorithms for securely hashing credentials in memory and +on disk. However, only the `PBKDF2` family of algorithms is compliant with FIPS +140-2 for password hashing. You must set the the `cache.hash_algo` realm settings +and the `xpack.security.authc.password_hashing.algorithm` setting to one of the +available `PBKDF2` values. +See <>. + +Password hashing configuration changes are not retroactive so the stored hashed +credentials of existing users of the file and native realms will not be updated +on disk. +Authentication will still work, but in order to ensure FIPS 140-2 compliance, +you would need to recreate users or change their password using the +<> CLI tool for the file realm and the +<> for the native realm. + +The user cache will be emptied upon node restart, so any existing hashes using +non-compliant algorithms will be discarded and the new ones will be created +using the compliant `PBKDF2` algorithm you have selected. + +[float] +=== Limitations + +Due to the limitations that FIPS 140-2 compliance enforces, a small number of +features are not available while running in fips mode. The list is as follows: + +* Azure Classic Discovery Plugin +* Ingest Attachment Plugin +* The {ref}/certutil.html[`elasticsearch-certutil`] tool. However, + `elasticsearch-certutil` can very well be used in a non FIPS 140-2 + enabled JVM (pointing `JAVA_HOME` environment variable to a different java + installation) in order to generate the keys and certificates that + can be later used in the FIPS 140-2 enabled JVM. +* The `elasticsearch-plugin` tool. Accordingly, `elasticsearch-plugin` can be + used with a different (non FIPS 140-2 enabled) Java installation if + available. +* The SQL CLI client cannot run in a FIPS 140-2 enabled JVM while using + TLS for transport security or PKI for client authentication. +* The SAML Realm cannot decrypt and consume encrypted Assertions or encrypted + attributes in Attribute Statements from the SAML IdP. \ No newline at end of file From 5b446b81efd500fcfef424d871a972c12e853ece Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 21 Aug 2018 16:53:59 +0200 Subject: [PATCH 05/48] Reenable MonitoringIT#testMonitoringService. When discussing this test, it made little sense that testMonitoringService would fail but not testMonitoringBulk given their similarity. So we argeed to enable it again. Relates #29880 --- .../elasticsearch/xpack/monitoring/integration/MonitoringIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index efc32fccb3d..0ee5f85bfad 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -186,7 +186,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { * This test waits for the monitoring service to collect monitoring documents and then checks that all expected documents * have been indexed with the expected information. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29880") public void testMonitoringService() throws Exception { final boolean createAPMIndex = randomBoolean(); final String indexName = createAPMIndex ? "apm-2017.11.06" : "books"; From bdfcc326d7ee351390fb26f928261926c39ae6ff Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 21 Aug 2018 11:02:25 -0400 Subject: [PATCH 06/48] Enable avoiding mmap bootstrap check (#32421) The maximum map count boostrap check can be a hindrance to users that do not own the underlying platform on which they are executing Elasticsearch. This is because addressing it requires tuning the kernel and a platform provider might now allow this, especially on shared infrastructure. However, this bootstrap check is not needed if mmapfs is not in use. Today we do not have a way for the user to communicate that they are not going to use mmapfs. This commit therefore adds a setting that enables the user to disallow mmapfs. When mmapfs is disallowed, the maximum map count bootstrap check is not enforced. Additionally, we fallback to a different default index store and prevent the explicit use of mmapfs for an index. --- docs/reference/index-modules/store.asciidoc | 7 ++ .../reference/setup/bootstrap-checks.asciidoc | 5 + .../bootstrap/BootstrapChecks.java | 24 ++-- .../common/settings/ClusterSettings.java | 2 + .../org/elasticsearch/index/IndexModule.java | 105 ++++++++++++++---- .../index/store/FsDirectoryService.java | 18 ++- .../elasticsearch/indices/IndicesService.java | 8 ++ .../bootstrap/BootstrapChecksTests.java | 27 +---- .../bootstrap/MaxMapCountCheckTests.java | 67 ++++++++++- .../elasticsearch/index/IndexModuleTests.java | 17 +++ .../plugins/IndexStorePluginTests.java | 27 ++++- 11 files changed, 243 insertions(+), 64 deletions(-) diff --git a/docs/reference/index-modules/store.asciidoc b/docs/reference/index-modules/store.asciidoc index a1e00bac616..c2b3d700e9b 100644 --- a/docs/reference/index-modules/store.asciidoc +++ b/docs/reference/index-modules/store.asciidoc @@ -67,6 +67,13 @@ process equal to the size of the file being mapped. Before using this class, be sure you have allowed plenty of <>. +[[allow-mmapfs]] +You can restrict the use of the `mmapfs` store type via the setting +`node.store.allow_mmapfs`. This is a boolean setting indicating whether or not +`mmapfs` is allowed. The default is to allow `mmapfs`. This setting is useful, +for example, if you are in an environment where you can not control the ability +to create a lot of memory maps so you need disable the ability to use `mmapfs`. + === Pre-loading data into the file system cache NOTE: This is an expert setting, the details of which may change in the future. diff --git a/docs/reference/setup/bootstrap-checks.asciidoc b/docs/reference/setup/bootstrap-checks.asciidoc index a8b8dd82d61..f0e5cfc71c9 100644 --- a/docs/reference/setup/bootstrap-checks.asciidoc +++ b/docs/reference/setup/bootstrap-checks.asciidoc @@ -155,6 +155,11 @@ the kernel allows a process to have at least 262,144 memory-mapped areas and is enforced on Linux only. To pass the maximum map count check, you must configure `vm.max_map_count` via `sysctl` to be at least `262144`. +Alternatively, the maximum map count check is only needed if you are using +`mmapfs` as the <> for your indices. If you +<> the use of `mmapfs` then this bootstrap check will +not be enforced. + === Client JVM check There are two different JVMs provided by OpenJDK-derived JVMs: the diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 1a028042db2..c5a8e806f41 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.process.ProcessProbe; import org.elasticsearch.node.NodeValidationException; @@ -393,17 +394,22 @@ final class BootstrapChecks { static class MaxMapCountCheck implements BootstrapCheck { - private static final long LIMIT = 1 << 18; + static final long LIMIT = 1 << 18; @Override - public BootstrapCheckResult check(BootstrapContext context) { - if (getMaxMapCount() != -1 && getMaxMapCount() < LIMIT) { - final String message = String.format( - Locale.ROOT, - "max virtual memory areas vm.max_map_count [%d] is too low, increase to at least [%d]", - getMaxMapCount(), - LIMIT); - return BootstrapCheckResult.failure(message); + public BootstrapCheckResult check(final BootstrapContext context) { + // we only enforce the check if mmapfs is an allowed store type + if (IndexModule.NODE_STORE_ALLOW_MMAPFS.get(context.settings)) { + if (getMaxMapCount() != -1 && getMaxMapCount() < LIMIT) { + final String message = String.format( + Locale.ROOT, + "max virtual memory areas vm.max_map_count [%d] is too low, increase to at least [%d]", + getMaxMapCount(), + LIMIT); + return BootstrapCheckResult.failure(message); + } else { + return BootstrapCheckResult.success(); + } } else { return BootstrapCheckResult.success(); } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index de8691b3b68..bf53a3dc01a 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -63,6 +63,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.http.HttpTransportSettings; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.indices.IndicesQueryCache; @@ -264,6 +265,7 @@ public final class ClusterSettings extends AbstractScopedSettings { HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, HierarchyCircuitBreakerService.ACCOUNTING_CIRCUIT_BREAKER_LIMIT_SETTING, HierarchyCircuitBreakerService.ACCOUNTING_CIRCUIT_BREAKER_OVERHEAD_SETTING, + IndexModule.NODE_STORE_ALLOW_MMAPFS, ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, SearchService.DEFAULT_SEARCH_TIMEOUT_SETTING, SearchService.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS, diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 715b78b14ff..7f2eae492fd 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -21,10 +21,11 @@ package org.elasticsearch.index; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; +import org.apache.lucene.store.MMapDirectory; +import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; import org.elasticsearch.client.Client; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; @@ -59,7 +60,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -84,8 +84,10 @@ import java.util.function.Function; */ public final class IndexModule { + public static final Setting NODE_STORE_ALLOW_MMAPFS = Setting.boolSetting("node.store.allow_mmapfs", true, Property.NodeScope); + public static final Setting INDEX_STORE_TYPE_SETTING = - new Setting<>("index.store.type", "", Function.identity(), Property.IndexScope, Property.NodeScope); + new Setting<>("index.store.type", "", Function.identity(), Property.IndexScope, Property.NodeScope); /** On which extensions to load data into the file-system cache upon opening of files. * This only works with the mmap directory, and even in that case is still @@ -289,7 +291,7 @@ public final class IndexModule { } } - private static boolean isBuiltinType(String storeType) { + public static boolean isBuiltinType(String storeType) { for (Type type : Type.values()) { if (type.match(storeType)) { return true; @@ -298,21 +300,48 @@ public final class IndexModule { return false; } + public enum Type { - NIOFS, - MMAPFS, - SIMPLEFS, - FS; + NIOFS("niofs"), + MMAPFS("mmapfs"), + SIMPLEFS("simplefs"), + FS("fs"); + + private final String settingsKey; + + Type(final String settingsKey) { + this.settingsKey = settingsKey; + } + + private static final Map TYPES; + + static { + final Map types = new HashMap<>(4); + for (final Type type : values()) { + types.put(type.settingsKey, type); + } + TYPES = Collections.unmodifiableMap(types); + } public String getSettingsKey() { - return this.name().toLowerCase(Locale.ROOT); + return this.settingsKey; } + + public static Type fromSettingsKey(final String key) { + final Type type = TYPES.get(key); + if (type == null) { + throw new IllegalArgumentException("no matching type for [" + key + "]"); + } + return type; + } + /** * Returns true iff this settings matches the type. */ public boolean match(String setting) { return getSettingsKey().equals(setting); } + } /** @@ -325,6 +354,16 @@ public final class IndexModule { IndexSearcherWrapper newWrapper(IndexService indexService); } + public static Type defaultStoreType(final boolean allowMmapfs) { + if (allowMmapfs && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + return Type.MMAPFS; + } else if (Constants.WINDOWS) { + return Type.SIMPLEFS; + } else { + return Type.NIOFS; + } + } + public IndexService newIndexService( NodeEnvironment environment, NamedXContentRegistry xContentRegistry, @@ -343,20 +382,7 @@ public final class IndexModule { IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get(); eventListener.beforeIndexCreated(indexSettings.getIndex(), indexSettings.getSettings()); - final String storeType = indexSettings.getValue(INDEX_STORE_TYPE_SETTING); - final IndexStore store; - if (Strings.isEmpty(storeType) || isBuiltinType(storeType)) { - store = new IndexStore(indexSettings); - } else { - Function factory = indexStoreFactories.get(storeType); - if (factory == null) { - throw new IllegalArgumentException("Unknown store type [" + storeType + "]"); - } - store = factory.apply(indexSettings); - if (store == null) { - throw new IllegalStateException("store must not be null"); - } - } + final IndexStore store = getIndexStore(indexSettings, indexStoreFactories); final QueryCache queryCache; if (indexSettings.getValue(INDEX_QUERY_CACHE_ENABLED_SETTING)) { BiFunction queryCacheProvider = forceQueryCacheProvider.get(); @@ -375,6 +401,39 @@ public final class IndexModule { indicesFieldDataCache, searchOperationListeners, indexOperationListeners, namedWriteableRegistry); } + private static IndexStore getIndexStore( + final IndexSettings indexSettings, final Map> indexStoreFactories) { + final String storeType = indexSettings.getValue(INDEX_STORE_TYPE_SETTING); + final Type type; + final Boolean allowMmapfs = NODE_STORE_ALLOW_MMAPFS.get(indexSettings.getNodeSettings()); + if (storeType.isEmpty() || Type.FS.getSettingsKey().equals(storeType)) { + type = defaultStoreType(allowMmapfs); + } else { + if (isBuiltinType(storeType)) { + type = Type.fromSettingsKey(storeType); + } else { + type = null; + } + } + if (type != null && type == Type.MMAPFS && allowMmapfs == false) { + throw new IllegalArgumentException("store type [mmapfs] is not allowed"); + } + final IndexStore store; + if (storeType.isEmpty() || isBuiltinType(storeType)) { + store = new IndexStore(indexSettings); + } else { + Function factory = indexStoreFactories.get(storeType); + if (factory == null) { + throw new IllegalArgumentException("Unknown store type [" + storeType + "]"); + } + store = factory.apply(indexSettings); + if (store == null) { + throw new IllegalStateException("store must not be null"); + } + } + return store; + } + /** * creates a new mapper service to do administrative work like mapping updates. This *should not* be used for document parsing. * doing so will result in an exception. diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index fc605430066..f95cdb3a9f6 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.FileSwitchDirectory; import org.apache.lucene.store.LockFactory; import org.apache.lucene.store.MMapDirectory; @@ -77,10 +76,21 @@ public class FsDirectoryService extends DirectoryService { } protected Directory newFSDirectory(Path location, LockFactory lockFactory) throws IOException { - final String storeType = indexSettings.getSettings().get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), - IndexModule.Type.FS.getSettingsKey()); + final String storeType = + indexSettings.getSettings().get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.FS.getSettingsKey()); if (IndexModule.Type.FS.match(storeType)) { - return FSDirectory.open(location, lockFactory); // use lucene defaults + final IndexModule.Type type = + IndexModule.defaultStoreType(IndexModule.NODE_STORE_ALLOW_MMAPFS.get(indexSettings.getNodeSettings())); + switch (type) { + case MMAPFS: + return new MMapDirectory(location, lockFactory); + case SIMPLEFS: + return new SimpleFSDirectory(location, lockFactory); + case NIOFS: + return new NIOFSDirectory(location, lockFactory); + default: + throw new AssertionError("unexpected built-in store type [" + type + "]"); + } } else if (IndexModule.Type.SIMPLEFS.match(storeType)) { return new SimpleFSDirectory(location, lockFactory); } else if (IndexModule.Type.NIOFS.match(storeType)) { diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 5c097ba774f..1c83a880511 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -228,6 +228,14 @@ public class IndicesService extends AbstractLifecycleComponent this.cacheCleaner = new CacheCleaner(indicesFieldDataCache, indicesRequestCache, logger, threadPool, this.cleanInterval); this.metaStateService = metaStateService; this.engineFactoryProviders = engineFactoryProviders; + + // do not allow any plugin-provided index store type to conflict with a built-in type + for (final String indexStoreType : indexStoreFactories.keySet()) { + if (IndexModule.isBuiltinType(indexStoreType)) { + throw new IllegalStateException("registered index store type [" + indexStoreType + "] conflicts with a built-in type"); + } + } + this.indexStoreFactories = indexStoreFactories; } diff --git a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java index 8180dd96e8e..1e18135f4eb 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java @@ -52,7 +52,7 @@ import static org.mockito.Mockito.when; public class BootstrapChecksTests extends ESTestCase { - private static final BootstrapContext defaultContext = new BootstrapContext(Settings.EMPTY, MetaData.EMPTY_META_DATA); + static final BootstrapContext defaultContext = new BootstrapContext(Settings.EMPTY, MetaData.EMPTY_META_DATA); public void testNonProductionMode() throws NodeValidationException { // nothing should happen since we are in non-production mode @@ -356,31 +356,6 @@ public class BootstrapChecksTests extends ESTestCase { BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } - public void testMaxMapCountCheck() throws NodeValidationException { - final int limit = 1 << 18; - final AtomicLong maxMapCount = new AtomicLong(randomIntBetween(1, limit - 1)); - final BootstrapChecks.MaxMapCountCheck check = new BootstrapChecks.MaxMapCountCheck() { - @Override - long getMaxMapCount() { - return maxMapCount.get(); - } - }; - - final NodeValidationException e = expectThrows( - NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); - assertThat(e.getMessage(), containsString("max virtual memory areas vm.max_map_count")); - - maxMapCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); - - // nothing should happen if current vm.max_map_count is not - // available - maxMapCount.set(-1); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); - } - public void testClientJvmCheck() throws NodeValidationException { final AtomicReference vmName = new AtomicReference<>("Java HotSpot(TM) 32-Bit Client VM"); final BootstrapCheck check = new BootstrapChecks.ClientJvmCheck() { diff --git a/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java b/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java index c5b99a91ffa..9a964a97bd7 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java @@ -24,16 +24,21 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.Constants; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import java.io.BufferedReader; import java.io.IOException; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; import java.util.function.Predicate; import static org.hamcrest.CoreMatchers.equalTo; @@ -45,6 +50,66 @@ import static org.mockito.Mockito.when; public class MaxMapCountCheckTests extends ESTestCase { + // initialize as if the max map count is under the limit, tests can override by setting maxMapCount before executing the check + private final AtomicLong maxMapCount = new AtomicLong(randomIntBetween(1, Math.toIntExact(BootstrapChecks.MaxMapCountCheck.LIMIT) - 1)); + private final BootstrapChecks.MaxMapCountCheck check = new BootstrapChecks.MaxMapCountCheck() { + @Override + long getMaxMapCount() { + return maxMapCount.get(); + } + }; + + private void assertFailure(final BootstrapCheck.BootstrapCheckResult result) { + assertTrue(result.isFailure()); + assertThat( + result.getMessage(), + equalTo( + "max virtual memory areas vm.max_map_count [" + maxMapCount.get() + "] is too low, " + + "increase to at least [" + BootstrapChecks.MaxMapCountCheck.LIMIT + "]")); + } + + public void testMaxMapCountCheckBelowLimit() { + assertFailure(check.check(BootstrapChecksTests.defaultContext)); + } + + public void testMaxMapCountCheckBelowLimitAndMemoryMapAllowed() { + /* + * There are two ways that memory maps are allowed: + * - by default + * - mmapfs is explicitly allowed + * We want to test that if mmapfs is allowed then the max map count check is enforced. + */ + final List settingsThatAllowMemoryMap = new ArrayList<>(); + settingsThatAllowMemoryMap.add(Settings.EMPTY); + settingsThatAllowMemoryMap.add(Settings.builder().put("node.store.allow_mmapfs", true).build()); + + for (final Settings settingThatAllowsMemoryMap : settingsThatAllowMemoryMap) { + assertFailure(check.check(new BootstrapContext(settingThatAllowsMemoryMap, MetaData.EMPTY_META_DATA))); + } + } + + public void testMaxMapCountCheckNotEnforcedIfMemoryMapNotAllowed() { + // nothing should happen if current vm.max_map_count is under the limit but mmapfs is not allowed + final Settings settings = Settings.builder().put("node.store.allow_mmapfs", false).build(); + final BootstrapContext context = new BootstrapContext(settings, MetaData.EMPTY_META_DATA); + final BootstrapCheck.BootstrapCheckResult result = check.check(context); + assertTrue(result.isSuccess()); + } + + public void testMaxMapCountCheckAboveLimit() { + // nothing should happen if current vm.max_map_count exceeds the limit + maxMapCount.set(randomIntBetween(Math.toIntExact(BootstrapChecks.MaxMapCountCheck.LIMIT) + 1, Integer.MAX_VALUE)); + final BootstrapCheck.BootstrapCheckResult result = check.check(BootstrapChecksTests.defaultContext); + assertTrue(result.isSuccess()); + } + + public void testMaxMapCountCheckMaxMapCountNotAvailable() { + // nothing should happen if current vm.max_map_count is not available + maxMapCount.set(-1); + final BootstrapCheck.BootstrapCheckResult result = check.check(BootstrapChecksTests.defaultContext); + assertTrue(result.isSuccess()); + } + public void testGetMaxMapCountOnLinux() { if (Constants.LINUX) { final BootstrapChecks.MaxMapCountCheck check = new BootstrapChecks.MaxMapCountCheck(); @@ -142,7 +207,7 @@ public class MaxMapCountCheckTests extends ESTestCase { } @Override - public void match(LogEvent event) { + public void match(final LogEvent event) { if (event.getLevel().equals(level) && event.getLoggerName().equals(loggerName) && event.getMessage() instanceof ParameterizedMessage) { diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index a82b932e2b5..00072286388 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -87,6 +87,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; public class IndexModuleTests extends ESTestCase { @@ -376,6 +378,21 @@ public class IndexModuleTests extends ESTestCase { indexService.close("simon says", false); } + public void testMmapfsStoreTypeNotAllowed() { + final Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put("index.store.type", "mmapfs") + .build(); + final Settings nodeSettings = Settings.builder() + .put(IndexModule.NODE_STORE_ALLOW_MMAPFS.getKey(), false) + .build(); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("foo", "_na_"), settings, nodeSettings); + final IndexModule module = + new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> newIndexService(module)); + assertThat(e, hasToString(containsString("store type [mmapfs] is not allowed"))); + } + class CustomQueryCache implements QueryCache { @Override diff --git a/server/src/test/java/org/elasticsearch/plugins/IndexStorePluginTests.java b/server/src/test/java/org/elasticsearch/plugins/IndexStorePluginTests.java index c53d798f7b4..d413c0f0be2 100644 --- a/server/src/test/java/org/elasticsearch/plugins/IndexStorePluginTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/IndexStorePluginTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.plugins; import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.node.MockNode; @@ -32,6 +33,7 @@ import java.util.Map; import java.util.function.Function; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasToString; public class IndexStorePluginTests extends ESTestCase { @@ -54,7 +56,30 @@ public class IndexStorePluginTests extends ESTestCase { } - public void testDuplicateIndexStoreProviders() { + public static class ConflictingStorePlugin extends Plugin implements IndexStorePlugin { + + public static final String TYPE; + + static { + TYPE = randomFrom(Arrays.asList(IndexModule.Type.values())).getSettingsKey(); + } + + @Override + public Map> getIndexStoreFactories() { + return Collections.singletonMap(TYPE, IndexStore::new); + } + + } + + public void testIndexStoreFactoryConflictsWithBuiltInIndexStoreType() { + final Settings settings = Settings.builder().put("path.home", createTempDir()).build(); + final IllegalStateException e = expectThrows( + IllegalStateException.class, () -> new MockNode(settings, Collections.singletonList(ConflictingStorePlugin.class))); + assertThat(e, hasToString(containsString( + "registered index store type [" + ConflictingStorePlugin.TYPE + "] conflicts with a built-in type"))); + } + + public void testDuplicateIndexStoreFactories() { final Settings settings = Settings.builder().put("path.home", createTempDir()).build(); final IllegalStateException e = expectThrows( IllegalStateException.class, () -> new MockNode(settings, Arrays.asList(BarStorePlugin.class, FooStorePlugin.class))); From 28d12b05b77472d0596df319d7129cea3ba9154a Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 21 Aug 2018 12:23:21 -0400 Subject: [PATCH 07/48] Move ML tests to be sub-projects of ML (#33026) This commit moves the ML QA tests to be a sub-project of ML. The purpose of this refactoring is to enable ML developers to run :x-pack:plugin:ml:check and run the vast majority of a ML tests with a single command (this still does not contain the ML REST tests, nor the upgrade tests). This simplifies local development for faster iteration. --- x-pack/plugin/ml/build.gradle | 10 ++++++ .../ml/qa/basic-multi-node}/build.gradle | 0 .../ml/integration/MlBasicMultiNodeIT.java | 0 x-pack/plugin/ml/qa/build.gradle | 31 +++++++++++++++++++ .../ml/qa/disabled}/build.gradle | 0 .../ml/integration/MlPluginDisabledIT.java | 0 .../ml/qa/ml-with-security}/build.gradle | 0 .../ml/qa/ml-with-security}/roles.yml | 0 .../smoketest/MlWithSecurityIT.java | 0 .../MlWithSecurityInsufficientRoleIT.java | 0 .../smoketest/MlWithSecurityUserRoleIT.java | 0 .../qa/native-multi-node-tests}/build.gradle | 0 .../integration/AutodetectMemoryLimitIT.java | 0 .../integration/BasicRenormalizationIT.java | 0 .../ml/integration/CategorizationIT.java | 0 .../xpack/ml/integration/DatafeedJobsIT.java | 0 .../ml/integration/DatafeedJobsRestIT.java | 0 .../ml/integration/DeleteExpiredDataIT.java | 0 .../ml/integration/DetectionRulesIT.java | 0 .../xpack/ml/integration/ForecastIT.java | 0 ...erimResultsDeletedAfterReopeningJobIT.java | 0 .../xpack/ml/integration/MlJobIT.java | 0 .../MlNativeAutodetectIntegTestCase.java | 0 .../xpack/ml/integration/ModelPlotsIT.java | 0 .../ml/integration/OverallBucketsIT.java | 0 .../xpack/ml/integration/PersistJobIT.java | 0 .../ReopenJobResetsFinishedTimeIT.java | 0 .../ml/integration/ReopenJobWithGapIT.java | 0 .../integration/RestoreModelSnapshotIT.java | 0 .../ml/integration/RevertModelSnapshotIT.java | 0 .../ml/integration/ScheduledEventsIT.java | 0 .../integration/UpdateInterimResultsIT.java | 0 .../ml/qa/no-bootstrap-tests}/build.gradle | 0 .../NamedPipeHelperNoBootstrapTests.java | 0 .../ml/qa/single-node-tests}/build.gradle | 0 .../ml/transforms/PainlessDomainSplitIT.java | 0 36 files changed, 41 insertions(+) rename x-pack/{qa/ml-basic-multi-node => plugin/ml/qa/basic-multi-node}/build.gradle (100%) rename x-pack/{qa/ml-basic-multi-node => plugin/ml/qa/basic-multi-node}/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java (100%) create mode 100644 x-pack/plugin/ml/qa/build.gradle rename x-pack/{qa/ml-disabled => plugin/ml/qa/disabled}/build.gradle (100%) rename x-pack/{qa/ml-disabled => plugin/ml/qa/disabled}/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java (100%) rename x-pack/{qa/smoke-test-ml-with-security => plugin/ml/qa/ml-with-security}/build.gradle (100%) rename x-pack/{qa/smoke-test-ml-with-security => plugin/ml/qa/ml-with-security}/roles.yml (100%) rename x-pack/{qa/smoke-test-ml-with-security => plugin/ml/qa/ml-with-security}/src/test/java/org/elasticsearch/smoketest/MlWithSecurityIT.java (100%) rename x-pack/{qa/smoke-test-ml-with-security => plugin/ml/qa/ml-with-security}/src/test/java/org/elasticsearch/smoketest/MlWithSecurityInsufficientRoleIT.java (100%) rename x-pack/{qa/smoke-test-ml-with-security => plugin/ml/qa/ml-with-security}/src/test/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/build.gradle (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobResetsFinishedTimeIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java (100%) rename x-pack/{qa/ml-native-multi-node-tests => plugin/ml/qa/native-multi-node-tests}/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java (100%) rename x-pack/{qa/ml-no-bootstrap-tests => plugin/ml/qa/no-bootstrap-tests}/build.gradle (100%) rename x-pack/{qa/ml-no-bootstrap-tests => plugin/ml/qa/no-bootstrap-tests}/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperNoBootstrapTests.java (100%) rename x-pack/{qa/ml-single-node-tests => plugin/ml/qa/single-node-tests}/build.gradle (100%) rename x-pack/{qa/ml-single-node-tests => plugin/ml/qa/single-node-tests}/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java (100%) diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 3602e1b359e..282ce96fa93 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -103,9 +103,19 @@ task internalClusterTest(type: RandomizedTestingTask, include '**/*IT.class' systemProperty 'es.set.netty.runtime.available.processors', 'false' } + check.dependsOn internalClusterTest internalClusterTest.mustRunAfter test +// add all sub-projects of the qa sub-project +gradle.projectsEvaluated { + project.subprojects + .find { it.path == project.path + ":qa" } + .subprojects + .findAll { it.path.startsWith(project.path + ":qa") } + .each { check.dependsOn it.check } +} + // also add an "alias" task to make typing on the command line easier task icTest { dependsOn internalClusterTest diff --git a/x-pack/qa/ml-basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle similarity index 100% rename from x-pack/qa/ml-basic-multi-node/build.gradle rename to x-pack/plugin/ml/qa/basic-multi-node/build.gradle diff --git a/x-pack/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java b/x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java similarity index 100% rename from x-pack/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java rename to x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java diff --git a/x-pack/plugin/ml/qa/build.gradle b/x-pack/plugin/ml/qa/build.gradle new file mode 100644 index 00000000000..517c93cc178 --- /dev/null +++ b/x-pack/plugin/ml/qa/build.gradle @@ -0,0 +1,31 @@ +import org.elasticsearch.gradle.test.RestIntegTestTask + +subprojects { + // HACK: please fix this + // we want to add the rest api specs for xpack to qa tests, but we + // need to wait until after the project is evaluated to only apply + // to those that rest tests. this used to be done automatically + // when xpack was a plugin, but now there is no place with xpack as a module. + // instead, we should package these and make them easy to use for rest tests, + // but currently, they must be copied into the resources of the test runner. + project.tasks.withType(RestIntegTestTask) { + File xpackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') + project.copyRestSpec.from(xpackResources) { + include 'rest-api-spec/api/**' + } + } +} + +gradle.projectsEvaluated { + subprojects { + Task assemble = project.tasks.findByName('assemble') + if (assemble) { + project.tasks.remove(assemble) + project.build.dependsOn.remove('assemble') + } + Task dependenciesInfo = project.tasks.findByName('dependenciesInfo') + if (dependenciesInfo) { + project.precommit.dependsOn.remove('dependenciesInfo') + } + } +} diff --git a/x-pack/qa/ml-disabled/build.gradle b/x-pack/plugin/ml/qa/disabled/build.gradle similarity index 100% rename from x-pack/qa/ml-disabled/build.gradle rename to x-pack/plugin/ml/qa/disabled/build.gradle diff --git a/x-pack/qa/ml-disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java b/x-pack/plugin/ml/qa/disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java similarity index 100% rename from x-pack/qa/ml-disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java rename to x-pack/plugin/ml/qa/disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java diff --git a/x-pack/qa/smoke-test-ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle similarity index 100% rename from x-pack/qa/smoke-test-ml-with-security/build.gradle rename to x-pack/plugin/ml/qa/ml-with-security/build.gradle diff --git a/x-pack/qa/smoke-test-ml-with-security/roles.yml b/x-pack/plugin/ml/qa/ml-with-security/roles.yml similarity index 100% rename from x-pack/qa/smoke-test-ml-with-security/roles.yml rename to x-pack/plugin/ml/qa/ml-with-security/roles.yml diff --git a/x-pack/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityIT.java b/x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityIT.java similarity index 100% rename from x-pack/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityIT.java rename to x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityIT.java diff --git a/x-pack/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityInsufficientRoleIT.java b/x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityInsufficientRoleIT.java similarity index 100% rename from x-pack/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityInsufficientRoleIT.java rename to x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityInsufficientRoleIT.java diff --git a/x-pack/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java b/x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java similarity index 100% rename from x-pack/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java rename to x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/build.gradle rename to x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobResetsFinishedTimeIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobResetsFinishedTimeIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobResetsFinishedTimeIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobResetsFinishedTimeIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java similarity index 100% rename from x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java rename to x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java diff --git a/x-pack/qa/ml-no-bootstrap-tests/build.gradle b/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle similarity index 100% rename from x-pack/qa/ml-no-bootstrap-tests/build.gradle rename to x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle diff --git a/x-pack/qa/ml-no-bootstrap-tests/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperNoBootstrapTests.java b/x-pack/plugin/ml/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperNoBootstrapTests.java similarity index 100% rename from x-pack/qa/ml-no-bootstrap-tests/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperNoBootstrapTests.java rename to x-pack/plugin/ml/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperNoBootstrapTests.java diff --git a/x-pack/qa/ml-single-node-tests/build.gradle b/x-pack/plugin/ml/qa/single-node-tests/build.gradle similarity index 100% rename from x-pack/qa/ml-single-node-tests/build.gradle rename to x-pack/plugin/ml/qa/single-node-tests/build.gradle diff --git a/x-pack/qa/ml-single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java similarity index 100% rename from x-pack/qa/ml-single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java rename to x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java From 28a0df2c7fbaa46fa3dd5df671bdc6fa16fd3711 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Tue, 21 Aug 2018 18:12:28 +0100 Subject: [PATCH 08/48] HLRC: Clear ML data after client tests (#33023) This commit duplicates the `MlRestTestStateCleaner` to make sure all ML data is removed after each test. After implementing the job and datafeed APIs in the HLRC, we shall replace this implementation with one using the HLRC itself. Closes #32993 --- .../client/MachineLearningIT.java | 9 +- .../client/MlRestTestStateCleaner.java | 109 ++++++++++++++++++ .../MlClientDocumentationIT.java | 8 ++ 3 files changed, 124 insertions(+), 2 deletions(-) create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 6c4fa6e4514..2c0fc70b848 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.client; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.CloseJobResponse; @@ -33,15 +32,21 @@ import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription; import org.elasticsearch.protocol.xpack.ml.job.config.Detector; import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.junit.After; +import java.io.IOException; import java.util.Arrays; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.is; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32993") public class MachineLearningIT extends ESRestHighLevelClientTestCase { + @After + public void cleanUp() throws IOException { + new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + } + public void testPutJob() throws Exception { String jobId = randomValidJobId(); Job job = buildJob(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java new file mode 100644 index 00000000000..7ad86576245 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.rest.ESRestTestCase; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * This is temporarily duplicated from the server side. + * @TODO Replace with an implementation using the HLRC once + * the APIs for managing datafeeds are implemented. + */ +public class MlRestTestStateCleaner { + + private final Logger logger; + private final RestClient adminClient; + + public MlRestTestStateCleaner(Logger logger, RestClient adminClient) { + this.logger = logger; + this.adminClient = adminClient; + } + + public void clearMlMetadata() throws IOException { + deleteAllDatafeeds(); + deleteAllJobs(); + // indices will be deleted by the ESRestTestCase class + } + + @SuppressWarnings("unchecked") + private void deleteAllDatafeeds() throws IOException { + final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds"); + datafeedsRequest.addParameter("filter_path", "datafeeds"); + final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest); + final List> datafeeds = + (List>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse)); + if (datafeeds == null) { + return; + } + + try { + adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop")); + } catch (Exception e1) { + logger.warn("failed to stop all datafeeds. Forcing stop", e1); + try { + adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true")); + } catch (Exception e2) { + logger.warn("Force-closing all data feeds failed", e2); + } + throw new RuntimeException( + "Had to resort to force-stopping datafeeds, something went wrong?", e1); + } + + for (Map datafeed : datafeeds) { + String datafeedId = (String) datafeed.get("datafeed_id"); + adminClient.performRequest(new Request("DELETE", "/_xpack/ml/datafeeds/" + datafeedId)); + } + } + + private void deleteAllJobs() throws IOException { + final Request jobsRequest = new Request("GET", "/_xpack/ml/anomaly_detectors"); + jobsRequest.addParameter("filter_path", "jobs"); + final Response response = adminClient.performRequest(jobsRequest); + @SuppressWarnings("unchecked") + final List> jobConfigs = + (List>) XContentMapValues.extractValue("jobs", ESRestTestCase.entityAsMap(response)); + if (jobConfigs == null) { + return; + } + + try { + adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close")); + } catch (Exception e1) { + logger.warn("failed to close all jobs. Forcing closed", e1); + try { + adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close?force=true")); + } catch (Exception e2) { + logger.warn("Force-closing all jobs failed", e2); + } + throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?", + e1); + } + + for (Map jobConfig : jobConfigs) { + String jobId = (String) jobConfig.get("job_id"); + adminClient.performRequest(new Request("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId)); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 05aad96fff6..6e48036419b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.MachineLearningIT; +import org.elasticsearch.client.MlRestTestStateCleaner; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.unit.TimeValue; @@ -37,7 +38,9 @@ import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription; import org.elasticsearch.protocol.xpack.ml.job.config.Detector; import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.junit.After; +import java.io.IOException; import java.util.Collections; import java.util.Date; import java.util.List; @@ -48,6 +51,11 @@ import static org.hamcrest.Matchers.greaterThan; public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { + @After + public void cleanUp() throws IOException { + new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + } + public void testCreateJob() throws Exception { RestHighLevelClient client = highLevelClient(); From fcf8cadd9a5114da8b8536338d058289daa88d99 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 21 Aug 2018 14:48:53 -0400 Subject: [PATCH 09/48] Switch some x-pack tests to new style Requests (#32500) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes all calls in the `x-pack/qa/audit-tests`, `x-pack/qa/ml-disabled`, and `x-pack/qa/multi-node` projects to use the new versions. --- .../ml/integration/MlPluginDisabledIT.java | 57 +++++++++++-------- .../xpack/security/audit/IndexAuditIT.java | 11 ++-- .../GlobalCheckpointSyncActionIT.java | 29 +++++----- 3 files changed, 52 insertions(+), 45 deletions(-) diff --git a/x-pack/plugin/ml/qa/disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java b/x-pack/plugin/ml/qa/disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java index 3bb9566e5bf..170b4f14486 100644 --- a/x-pack/plugin/ml/qa/disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java +++ b/x-pack/plugin/ml/qa/disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java @@ -5,16 +5,13 @@ */ package org.elasticsearch.xpack.ml.integration; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.ml.MachineLearning; -import java.util.Collections; - import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -27,30 +24,40 @@ public class MlPluginDisabledIT extends ESRestTestCase { public void testActionsFail() throws Exception { XContentBuilder xContentBuilder = jsonBuilder(); xContentBuilder.startObject(); - xContentBuilder.field("actions-fail-job", "foo"); - xContentBuilder.field("description", "Analysis of response time by airline"); + { + xContentBuilder.field("actions-fail-job", "foo"); + xContentBuilder.field("description", "Analysis of response time by airline"); - xContentBuilder.startObject("analysis_config"); - xContentBuilder.field("bucket_span", "3600s"); - xContentBuilder.startArray("detectors"); - xContentBuilder.startObject(); - xContentBuilder.field("function", "metric"); - xContentBuilder.field("field_name", "responsetime"); - xContentBuilder.field("by_field_name", "airline"); - xContentBuilder.endObject(); - xContentBuilder.endArray(); + xContentBuilder.startObject("analysis_config"); + { + xContentBuilder.field("bucket_span", "3600s"); + xContentBuilder.startArray("detectors"); + { + xContentBuilder.startObject(); + { + xContentBuilder.field("function", "metric"); + xContentBuilder.field("field_name", "responsetime"); + xContentBuilder.field("by_field_name", "airline"); + } + xContentBuilder.endObject(); + } + xContentBuilder.endArray(); + } + xContentBuilder.endObject(); + + xContentBuilder.startObject("data_description"); + { + xContentBuilder.field("format", "xcontent"); + xContentBuilder.field("time_field", "time"); + xContentBuilder.field("time_format", "epoch"); + } + xContentBuilder.endObject(); + } xContentBuilder.endObject(); - xContentBuilder.startObject("data_description"); - xContentBuilder.field("format", "xcontent"); - xContentBuilder.field("time_field", "time"); - xContentBuilder.field("time_format", "epoch"); - xContentBuilder.endObject(); - xContentBuilder.endObject(); - - ResponseException exception = expectThrows(ResponseException.class, () -> client().performRequest("put", - MachineLearning.BASE_PATH + "anomaly_detectors/foo", Collections.emptyMap(), - new StringEntity(Strings.toString(xContentBuilder), ContentType.APPLICATION_JSON))); + Request request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/foo"); + request.setJsonEntity(Strings.toString(xContentBuilder)); + ResponseException exception = expectThrows(ResponseException.class, () -> client().performRequest(request)); assertThat(exception.getMessage(), containsString("no handler found for uri [/_xpack/ml/anomaly_detectors/foo] and method [PUT]")); } } diff --git a/x-pack/qa/audit-tests/src/test/java/org/elasticsearch/xpack/security/audit/IndexAuditIT.java b/x-pack/qa/audit-tests/src/test/java/org/elasticsearch/xpack/security/audit/IndexAuditIT.java index c0111e57c74..d1ee4f2d9e1 100644 --- a/x-pack/qa/audit-tests/src/test/java/org/elasticsearch/xpack/security/audit/IndexAuditIT.java +++ b/x-pack/qa/audit-tests/src/test/java/org/elasticsearch/xpack/security/audit/IndexAuditIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.security.audit; import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.apache.http.message.BasicHeader; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -111,10 +110,12 @@ public class IndexAuditIT extends ESIntegTestCase { } public void testIndexAuditTrailWorking() throws Exception { - Response response = getRestClient().performRequest("GET", "/", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(USER, new SecureString(PASS.toCharArray())))); - assertThat(response.getStatusLine().getStatusCode(), is(200)); + Request request = new Request("GET", "/"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + UsernamePasswordToken.basicAuthHeaderValue(USER, new SecureString(PASS.toCharArray()))); + request.setOptions(options); + Response response = getRestClient().performRequest(request); final AtomicReference lastClusterState = new AtomicReference<>(); final boolean found = awaitSecurityAuditIndex(lastClusterState, QueryBuilders.matchQuery("principal", USER)); diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/GlobalCheckpointSyncActionIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/GlobalCheckpointSyncActionIT.java index abc784b4cb2..18cd67ff271 100644 --- a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/GlobalCheckpointSyncActionIT.java +++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/GlobalCheckpointSyncActionIT.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.multi_node; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; @@ -16,10 +15,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ObjectPath; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.equalTo; @@ -59,12 +54,15 @@ public class GlobalCheckpointSyncActionIT extends ESRestTestCase { builder.endObject(); } builder.endObject(); - final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - client().performRequest("PUT", "test-index", Collections.emptyMap(), entity); + Request createIndexRequest = new Request("PUT", "/test-index"); + createIndexRequest.setJsonEntity(Strings.toString(builder)); + client().performRequest(createIndexRequest); } // wait for the replica to recover - client().performRequest("GET", "/_cluster/health", Collections.singletonMap("wait_for_status", "green")); + Request healthRequest = new Request("GET", "/_cluster/health"); + healthRequest.addParameter("wait_for_status", "green"); + client().performRequest(healthRequest); // index some documents final int numberOfDocuments = randomIntBetween(0, 128); @@ -75,17 +73,18 @@ public class GlobalCheckpointSyncActionIT extends ESRestTestCase { builder.field("foo", i); } builder.endObject(); - final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/test-index/test-type/" + i, Collections.emptyMap(), entity); + Request indexRequest = new Request("PUT", "/test-index/test-type/" + i); + indexRequest.setJsonEntity(Strings.toString(builder)); + client().performRequest(indexRequest); } } // we have to wait for the post-operation global checkpoint sync to propagate to the replica assertBusy(() -> { - final Map params = new HashMap<>(2); - params.put("level", "shards"); - params.put("filter_path", "**.seq_no"); - final Response response = client().performRequest("GET", "/test-index/_stats", params); + final Request request = new Request("GET", "/test-index/_stats"); + request.addParameter("level", "shards"); + request.addParameter("filter_path", "**.seq_no"); + final Response response = client().performRequest(request); final ObjectPath path = ObjectPath.createFromResponse(response); // int looks funny here since global checkpoints are longs but the response parser does not know enough to treat them as long final int shard0GlobalCheckpoint = path.evaluate("indices.test-index.shards.0.0.seq_no.global_checkpoint"); From 3973bb4028cd0536eef0f178fb66f984d3a74246 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Tue, 21 Aug 2018 14:59:37 -0400 Subject: [PATCH 10/48] Fix north pole overflow error in GeoHashUtils.bbox() (#32891) Fixes an overflow error in GeoHashUtils.bbox() calculation of a bounding box for geohashes with maximum precision located next to the north pole. --- .../common/geo/GeoHashUtils.java | 23 ++++++++++++++----- .../common/geo/GeoHashTests.java | 5 ++++ 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java b/server/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java index 0ee8d095f49..bf65162d215 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java @@ -25,6 +25,8 @@ import org.apache.lucene.util.BitUtil; import java.util.ArrayList; import java.util.Collection; +import static org.apache.lucene.geo.GeoUtils.MAX_LAT_INCL; + /** * Utilities for converting to/from the GeoHash standard * @@ -48,6 +50,8 @@ public class GeoHashUtils { private static final double LAT_SCALE = (0x1L<>>= 4; - // deinterleave and add 1 to lat and lon to get topRight - long lat = BitUtil.deinterleave(ghLong >>> 1) + 1; - long lon = BitUtil.deinterleave(ghLong) + 1; - GeoPoint topRight = GeoPoint.fromGeohash(BitUtil.interleave((int)lon, (int)lat) << 4 | len); - - return new Rectangle(bottomLeft.lat(), topRight.lat(), bottomLeft.lon(), topRight.lon()); + // deinterleave + long lon = BitUtil.deinterleave(ghLong >>> 1); + long lat = BitUtil.deinterleave(ghLong); + if (lat < MAX_LAT_BITS) { + // add 1 to lat and lon to get topRight + GeoPoint topRight = GeoPoint.fromGeohash(BitUtil.interleave((int)(lat + 1), (int)(lon + 1)) << 4 | len); + return new Rectangle(bottomLeft.lat(), topRight.lat(), bottomLeft.lon(), topRight.lon()); + } else { + // We cannot go north of north pole, so just using 90 degrees instead of calculating it using + // add 1 to lon to get lon of topRight, we are going to use 90 for lat + GeoPoint topRight = GeoPoint.fromGeohash(BitUtil.interleave((int)lat, (int)(lon + 1)) << 4 | len); + return new Rectangle(bottomLeft.lat(), MAX_LAT_INCL, bottomLeft.lon(), topRight.lon()); + } } /** diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java index 87f98389231..b4a24cfc4fc 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java @@ -98,6 +98,11 @@ public class GeoHashTests extends ESTestCase { } } + public void testNorthPoleBoundingBox() { + Rectangle bbox = GeoHashUtils.bbox("zzbxfpgzupbx"); // Bounding box with maximum precision touching north pole + assertEquals(90.0, bbox.maxLat, 0.0000001); // Should be 90 degrees + } + public void testInvalidGeohashes() { IllegalArgumentException ex; From f311680176bd68d74308bd5173d9703a8c45f263 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 21 Aug 2018 16:04:16 -0400 Subject: [PATCH 11/48] Monitoring: Clean up MonitoringIT We recently reenabled MonitoringIT to hunt down #29880 but some of its assertions were out of date. This updates the assertions. --- .../xpack/monitoring/integration/MonitoringIT.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index 0ee5f85bfad..e44d6da073e 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -67,6 +67,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.threadpool.ThreadPool.Names.WRITE; import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils.TEMPLATE_VERSION; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -336,7 +337,7 @@ public class MonitoringIT extends ESSingleNodeTestCase { final Map clusterStats = (Map) source.get("cluster_stats"); assertThat(clusterStats, notNullValue()); - assertThat(clusterStats.size(), equalTo(4)); + assertThat(clusterStats.size(), equalTo(5)); final Map stackStats = (Map) source.get("stack_stats"); assertThat(stackStats, notNullValue()); @@ -346,7 +347,7 @@ public class MonitoringIT extends ESSingleNodeTestCase { assertThat(apm, notNullValue()); assertThat(apm.size(), equalTo(1)); assertThat(apm.remove("found"), is(apmIndicesExist)); - assertThat(apm.isEmpty(), is(true)); + assertThat(apm.keySet(), empty()); final Map xpackStats = (Map) stackStats.get("xpack"); assertThat(xpackStats, notNullValue()); @@ -358,14 +359,14 @@ public class MonitoringIT extends ESSingleNodeTestCase { final Map clusterState = (Map) source.get("cluster_state"); assertThat(clusterState, notNullValue()); - assertThat(clusterState.size(), equalTo(6)); assertThat(clusterState.remove("nodes_hash"), notNullValue()); assertThat(clusterState.remove("status"), notNullValue()); assertThat(clusterState.remove("version"), notNullValue()); assertThat(clusterState.remove("state_uuid"), notNullValue()); + assertThat(clusterState.remove("cluster_uuid"), notNullValue()); assertThat(clusterState.remove("master_node"), notNullValue()); assertThat(clusterState.remove("nodes"), notNullValue()); - assertThat(clusterState.isEmpty(), is(true)); + assertThat(clusterState.keySet(), empty()); } /** @@ -451,6 +452,11 @@ public class MonitoringIT extends ESSingleNodeTestCase { return; } + // bulk is not a thread pool in the current version but we allow it to support mixed version clusters + if (filter.startsWith("node_stats.thread_pool.bulk")) { + return; + } + assertThat(filter + " must not be null in the monitoring document", extractValue(filter, source), notNullValue()); }); } From 8b43e21521b4d0f3409439d428eda156d8744cf1 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 21 Aug 2018 22:12:53 +0200 Subject: [PATCH 12/48] Fix multi fields empty query (#33017) This change fixes empty query removal when all fields remove the search term in `simple_query_string`, `multi_match` and `query_string`. Closes #33009 --- .../index/query/QueryStringQueryBuilder.java | 9 +-- .../index/search/MultiMatchQuery.java | 2 +- .../index/search/QueryStringQueryParser.java | 3 + .../query/MultiMatchQueryBuilderTests.java | 64 +++++++++++++++++++ .../query/QueryStringQueryBuilderTests.java | 53 ++++++++++++++- .../query/SimpleQueryStringBuilderTests.java | 50 ++++++++++++++- 6 files changed, 172 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index e9d53d8e829..19687464edc 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -328,8 +328,9 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder groupQuery) { if (groupQuery == null || groupQuery.isEmpty()) { - return new MatchNoDocsQuery("[multi_match] list of group queries was empty"); + return zeroTermsQuery(); } if (groupQuery.size() == 1) { return groupQuery.get(0); diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 50406ed5834..04a1bf122e6 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -347,6 +347,9 @@ public class QueryStringQueryParser extends XQueryParser { } queryBuilder.setPhraseSlop(slop); Query query = queryBuilder.parse(MultiMatchQueryBuilder.Type.PHRASE, fields, queryText, null); + if (query == null) { + return null; + } return applySlop(query, slop); } catch (IOException e) { throw new ParseException(e.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index e30cdaca402..2f69ef7674d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; @@ -381,6 +382,69 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase Date: Tue, 21 Aug 2018 13:13:07 -0700 Subject: [PATCH 13/48] [DOCS] Fixed formatting of Example headings. (#33038) --- docs/painless/painless-execute-script.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/painless/painless-execute-script.asciidoc b/docs/painless/painless-execute-script.asciidoc index 2aca9597786..30320def79b 100644 --- a/docs/painless/painless-execute-script.asciidoc +++ b/docs/painless/painless-execute-script.asciidoc @@ -26,7 +26,7 @@ The only variable that is available is `params`, which can be used to access use The result of the script is always converted to a string. If no context is specified then this context is used by default. -====== Example +*Example* Request: @@ -67,7 +67,7 @@ The following parameters may be specified in `context_setup` for a filter contex document:: Contains the document that will be temporarily indexed in-memory and is accessible from the script. index:: The name of an index containing a mapping that is compatable with the document being indexed. -====== Example +*Example* [source,js] ---------------------------------------------------------------- @@ -125,7 +125,7 @@ document:: Contains the document that will be temporarily indexed in-memory and index:: The name of an index containing a mapping that is compatable with the document being indexed. query:: If `_score` is used in the script then a query can specified that will be used to compute a score. -====== Example +*Example* [source,js] ---------------------------------------------------------------- From 767c69593c67befb843686de8ea51b7bc87728c9 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 21 Aug 2018 22:15:09 +0200 Subject: [PATCH 14/48] Fix quoted _exists_ query (#33019) This change in the `query_string` query fixes the detection of the special `_exists_` field when it is used with a quoted term. Closes #28922 --- .../index/search/QueryStringQueryParser.java | 12 ++++++++---- .../index/query/QueryStringQueryBuilderTests.java | 12 ++++++++++++ 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 04a1bf122e6..fa2fd033bee 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -280,14 +280,14 @@ public class QueryStringQueryParser extends XQueryParser { @Override public Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException { - if (quoted) { - return getFieldQuery(field, queryText, getPhraseSlop()); - } - if (field != null && EXISTS_FIELD.equals(field)) { return existsQuery(queryText); } + if (quoted) { + return getFieldQuery(field, queryText, getPhraseSlop()); + } + // Detects additional operators '<', '<=', '>', '>=' to handle range query with one side unbounded. // It is required to use a prefix field operator to enable the detection since they are not treated // as logical operator by the query parser (e.g. age:>=10). @@ -333,6 +333,10 @@ public class QueryStringQueryParser extends XQueryParser { @Override protected Query getFieldQuery(String field, String queryText, int slop) throws ParseException { + if (field != null && EXISTS_FIELD.equals(field)) { + return existsQuery(queryText); + } + Map fields = extractMultiFields(field, true); if (fields.isEmpty()) { return newUnmappedFieldQuery(field); diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 81895f4c9b8..b0ee3254873 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -998,6 +998,18 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase Date: Tue, 21 Aug 2018 16:33:42 -0700 Subject: [PATCH 15/48] Ensure that _exists queries on keyword fields use norms when they're available. (#33006) --- .../index/mapper/KeywordFieldMapper.java | 17 +++++++++----- .../index/mapper/TypeParsers.java | 5 ++--- .../index/mapper/KeywordFieldMapperTests.java | 16 ++++++++++---- .../index/mapper/KeywordFieldTypeTests.java | 22 +++++++++++++++++-- 4 files changed, 45 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 9c334f79551..20b4bb37cc7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; @@ -166,7 +167,7 @@ public final class KeywordFieldMapper extends FieldMapper { builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1)); iterator.remove(); } else if (propName.equals("norms")) { - builder.omitNorms(XContentMapValues.nodeBooleanValue(propNode, "norms") == false); + TypeParsers.parseNorms(builder, name, propNode); iterator.remove(); } else if (propName.equals("eager_global_ordinals")) { builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode, "eager_global_ordinals")); @@ -256,8 +257,10 @@ public final class KeywordFieldMapper extends FieldMapper { public Query existsQuery(QueryShardContext context) { if (hasDocValues()) { return new DocValuesFieldExistsQuery(name()); - } else { + } else if (omitNorms()) { return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name())); + } else { + return new NormsFieldExistsQuery(name()); } } @@ -366,17 +369,19 @@ public final class KeywordFieldMapper extends FieldMapper { // convert to utf8 only once before feeding postings/dv/stored fields final BytesRef binaryValue = new BytesRef(value); - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { Field field = new Field(fieldType().name(), binaryValue, fieldType()); fields.add(field); + + if (fieldType().hasDocValues() == false && fieldType().omitNorms()) { + createFieldNamesField(context, fields); + } } + if (fieldType().hasDocValues()) { fields.add(new SortedSetDocValuesField(fieldType().name(), binaryValue)); - } else if (fieldType().stored() || fieldType().indexOptions() != IndexOptions.NONE) { - createFieldNamesField(context, fields); } } - @Override protected String contentType() { return CONTENT_TYPE; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index a6a5fab0d04..667f4a73617 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -122,8 +122,7 @@ public class TypeParsers { } } - public static void parseNorms(FieldMapper.Builder builder, String fieldName, Object propNode, - Mapper.TypeParser.ParserContext parserContext) { + public static void parseNorms(FieldMapper.Builder builder, String fieldName, Object propNode) { builder.omitNorms(XContentMapValues.nodeBooleanValue(propNode, fieldName + ".norms") == false); } @@ -140,7 +139,7 @@ public class TypeParsers { final String propName = entry.getKey(); final Object propNode = entry.getValue(); if ("norms".equals(propName)) { - parseNorms(builder, name, propNode, parserContext); + parseNorms(builder, name, propNode); iterator.remove(); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 56e587dc995..8e5c81e58f1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -321,11 +321,16 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { public void testEnableNorms() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "keyword").field("norms", true).endObject().endObject() - .endObject().endObject()); + .startObject("properties") + .startObject("field") + .field("type", "keyword") + .field("doc_values", false) + .field("norms", true) + .endObject() + .endObject() + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference @@ -336,8 +341,11 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); - assertEquals(2, fields.length); + assertEquals(1, fields.length); assertFalse(fields[0].fieldType().omitNorms()); + + IndexableField[] fieldNamesFields = doc.rootDoc().getFields(FieldNamesFieldMapper.NAME); + assertEquals(0, fieldNamesFields.length); } public void testNormalizer() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index a291062c7a5..eae5b4ac7d2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.LowerCaseFilter; import org.apache.lucene.analysis.TokenFilter; @@ -28,9 +27,11 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; -import org.apache.lucene.search.TermInSetQuery; +import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; @@ -132,6 +133,23 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase { assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } + public void testExistsQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + ft.setHasDocValues(true); + ft.setOmitNorms(true); + assertEquals(new DocValuesFieldExistsQuery("field"), ft.existsQuery(null)); + + ft.setHasDocValues(false); + ft.setOmitNorms(false); + assertEquals(new NormsFieldExistsQuery("field"), ft.existsQuery(null)); + + ft.setHasDocValues(false); + ft.setOmitNorms(true); + assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.NAME, "field")), ft.existsQuery(null)); + } + public void testRegexpQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); From 2c81d7f77ec90c94446be3a8fe15d461a6f2d362 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 21 Aug 2018 20:03:28 -0400 Subject: [PATCH 16/48] Build: Rework shadow plugin configuration (#32409) This reworks how we configure the `shadow` plugin in the build. The major change is that we no longer bundle dependencies in the `compile` configuration, instead we bundle dependencies in the new `bundle` configuration. This feels more right because it is a little more "opt in" rather than "opt out" and the name of the `bundle` configuration is a little more obvious. As an neat side effect of this, the `runtimeElements` configuration used when one project depends on another now contains exactly the dependencies needed to run the project so you no longer need to reference projects that use the shadow plugin like this: ``` testCompile project(path: ':client:rest-high-level', configuration: 'shadow') ``` You can instead use the much more normal: ``` testCompile "org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}" ``` --- CONTRIBUTING.md | 14 ++- build.gradle | 39 +++------ .../elasticsearch/gradle/BuildPlugin.groovy | 85 +++++++------------ .../gradle/plugin/PluginBuildPlugin.groovy | 5 +- .../gradle/precommit/JarHellTask.groovy | 4 + .../precommit/ThirdPartyAuditTask.groovy | 6 ++ client/rest-high-level/build.gradle | 14 +-- qa/ccs-unavailable-clusters/build.gradle | 2 +- x-pack/docs/build.gradle | 5 +- x-pack/license-tools/build.gradle | 2 +- x-pack/plugin/core/build.gradle | 22 ++--- x-pack/plugin/deprecation/build.gradle | 2 +- x-pack/plugin/graph/build.gradle | 3 +- x-pack/plugin/logstash/build.gradle | 4 +- x-pack/plugin/ml/build.gradle | 3 +- .../ml/qa/basic-multi-node/build.gradle | 2 +- x-pack/plugin/ml/qa/disabled/build.gradle | 2 +- .../ml/qa/ml-with-security/build.gradle | 3 +- .../qa/native-multi-node-tests/build.gradle | 3 +- .../ml/qa/no-bootstrap-tests/build.gradle | 2 +- .../ml/qa/single-node-tests/build.gradle | 2 +- x-pack/plugin/monitoring/build.gradle | 3 +- x-pack/plugin/rollup/build.gradle | 3 +- x-pack/plugin/security/build.gradle | 3 +- x-pack/plugin/security/cli/build.gradle | 5 +- x-pack/plugin/sql/build.gradle | 3 +- x-pack/plugin/upgrade/build.gradle | 3 +- x-pack/plugin/watcher/build.gradle | 3 +- x-pack/qa/evil-tests/build.gradle | 2 +- x-pack/qa/full-cluster-restart/build.gradle | 6 +- x-pack/qa/kerberos-tests/build.gradle | 2 +- .../build.gradle | 3 +- x-pack/qa/multi-node/build.gradle | 2 +- x-pack/qa/openldap-tests/build.gradle | 3 +- .../reindex-tests-with-security/build.gradle | 3 +- x-pack/qa/rolling-upgrade-basic/build.gradle | 3 +- x-pack/qa/rolling-upgrade/build.gradle | 6 +- x-pack/qa/saml-idp-tests/build.gradle | 3 +- x-pack/qa/security-client-tests/build.gradle | 2 +- .../build.gradle | 2 +- x-pack/qa/security-migrate-tests/build.gradle | 2 +- .../build.gradle | 3 +- .../build.gradle | 2 +- .../build.gradle | 2 +- x-pack/qa/smoke-test-plugins-ssl/build.gradle | 4 +- x-pack/qa/smoke-test-plugins/build.gradle | 2 +- .../build.gradle | 3 +- .../build.gradle | 2 +- x-pack/qa/smoke-test-watcher/build.gradle | 2 +- x-pack/qa/sql/security/build.gradle | 4 +- x-pack/qa/third-party/hipchat/build.gradle | 2 +- x-pack/qa/third-party/jira/build.gradle | 2 +- x-pack/qa/third-party/pagerduty/build.gradle | 2 +- x-pack/qa/third-party/slack/build.gradle | 2 +- x-pack/qa/transport-client-tests/build.gradle | 2 +- x-pack/test/feature-aware/build.gradle | 2 +- x-pack/transport-client/build.gradle | 2 +- 57 files changed, 158 insertions(+), 166 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c591459f01b..4285c8fd20c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -325,21 +325,19 @@ common configurations in our build and how we use them:
`compile`
Code that is on the classpath at both compile and -runtime. If the [`shadow`][shadow-plugin] plugin is applied to the project then -this code is bundled into the jar produced by the project.
+runtime.
`runtime`
Code that is not on the classpath at compile time but is on the classpath at runtime. We mostly use this configuration to make sure that we do not accidentally compile against dependencies of our dependencies also known as "transitive" dependencies".
-
`compileOnly`
Code that is on the classpath at comile time but that +
`compileOnly`
Code that is on the classpath at compile time but that should not be shipped with the project because it is "provided" by the runtime somehow. Elasticsearch plugins use this configuration to include dependencies that are bundled with Elasticsearch's server.
-
`shadow`
Only available in projects with the shadow plugin. Code -that is on the classpath at both compile and runtime but it *not* bundled into -the jar produced by the project. If you depend on a project with the `shadow` -plugin then you need to depend on this configuration because it will bring -along all of the dependencies you need at runtime.
+
`bundle`
Only available in projects with the shadow plugin, +dependencies with this configuration are bundled into the jar produced by the +build. Since IDEs do not understand this configuration we rig them to treat +dependencies in this configuration as `compile` dependencies.
`testCompile`
Code that is on the classpath for compiling tests that are part of this project but not production code. The canonical example of this is `junit`.
diff --git a/build.gradle b/build.gradle index 0df5b97ae4a..36d3a543d89 100644 --- a/build.gradle +++ b/build.gradle @@ -22,6 +22,7 @@ import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionCollection import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.gradle.plugins.ide.eclipse.model.SourceFolder import org.gradle.util.GradleVersion import org.gradle.util.DistributionLocator @@ -304,7 +305,7 @@ subprojects { // org.elasticsearch:elasticsearch must be the last one or all the links for the // other packages (e.g org.elasticsearch.client) will point to server rather than // their own artifacts. - if (project.plugins.hasPlugin(BuildPlugin)) { + if (project.plugins.hasPlugin(BuildPlugin) || project.plugins.hasPlugin(PluginBuildPlugin)) { String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" Closure sortClosure = { a, b -> b.group <=> a.group } Closure depJavadocClosure = { shadowed, dep -> @@ -322,13 +323,6 @@ subprojects { */ project.evaluationDependsOn(upstreamProject.path) project.javadoc.source += upstreamProject.javadoc.source - /* - * Do not add those projects to the javadoc classpath because - * we are going to resolve them with their source instead. - */ - project.javadoc.classpath = project.javadoc.classpath.filter { f -> - false == upstreamProject.configurations.archives.artifacts.files.files.contains(f) - } /* * Instead we need the upstream project's javadoc classpath so * we don't barf on the classes that it references. @@ -345,16 +339,16 @@ subprojects { project.configurations.compile.dependencies .findAll() .toSorted(sortClosure) - .each({ c -> depJavadocClosure(hasShadow, c) }) + .each({ c -> depJavadocClosure(false, c) }) project.configurations.compileOnly.dependencies .findAll() .toSorted(sortClosure) - .each({ c -> depJavadocClosure(hasShadow, c) }) + .each({ c -> depJavadocClosure(false, c) }) if (hasShadow) { - project.configurations.shadow.dependencies + project.configurations.bundle.dependencies .findAll() .toSorted(sortClosure) - .each({ c -> depJavadocClosure(false, c) }) + .each({ c -> depJavadocClosure(true, c) }) } } } @@ -523,25 +517,18 @@ allprojects { allprojects { /* * IntelliJ and Eclipse don't know about the shadow plugin so when we're - * in "IntelliJ mode" or "Eclipse mode" add "runtime" dependencies - * eveywhere where we see a "shadow" dependency which will cause them to - * reference shadowed projects directly rather than rely on the shadowing - * to include them. This is the correct thing for it to do because it - * doesn't run the jar shadowing at all. This isn't needed for the project + * in "IntelliJ mode" or "Eclipse mode" switch "bundle" dependencies into + * regular "compile" dependencies. This isn't needed for the project * itself because the IDE configuration is done by SourceSets but it is * *is* needed for projects that depends on the project doing the shadowing. * Without this they won't properly depend on the shadowed project. */ if (isEclipse || isIdea) { - configurations.all { Configuration configuration -> - dependencies.all { Dependency dep -> - if (dep instanceof ProjectDependency) { - if (dep.getTargetConfiguration() == 'shadow') { - configuration.dependencies.add(project.dependencies.project(path: dep.dependencyProject.path, configuration: 'runtime')) - } - } - } - } + project.plugins.withType(ShadowPlugin).whenPluginAdded { + project.afterEvaluate { + project.configurations.compile.extendsFrom project.configurations.bundle + } + } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index e45ba7ce9dc..7713d5c64df 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -79,8 +79,9 @@ class BuildPlugin implements Plugin { } project.pluginManager.apply('java') project.pluginManager.apply('carrotsearch.randomized-testing') - // these plugins add lots of info to our jars + configureConfigurations(project) configureJars(project) // jar config must be added before info broker + // these plugins add lots of info to our jars project.pluginManager.apply('nebula.info-broker') project.pluginManager.apply('nebula.info-basic') project.pluginManager.apply('nebula.info-java') @@ -91,8 +92,8 @@ class BuildPlugin implements Plugin { globalBuildInfo(project) configureRepositories(project) - configureConfigurations(project) project.ext.versions = VersionProperties.versions + configureSourceSets(project) configureCompile(project) configureJavadoc(project) configureSourcesJar(project) @@ -421,8 +422,10 @@ class BuildPlugin implements Plugin { project.configurations.compile.dependencies.all(disableTransitiveDeps) project.configurations.testCompile.dependencies.all(disableTransitiveDeps) project.configurations.compileOnly.dependencies.all(disableTransitiveDeps) + project.plugins.withType(ShadowPlugin).whenPluginAdded { - project.configurations.shadow.dependencies.all(disableTransitiveDeps) + Configuration bundle = project.configurations.create('bundle') + bundle.dependencies.all(disableTransitiveDeps) } } @@ -556,30 +559,6 @@ class BuildPlugin implements Plugin { publications { nebula(MavenPublication) { artifacts = [ project.tasks.shadowJar ] - artifactId = project.archivesBaseName - /* - * Configure the pom to include the "shadow" as compile dependencies - * because that is how we're using them but remove all other dependencies - * because they've been shaded into the jar. - */ - pom.withXml { XmlProvider xml -> - Node root = xml.asNode() - root.remove(root.dependencies) - Node dependenciesNode = root.appendNode('dependencies') - project.configurations.shadow.allDependencies.each { - if (false == it instanceof SelfResolvingDependency) { - Node dependencyNode = dependenciesNode.appendNode('dependency') - dependencyNode.appendNode('groupId', it.group) - dependencyNode.appendNode('artifactId', it.name) - dependencyNode.appendNode('version', it.version) - dependencyNode.appendNode('scope', 'compile') - } - } - // Be tidy and remove the element if it is empty - if (dependenciesNode.children.empty) { - root.remove(dependenciesNode) - } - } } } } @@ -587,6 +566,20 @@ class BuildPlugin implements Plugin { } } + /** + * Add dependencies that we are going to bundle to the compile classpath. + */ + static void configureSourceSets(Project project) { + project.plugins.withType(ShadowPlugin).whenPluginAdded { + ['main', 'test'].each {name -> + SourceSet sourceSet = project.sourceSets.findByName(name) + if (sourceSet != null) { + sourceSet.compileClasspath += project.configurations.bundle + } + } + } + } + /** Adds compiler settings to the project */ static void configureCompile(Project project) { if (project.compilerJavaVersion < JavaVersion.VERSION_1_10) { @@ -764,9 +757,16 @@ class BuildPlugin implements Plugin { * better to be safe */ mergeServiceFiles() + /* + * Bundle dependencies of the "bundled" configuration. + */ + configurations = [project.configurations.bundle] } // Make sure we assemble the shadow jar project.tasks.assemble.dependsOn project.tasks.shadowJar + project.artifacts { + apiElements project.tasks.shadowJar + } } } @@ -873,13 +873,8 @@ class BuildPlugin implements Plugin { exclude '**/*$*.class' project.plugins.withType(ShadowPlugin).whenPluginAdded { - /* - * If we make a shaded jar we test against it. - */ + // Test against a shadow jar if we made one classpath -= project.tasks.compileJava.outputs.files - classpath -= project.configurations.compile - classpath -= project.configurations.runtime - classpath += project.configurations.shadow classpath += project.tasks.shadowJar.outputs.files dependsOn project.tasks.shadowJar } @@ -905,26 +900,6 @@ class BuildPlugin implements Plugin { additionalTest.dependsOn(project.tasks.testClasses) project.check.dependsOn(additionalTest) }); - - project.plugins.withType(ShadowPlugin).whenPluginAdded { - /* - * We need somewhere to configure dependencies that we don't wish - * to shade into the jar. The shadow plugin creates a "shadow" - * configuration which is *almost* exactly that. It is never - * bundled into the shaded jar but is used for main source - * compilation. Unfortunately, by default it is not used for - * *test* source compilation and isn't used in tests at all. This - * change makes it available for test compilation. - * - * Note that this isn't going to work properly with qa projects - * but they have no business applying the shadow plugin in the - * firstplace. - */ - SourceSet testSourceSet = project.sourceSets.findByName('test') - if (testSourceSet != null) { - testSourceSet.compileClasspath += project.configurations.shadow - } - } } private static configurePrecommit(Project project) { @@ -936,7 +911,7 @@ class BuildPlugin implements Plugin { it.group.startsWith('org.elasticsearch') == false } - project.configurations.compileOnly project.plugins.withType(ShadowPlugin).whenPluginAdded { - project.dependencyLicenses.dependencies += project.configurations.shadow.fileCollection { + project.dependencyLicenses.dependencies += project.configurations.bundle.fileCollection { it.group.startsWith('org.elasticsearch') == false } } @@ -947,7 +922,7 @@ class BuildPlugin implements Plugin { deps.runtimeConfiguration = project.configurations.runtime project.plugins.withType(ShadowPlugin).whenPluginAdded { deps.runtimeConfiguration = project.configurations.create('infoDeps') - deps.runtimeConfiguration.extendsFrom(project.configurations.runtime, project.configurations.shadow) + deps.runtimeConfiguration.extendsFrom(project.configurations.runtime, project.configurations.bundle) } deps.compileOnlyConfiguration = project.configurations.compileOnly project.afterEvaluate { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 5216f084274..a14a3a680da 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -157,11 +157,10 @@ public class PluginBuildPlugin extends BuildPlugin { from pluginMetadata // metadata (eg custom security policy) /* * If the plugin is using the shadow plugin then we need to bundle - * "shadow" things rather than the default jar and dependencies so - * we don't hit jar hell. + * that shadow jar. */ from { project.plugins.hasPlugin(ShadowPlugin) ? project.shadowJar : project.jar } - from { project.plugins.hasPlugin(ShadowPlugin) ? project.configurations.shadow : project.configurations.runtime - project.configurations.compileOnly } + from project.configurations.runtime - project.configurations.compileOnly // extra files for the plugin to go into the zip from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging from('src/main') { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/JarHellTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/JarHellTask.groovy index 656d5e0d35a..4299efd95a3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/JarHellTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/JarHellTask.groovy @@ -19,6 +19,7 @@ package org.elasticsearch.gradle.precommit +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import org.elasticsearch.gradle.LoggedExec import org.gradle.api.file.FileCollection import org.gradle.api.tasks.OutputFile @@ -39,6 +40,9 @@ public class JarHellTask extends LoggedExec { public JarHellTask() { project.afterEvaluate { FileCollection classpath = project.sourceSets.test.runtimeClasspath + if (project.plugins.hasPlugin(ShadowPlugin)) { + classpath += project.configurations.bundle + } inputs.files(classpath) dependsOn(classpath) description = "Runs CheckJarHell on ${classpath}" diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index d6babbbfbb8..52b13a56644 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -18,6 +18,7 @@ */ package org.elasticsearch.gradle.precommit; +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import org.apache.tools.ant.BuildEvent; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.BuildListener; @@ -82,6 +83,11 @@ public class ThirdPartyAuditTask extends AntTask { configuration = project.configurations.findByName('testCompile') } assert configuration != null + if (project.plugins.hasPlugin(ShadowPlugin)) { + Configuration original = configuration + configuration = project.configurations.create('thirdPartyAudit') + configuration.extendsFrom(original, project.configurations.bundle) + } if (compileOnly == null) { classpath = configuration } else { diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 6f5eab6e1db..48169faac2f 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -47,13 +47,13 @@ dependencies { * Everything in the "shadow" configuration is *not* copied into the * shadowJar. */ - shadow "org.elasticsearch:elasticsearch:${version}" - shadow "org.elasticsearch.client:elasticsearch-rest-client:${version}" - shadow "org.elasticsearch.plugin:parent-join-client:${version}" - shadow "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" - shadow "org.elasticsearch.plugin:rank-eval-client:${version}" - shadow "org.elasticsearch.plugin:lang-mustache-client:${version}" - compile project(':x-pack:protocol') + compile "org.elasticsearch:elasticsearch:${version}" + compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" + compile "org.elasticsearch.plugin:parent-join-client:${version}" + compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" + compile "org.elasticsearch.plugin:rank-eval-client:${version}" + compile "org.elasticsearch.plugin:lang-mustache-client:${version}" + bundle project(':x-pack:protocol') testCompile "org.elasticsearch.client:test:${version}" testCompile "org.elasticsearch.test:framework:${version}" diff --git a/qa/ccs-unavailable-clusters/build.gradle b/qa/ccs-unavailable-clusters/build.gradle index d9de422bb43..c1f2bc96271 100644 --- a/qa/ccs-unavailable-clusters/build.gradle +++ b/qa/ccs-unavailable-clusters/build.gradle @@ -21,5 +21,5 @@ apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.test-with-dependencies' dependencies { - testCompile project(path: ':client:rest-high-level', configuration: 'shadow') + testCompile "org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}" } diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index aab84355581..7ef17715e06 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -30,7 +30,8 @@ buildRestTests.expectedUnconvertedCandidates = [ ] dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') } @@ -309,7 +310,7 @@ setups['farequote_datafeed'] = setups['farequote_job'] + ''' "job_id":"farequote", "indexes":"farequote" } -''' +''' setups['ml_filter_safe_domains'] = ''' - do: xpack.ml.put_filter: diff --git a/x-pack/license-tools/build.gradle b/x-pack/license-tools/build.gradle index 183b9ab50e0..4bd17713a2f 100644 --- a/x-pack/license-tools/build.gradle +++ b/x-pack/license-tools/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'elasticsearch.build' dependencies { - compile project(path: xpackModule('core'), configuration: 'shadow') + compile "org.elasticsearch.plugin:x-pack-core:${version}" compile "org.elasticsearch:elasticsearch:${version}" testCompile "org.elasticsearch.test:framework:${version}" } diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index a3b4bea9702..ef428bdd73d 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -27,19 +27,19 @@ dependencyLicenses { dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - compile project(':x-pack:protocol') - shadow "org.apache.httpcomponents:httpclient:${versions.httpclient}" - shadow "org.apache.httpcomponents:httpcore:${versions.httpcore}" - shadow "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" - shadow "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}" + bundle project(':x-pack:protocol') + compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" + compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" + compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" + compile "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}" - shadow "commons-logging:commons-logging:${versions.commonslogging}" - shadow "commons-codec:commons-codec:${versions.commonscodec}" + compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "commons-codec:commons-codec:${versions.commonscodec}" // security deps - shadow 'com.unboundid:unboundid-ldapsdk:3.2.0' - shadow project(path: ':modules:transport-netty4', configuration: 'runtime') - shadow(project(path: ':plugins:transport-nio', configuration: 'runtime')) { + compile 'com.unboundid:unboundid-ldapsdk:3.2.0' + compile project(path: ':modules:transport-netty4', configuration: 'runtime') + compile(project(path: ':plugins:transport-nio', configuration: 'runtime')) { // TODO: core exclusion should not be necessary, since it is a transitive dep of all plugins exclude group: "org.elasticsearch", module: "elasticsearch-core" } @@ -122,7 +122,7 @@ task testJar(type: Jar) { artifacts { // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions - archives jar + archives shadowJar testArtifacts testJar } diff --git a/x-pack/plugin/deprecation/build.gradle b/x-pack/plugin/deprecation/build.gradle index 3746287d615..d89eb62e884 100644 --- a/x-pack/plugin/deprecation/build.gradle +++ b/x-pack/plugin/deprecation/build.gradle @@ -10,7 +10,7 @@ esplugin { archivesBaseName = 'x-pack-deprecation' dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" } run { diff --git a/x-pack/plugin/graph/build.gradle b/x-pack/plugin/graph/build.gradle index 2b0f592b720..069bfa5fbbe 100644 --- a/x-pack/plugin/graph/build.gradle +++ b/x-pack/plugin/graph/build.gradle @@ -10,7 +10,8 @@ esplugin { archivesBaseName = 'x-pack-graph' dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/logstash/build.gradle b/x-pack/plugin/logstash/build.gradle index 2e158a90ac7..1057a1c8526 100644 --- a/x-pack/plugin/logstash/build.gradle +++ b/x-pack/plugin/logstash/build.gradle @@ -10,9 +10,9 @@ esplugin { archivesBaseName = 'x-pack-logstash' dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') - } run { diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 282ce96fa93..7c3594a06cf 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -40,7 +40,8 @@ compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try, compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // This should not be here testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index 3df77aadccb..cc5a2cd68dd 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/plugin/ml/qa/disabled/build.gradle b/x-pack/plugin/ml/qa/disabled/build.gradle index e914def3507..a24036651d5 100644 --- a/x-pack/plugin/ml/qa/disabled/build.gradle +++ b/x-pack/plugin/ml/qa/disabled/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index 84c23add254..a702973fcb0 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -2,7 +2,8 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index b1893b20c46..0c4304b123e 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -4,7 +4,8 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('ml'), configuration: 'runtime') testCompile project(path: xpackModule('ml'), configuration: 'testArtifacts') diff --git a/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle b/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle index 7e252afa302..9eac3fdd37a 100644 --- a/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle +++ b/x-pack/plugin/ml/qa/no-bootstrap-tests/build.gradle @@ -1,6 +1,6 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/plugin/ml/qa/single-node-tests/build.gradle b/x-pack/plugin/ml/qa/single-node-tests/build.gradle index b62e37894b3..88ca4dd118e 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/single-node-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/plugin/monitoring/build.gradle b/x-pack/plugin/monitoring/build.gradle index a452ef09a20..e551d577b7b 100644 --- a/x-pack/plugin/monitoring/build.gradle +++ b/x-pack/plugin/monitoring/build.gradle @@ -13,7 +13,8 @@ esplugin { archivesBaseName = 'x-pack-monitoring' dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // monitoring deps diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle index 649a89bc2cd..75fd22abacc 100644 --- a/x-pack/plugin/rollup/build.gradle +++ b/x-pack/plugin/rollup/build.gradle @@ -16,7 +16,8 @@ compileTestJava.options.compilerArgs << "-Xlint:-rawtypes" dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 6db533bbecf..003263669d5 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -12,7 +12,8 @@ esplugin { archivesBaseName = 'x-pack-security' dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') compileOnly project(path: ':modules:transport-netty4', configuration: 'runtime') compileOnly project(path: ':plugins:transport-nio', configuration: 'runtime') diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 1a00b2a0340..426c48aac80 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -4,7 +4,8 @@ archivesBaseName = 'elasticsearch-security-cli' dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') compile 'org.bouncycastle:bcprov-jdk15on:1.59' compile 'org.bouncycastle:bcpkix-jdk15on:1.59' testImplementation 'com.google.jimfs:jimfs:1.1' @@ -21,4 +22,4 @@ dependencyLicenses { if (inFipsJvm) { test.enabled = false -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index 039e78c1495..62097e76b97 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -19,7 +19,8 @@ archivesBaseName = 'x-pack-sql' integTest.enabled = false dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') compileOnly(project(':modules:lang-painless')) { // exclude ASM to not affect featureAware task on Java 10+ exclude group: "org.ow2.asm" diff --git a/x-pack/plugin/upgrade/build.gradle b/x-pack/plugin/upgrade/build.gradle index f95cde7134c..56ce274dd11 100644 --- a/x-pack/plugin/upgrade/build.gradle +++ b/x-pack/plugin/upgrade/build.gradle @@ -14,7 +14,8 @@ esplugin { archivesBaseName = 'x-pack-upgrade' dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/watcher/build.gradle b/x-pack/plugin/watcher/build.gradle index a0feab67463..3a9d759c46d 100644 --- a/x-pack/plugin/watcher/build.gradle +++ b/x-pack/plugin/watcher/build.gradle @@ -25,7 +25,8 @@ dependencyLicenses { dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + compileOnly project(path: xpackModule('core'), configuration: 'default') compileOnly project(path: ':modules:transport-netty4', configuration: 'runtime') compileOnly project(path: ':plugins:transport-nio', configuration: 'runtime') diff --git a/x-pack/qa/evil-tests/build.gradle b/x-pack/qa/evil-tests/build.gradle index 9b6055ffad7..03f2a569873 100644 --- a/x-pack/qa/evil-tests/build.gradle +++ b/x-pack/qa/evil-tests/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" } test { diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index 3cf29701206..ab8f9172b69 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -11,7 +11,8 @@ apply plugin: 'elasticsearch.build' test.enabled = false dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile (project(path: xpackModule('security'), configuration: 'runtime')) { // Need to drop the guava dependency here or we get a conflict with watcher's guava dependency. // This is total #$%, but the solution is to get the SAML realm (which uses guava) out of security proper @@ -249,7 +250,8 @@ subprojects { check.dependsOn(integTest) dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 59667d9ee78..f680a45bd7f 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } diff --git a/x-pack/qa/multi-cluster-search-security/build.gradle b/x-pack/qa/multi-cluster-search-security/build.gradle index 5d90f974762..c06ad68d803 100644 --- a/x-pack/qa/multi-cluster-search-security/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/build.gradle @@ -3,7 +3,8 @@ import org.elasticsearch.gradle.test.RestIntegTestTask apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/qa/multi-node/build.gradle b/x-pack/qa/multi-node/build.gradle index 19729cf367e..4369287caba 100644 --- a/x-pack/qa/multi-node/build.gradle +++ b/x-pack/qa/multi-node/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" } integTestCluster { diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 24cd6184afa..bb9a9799289 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -5,7 +5,8 @@ apply plugin: 'elasticsearch.standalone-test' apply plugin: 'elasticsearch.vagrantsupport' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/qa/reindex-tests-with-security/build.gradle b/x-pack/qa/reindex-tests-with-security/build.gradle index 097d343b279..97c0e8e17fe 100644 --- a/x-pack/qa/reindex-tests-with-security/build.gradle +++ b/x-pack/qa/reindex-tests-with-security/build.gradle @@ -2,7 +2,8 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: ':modules:reindex') diff --git a/x-pack/qa/rolling-upgrade-basic/build.gradle b/x-pack/qa/rolling-upgrade-basic/build.gradle index 21ac4414d86..5774e5d7856 100644 --- a/x-pack/qa/rolling-upgrade-basic/build.gradle +++ b/x-pack/qa/rolling-upgrade-basic/build.gradle @@ -7,7 +7,8 @@ import java.nio.charset.StandardCharsets apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // to be moved in a later commit } diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index b983caa8669..548081a8938 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -10,7 +10,8 @@ apply plugin: 'elasticsearch.build' test.enabled = false dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // to be moved in a later commit } @@ -284,7 +285,8 @@ subprojects { check.dependsOn(integTest) dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('watcher')) } diff --git a/x-pack/qa/saml-idp-tests/build.gradle b/x-pack/qa/saml-idp-tests/build.gradle index 752ec6fb307..9dd5d6d848f 100644 --- a/x-pack/qa/saml-idp-tests/build.gradle +++ b/x-pack/qa/saml-idp-tests/build.gradle @@ -6,7 +6,8 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile 'com.google.jimfs:jimfs:1.1' diff --git a/x-pack/qa/security-client-tests/build.gradle b/x-pack/qa/security-client-tests/build.gradle index 97945fb00ef..e676e55a152 100644 --- a/x-pack/qa/security-client-tests/build.gradle +++ b/x-pack/qa/security-client-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/qa/security-example-spi-extension/build.gradle b/x-pack/qa/security-example-spi-extension/build.gradle index 7aeed3ad62d..aef4fc33f6a 100644 --- a/x-pack/qa/security-example-spi-extension/build.gradle +++ b/x-pack/qa/security-example-spi-extension/build.gradle @@ -8,7 +8,7 @@ esplugin { } dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/qa/security-migrate-tests/build.gradle b/x-pack/qa/security-migrate-tests/build.gradle index 3a8a0cf1005..abc3564ca13 100644 --- a/x-pack/qa/security-migrate-tests/build.gradle +++ b/x-pack/qa/security-migrate-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('security'), configuration: 'runtime') testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/qa/security-setup-password-tests/build.gradle b/x-pack/qa/security-setup-password-tests/build.gradle index adb159acf6f..c0801a38b57 100644 --- a/x-pack/qa/security-setup-password-tests/build.gradle +++ b/x-pack/qa/security-setup-password-tests/build.gradle @@ -2,7 +2,8 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/qa/smoke-test-graph-with-security/build.gradle b/x-pack/qa/smoke-test-graph-with-security/build.gradle index 9cdfaffccfb..f0f819b46d4 100644 --- a/x-pack/qa/smoke-test-graph-with-security/build.gradle +++ b/x-pack/qa/smoke-test-graph-with-security/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" } // bring in graph rest test suite diff --git a/x-pack/qa/smoke-test-monitoring-with-watcher/build.gradle b/x-pack/qa/smoke-test-monitoring-with-watcher/build.gradle index 8ce0cde7657..7813ff3d3d5 100644 --- a/x-pack/qa/smoke-test-monitoring-with-watcher/build.gradle +++ b/x-pack/qa/smoke-test-monitoring-with-watcher/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('watcher')) testCompile project(path: xpackModule('monitoring')) } diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index 53533bd9b87..4f338d07fb5 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -15,7 +15,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" } String outputDir = "${buildDir}/generated-resources/${project.name}" @@ -138,4 +138,4 @@ processTestResources { inputs.properties(expansions) MavenFilteringHack.filter(it, expansions) } -} \ No newline at end of file +} diff --git a/x-pack/qa/smoke-test-plugins/build.gradle b/x-pack/qa/smoke-test-plugins/build.gradle index b66903af18b..3b7661eeeb0 100644 --- a/x-pack/qa/smoke-test-plugins/build.gradle +++ b/x-pack/qa/smoke-test-plugins/build.gradle @@ -4,7 +4,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" } ext.pluginsCount = 0 diff --git a/x-pack/qa/smoke-test-security-with-mustache/build.gradle b/x-pack/qa/smoke-test-security-with-mustache/build.gradle index d921c5f5b66..48b525ba3da 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/build.gradle +++ b/x-pack/qa/smoke-test-security-with-mustache/build.gradle @@ -2,7 +2,8 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') } diff --git a/x-pack/qa/smoke-test-watcher-with-security/build.gradle b/x-pack/qa/smoke-test-watcher-with-security/build.gradle index a843641be80..50e217b28b2 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/build.gradle +++ b/x-pack/qa/smoke-test-watcher-with-security/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" } // bring in watcher rest test suite diff --git a/x-pack/qa/smoke-test-watcher/build.gradle b/x-pack/qa/smoke-test-watcher/build.gradle index dc87248df61..5923afcacad 100644 --- a/x-pack/qa/smoke-test-watcher/build.gradle +++ b/x-pack/qa/smoke-test-watcher/build.gradle @@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('watcher'), configuration: 'runtime') testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') testCompile project(path: ':modules:lang-painless', configuration: 'runtime') diff --git a/x-pack/qa/sql/security/build.gradle b/x-pack/qa/sql/security/build.gradle index f02886f80a1..15f7734f942 100644 --- a/x-pack/qa/sql/security/build.gradle +++ b/x-pack/qa/sql/security/build.gradle @@ -1,5 +1,5 @@ dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" } Project mainProject = project @@ -20,7 +20,7 @@ subprojects { } dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" } integTestCluster { diff --git a/x-pack/qa/third-party/hipchat/build.gradle b/x-pack/qa/third-party/hipchat/build.gradle index 03b6c319698..2b2ee7fcbbf 100644 --- a/x-pack/qa/third-party/hipchat/build.gradle +++ b/x-pack/qa/third-party/hipchat/build.gradle @@ -4,7 +4,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/third-party/jira/build.gradle b/x-pack/qa/third-party/jira/build.gradle index 3814c8e9a53..283f9688699 100644 --- a/x-pack/qa/third-party/jira/build.gradle +++ b/x-pack/qa/third-party/jira/build.gradle @@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/third-party/pagerduty/build.gradle b/x-pack/qa/third-party/pagerduty/build.gradle index c0f337e160e..12758989d0f 100644 --- a/x-pack/qa/third-party/pagerduty/build.gradle +++ b/x-pack/qa/third-party/pagerduty/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/third-party/slack/build.gradle b/x-pack/qa/third-party/slack/build.gradle index 431752765f3..f1bcd98cff6 100644 --- a/x-pack/qa/third-party/slack/build.gradle +++ b/x-pack/qa/third-party/slack/build.gradle @@ -5,7 +5,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/transport-client-tests/build.gradle b/x-pack/qa/transport-client-tests/build.gradle index a94ad8fd592..3ece6dd1147 100644 --- a/x-pack/qa/transport-client-tests/build.gradle +++ b/x-pack/qa/transport-client-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'shadow') + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/test/feature-aware/build.gradle b/x-pack/test/feature-aware/build.gradle index f6a1f6cb16f..11b0e67183c 100644 --- a/x-pack/test/feature-aware/build.gradle +++ b/x-pack/test/feature-aware/build.gradle @@ -3,7 +3,7 @@ apply plugin: 'elasticsearch.build' dependencies { compile 'org.ow2.asm:asm:6.2' compile "org.elasticsearch:elasticsearch:${version}" - compile project(path: xpackModule('core'), configuration: 'shadow') + compile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile "org.elasticsearch.test:framework:${version}" } diff --git a/x-pack/transport-client/build.gradle b/x-pack/transport-client/build.gradle index 7155dad5ee6..2e350ef98ff 100644 --- a/x-pack/transport-client/build.gradle +++ b/x-pack/transport-client/build.gradle @@ -10,7 +10,7 @@ archivesBaseName = 'x-pack-transport' dependencies { // this "api" dependency looks weird, but it is correct, as it contains // all of x-pack for now, and transport client will be going away in the future. - compile project(path: xpackModule('core'), configuration: 'shadow') + compile "org.elasticsearch.plugin:x-pack-core:${version}" compile "org.elasticsearch.client:transport:${version}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" From 07b3ff9fe711c076e12ac066472f6a624ec78bd4 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Wed, 22 Aug 2018 11:26:53 +1000 Subject: [PATCH 17/48] Add beta label to MSI on install Elasticsearch page (#28126) The main installation instructions page for the Windows MSI installer includes a header at the top to indicate that the installer is in beta, but the Installing Elasticsearch page does not. This commit adds the beta label to the MSI entry within the installation options. --- docs/reference/setup/install.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index c0ebfb60fa7..26a207824af 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -41,6 +41,8 @@ Elasticsearch website or from our RPM repository. `msi`:: +beta[] ++ The `msi` package is suitable for installation on Windows 64-bit systems with at least .NET 4.5 framework installed, and is the easiest choice for getting started with Elasticsearch on Windows. MSIs may be downloaded from the Elasticsearch website. From e2ea83d2174ad43027bf8756e493e36d4c8a738c Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 21 Aug 2018 21:02:28 -0500 Subject: [PATCH 18/48] HLRC: Add ML Get Job (#32960) * HLRC: Adding GET ML Job info API * HLRC: Adding GET Job ML API * Fixing QueryPage license header * Adding serialization tests, addressing minor issues * Renaming querypage, changing the dependency on it * Making things immutable * Fixing build failure due to method rename --- .../client/MLRequestConverters.java | 19 +++ .../client/MachineLearningClient.java | 43 +++++ .../client/MLRequestConvertersTests.java | 19 +++ .../client/MachineLearningIT.java | 41 +++++ .../MlClientDocumentationIT.java | 62 ++++++++ docs/java-rest/high-level/ml/get-job.asciidoc | 57 +++++++ .../high-level/supported-apis.asciidoc | 2 + .../xpack/ml/AbstractResultResponse.java | 62 ++++++++ .../protocol/xpack/ml/GetJobRequest.java | 148 ++++++++++++++++++ .../protocol/xpack/ml/GetJobResponse.java | 89 +++++++++++ .../protocol/xpack/ml/GetJobRequestTests.java | 69 ++++++++ .../xpack/ml/GetJobResponseTests.java | 53 +++++++ .../xpack/ml/job/config/JobTests.java | 8 +- 13 files changed, 670 insertions(+), 2 deletions(-) create mode 100644 docs/java-rest/high-level/ml/get-job.asciidoc create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/AbstractResultResponse.java create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetJobRequest.java create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetJobResponse.java create mode 100644 x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetJobRequestTests.java create mode 100644 x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetJobResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 7178a9c7fc3..9a4e825be7c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -20,12 +20,14 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.RequestConverters.EndpointBuilder; import org.elasticsearch.common.Strings; import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.GetJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; @@ -50,6 +52,23 @@ final class MLRequestConverters { return request; } + static Request getJob(GetJobRequest getJobRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(Strings.collectionToCommaDelimitedString(getJobRequest.getJobIds())) + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + if (getJobRequest.isAllowNoJobs() != null) { + params.putParam("allow_no_jobs", Boolean.toString(getJobRequest.isAllowNoJobs())); + } + + return request; + } + static Request openJob(OpenJobRequest openJobRequest) throws IOException { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index 2073d613ac6..90acabfbdd8 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -23,6 +23,8 @@ import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.CloseJobResponse; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.GetJobRequest; +import org.elasticsearch.protocol.xpack.ml.GetJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobResponse; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; @@ -84,6 +86,47 @@ public final class MachineLearningClient { Collections.emptySet()); } + /** + * Gets one or more Machine Learning job configuration info. + * + *

+ * For additional info + * see + *

+ * @param request {@link GetJobRequest} request containing a list of jobId(s) and additional options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return {@link GetJobResponse} response object containing + * the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} objects and the number of jobs found + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public GetJobResponse getJob(GetJobRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::getJob, + options, + GetJobResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Gets one or more Machine Learning job configuration info, asynchronously. + * + *

+ * For additional info + * see + *

+ * @param request {@link GetJobRequest} request containing a list of jobId(s) and additional options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified with {@link GetJobResponse} upon request completion + */ + public void getJobAsync(GetJobRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::getJob, + options, + GetJobResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Deletes the given Machine Learning Job *

diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index a313b99a54f..9ed09d06b72 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -20,12 +20,14 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.GetJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; @@ -54,6 +56,23 @@ public class MLRequestConvertersTests extends ESTestCase { } } + public void testGetJob() { + GetJobRequest getJobRequest = new GetJobRequest(); + + Request request = MLRequestConverters.getJob(getJobRequest); + + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors", request.getEndpoint()); + assertFalse(request.getParameters().containsKey("allow_no_jobs")); + + getJobRequest = new GetJobRequest("job1", "jobs*"); + getJobRequest.setAllowNoJobs(true); + request = MLRequestConverters.getJob(getJobRequest); + + assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*", request.getEndpoint()); + assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs")); + } + public void testOpenJob() throws Exception { String jobId = "some-job-id"; OpenJobRequest openJobRequest = new OpenJobRequest(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 2c0fc70b848..cec5dd7ccf8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -24,6 +24,8 @@ import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.CloseJobResponse; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.GetJobRequest; +import org.elasticsearch.protocol.xpack.ml.GetJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobResponse; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; @@ -37,7 +39,11 @@ import org.junit.After; import java.io.IOException; import java.util.Arrays; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import static org.hamcrest.CoreMatchers.hasItems; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; public class MachineLearningIT extends ESRestHighLevelClientTestCase { @@ -59,6 +65,41 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { assertThat(createdJob.getJobType(), is(Job.ANOMALY_DETECTOR_JOB_TYPE)); } + public void testGetJob() throws Exception { + String jobId1 = randomValidJobId(); + String jobId2 = randomValidJobId(); + + Job job1 = buildJob(jobId1); + Job job2 = buildJob(jobId2); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job1), RequestOptions.DEFAULT); + machineLearningClient.putJob(new PutJobRequest(job2), RequestOptions.DEFAULT); + + GetJobRequest request = new GetJobRequest(jobId1, jobId2); + + // Test getting specific jobs + GetJobResponse response = execute(request, machineLearningClient::getJob, machineLearningClient::getJobAsync); + + assertEquals(2, response.count()); + assertThat(response.jobs(), hasSize(2)); + assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), containsInAnyOrder(jobId1, jobId2)); + + // Test getting all jobs explicitly + request = GetJobRequest.getAllJobsRequest(); + response = execute(request, machineLearningClient::getJob, machineLearningClient::getJobAsync); + + assertTrue(response.count() >= 2L); + assertTrue(response.jobs().size() >= 2L); + assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2)); + + // Test getting all jobs implicitly + response = execute(new GetJobRequest(), machineLearningClient::getJob, machineLearningClient::getJobAsync); + + assertTrue(response.count() >= 2L); + assertTrue(response.jobs().size() >= 2L); + assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2)); + } + public void testDeleteJob() throws Exception { String jobId = randomValidJobId(); Job job = buildJob(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 6e48036419b..73531bae553 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -30,6 +30,8 @@ import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.CloseJobResponse; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.GetJobRequest; +import org.elasticsearch.protocol.xpack.ml.GetJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobResponse; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; @@ -46,8 +48,11 @@ import java.util.Date; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { @@ -134,6 +139,63 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { } } + public void testGetJob() throws Exception { + RestHighLevelClient client = highLevelClient(); + + String jobId = "get-machine-learning-job1"; + + Job job = MachineLearningIT.buildJob("get-machine-learning-job1"); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job2"); + client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); + + { + //tag::x-pack-ml-get-job-request + GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); //<1> + request.setAllowNoJobs(true); //<2> + //end::x-pack-ml-get-job-request + + //tag::x-pack-ml-get-job-execute + GetJobResponse response = client.machineLearning().getJob(request, RequestOptions.DEFAULT); + long numberOfJobs = response.count(); //<1> + List jobs = response.jobs(); //<2> + //end::x-pack-ml-get-job-execute + + assertEquals(2, response.count()); + assertThat(response.jobs(), hasSize(2)); + assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), + containsInAnyOrder(job.getId(), secondJob.getId())); + } + { + GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); + + // tag::x-pack-ml-get-job-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(GetJobResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-ml-get-job-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-get-job-execute-async + client.machineLearning().getJobAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-ml-get-job-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testDeleteJob() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/ml/get-job.asciidoc b/docs/java-rest/high-level/ml/get-job.asciidoc new file mode 100644 index 00000000000..4ecf70e8e65 --- /dev/null +++ b/docs/java-rest/high-level/ml/get-job.asciidoc @@ -0,0 +1,57 @@ +[[java-rest-high-x-pack-ml-get-job]] +=== Get Job API + +The Get Job API provides the ability to get {ml} jobs in the cluster. +It accepts a `GetJobRequest` object and responds +with a `GetJobResponse` object. + +[[java-rest-high-x-pack-ml-get-job-request]] +==== Get Job Request + +A `GetJobRequest` object gets can have any number of `jobId` or `groupName` +entries. However, they all must be non-null. An empty list is the same as +requesting for all jobs. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-request] +-------------------------------------------------- +<1> Constructing a new request referencing existing `jobIds`, can contain wildcards +<2> Whether to ignore if a wildcard expression matches no jobs. + (This includes `_all` string or when no jobs have been specified) + +[[java-rest-high-x-pack-ml-get-job-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-execute] +-------------------------------------------------- +<1> `getCount()` from the `GetJobResponse` indicates the number of jobs found +<2> `getJobs()` is the collection of {ml} `Job` objects found + +[[java-rest-high-x-pack-ml-get-job-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-execute-async] +-------------------------------------------------- +<1> The `GetJobRequest` to execute and the `ActionListener` to use when +the execution completes + +The method does not block and returns immediately. The passed `ActionListener` is used +to notify the caller of completion. A typical `ActionListener` for `GetJobResponse` may +look like + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index b3de26e56bd..c7b46b39962 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -205,11 +205,13 @@ include::licensing/delete-license.asciidoc[] The Java High Level REST Client supports the following Machine Learning APIs: * <> +* <> * <> * <> * <> include::ml/put-job.asciidoc[] +include::ml/get-job.asciidoc[] include::ml/delete-job.asciidoc[] include::ml/open-job.asciidoc[] include::ml/close-job.asciidoc[] diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/AbstractResultResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/AbstractResultResponse.java new file mode 100644 index 00000000000..64f350933c9 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/AbstractResultResponse.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Abstract class that provides a list of results and their count. + */ +public abstract class AbstractResultResponse extends ActionResponse implements ToXContentObject { + + public static final ParseField COUNT = new ParseField("count"); + + private final ParseField resultsField; + protected final List results; + protected final long count; + + AbstractResultResponse(ParseField resultsField, List results, long count) { + this.resultsField = Objects.requireNonNull(resultsField, + "[results_field] must not be null"); + this.results = Collections.unmodifiableList(results); + this.count = count; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(COUNT.getPreferredName(), count); + builder.field(resultsField.getPreferredName(), results); + builder.endObject(); + return builder; + } + + public long count() { + return count; + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetJobRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetJobRequest.java new file mode 100644 index 00000000000..b0377c86fdc --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetJobRequest.java @@ -0,0 +1,148 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * Request object to get {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} objects with the matching `jobId`s or + * `groupName`s. + * + * `_all` explicitly gets all the jobs in the cluster + * An empty request (no `jobId`s) implicitly gets all the jobs in the cluster + */ +public class GetJobRequest extends ActionRequest implements ToXContentObject { + + public static final ParseField JOB_IDS = new ParseField("job_ids"); + public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs"); + + private static final String ALL_JOBS = "_all"; + private final List jobIds; + private Boolean allowNoJobs; + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_job_request", + true, a -> new GetJobRequest(a[0] == null ? new ArrayList<>() : (List) a[0])); + + static { + PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), JOB_IDS); + PARSER.declareBoolean(GetJobRequest::setAllowNoJobs, ALLOW_NO_JOBS); + } + + /** + * Helper method to create a query that will get ALL jobs + * @return new {@link GetJobRequest} object searching for the jobId "_all" + */ + public static GetJobRequest getAllJobsRequest() { + return new GetJobRequest(ALL_JOBS); + } + + /** + * Get the specified {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} configurations via their unique jobIds + * @param jobIds must not contain any null values + */ + public GetJobRequest(String... jobIds) { + this(Arrays.asList(jobIds)); + } + + GetJobRequest(List jobIds) { + if (jobIds.stream().anyMatch(Objects::isNull)) { + throw new NullPointerException("jobIds must not contain null values"); + } + this.jobIds = new ArrayList<>(jobIds); + } + + /** + * All the jobIds for which to get configuration information + */ + public List getJobIds() { + return jobIds; + } + + + /** + * See {@link GetJobRequest#isAllowNoJobs()} + * @param allowNoJobs + */ + public void setAllowNoJobs(boolean allowNoJobs) { + this.allowNoJobs = allowNoJobs; + } + + /** + * Whether to ignore if a wildcard expression matches no jobs. + * + * If this is `false`, then an error is returned when a wildcard (or `_all`) does not match any jobs + */ + public Boolean isAllowNoJobs() { + return allowNoJobs; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public int hashCode() { + return Objects.hash(jobIds, allowNoJobs); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || other.getClass() != getClass()) { + return false; + } + + GetJobRequest that = (GetJobRequest) other; + return Objects.equals(jobIds, that.jobIds) && + Objects.equals(allowNoJobs, that.allowNoJobs); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (jobIds.isEmpty() == false) { + builder.field(JOB_IDS.getPreferredName(), jobIds); + } + + if (allowNoJobs != null) { + builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs); + } + + builder.endObject(); + return builder; + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetJobResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetJobResponse.java new file mode 100644 index 00000000000..4db542dc152 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetJobResponse.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Contains a {@link List} of the found {@link Job} objects and the total count found + */ +public class GetJobResponse extends AbstractResultResponse { + + public static final ParseField RESULTS_FIELD = new ParseField("jobs"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("jobs_response", true, + a -> new GetJobResponse((List) a[0], (long) a[1])); + + static { + PARSER.declareObjectArray(constructorArg(), Job.PARSER, RESULTS_FIELD); + PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); + } + + GetJobResponse(List jobBuilders, long count) { + super(RESULTS_FIELD, jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()), count); + } + + /** + * The collection of {@link Job} objects found in the query + */ + public List jobs() { + return results; + } + + public static GetJobResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public int hashCode() { + return Objects.hash(results, count); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + GetJobResponse other = (GetJobResponse) obj; + return Objects.equals(results, other.results) && count == other.count; + } + + @Override + public final String toString() { + return Strings.toString(this); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetJobRequestTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetJobRequestTests.java new file mode 100644 index 00000000000..b94b704fbf6 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetJobRequestTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class GetJobRequestTests extends AbstractXContentTestCase { + + public void testAllJobsRequest() { + GetJobRequest request = GetJobRequest.getAllJobsRequest(); + + assertEquals(request.getJobIds().size(), 1); + assertEquals(request.getJobIds().get(0), "_all"); + } + + public void testNewWithJobId() { + Exception exception = expectThrows(NullPointerException.class, () -> new GetJobRequest("job",null)); + assertEquals(exception.getMessage(), "jobIds must not contain null values"); + } + + @Override + protected GetJobRequest createTestInstance() { + int jobCount = randomIntBetween(0, 10); + List jobIds = new ArrayList<>(jobCount); + + for (int i = 0; i < jobCount; i++) { + jobIds.add(randomAlphaOfLength(10)); + } + + GetJobRequest request = new GetJobRequest(jobIds); + + if (randomBoolean()) { + request.setAllowNoJobs(randomBoolean()); + } + + return request; + } + + @Override + protected GetJobRequest doParseInstance(XContentParser parser) throws IOException { + return GetJobRequest.PARSER.parse(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetJobResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetJobResponseTests.java new file mode 100644 index 00000000000..79d4d678b92 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetJobResponseTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.config.JobTests; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class GetJobResponseTests extends AbstractXContentTestCase { + + @Override + protected GetJobResponse createTestInstance() { + + int count = randomIntBetween(1, 5); + List results = new ArrayList<>(count); + for(int i = 0; i < count; i++) { + results.add(JobTests.createRandomizedJobBuilder()); + } + + return new GetJobResponse(results, count); + } + + @Override + protected GetJobResponse doParseInstance(XContentParser parser) throws IOException { + return GetJobResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/JobTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/JobTests.java index 7ba4946efa7..61931743403 100644 --- a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/JobTests.java +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/JobTests.java @@ -210,7 +210,7 @@ public class JobTests extends AbstractXContentTestCase { return new AnalysisConfig.Builder(Arrays.asList(d1.build(), d2.build())); } - public static Job createRandomizedJob() { + public static Job.Builder createRandomizedJobBuilder() { String jobId = randomValidJobId(); Job.Builder builder = new Job.Builder(jobId); if (randomBoolean()) { @@ -265,7 +265,11 @@ public class JobTests extends AbstractXContentTestCase { if (randomBoolean()) { builder.setResultsIndexName(randomValidJobId()); } - return builder.build(); + return builder; + } + + public static Job createRandomizedJob() { + return createRandomizedJobBuilder().build(); } @Override From 9f588c953f514a3fbdef0e4d675614203ef1e8ee Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Wed, 22 Aug 2018 15:23:39 +1000 Subject: [PATCH 19/48] [TEST] Split tests and skip file permission test on Windows (#32781) Changes to split tests for keytab file test cases instead of randomized testing for testing branches in the code in the same test. On windows platform, for keytab file permission test, we required additional security permissions for the test framework. As this was the only test that required those permissions, skipping that test on windows platform. The same scenario gets tested in *nix environments. Closes#32768 --- .../authc/kerberos/KerberosRealmTests.java | 101 +++++++++--------- 1 file changed, 48 insertions(+), 53 deletions(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java index 80e61a5545f..9e6fafc481d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.authc.kerberos; +import org.apache.lucene.util.Constants; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; @@ -13,28 +14,27 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.protocol.xpack.security.User; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.protocol.xpack.security.User; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData; import org.ietf.jgss.GSSException; import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.SeekableByteChannel; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.attribute.AclEntry; -import java.nio.file.attribute.AclEntryPermission; -import java.nio.file.attribute.AclEntryType; -import java.nio.file.attribute.AclFileAttributeView; -import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; -import java.nio.file.attribute.UserPrincipal; import java.util.Arrays; -import java.util.List; +import java.util.EnumSet; import java.util.Locale; import java.util.Set; @@ -110,52 +110,47 @@ public class KerberosRealmTests extends KerberosRealmTestCase { assertThat(future.actionGet(), is(nullValue())); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32768") - public void testKerberosRealmWithInvalidKeytabPathConfigurations() throws IOException { - final String keytabPathCase = randomFrom("keytabPathAsDirectory", "keytabFileDoesNotExist", "keytabPathWithNoReadPermissions"); - final String expectedErrorMessage; - final String keytabPath; - switch (keytabPathCase) { - case "keytabPathAsDirectory": - final String dirName = randomAlphaOfLength(5); - Files.createDirectory(dir.resolve(dirName)); - keytabPath = dir.resolve(dirName).toString(); - expectedErrorMessage = "configured service key tab file [" + keytabPath + "] is a directory"; - break; - case "keytabFileDoesNotExist": - keytabPath = dir.resolve(randomAlphaOfLength(5) + ".keytab").toString(); - expectedErrorMessage = "configured service key tab file [" + keytabPath + "] does not exist"; - break; - case "keytabPathWithNoReadPermissions": - final String fileName = randomAlphaOfLength(5); - final Path keytabFilePath = Files.createTempFile(dir, fileName, ".keytab"); - Files.write(keytabFilePath, randomAlphaOfLength(5).getBytes(StandardCharsets.UTF_8)); - final Set supportedAttributes = keytabFilePath.getFileSystem().supportedFileAttributeViews(); - if (supportedAttributes.contains("posix")) { - final PosixFileAttributeView fileAttributeView = Files.getFileAttributeView(keytabFilePath, PosixFileAttributeView.class); - fileAttributeView.setPermissions(PosixFilePermissions.fromString("---------")); - } else if (supportedAttributes.contains("acl")) { - final UserPrincipal principal = Files.getOwner(keytabFilePath); - final AclFileAttributeView view = Files.getFileAttributeView(keytabFilePath, AclFileAttributeView.class); - final AclEntry entry = AclEntry.newBuilder() - .setType(AclEntryType.DENY) - .setPrincipal(principal) - .setPermissions(AclEntryPermission.READ_DATA, AclEntryPermission.READ_ATTRIBUTES).build(); - final List acl = view.getAcl(); - acl.add(0, entry); - view.setAcl(acl); - } else { - throw new UnsupportedOperationException( - String.format(Locale.ROOT, "Don't know how to make file [%s] non-readable on a file system with attributes [%s]", - keytabFilePath, supportedAttributes)); - } - keytabPath = keytabFilePath.toString(); - expectedErrorMessage = "configured service key tab file [" + keytabPath + "] must have read permission"; - break; - default: - throw new IllegalArgumentException("Unknown test case :" + keytabPathCase); - } + public void testKerberosRealmThrowsErrorWhenKeytabPathIsConfiguredAsDirectory() throws IOException { + final String dirName = randomAlphaOfLength(5); + Files.createDirectory(dir.resolve(dirName)); + final String keytabPath = dir.resolve(dirName).toString(); + final String expectedErrorMessage = "configured service key tab file [" + keytabPath + "] is a directory"; + assertKerberosRealmConstructorFails(keytabPath, expectedErrorMessage); + } + + public void testKerberosRealmThrowsErrorWhenKeytabFileDoesNotExist() throws IOException { + final String keytabPath = dir.resolve(randomAlphaOfLength(5) + ".keytab").toString(); + final String expectedErrorMessage = "configured service key tab file [" + keytabPath + "] does not exist"; + + assertKerberosRealmConstructorFails(keytabPath, expectedErrorMessage); + } + + public void testKerberosRealmThrowsErrorWhenKeytabFileHasNoReadPermissions() throws IOException { + assumeFalse("Not running this test on Windows, as it requires additional access permissions for test framework.", + Constants.WINDOWS); + final Set supportedAttributes = dir.getFileSystem().supportedFileAttributeViews(); + final String keytabFileName = randomAlphaOfLength(5) + ".keytab"; + final Path keytabPath; + if (supportedAttributes.contains("posix")) { + final Set filePerms = PosixFilePermissions.fromString("---------"); + final FileAttribute> fileAttributes = PosixFilePermissions.asFileAttribute(filePerms); + try (SeekableByteChannel byteChannel = Files.newByteChannel(dir.resolve(keytabFileName), + EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), fileAttributes)) { + byteChannel.write(ByteBuffer.wrap(randomByteArrayOfLength(10))); + } + keytabPath = dir.resolve(keytabFileName); + } else { + throw new UnsupportedOperationException( + String.format(Locale.ROOT, "Don't know how to make file [%s] non-readable on a file system with attributes [%s]", + dir.resolve(keytabFileName), supportedAttributes)); + } + final String expectedErrorMessage = "configured service key tab file [" + keytabPath + "] must have read permission"; + + assertKerberosRealmConstructorFails(keytabPath.toString(), expectedErrorMessage); + } + + private void assertKerberosRealmConstructorFails(final String keytabPath, final String expectedErrorMessage) { settings = KerberosTestCase.buildKerberosRealmSettings(keytabPath, 100, "10m", true, randomBoolean()); config = new RealmConfig("test-kerb-realm", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); From 0a4b55c9c0f1818d2199cc8468b70b9cc98e5ae8 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Wed, 22 Aug 2018 08:37:50 +0300 Subject: [PATCH 20/48] [DOCS] Add RequestedAuthnContext Documentation (#32946) Add documentation for #31238 - Add documentation for the req_authn_context_class_ref setting - Add a section in SAML Guide regarding the use of SAML Authentication Context. --- .../settings/security-settings.asciidoc | 9 +++++ .../authentication/saml-guide.asciidoc | 40 +++++++++++++++++-- 2 files changed, 45 insertions(+), 4 deletions(-) diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index 74deee3473f..f1d8b555d56 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -861,6 +861,15 @@ The maximum amount of skew that can be tolerated between the IdP's clock and the {es} node's clock. Defaults to `3m` (3 minutes). +`req_authn_context_class_ref`:: +A comma separated list of Authentication Context Class Reference values to be +included in the Requested Authentication Context when requesting the IdP to +authenticate the current user. The Authentication Context of the corresponding +authentication response should contain at least one of the requested values. ++ +For more information, see +{stack-ov}/saml-guide-authentication.html#req-authn-context[Requesting specific authentication methods]. + [float] [[ref-saml-signing-settings]] ===== SAML realm signing settings diff --git a/x-pack/docs/en/security/authentication/saml-guide.asciidoc b/x-pack/docs/en/security/authentication/saml-guide.asciidoc index 633140f1238..4facceff81c 100644 --- a/x-pack/docs/en/security/authentication/saml-guide.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-guide.asciidoc @@ -76,12 +76,13 @@ binding. There are five configuration steps to enable SAML authentication in {es}: -. Enable SSL/TLS for HTTP -. Enable the Token Service -. Create one or more SAML realms -. Configure role mappings +. <> +. <> +. <> +. <> . Generate a SAML Metadata file for use by your Identity Provider _(optional)_ +[[saml-enable-http]] ==== Enable TLS for HTTP If your {es} cluster is operating in production mode, then you must @@ -91,6 +92,7 @@ authentication. For more information, see {ref}/configuring-tls.html#tls-http[Encrypting HTTP Client Communications]. +[[saml-enable-token]] ==== Enable the token service The {es} SAML implementation makes use of the {es} Token Service. This service @@ -356,6 +358,35 @@ address such as `admin@staff.example.com.attacker.net`. It is important that you make sure your regular expressions are as precise as possible so that you do not inadvertently open an avenue for user impersonation attacks. +[[req-authn-context]] +==== Requesting specific authentication methods + +It is sometimes necessary for a SAML SP to be able to impose specific +restrictions regarding the authentication that will take place at an IdP, +in order to assess the level of confidence that it can place in +the corresponding authentication response. The restrictions might have to do +with the authentication method (password, client certificates, etc), the +user identification method during registration, and other details. {es} implements +https://docs.oasis-open.org/security/saml/v2.0/saml-authn-context-2.0-os.pdf[SAML 2.0 Authentication Context], which can be used for this purpose as defined in SAML 2.0 Core +Specification. + +In short, the SAML SP defines a set of Authentication Context Class Reference +values, which describe the restrictions to be imposed on the IdP, and sends these +in the Authentication Request. The IdP attempts to grant these restrictions. +If it cannot grant them, the authentication attempt fails. If the user is +successfully authenticated, the Authentication Statement of the SAML Response +contains an indication of the restrictions that were satisfied. + +You can define the Authentication Context Class Reference values by using the `req_authn_context_class_ref` option in the SAML realm configuration. See +{ref}/security-settings.html#ref-saml-settings[SAML realm settings]. + +{es} supports only the `exact` comparison method for the Authentication Context. +When it receives the Authentication Response from the IdP, {es} examines the +value of the Authentication Context Class Reference that is part of the +Authentication Statement of the SAML Assertion. If it matches one of the +requested values, the authentication is considered successful. Otherwise, the +authentication attempt fails. + [[saml-logout]] ==== SAML logout @@ -573,6 +604,7 @@ The passphrase for the keystore, if the file is encypted. This is a {ref}/secure-settings.html[secure setting] that must be set with the `elasticsearch-keystore` tool. +[[saml-sp-metadata]] === Generating SP metadata Some Identity Providers support importing a metadata file from the Service From 82d10b484a20514e70f6d773544ffea62e57f711 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Wed, 22 Aug 2018 09:05:22 +0300 Subject: [PATCH 21/48] Run forbidden api checks with runtimeJavaVersion (#32947) Run forbidden APIs checks with runtime hava version --- .../gradle/precommit/PrecommitTasks.groovy | 108 +++++------- ...ExportElasticsearchBuildResourcesTask.java | 3 +- .../precommit/ForbiddenApisCliTask.java | 154 ++++++++++++++++++ client/rest-high-level/build.gradle | 6 +- client/rest/build.gradle | 13 +- client/sniffer/build.gradle | 7 +- client/test/build.gradle | 8 +- client/transport/build.gradle | 6 +- .../tools/java-version-checker/build.gradle | 6 +- distribution/tools/launchers/build.gradle | 12 +- libs/cli/build.gradle | 5 +- libs/core/build.gradle | 4 +- libs/dissect/build.gradle | 4 +- libs/grok/build.gradle | 4 +- libs/nio/build.gradle | 5 +- libs/secure-sm/build.gradle | 5 +- libs/x-content/build.gradle | 4 +- plugins/analysis-icu/build.gradle | 4 +- qa/vagrant/build.gradle | 6 +- test/framework/build.gradle | 7 +- test/logger-usage/build.gradle | 4 +- .../ml/log-structure-finder/build.gradle | 4 +- x-pack/plugin/security/build.gradle | 3 +- x-pack/plugin/sql/jdbc/build.gradle | 2 +- x-pack/plugin/sql/sql-action/build.gradle | 5 +- x-pack/plugin/sql/sql-cli/build.gradle | 8 +- x-pack/plugin/sql/sql-client/build.gradle | 2 +- x-pack/plugin/sql/sql-proto/build.gradle | 5 +- x-pack/qa/sql/build.gradle | 4 +- x-pack/transport-client/build.gradle | 5 +- 30 files changed, 241 insertions(+), 172 deletions(-) create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 42dc29df058..b63b1f40d80 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -18,18 +18,12 @@ */ package org.elasticsearch.gradle.precommit -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis -import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask -import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task -import org.gradle.api.file.FileCollection +import org.gradle.api.artifacts.Configuration import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.plugins.quality.Checkstyle -import org.gradle.api.tasks.JavaExec -import org.gradle.api.tasks.StopExecutionException - /** * Validation tasks which should be run before committing. These run before tests. */ @@ -38,8 +32,8 @@ class PrecommitTasks { /** Adds a precommit task, which depends on non-test verification tasks. */ public static Task create(Project project, boolean includeDependencyLicenses) { List precommitTasks = [ - configureForbiddenApis(project), configureCheckstyle(project), + configureForbiddenApisCli(project), configureNamingConventions(project), project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), project.tasks.create('licenseHeaders', LicenseHeadersTask.class), @@ -48,9 +42,6 @@ class PrecommitTasks { project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class) ] - // Configure it but don't add it as a dependency yet - configureForbiddenApisCli(project) - // tasks with just tests don't need dependency licenses, so this flag makes adding // the task optional if (includeDependencyLicenses) { @@ -84,77 +75,60 @@ class PrecommitTasks { return project.tasks.create(precommitOptions) } - private static Task configureForbiddenApis(Project project) { - project.pluginManager.apply(ForbiddenApisPlugin.class) - project.forbiddenApis { - failOnUnsupportedJava = false - bundledSignatures = ['jdk-unsafe', 'jdk-deprecated', 'jdk-non-portable', 'jdk-system-out'] - signaturesURLs = [getClass().getResource('/forbidden/jdk-signatures.txt'), - getClass().getResource('/forbidden/es-all-signatures.txt')] - suppressAnnotations = ['**.SuppressForbidden'] - } - project.tasks.withType(CheckForbiddenApis) { - // we do not use the += operator to add signatures, as conventionMappings of Gradle do not work when it's configured using withType: - if (name.endsWith('Test')) { - signaturesURLs = project.forbiddenApis.signaturesURLs + - [ getClass().getResource('/forbidden/es-test-signatures.txt'), getClass().getResource('/forbidden/http-signatures.txt') ] - } else { - signaturesURLs = project.forbiddenApis.signaturesURLs + - [ getClass().getResource('/forbidden/es-server-signatures.txt') ] - } - // forbidden apis doesn't support Java 11, so stop at 10 - String targetMajorVersion = (project.compilerJavaVersion.compareTo(JavaVersion.VERSION_1_10) > 0 ? - JavaVersion.VERSION_1_10 : - project.compilerJavaVersion).getMajorVersion() - targetCompatibility = Integer.parseInt(targetMajorVersion) >= 9 ?targetMajorVersion : "1.${targetMajorVersion}" - } - Task forbiddenApis = project.tasks.findByName('forbiddenApis') - forbiddenApis.group = "" // clear group, so this does not show up under verification tasks - - return forbiddenApis - } - private static Task configureForbiddenApisCli(Project project) { - project.configurations.create("forbiddenApisCliJar") + Configuration forbiddenApisConfiguration = project.configurations.create("forbiddenApisCliJar") project.dependencies { - forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.5' + forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.5') } - Task forbiddenApisCli = project.tasks.create('forbiddenApisCli') + Task forbiddenApisCli = project.tasks.create('forbiddenApis') project.sourceSets.forEach { sourceSet -> forbiddenApisCli.dependsOn( - project.tasks.create(sourceSet.getTaskName('forbiddenApisCli', null), JavaExec) { + project.tasks.create(sourceSet.getTaskName('forbiddenApis', null), ForbiddenApisCliTask) { ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') dependsOn(buildResources) - classpath = project.files( - project.configurations.forbiddenApisCliJar, + execAction = { spec -> + spec.classpath = project.files( + project.configurations.forbiddenApisCliJar, + sourceSet.compileClasspath, + sourceSet.runtimeClasspath + ) + spec.executable = "${project.runtimeJavaHome}/bin/java" + } + inputs.files( + forbiddenApisConfiguration, sourceSet.compileClasspath, sourceSet.runtimeClasspath ) - main = 'de.thetaphi.forbiddenapis.cli.CliMain' - executable = "${project.runtimeJavaHome}/bin/java" - args "-b", 'jdk-unsafe-1.8' - args "-b", 'jdk-deprecated-1.8' - args "-b", 'jdk-non-portable' - args "-b", 'jdk-system-out' - args "-f", buildResources.copy("forbidden/jdk-signatures.txt") - args "-f", buildResources.copy("forbidden/es-all-signatures.txt") - args "--suppressannotation", '**.SuppressForbidden' + + targetCompatibility = project.compilerJavaVersion + bundledSignatures = [ + "jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out" + ] + signaturesFiles = project.files( + buildResources.copy("forbidden/jdk-signatures.txt"), + buildResources.copy("forbidden/es-all-signatures.txt") + ) + suppressAnnotations = ['**.SuppressForbidden'] if (sourceSet.name == 'test') { - args "-f", buildResources.copy("forbidden/es-test-signatures.txt") - args "-f", buildResources.copy("forbidden/http-signatures.txt") + signaturesFiles += project.files( + buildResources.copy("forbidden/es-test-signatures.txt"), + buildResources.copy("forbidden/http-signatures.txt") + ) } else { - args "-f", buildResources.copy("forbidden/es-server-signatures.txt") + signaturesFiles += project.files(buildResources.copy("forbidden/es-server-signatures.txt")) } dependsOn sourceSet.classesTaskName - doFirst { - // Forbidden APIs expects only existing dirs, and requires at least one - FileCollection existingOutputs = sourceSet.output.classesDirs - .filter { it.exists() } - if (existingOutputs.isEmpty()) { - throw new StopExecutionException("${sourceSet.name} has no outputs") - } - existingOutputs.forEach { args "-d", it } + classesDirs = sourceSet.output.classesDirs + ext.replaceSignatureFiles = { String... names -> + signaturesFiles = project.files( + names.collect { buildResources.copy("forbidden/${it}.txt") } + ) + } + ext.addSignatureFiles = { String... names -> + signaturesFiles += project.files( + names.collect { buildResources.copy("forbidden/${it}.txt") } + ) } } ) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java index 03c18f54e67..4af104093a5 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java @@ -35,6 +35,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.Collections; import java.util.HashSet; import java.util.Set; @@ -105,7 +106,7 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask { if (is == null) { throw new GradleException("Can't export `" + resourcePath + "` from build-tools: not found"); } - Files.copy(is, destination); + Files.copy(is, destination, StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { throw new GradleException("Can't write resource `" + resourcePath + "` to " + destination, e); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java new file mode 100644 index 00000000000..e33f1670964 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.precommit; + +import de.thetaphi.forbiddenapis.cli.CliMain; +import org.gradle.api.Action; +import org.gradle.api.DefaultTask; +import org.gradle.api.JavaVersion; +import org.gradle.api.file.FileCollection; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.TaskAction; +import org.gradle.process.JavaExecSpec; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +public class ForbiddenApisCliTask extends DefaultTask { + + private FileCollection signaturesFiles; + private List signatures = new ArrayList<>(); + private Set bundledSignatures = new LinkedHashSet<>(); + private Set suppressAnnotations = new LinkedHashSet<>(); + private JavaVersion targetCompatibility; + private FileCollection classesDirs; + private Action execAction; + + public JavaVersion getTargetCompatibility() { + return targetCompatibility; + } + + public void setTargetCompatibility(JavaVersion targetCompatibility) { + this.targetCompatibility = targetCompatibility; + } + + public Action getExecAction() { + return execAction; + } + + public void setExecAction(Action execAction) { + this.execAction = execAction; + } + + @OutputFile + public File getMarkerFile() { + return new File( + new File(getProject().getBuildDir(), "precommit"), + getName() + ); + } + + @InputFiles + @SkipWhenEmpty + public FileCollection getClassesDirs() { + return classesDirs.filter(File::exists); + } + + public void setClassesDirs(FileCollection classesDirs) { + this.classesDirs = classesDirs; + } + + @InputFiles + public FileCollection getSignaturesFiles() { + return signaturesFiles; + } + + public void setSignaturesFiles(FileCollection signaturesFiles) { + this.signaturesFiles = signaturesFiles; + } + + @Input + public List getSignatures() { + return signatures; + } + + public void setSignatures(List signatures) { + this.signatures = signatures; + } + + @Input + public Set getBundledSignatures() { + return bundledSignatures; + } + + public void setBundledSignatures(Set bundledSignatures) { + this.bundledSignatures = bundledSignatures; + } + + @Input + public Set getSuppressAnnotations() { + return suppressAnnotations; + } + + public void setSuppressAnnotations(Set suppressAnnotations) { + this.suppressAnnotations = suppressAnnotations; + } + + @TaskAction + public void runForbiddenApisAndWriteMarker() throws IOException { + getProject().javaexec((JavaExecSpec spec) -> { + execAction.execute(spec); + spec.setMain(CliMain.class.getName()); + // build the command line + getSignaturesFiles().forEach(file -> spec.args("-f", file.getAbsolutePath())); + getSuppressAnnotations().forEach(annotation -> spec.args("--suppressannotation", annotation)); + getBundledSignatures().forEach(bundled -> { + // there's no option for target compatibility so we have to interpret it + final String prefix; + if (bundled.equals("jdk-system-out") || + bundled.equals("jdk-reflection") || + bundled.equals("jdk-non-portable")) { + prefix = ""; + } else { + prefix = "-" + ( + getTargetCompatibility().compareTo(JavaVersion.VERSION_1_9) >= 0 ? + getTargetCompatibility().getMajorVersion() : + "1." + getTargetCompatibility().getMajorVersion()) + ; + } + spec.args("-b", bundled + prefix); + } + ); + getClassesDirs().forEach(dir -> + spec.args("-d", dir) + ); + }); + Files.write(getMarkerFile().toPath(), Collections.emptyList()); + } + +} diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 48169faac2f..9acfc630f94 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -16,8 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.test.RestIntegTestTask import org.gradle.api.internal.provider.Providers @@ -75,8 +73,8 @@ dependencyLicenses { forbiddenApisMain { // core does not depend on the httpclient for compile so we add the signatures here. We don't add them for test as they are already // specified - signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')] - signaturesURLs += [file('src/main/resources/forbidden/rest-high-level-signatures.txt').toURI().toURL()] + addSignatureFiles 'http-signatures' + signaturesFiles += files('src/main/resources/forbidden/rest-high-level-signatures.txt') } integTestCluster { diff --git a/client/rest/build.gradle b/client/rest/build.gradle index fc2ab0bc4c0..273836a31f0 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -16,9 +18,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -53,10 +52,9 @@ dependencies { testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}" } -forbiddenApisMain { +tasks.withType(ForbiddenApisCliTask) { //client does not depend on server, so only jdk and http signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/http-signatures.txt')] + replaceSignatureFiles ('jdk-signatures', 'http-signatures') } forbiddenPatterns { @@ -67,9 +65,6 @@ forbiddenApisTest { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage bundledSignatures -= 'jdk-non-portable' bundledSignatures += 'jdk-internal' - //client does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/http-signatures.txt')] } // JarHell is part of es server, which we don't want to pull in diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index 41146e0b7ec..6ba69c5713c 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -55,7 +52,7 @@ dependencies { forbiddenApisMain { //client does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } forbiddenApisTest { @@ -63,7 +60,7 @@ forbiddenApisTest { bundledSignatures -= 'jdk-non-portable' bundledSignatures += 'jdk-internal' //client does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } dependencyLicenses { diff --git a/client/test/build.gradle b/client/test/build.gradle index cc69a1828dc..e66d2be57f1 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -16,10 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks -import org.gradle.api.JavaVersion - apply plugin: 'elasticsearch.build' targetCompatibility = JavaVersion.VERSION_1_7 @@ -36,7 +32,7 @@ dependencies { forbiddenApisMain { //client does not depend on core, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } forbiddenApisTest { @@ -44,7 +40,7 @@ forbiddenApisTest { bundledSignatures -= 'jdk-non-portable' bundledSignatures += 'jdk-internal' //client does not depend on core, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } // JarHell is part of es server, which we don't want to pull in diff --git a/client/transport/build.gradle b/client/transport/build.gradle index 944a038edd9..269a37105fb 100644 --- a/client/transport/build.gradle +++ b/client/transport/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -47,8 +44,7 @@ dependencyLicenses { forbiddenApisTest { // we don't use the core test-framework, no lucene classes present so we don't want the es-test-signatures to // be pulled in - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-all-signatures.txt')] + replaceSignatureFiles 'jdk-signatures', 'es-all-signatures' } namingConventions { diff --git a/distribution/tools/java-version-checker/build.gradle b/distribution/tools/java-version-checker/build.gradle index ad9b56fec05..6d18b79d4bd 100644 --- a/distribution/tools/java-version-checker/build.gradle +++ b/distribution/tools/java-version-checker/build.gradle @@ -1,11 +1,11 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' targetCompatibility = JavaVersion.VERSION_1_7 // java_version_checker do not depend on core so only JDK signatures should be checked -forbiddenApisMain.signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] +forbiddenApisMain { + replaceSignatureFiles 'jdk-signatures' +} test.enabled = false namingConventions.enabled = false diff --git a/distribution/tools/launchers/build.gradle b/distribution/tools/launchers/build.gradle index a774691b2eb..ca1aa6bcac9 100644 --- a/distribution/tools/launchers/build.gradle +++ b/distribution/tools/launchers/build.gradle @@ -17,8 +17,9 @@ * under the License. */ -import org.elasticsearch.gradle.precommit.PrecommitTasks -import org.gradle.api.JavaVersion + + +import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask apply plugin: 'elasticsearch.build' @@ -31,10 +32,9 @@ dependencies { archivesBaseName = 'elasticsearch-launchers' -// java_version_checker do not depend on core so only JDK signatures should be checked -List jdkSignatures = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] -forbiddenApisMain.signaturesURLs = jdkSignatures -forbiddenApisTest.signaturesURLs = jdkSignatures +tasks.withType(ForbiddenApisCliTask) { + replaceSignatureFiles 'jdk-signatures' +} namingConventions { testClass = 'org.elasticsearch.tools.launchers.LaunchersTestCase' diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle index 00d6d96ef0d..b1f3b338255 100644 --- a/libs/cli/build.gradle +++ b/libs/cli/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.optional-base' apply plugin: 'nebula.maven-base-publish' @@ -34,5 +31,5 @@ test.enabled = false jarHell.enabled = false forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/core/build.gradle b/libs/core/build.gradle index 2017c2a418a..cc5c1e20fc1 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -91,7 +89,7 @@ dependencies { forbiddenApisMain { // :libs:core does not depend on server // TODO: Need to decide how we want to handle for forbidden signatures with the changes to server - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } if (isEclipse) { diff --git a/libs/dissect/build.gradle b/libs/dissect/build.gradle index c09a2a4ebd1..853c78646c2 100644 --- a/libs/dissect/build.gradle +++ b/libs/dissect/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -33,7 +31,7 @@ dependencies { } forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } if (isEclipse) { diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle index 61437be6aff..37b494624ed 100644 --- a/libs/grok/build.gradle +++ b/libs/grok/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -34,7 +32,7 @@ dependencies { } forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } if (isEclipse) { diff --git a/libs/nio/build.gradle b/libs/nio/build.gradle index 43c9a133a3f..f6a6ff65245 100644 --- a/libs/nio/build.gradle +++ b/libs/nio/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -62,5 +59,5 @@ if (isEclipse) { forbiddenApisMain { // nio does not depend on core, so only jdk signatures should be checked // es-all is not checked as we connect and accept sockets - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/secure-sm/build.gradle b/libs/secure-sm/build.gradle index 93fdfd01c8f..3baf3513b12 100644 --- a/libs/secure-sm/build.gradle +++ b/libs/secure-sm/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -47,7 +44,7 @@ dependencies { } forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } if (isEclipse) { diff --git a/libs/x-content/build.gradle b/libs/x-content/build.gradle index c8b37108ff9..0ec4e0d6ad3 100644 --- a/libs/x-content/build.gradle +++ b/libs/x-content/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -57,7 +55,7 @@ dependencies { forbiddenApisMain { // x-content does not depend on server // TODO: Need to decide how we want to handle for forbidden signatures with the changes to core - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } if (isEclipse) { diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index 1883e3bf1b9..676fd448131 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -22,7 +24,7 @@ esplugin { classname 'org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin' } -forbiddenApis { +tasks.withType(ForbiddenApisCliTask) { signatures += [ "com.ibm.icu.text.Collator#getInstance() @ Don't use default locale, use getInstance(ULocale) instead" ] diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 4a0c9146962..4c3b48cbac9 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -69,9 +67,7 @@ esvagrant { } forbiddenApisMain { - signaturesURLs = [ - PrecommitTasks.getResource('/forbidden/jdk-signatures.txt') - ] + replaceSignatureFiles 'jdk-signatures' } // we don't have additional tests for the tests themselves diff --git a/test/framework/build.gradle b/test/framework/build.gradle index ab513a1b0bb..8179e3d096a 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks; - dependencies { compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" compile "org.elasticsearch.client:elasticsearch-rest-client-sniffer:${version}" @@ -41,9 +38,7 @@ compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-all-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')] + replaceSignatureFiles 'jdk-signatures', 'es-all-signatures', 'es-test-signatures' } // TODO: should we have licenses for our test deps? diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index c16dab6a625..0f02283e537 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -29,7 +27,7 @@ loggerUsageCheck.enabled = false forbiddenApisMain.enabled = true // disabled by parent project forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] // does not depend on core, only jdk signatures + replaceSignatureFiles 'jdk-signatures' // does not depend on core, only jdk signatures } jarHell.enabled = true // disabled by parent project diff --git a/x-pack/plugin/ml/log-structure-finder/build.gradle b/x-pack/plugin/ml/log-structure-finder/build.gradle index 9048a1c4686..f5dff6dc846 100644 --- a/x-pack/plugin/ml/log-structure-finder/build.gradle +++ b/x-pack/plugin/ml/log-structure-finder/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' archivesBaseName = 'x-pack-log-structure-finder' @@ -31,6 +29,6 @@ artifacts { forbiddenApisMain { // log-structure-finder does not depend on server, so cannot forbid server methods - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 003263669d5..5198c3da669 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -158,8 +158,7 @@ forbiddenPatterns { } forbiddenApisMain { - signaturesURLs += file('forbidden/ldap-signatures.txt').toURI().toURL() - signaturesURLs += file('forbidden/xml-signatures.txt').toURI().toURL() + signaturesFiles += files('forbidden/ldap-signatures.txt', 'forbidden/xml-signatures.txt') } // classes are missing, e.g. com.ibm.icu.lang.UCharacter diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index a0d9b24c507..1a7d6115e15 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -8,7 +8,7 @@ archivesBaseName = "x-pack-sql-jdbc" forbiddenApisMain { // does not depend on core, so only jdk and http signatures should be checked - signaturesURLs = [this.class.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } dependencies { diff --git a/x-pack/plugin/sql/sql-action/build.gradle b/x-pack/plugin/sql/sql-action/build.gradle index bf79fd824ef..345318d20b8 100644 --- a/x-pack/plugin/sql/sql-action/build.gradle +++ b/x-pack/plugin/sql/sql-action/build.gradle @@ -2,9 +2,6 @@ /* * This project contains transport-level requests and responses that are shared between x-pack plugin and qa tests */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' description = 'Request and response objects shared by the cli, jdbc ' + @@ -34,7 +31,7 @@ dependencies { forbiddenApisMain { //sql does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } dependencyLicenses { diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index b90b07abad3..0b2559c6a84 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask /* * This project is named sql-cli because it is in the "org.elasticsearch.plugin" @@ -74,11 +75,8 @@ artifacts { } -forbiddenApisMain { - signaturesURLs += file('src/forbidden/cli-signatures.txt').toURI().toURL() -} -forbiddenApisTest { - signaturesURLs += file('src/forbidden/cli-signatures.txt').toURI().toURL() +tasks.withType(ForbiddenApisCliTask) { + signaturesFiles += files('src/forbidden/cli-signatures.txt') } thirdPartyAudit.excludes = [ diff --git a/x-pack/plugin/sql/sql-client/build.gradle b/x-pack/plugin/sql/sql-client/build.gradle index fbc411e4459..c4ee030d456 100644 --- a/x-pack/plugin/sql/sql-client/build.gradle +++ b/x-pack/plugin/sql/sql-client/build.gradle @@ -26,7 +26,7 @@ dependencyLicenses { forbiddenApisMain { // does not depend on core, so only jdk and http signatures should be checked - signaturesURLs = [this.class.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } forbiddenApisTest { diff --git a/x-pack/plugin/sql/sql-proto/build.gradle b/x-pack/plugin/sql/sql-proto/build.gradle index 7f26176e3c7..7d28336bfc5 100644 --- a/x-pack/plugin/sql/sql-proto/build.gradle +++ b/x-pack/plugin/sql/sql-proto/build.gradle @@ -2,9 +2,6 @@ /* * This project contains XContent protocol classes shared between server and http client */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' description = 'Request and response objects shared by the cli, jdbc ' + @@ -25,7 +22,7 @@ dependencies { forbiddenApisMain { //sql does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } dependencyLicenses { diff --git a/x-pack/qa/sql/build.gradle b/x-pack/qa/sql/build.gradle index 17a1d5acdc9..baaf0451e51 100644 --- a/x-pack/qa/sql/build.gradle +++ b/x-pack/qa/sql/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.test.RunTask description = 'Integration tests for SQL' @@ -29,8 +28,7 @@ dependenciesInfo.enabled = false // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/es-all-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')] + replaceSignatureFiles 'es-all-signatures', 'es-test-signatures' } thirdPartyAudit.excludes = [ diff --git a/x-pack/transport-client/build.gradle b/x-pack/transport-client/build.gradle index 2e350ef98ff..a96f4146fbf 100644 --- a/x-pack/transport-client/build.gradle +++ b/x-pack/transport-client/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -22,8 +20,7 @@ dependencyLicenses.enabled = false forbiddenApisTest { // we don't use the core test-framework, no lucene classes present so we don't want the es-test-signatures to // be pulled in - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-all-signatures.txt')] + replaceSignatureFiles 'jdk-signatures', 'es-all-signatures' } namingConventions { From ffb1a5d5b795e40fb7b9afff2a04fb0fc3166d5c Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 22 Aug 2018 08:45:08 +0200 Subject: [PATCH 22/48] Expose `max_concurrent_shard_requests` in `_msearch` (#33016) Today `_msearch` doesn't allow modifying the `max_concurrent_shard_requests` per sub search request. This change adds support for setting this parameter on all sub-search requests in an `_msearch`. Relates to #31877 --- docs/reference/search/multi-search.asciidoc | 10 ++++++ .../resources/rest-api-spec/api/msearch.json | 5 +++ .../rest-api-spec/test/msearch/10_basic.yml | 32 +++++++++++++++++++ .../action/search/RestMultiSearchAction.java | 11 +++++++ 4 files changed, 58 insertions(+) diff --git a/docs/reference/search/multi-search.asciidoc b/docs/reference/search/multi-search.asciidoc index c68cf0daaf5..8771915dee6 100644 --- a/docs/reference/search/multi-search.asciidoc +++ b/docs/reference/search/multi-search.asciidoc @@ -86,6 +86,16 @@ The msearch's `max_concurrent_searches` request parameter can be used to control the maximum number of concurrent searches the multi search api will execute. This default is based on the number of data nodes and the default search thread pool size. +The request parameter `max_concurrent_shard_requests` can be used to control the +maximum number of concurrent shard requests the each sub search request will execute. +This parameter should be used to protect a single request from overloading a cluster +(e.g., a default request will hit all indices in a cluster which could cause shard request rejections +if the number of shards per node is high). This default is based on the number of +data nodes in the cluster but at most `256`.In certain scenarios parallelism isn't achieved through +concurrent request such that this protection will result in poor performance. For +instance in an environment where only a very low number of concurrent search requests are expected +it might help to increase this value to a higher number. + [float] [[msearch-security]] === Security diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json index 090c429fd82..13281a2a232 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json @@ -33,6 +33,11 @@ "type" : "number", "description" : "A threshold that enforces a pre-filter roundtrip to prefilter search shards based on query rewriting if the number of shards the search request expands to exceeds the threshold. This filter roundtrip can limit the number of shards significantly if for instance a shard can not match any documents based on it's rewrite method ie. if date filters are mandatory to match but the shard bounds and the query are disjoint.", "default" : 128 + }, + "max_concurrent_shard_requests" : { + "type" : "number", + "description" : "The number of concurrent shard requests each sub search executes concurrently. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests", + "default" : "The default grows with the number of nodes in the cluster but is at most 256." } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml index 536e2bfaf94..fb884ddfca2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml @@ -61,3 +61,35 @@ setup: - match: { responses.3.error.root_cause.0.reason: "/no.such.index/" } - match: { responses.3.error.root_cause.0.index: index_3 } - match: { responses.4.hits.total: 4 } + +--- +"Least impact smoke test": +# only passing these parameters to make sure they are consumed + - do: + max_concurrent_shard_requests: 1 + max_concurrent_searches: 1 + msearch: + body: + - index: index_* + - query: + match: {foo: foo} + - index: index_2 + - query: + match_all: {} + - index: index_1 + - query: + match: {foo: foo} + - index: index_3 + - query: + match_all: {} + - type: test + - query: + match_all: {} + + - match: { responses.0.hits.total: 2 } + - match: { responses.1.hits.total: 1 } + - match: { responses.2.hits.total: 1 } + - match: { responses.3.error.root_cause.0.type: index_not_found_exception } + - match: { responses.3.error.root_cause.0.reason: "/no.such.index/" } + - match: { responses.3.error.root_cause.0.index: index_3 } + - match: { responses.4.hits.total: 4 } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 2a60262b32f..6239015dae4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -86,6 +86,14 @@ public class RestMultiSearchAction extends BaseRestHandler { int preFilterShardSize = restRequest.paramAsInt("pre_filter_shard_size", SearchRequest.DEFAULT_PRE_FILTER_SHARD_SIZE); + final Integer maxConcurrentShardRequests; + if (restRequest.hasParam("max_concurrent_shard_requests")) { + // only set if we have the parameter since we auto adjust the max concurrency on the coordinator + // based on the number of nodes in the cluster + maxConcurrentShardRequests = restRequest.paramAsInt("max_concurrent_shard_requests", Integer.MIN_VALUE); + } else { + maxConcurrentShardRequests = null; + } parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, parser) -> { searchRequest.source(SearchSourceBuilder.fromXContent(parser, false)); @@ -96,6 +104,9 @@ public class RestMultiSearchAction extends BaseRestHandler { for (SearchRequest request : requests) { // preserve if it's set on the request request.setPreFilterShardSize(Math.min(preFilterShardSize, request.getPreFilterShardSize())); + if (maxConcurrentShardRequests != null) { + request.setMaxConcurrentShardRequests(maxConcurrentShardRequests); + } } return multiRequest; } From ab000323fa7be57858c5470516877c69d00f058f Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 22 Aug 2018 09:09:08 +0100 Subject: [PATCH 23/48] Allow extension of CapturingTransport by subclasses (#33012) Today, CapturingTransport#createCapturingTransportService creates a transport service with a connection manager with reasonable default behaviours, but overriding this behaviour in a consumer is a litle tricky. Additionally, the default behaviour for opening a connection duplicates the content of the CapturingTransport#openConnection() method. This change removes this duplication by delegating to openConnection() and introduces overridable nodeConnected() and onSendRequest() methods so that consumers can alter this behaviour more easily. Relates #32246 in which we test the mechanisms for opening connections to unknown (and possibly unreachable) nodes. --- .../test/transport/CapturingTransport.java | 87 ++++++++----------- 1 file changed, 37 insertions(+), 50 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java index 60133a16a10..132a07d5b7f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java @@ -51,7 +51,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportStats; import java.io.IOException; -import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -66,11 +65,13 @@ import java.util.function.Function; import static org.apache.lucene.util.LuceneTestCase.rarely; -/** A transport class that doesn't send anything but rather captures all requests for inspection from tests */ +/** + * A transport class that doesn't send anything but rather captures all requests for inspection from tests + */ public class CapturingTransport implements Transport { private volatile Map requestHandlers = Collections.emptyMap(); - final Object requestHandlerMutex = new Object(); + private final Object requestHandlerMutex = new Object(); private final ResponseHandlers responseHandlers = new ResponseHandlers(); private TransportMessageListener listener; @@ -80,7 +81,7 @@ public class CapturingTransport implements Transport { public final String action; public final TransportRequest request; - public CapturedRequest(DiscoveryNode node, long requestId, String action, TransportRequest request) { + CapturedRequest(DiscoveryNode node, long requestId, String action, TransportRequest request) { this.node = node; this.requestId = requestId; this.action = action; @@ -96,41 +97,15 @@ public class CapturingTransport implements Transport { @Nullable ClusterSettings clusterSettings, Set taskHeaders) { StubbableConnectionManager connectionManager = new StubbableConnectionManager(new ConnectionManager(settings, this, threadPool), settings, this, threadPool); - connectionManager.setDefaultNodeConnectedBehavior((cm, discoveryNode) -> true); - connectionManager.setDefaultConnectBehavior((cm, discoveryNode) -> new Connection() { - @Override - public DiscoveryNode getNode() { - return discoveryNode; - } - - @Override - public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) - throws TransportException { - requests.put(requestId, Tuple.tuple(discoveryNode, action)); - capturedRequests.add(new CapturedRequest(discoveryNode, requestId, action, request)); - } - - @Override - public void addCloseListener(ActionListener listener) { - - } - - @Override - public boolean isClosed() { - return false; - } - - @Override - public void close() { - - } - }); + connectionManager.setDefaultNodeConnectedBehavior((cm, discoveryNode) -> nodeConnected(discoveryNode)); + connectionManager.setDefaultConnectBehavior((cm, discoveryNode) -> openConnection(discoveryNode, null)); return new TransportService(settings, this, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders, connectionManager); - } - /** returns all requests captured so far. Doesn't clear the captured request list. See {@link #clear()} */ + /** + * returns all requests captured so far. Doesn't clear the captured request list. See {@link #clear()} + */ public CapturedRequest[] capturedRequests() { return capturedRequests.toArray(new CapturedRequest[0]); } @@ -178,12 +153,16 @@ public class CapturingTransport implements Transport { return groupRequestsByTargetNode(requests); } - /** clears captured requests */ + /** + * clears captured requests + */ public void clear() { capturedRequests.clear(); } - /** simulate a response for the given requestId */ + /** + * simulate a response for the given requestId + */ public void handleResponse(final long requestId, final TransportResponse response) { responseHandlers.onResponseReceived(requestId, listener).handleResponse(response); } @@ -194,7 +173,7 @@ public class CapturingTransport implements Transport { * * @param requestId the id corresponding to the captured send * request - * @param t the failure to wrap + * @param t the failure to wrap */ public void handleLocalError(final long requestId, final Throwable t) { Tuple request = requests.get(requestId); @@ -208,7 +187,7 @@ public class CapturingTransport implements Transport { * * @param requestId the id corresponding to the captured send * request - * @param t the failure to wrap + * @param t the failure to wrap */ public void handleRemoteError(final long requestId, final Throwable t) { final RemoteTransportException remoteException; @@ -234,7 +213,7 @@ public class CapturingTransport implements Transport { * * @param requestId the id corresponding to the captured send * request - * @param e the failure + * @param e the failure */ public void handleError(final long requestId, final TransportException e) { responseHandlers.onResponseReceived(requestId, listener).handleException(e); @@ -251,13 +230,11 @@ public class CapturingTransport implements Transport { @Override public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) throws TransportException { - requests.put(requestId, Tuple.tuple(node, action)); - capturedRequests.add(new CapturedRequest(node, requestId, action, request)); + onSendRequest(requestId, action, request, node); } @Override public void addCloseListener(ActionListener listener) { - } @Override @@ -267,11 +244,19 @@ public class CapturingTransport implements Transport { @Override public void close() { - } }; } + protected void onSendRequest(long requestId, String action, TransportRequest request, DiscoveryNode node) { + requests.put(requestId, Tuple.tuple(node, action)); + capturedRequests.add(new CapturedRequest(node, requestId, action, request)); + } + + protected boolean nodeConnected(DiscoveryNode discoveryNode) { + return true; + } + @Override public TransportStats getStats() { throw new UnsupportedOperationException(); @@ -288,7 +273,7 @@ public class CapturingTransport implements Transport { } @Override - public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { + public TransportAddress[] addressesFromString(String address, int perAddressLimit) { return new TransportAddress[0]; } @@ -299,22 +284,23 @@ public class CapturingTransport implements Transport { @Override public void addLifecycleListener(LifecycleListener listener) { - } @Override public void removeLifecycleListener(LifecycleListener listener) { - } @Override - public void start() {} + public void start() { + } @Override - public void stop() {} + public void stop() { + } @Override - public void close() {} + public void close() { + } @Override public List getLocalAddresses() { @@ -330,6 +316,7 @@ public class CapturingTransport implements Transport { requestHandlers = MapBuilder.newMapBuilder(requestHandlers).put(reg.getAction(), reg).immutableMap(); } } + @Override public ResponseHandlers getResponseHandlers() { return responseHandlers; From 43f6f435f5b28e7a65c24d85fec0690c1d2e58b5 Mon Sep 17 00:00:00 2001 From: Sergey Date: Wed, 22 Aug 2018 12:27:37 +0300 Subject: [PATCH 24/48] [DOCS] Update remote-info.asciidoc (#32978) --- docs/reference/cluster/remote-info.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/cluster/remote-info.asciidoc b/docs/reference/cluster/remote-info.asciidoc index 3dfcc201e7a..2866d798b28 100644 --- a/docs/reference/cluster/remote-info.asciidoc +++ b/docs/reference/cluster/remote-info.asciidoc @@ -25,7 +25,7 @@ the configured remote cluster alias. `num_nodes_connected`:: The number of connected nodes in the remote cluster. -`max_connection_per_cluster`:: +`max_connections_per_cluster`:: The maximum number of connections maintained for the remote cluster. `initial_connect_timeout`:: From b02150a5ed8bd4f2dc393bfd3b6de0518149c468 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 22 Aug 2018 06:54:08 -0500 Subject: [PATCH 25/48] HLRC: close job refactor (#33031) * HLRC: close job refactor * Changing refactor to make job_id a string * Changing set entity methodology --- .../client/MLRequestConverters.java | 17 +++--------- .../client/MLRequestConvertersTests.java | 21 ++++++++------- .../protocol/xpack/ml/CloseJobRequest.java | 26 ++++++++++++------- 3 files changed, 30 insertions(+), 34 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 9a4e825be7c..1c2b6f79eaf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -78,11 +78,11 @@ final class MLRequestConverters { .addPathPartAsIs("_open") .build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setJsonEntity(openJobRequest.toString()); + request.setEntity(createEntity(openJobRequest, REQUEST_BODY_CONTENT_TYPE)); return request; } - static Request closeJob(CloseJobRequest closeJobRequest) { + static Request closeJob(CloseJobRequest closeJobRequest) throws IOException { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") .addPathPartAsIs("ml") @@ -91,18 +91,7 @@ final class MLRequestConverters { .addPathPartAsIs("_close") .build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(request); - if (closeJobRequest.isForce() != null) { - params.putParam("force", Boolean.toString(closeJobRequest.isForce())); - } - if (closeJobRequest.isAllowNoJobs() != null) { - params.putParam("allow_no_jobs", Boolean.toString(closeJobRequest.isAllowNoJobs())); - } - if (closeJobRequest.getTimeout() != null) { - params.putParam("timeout", closeJobRequest.getTimeout().getStringRep()); - } - + request.setEntity(createEntity(closeJobRequest, REQUEST_BODY_CONTENT_TYPE)); return request; } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index 9ed09d06b72..0d95c359658 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -81,21 +81,17 @@ public class MLRequestConvertersTests extends ESTestCase { Request request = MLRequestConverters.openJob(openJobRequest); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint()); - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - request.getEntity().writeTo(bos); - assertEquals(bos.toString("UTF-8"), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}"); + assertEquals(requestEntityToString(request), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}"); } - public void testCloseJob() { + public void testCloseJob() throws Exception { String jobId = "somejobid"; CloseJobRequest closeJobRequest = new CloseJobRequest(jobId); Request request = MLRequestConverters.closeJob(closeJobRequest); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_close", request.getEndpoint()); - assertFalse(request.getParameters().containsKey("force")); - assertFalse(request.getParameters().containsKey("allow_no_jobs")); - assertFalse(request.getParameters().containsKey("timeout")); + assertEquals("{\"job_id\":\"somejobid\"}", requestEntityToString(request)); closeJobRequest = new CloseJobRequest(jobId, "otherjobs*"); closeJobRequest.setForce(true); @@ -104,9 +100,8 @@ public class MLRequestConvertersTests extends ESTestCase { request = MLRequestConverters.closeJob(closeJobRequest); assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + ",otherjobs*/_close", request.getEndpoint()); - assertEquals(Boolean.toString(true), request.getParameters().get("force")); - assertEquals(Boolean.toString(false), request.getParameters().get("allow_no_jobs")); - assertEquals("10m", request.getParameters().get("timeout")); + assertEquals("{\"job_id\":\"somejobid,otherjobs*\",\"timeout\":\"10m\",\"force\":true,\"allow_no_jobs\":false}", + requestEntityToString(request)); } public void testDeleteJob() { @@ -130,4 +125,10 @@ public class MLRequestConvertersTests extends ESTestCase { jobBuilder.setAnalysisConfig(analysisConfig); return jobBuilder.build(); } + + private static String requestEntityToString(Request request) throws Exception { + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + request.getEntity().writeTo(bos); + return bos.toString("UTF-8"); + } } diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/CloseJobRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/CloseJobRequest.java index 1df5a02889e..3d54bfb9488 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/CloseJobRequest.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/CloseJobRequest.java @@ -21,8 +21,10 @@ package org.elasticsearch.protocol.xpack.ml; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,7 +37,7 @@ import java.util.Objects; public class CloseJobRequest extends ActionRequest implements ToXContentObject { - public static final ParseField JOB_IDS = new ParseField("job_ids"); + public static final ParseField JOB_ID = new ParseField("job_id"); public static final ParseField TIMEOUT = new ParseField("timeout"); public static final ParseField FORCE = new ParseField("force"); public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs"); @@ -46,7 +48,9 @@ public class CloseJobRequest extends ActionRequest implements ToXContentObject { true, a -> new CloseJobRequest((List) a[0])); static { - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), JOB_IDS); + PARSER.declareField(ConstructingObjectParser.constructorArg(), + p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), + JOB_ID, ObjectParser.ValueType.STRING_ARRAY); PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); PARSER.declareBoolean(CloseJobRequest::setForce, FORCE); PARSER.declareBoolean(CloseJobRequest::setAllowNoJobs, ALLOW_NO_JOBS); @@ -132,7 +136,7 @@ public class CloseJobRequest extends ActionRequest implements ToXContentObject { * This includes `_all` string or when no jobs have been specified */ public Boolean isAllowNoJobs() { - return allowNoJobs; + return this.allowNoJobs; } /** @@ -149,7 +153,7 @@ public class CloseJobRequest extends ActionRequest implements ToXContentObject { @Override public int hashCode() { - return Objects.hash(jobIds, timeout, allowNoJobs, force); + return Objects.hash(jobIds, timeout, force, allowNoJobs); } @Override @@ -165,16 +169,14 @@ public class CloseJobRequest extends ActionRequest implements ToXContentObject { CloseJobRequest that = (CloseJobRequest) other; return Objects.equals(jobIds, that.jobIds) && Objects.equals(timeout, that.timeout) && - Objects.equals(allowNoJobs, that.allowNoJobs) && - Objects.equals(force, that.force); + Objects.equals(force, that.force) && + Objects.equals(allowNoJobs, that.allowNoJobs); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - - builder.field(JOB_IDS.getPreferredName(), jobIds); - + builder.field(JOB_ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); if (timeout != null) { builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); } @@ -184,8 +186,12 @@ public class CloseJobRequest extends ActionRequest implements ToXContentObject { if (allowNoJobs != null) { builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs); } - builder.endObject(); return builder; } + + @Override + public String toString() { + return Strings.toString(this); + } } From 262d3c0783d771ea6b196ae2d9e38f44ec33d89c Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 22 Aug 2018 07:57:44 -0400 Subject: [PATCH 26/48] Allow engine to recover from translog upto a seqno (#33032) This change allows an engine to recover from its local translog up to the given seqno. The extended API can be used in these use cases: When a replica starts following a new primary, it resets its index to the safe commit, then replays its local translog up to the current global checkpoint (see #32867). When a replica starts a peer-recovery, it can initialize the start_sequence_number to the persisted global checkpoint instead of the local checkpoint of the safe commit. A replica will then replay its local translog up to that global checkpoint before accepting remote translog from the primary. This change will increase the chance of operation-based recovery. I will make this in a follow-up. Relates #32867 --- .../elasticsearch/index/engine/Engine.java | 6 +- .../index/engine/InternalEngine.java | 11 +-- .../elasticsearch/index/shard/IndexShard.java | 2 +- .../index/translog/Translog.java | 70 +++++++++++++-- .../index/engine/InternalEngineTests.java | 79 ++++++++++++----- .../index/shard/RefreshListenersTests.java | 2 +- .../index/translog/TranslogTests.java | 86 ++++++++++++++++++- .../index/engine/EngineTestCase.java | 2 +- 8 files changed, 217 insertions(+), 41 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 31da7afc51a..4d95cf89ef0 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1623,10 +1623,12 @@ public abstract class Engine implements Closeable { public abstract int fillSeqNoGaps(long primaryTerm) throws IOException; /** - * Performs recovery from the transaction log. + * Performs recovery from the transaction log up to {@code recoverUpToSeqNo} (inclusive). * This operation will close the engine if the recovery fails. + * + * @param recoverUpToSeqNo the upper bound, inclusive, of sequence number to be recovered */ - public abstract Engine recoverFromTranslog() throws IOException; + public abstract Engine recoverFromTranslog(long recoverUpToSeqNo) throws IOException; /** * Do not replay translog operations, but make the engine be ready. diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 982b220f25b..c4c6792bf46 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -364,7 +364,7 @@ public class InternalEngine extends Engine { } @Override - public InternalEngine recoverFromTranslog() throws IOException { + public InternalEngine recoverFromTranslog(long recoverUpToSeqNo) throws IOException { flushLock.lock(); try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); @@ -372,7 +372,7 @@ public class InternalEngine extends Engine { throw new IllegalStateException("Engine has already been recovered"); } try { - recoverFromTranslogInternal(); + recoverFromTranslogInternal(recoverUpToSeqNo); } catch (Exception e) { try { pendingTranslogRecovery.set(true); // just play safe and never allow commits on this see #ensureCanFlush @@ -394,11 +394,12 @@ public class InternalEngine extends Engine { pendingTranslogRecovery.set(false); // we are good - now we can commit } - private void recoverFromTranslogInternal() throws IOException { + private void recoverFromTranslogInternal(long recoverUpToSeqNo) throws IOException { Translog.TranslogGeneration translogGeneration = translog.getGeneration(); final int opsRecovered; - final long translogGen = Long.parseLong(lastCommittedSegmentInfos.getUserData().get(Translog.TRANSLOG_GENERATION_KEY)); - try (Translog.Snapshot snapshot = translog.newSnapshotFromGen(translogGen)) { + final long translogFileGen = Long.parseLong(lastCommittedSegmentInfos.getUserData().get(Translog.TRANSLOG_GENERATION_KEY)); + try (Translog.Snapshot snapshot = translog.newSnapshotFromGen( + new Translog.TranslogGeneration(translog.getTranslogUUID(), translogFileGen), recoverUpToSeqNo)) { opsRecovered = config().getTranslogRecoveryRunner().run(this, snapshot); } catch (Exception e) { throw new EngineException(shardId, "failed to recover from translog", e); diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index ffce0e6ea8b..e030c95b56e 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1305,7 +1305,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl **/ public void openEngineAndRecoverFromTranslog() throws IOException { innerOpenEngineAndTranslog(); - getEngine().recoverFromTranslog(); + getEngine().recoverFromTranslog(Long.MAX_VALUE); } /** diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index 72c6210535f..618aa546e42 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -577,21 +577,27 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC */ public Snapshot newSnapshot() throws IOException { try (ReleasableLock ignored = readLock.acquire()) { - return newSnapshotFromGen(getMinFileGeneration()); + return newSnapshotFromGen(new TranslogGeneration(translogUUID, getMinFileGeneration()), Long.MAX_VALUE); } } - public Snapshot newSnapshotFromGen(long minGeneration) throws IOException { + public Snapshot newSnapshotFromGen(TranslogGeneration fromGeneration, long upToSeqNo) throws IOException { try (ReleasableLock ignored = readLock.acquire()) { ensureOpen(); - if (minGeneration < getMinFileGeneration()) { - throw new IllegalArgumentException("requested snapshot generation [" + minGeneration + "] is not available. " + + final long fromFileGen = fromGeneration.translogFileGeneration; + if (fromFileGen < getMinFileGeneration()) { + throw new IllegalArgumentException("requested snapshot generation [" + fromFileGen + "] is not available. " + "Min referenced generation is [" + getMinFileGeneration() + "]"); } TranslogSnapshot[] snapshots = Stream.concat(readers.stream(), Stream.of(current)) - .filter(reader -> reader.getGeneration() >= minGeneration) + .filter(reader -> reader.getGeneration() >= fromFileGen && reader.getCheckpoint().minSeqNo <= upToSeqNo) .map(BaseTranslogReader::newSnapshot).toArray(TranslogSnapshot[]::new); - return newMultiSnapshot(snapshots); + final Snapshot snapshot = newMultiSnapshot(snapshots); + if (upToSeqNo == Long.MAX_VALUE) { + return snapshot; + } else { + return new SeqNoFilterSnapshot(snapshot, Long.MIN_VALUE, upToSeqNo); + } } } @@ -926,7 +932,59 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * Returns the next operation in the snapshot or null if we reached the end. */ Translog.Operation next() throws IOException; + } + /** + * A filtered snapshot consisting of only operations whose sequence numbers are in the given range + * between {@code fromSeqNo} (inclusive) and {@code toSeqNo} (inclusive). This filtered snapshot + * shares the same underlying resources with the {@code delegate} snapshot, therefore we should not + * use the {@code delegate} after passing it to this filtered snapshot. + */ + static final class SeqNoFilterSnapshot implements Snapshot { + private final Snapshot delegate; + private int filteredOpsCount; + private final long fromSeqNo; // inclusive + private final long toSeqNo; // inclusive + + SeqNoFilterSnapshot(Snapshot delegate, long fromSeqNo, long toSeqNo) { + assert fromSeqNo <= toSeqNo : "from_seq_no[" + fromSeqNo + "] > to_seq_no[" + toSeqNo + "]"; + this.delegate = delegate; + this.fromSeqNo = fromSeqNo; + this.toSeqNo = toSeqNo; + } + + @Override + public int totalOperations() { + return delegate.totalOperations(); + } + + @Override + public int skippedOperations() { + return filteredOpsCount + delegate.skippedOperations(); + } + + @Override + public int overriddenOperations() { + return delegate.overriddenOperations(); + } + + @Override + public Operation next() throws IOException { + Translog.Operation op; + while ((op = delegate.next()) != null) { + if (fromSeqNo <= op.seqNo() && op.seqNo() <= toSeqNo) { + return op; + } else { + filteredOpsCount++; + } + } + return null; + } + + @Override + public void close() throws IOException { + delegate.close(); + } } /** diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index f6df2224288..76e05ba1e0b 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -649,7 +649,7 @@ public class InternalEngineTests extends EngineTestCase { trimUnsafeCommits(engine.config()); engine = new InternalEngine(engine.config()); assertTrue(engine.isRecovering()); - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); Engine.Searcher searcher = wrapper.wrap(engine.acquireSearcher("test")); assertThat(counter.get(), equalTo(2)); searcher.close(); @@ -666,7 +666,7 @@ public class InternalEngineTests extends EngineTestCase { engine = new InternalEngine(engine.config()); expectThrows(IllegalStateException.class, () -> engine.flush(true, true)); assertTrue(engine.isRecovering()); - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); assertFalse(engine.isRecovering()); doc = testParsedDocument("2", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); @@ -696,7 +696,7 @@ public class InternalEngineTests extends EngineTestCase { } trimUnsafeCommits(engine.config()); try (Engine recoveringEngine = new InternalEngine(engine.config())){ - recoveringEngine.recoverFromTranslog(); + recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.searcher().search(new MatchAllDocsQuery(), collector); @@ -732,7 +732,7 @@ public class InternalEngineTests extends EngineTestCase { } }; assertThat(getTranslog(recoveringEngine).stats().getUncommittedOperations(), equalTo(docs)); - recoveringEngine.recoverFromTranslog(); + recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); assertTrue(committed.get()); } finally { IOUtils.close(recoveringEngine); @@ -766,7 +766,7 @@ public class InternalEngineTests extends EngineTestCase { initialEngine.close(); trimUnsafeCommits(initialEngine.config()); recoveringEngine = new InternalEngine(initialEngine.config()); - recoveringEngine.recoverFromTranslog(); + recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), docs); assertEquals(docs, topDocs.totalHits); @@ -776,6 +776,43 @@ public class InternalEngineTests extends EngineTestCase { } } + public void testRecoveryFromTranslogUpToSeqNo() throws IOException { + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); + try (Store store = createStore()) { + EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), null, null, globalCheckpoint::get); + final long maxSeqNo; + try (InternalEngine engine = createEngine(config)) { + final int docs = randomIntBetween(1, 100); + for (int i = 0; i < docs; i++) { + final String id = Integer.toString(i); + final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null); + engine.index(indexForDoc(doc)); + if (rarely()) { + engine.rollTranslogGeneration(); + } else if (rarely()) { + engine.flush(randomBoolean(), true); + } + } + maxSeqNo = engine.getLocalCheckpointTracker().getMaxSeqNo(); + globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpoint())); + engine.syncTranslog(); + } + trimUnsafeCommits(config); + try (InternalEngine engine = new InternalEngine(config)) { + engine.recoverFromTranslog(Long.MAX_VALUE); + assertThat(engine.getLocalCheckpoint(), equalTo(maxSeqNo)); + assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(maxSeqNo)); + } + trimUnsafeCommits(config); + try (InternalEngine engine = new InternalEngine(config)) { + long upToSeqNo = randomLongBetween(globalCheckpoint.get(), maxSeqNo); + engine.recoverFromTranslog(upToSeqNo); + assertThat(engine.getLocalCheckpoint(), equalTo(upToSeqNo)); + assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(upToSeqNo)); + } + } + } + public void testConcurrentGetAndFlush() throws Exception { ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null); engine.index(indexForDoc(doc)); @@ -1153,7 +1190,7 @@ public class InternalEngineTests extends EngineTestCase { } trimUnsafeCommits(config); engine = new InternalEngine(config); - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); } @@ -1172,7 +1209,7 @@ public class InternalEngineTests extends EngineTestCase { engine.close(); trimUnsafeCommits(config); engine = new InternalEngine(config); - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); assertNull("Sync ID must be gone since we have a document to replay", engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID)); } @@ -2126,7 +2163,7 @@ public class InternalEngineTests extends EngineTestCase { trimUnsafeCommits(initialEngine.engineConfig); try (InternalEngine recoveringEngine = new InternalEngine(initialEngine.config())){ - recoveringEngine.recoverFromTranslog(); + recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); assertEquals(primarySeqNo, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertThat( @@ -2447,7 +2484,7 @@ public class InternalEngineTests extends EngineTestCase { try (InternalEngine engine = createEngine(config)) { engine.index(firstIndexRequest); globalCheckpoint.set(engine.getLocalCheckpoint()); - expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog()); + expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog(Long.MAX_VALUE)); Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); @@ -2469,7 +2506,7 @@ public class InternalEngineTests extends EngineTestCase { assertEquals("3", userData.get(Translog.TRANSLOG_GENERATION_KEY)); } assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("3", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); @@ -2486,7 +2523,7 @@ public class InternalEngineTests extends EngineTestCase { Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); assertEquals(2, engine.getTranslog().currentFileGeneration()); assertEquals(0L, engine.getTranslog().stats().getUncommittedOperations()); } @@ -2500,7 +2537,7 @@ public class InternalEngineTests extends EngineTestCase { Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("no changes - nothing to commit", "1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); @@ -2606,7 +2643,7 @@ public class InternalEngineTests extends EngineTestCase { } } }) { - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc1)); globalCheckpoint.set(engine.getLocalCheckpoint()); @@ -2617,7 +2654,7 @@ public class InternalEngineTests extends EngineTestCase { try (InternalEngine engine = new InternalEngine(config(indexSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier))) { - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); assertVisibleCount(engine, 1); final long committedGen = Long.valueOf( engine.getLastCommittedSegmentInfos().getUserData().get(Translog.TRANSLOG_GENERATION_KEY)); @@ -2683,7 +2720,7 @@ public class InternalEngineTests extends EngineTestCase { engine.close(); trimUnsafeCommits(copy(engine.config(), inSyncGlobalCheckpointSupplier)); engine = new InternalEngine(copy(engine.config(), inSyncGlobalCheckpointSupplier)); // we need to reuse the engine config unless the parser.mappingModified won't work - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); assertVisibleCount(engine, numDocs, false); parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); @@ -3384,7 +3421,7 @@ public class InternalEngineTests extends EngineTestCase { } try (Store store = createStore(newFSDirectory(storeDir)); Engine engine = new InternalEngine(configSupplier.apply(store))) { assertEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); assertEquals(timestamp1, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); @@ -3667,7 +3704,7 @@ public class InternalEngineTests extends EngineTestCase { } trimUnsafeCommits(initialEngine.config()); try (Engine recoveringEngine = new InternalEngine(initialEngine.config())) { - recoveringEngine.recoverFromTranslog(); + recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); recoveringEngine.fillSeqNoGaps(2); assertThat(recoveringEngine.getLocalCheckpoint(), greaterThanOrEqualTo((long) (docs - 1))); } @@ -3774,7 +3811,7 @@ public class InternalEngineTests extends EngineTestCase { throw new UnsupportedOperationException(); } }; - noOpEngine.recoverFromTranslog(); + noOpEngine.recoverFromTranslog(Long.MAX_VALUE); final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get()); final String reason = randomAlphaOfLength(16); noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason)); @@ -3986,7 +4023,7 @@ public class InternalEngineTests extends EngineTestCase { trimUnsafeCommits(copy(replicaEngine.config(), globalCheckpoint::get)); recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get)); assertEquals(numDocsOnReplica, getTranslog(recoveringEngine).stats().getUncommittedOperations()); - recoveringEngine.recoverFromTranslog(); + recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertEquals(checkpointOnReplica, recoveringEngine.getLocalCheckpoint()); assertEquals((maxSeqIDOnReplica + 1) - numDocsOnReplica, recoveringEngine.fillSeqNoGaps(2)); @@ -4022,7 +4059,7 @@ public class InternalEngineTests extends EngineTestCase { if (flushed) { assertThat(recoveringEngine.getTranslogStats().getUncommittedOperations(), equalTo(0)); } - recoveringEngine.recoverFromTranslog(); + recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint()); assertEquals(0, recoveringEngine.fillSeqNoGaps(3)); @@ -4215,7 +4252,7 @@ public class InternalEngineTests extends EngineTestCase { super.commitIndexWriter(writer, translog, syncId); } }) { - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); int numDocs = scaledRandomIntBetween(10, 100); for (int docId = 0; docId < numDocs; docId++) { ParseContext.Document document = testDocumentWithTextField(); diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 2d1c1d4e15a..774b272121a 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -132,7 +132,7 @@ public class RefreshListenersTests extends ESTestCase { TimeValue.timeValueMinutes(5), Collections.singletonList(listeners), Collections.emptyList(), null, (e, s) -> 0, new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED, () -> primaryTerm); engine = new InternalEngine(config); - engine.recoverFromTranslog(); + engine.recoverFromTranslog(Long.MAX_VALUE); listeners.setCurrentRefreshLocationSupplier(engine::getTranslogLastWriteLocation); } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 4ec479334ba..a0e0c481e5f 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -360,7 +360,8 @@ public class TranslogTests extends ESTestCase { } markCurrentGenAsCommitted(translog); - try (Translog.Snapshot snapshot = translog.newSnapshotFromGen(firstId + 1)) { + try (Translog.Snapshot snapshot = translog.newSnapshotFromGen( + new Translog.TranslogGeneration(translog.getTranslogUUID(), firstId + 1), randomNonNegativeLong())) { assertThat(snapshot, SnapshotMatchers.size(0)); assertThat(snapshot.totalOperations(), equalTo(0)); } @@ -645,6 +646,82 @@ public class TranslogTests extends ESTestCase { } } + public void testSnapshotFromMinGen() throws Exception { + Map> operationsByGen = new HashMap<>(); + try (Translog.Snapshot snapshot = translog.newSnapshotFromGen( + new Translog.TranslogGeneration(translog.getTranslogUUID(), 1), randomNonNegativeLong())) { + assertThat(snapshot, SnapshotMatchers.size(0)); + } + int iters = between(1, 10); + for (int i = 0; i < iters; i++) { + long currentGeneration = translog.currentFileGeneration(); + operationsByGen.putIfAbsent(currentGeneration, new ArrayList<>()); + int numOps = between(0, 20); + for (int op = 0; op < numOps; op++) { + long seqNo = randomLongBetween(0, 1000); + addToTranslogAndList(translog, operationsByGen.get(currentGeneration), new Translog.Index("test", + Long.toString(seqNo), seqNo, primaryTerm.get(), new byte[]{1})); + } + long minGen = randomLongBetween(translog.getMinFileGeneration(), translog.currentFileGeneration()); + try (Translog.Snapshot snapshot = translog.newSnapshotFromGen( + new Translog.TranslogGeneration(translog.getTranslogUUID(), minGen), Long.MAX_VALUE)) { + List expectedOps = operationsByGen.entrySet().stream() + .filter(e -> e.getKey() >= minGen) + .flatMap(e -> e.getValue().stream()) + .collect(Collectors.toList()); + assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedOps)); + } + long upToSeqNo = randomLongBetween(0, 2000); + try (Translog.Snapshot snapshot = translog.newSnapshotFromGen( + new Translog.TranslogGeneration(translog.getTranslogUUID(), minGen), upToSeqNo)) { + List expectedOps = operationsByGen.entrySet().stream() + .filter(e -> e.getKey() >= minGen) + .flatMap(e -> e.getValue().stream().filter(op -> op.seqNo() <= upToSeqNo)) + .collect(Collectors.toList()); + assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedOps)); + } + translog.rollGeneration(); + } + } + + public void testSeqNoFilterSnapshot() throws Exception { + final int generations = between(2, 20); + for (int gen = 0; gen < generations; gen++) { + List batch = LongStream.rangeClosed(0, between(0, 100)).boxed().collect(Collectors.toList()); + Randomness.shuffle(batch); + for (long seqNo : batch) { + Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[]{1}); + translog.add(op); + } + translog.rollGeneration(); + } + List operations = new ArrayList<>(); + try (Translog.Snapshot snapshot = translog.newSnapshot()) { + Translog.Operation op; + while ((op = snapshot.next()) != null) { + operations.add(op); + } + } + try (Translog.Snapshot snapshot = translog.newSnapshot()) { + Translog.Snapshot filter = new Translog.SeqNoFilterSnapshot(snapshot, between(200, 300), between(300, 400)); // out range + assertThat(filter, SnapshotMatchers.size(0)); + assertThat(filter.totalOperations(), equalTo(snapshot.totalOperations())); + assertThat(filter.overriddenOperations(), equalTo(snapshot.overriddenOperations())); + assertThat(filter.skippedOperations(), equalTo(snapshot.totalOperations())); + } + try (Translog.Snapshot snapshot = translog.newSnapshot()) { + int fromSeqNo = between(-2, 500); + int toSeqNo = between(fromSeqNo, 500); + List selectedOps = operations.stream() + .filter(op -> fromSeqNo <= op.seqNo() && op.seqNo() <= toSeqNo).collect(Collectors.toList()); + Translog.Snapshot filter = new Translog.SeqNoFilterSnapshot(snapshot, fromSeqNo, toSeqNo); + assertThat(filter, SnapshotMatchers.containsOperationsInAnyOrder(selectedOps)); + assertThat(filter.totalOperations(), equalTo(snapshot.totalOperations())); + assertThat(filter.overriddenOperations(), equalTo(snapshot.overriddenOperations())); + assertThat(filter.skippedOperations(), equalTo(snapshot.skippedOperations() + operations.size() - selectedOps.size())); + } + } + public void assertFileIsPresent(Translog translog, long id) { if (Files.exists(translog.location().resolve(Translog.getFilename(id)))) { return; @@ -1304,7 +1381,7 @@ public class TranslogTests extends ESTestCase { translog = new Translog(config, translogGeneration.translogUUID, translog.getDeletionPolicy(), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get); assertEquals("lastCommitted must be 1 less than current", translogGeneration.translogFileGeneration + 1, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshotFromGen(translogGeneration.translogFileGeneration)) { + try (Translog.Snapshot snapshot = translog.newSnapshotFromGen(translogGeneration, Long.MAX_VALUE)) { for (int i = minUncommittedOp; i < translogOperations; i++) { assertEquals("expected operation" + i + " to be in the previous translog but wasn't", translog.currentFileGeneration() - 1, locations.get(i).generation); @@ -1735,7 +1812,7 @@ public class TranslogTests extends ESTestCase { } this.translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get); - try (Translog.Snapshot snapshot = this.translog.newSnapshotFromGen(translogGeneration.translogFileGeneration)) { + try (Translog.Snapshot snapshot = this.translog.newSnapshotFromGen(translogGeneration, Long.MAX_VALUE)) { for (int i = firstUncommitted; i < translogOperations; i++) { Translog.Operation next = snapshot.next(); assertNotNull("" + i, next); @@ -2557,7 +2634,8 @@ public class TranslogTests extends ESTestCase { generationUUID = Translog.createEmptyTranslog(config.getTranslogPath(), SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get()); } try (Translog translog = new Translog(config, generationUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get); - Translog.Snapshot snapshot = translog.newSnapshotFromGen(minGenForRecovery)) { + Translog.Snapshot snapshot = translog.newSnapshotFromGen( + new Translog.TranslogGeneration(generationUUID, minGenForRecovery), Long.MAX_VALUE)) { assertEquals(syncedDocs.size(), snapshot.totalOperations()); for (int i = 0; i < syncedDocs.size(); i++) { Translog.Operation next = snapshot.next(); diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 2a84a8f4246..b5ba5f18b39 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -375,7 +375,7 @@ public abstract class EngineTestCase extends ESTestCase { } InternalEngine internalEngine = createInternalEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); - internalEngine.recoverFromTranslog(); + internalEngine.recoverFromTranslog(Long.MAX_VALUE); return internalEngine; } From ead198bf2e76d907ed218b5aaddc60d53b7add72 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 22 Aug 2018 14:13:27 +0200 Subject: [PATCH 27/48] Add settings updater for 2 affix settings (#33050) Today we can only have non-affix settings updated and consumed _together_. Yet, there are use-cases where two affix settings depend on each other which makes using the hard without consuming updates together. Unfortunately, there is not straight forward way to have N settings updated together in a type-safe way having 2 still serves a large portion of use-cases. --- .../settings/AbstractScopedSettings.java | 71 +++++++++++++- .../common/settings/Setting.java | 2 +- .../common/settings/ScopedSettingsTests.java | 96 ++++++++++++++++++- .../common/settings/SettingTests.java | 6 +- 4 files changed, 168 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 8847c8138a7..9f1d7d8a395 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -199,7 +199,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { * Also automatically adds empty consumers for all settings in order to activate logging */ public synchronized void addSettingsUpdateConsumer(Consumer consumer, List> settings) { - addSettingsUpdater(Setting.groupedSettingsUpdater(consumer, logger, settings)); + addSettingsUpdater(Setting.groupedSettingsUpdater(consumer, settings)); } /** @@ -208,11 +208,78 @@ public abstract class AbstractScopedSettings extends AbstractComponent { */ public synchronized void addAffixUpdateConsumer(Setting.AffixSetting setting, BiConsumer consumer, BiConsumer validator) { + ensureSettingIsRegistered(setting); + addSettingsUpdater(setting.newAffixUpdater(consumer, logger, validator)); + } + + /** + * Adds a affix settings consumer that accepts the values for two settings. The consumer is only notified if one or both settings change + * and if the provided validator succeeded. + *

+ * Note: Only settings registered in {@link SettingsModule} can be changed dynamically. + *

+ * This method registers a compound updater that is useful if two settings are depending on each other. + * The consumer is always provided with both values even if only one of the two changes. + */ + public synchronized void addAffixUpdateConsumer(Setting.AffixSetting settingA, Setting.AffixSetting settingB, + BiConsumer> consumer, + BiConsumer> validator) { + // it would be awesome to have a generic way to do that ie. a set of settings that map to an object with a builder + // down the road this would be nice to have! + ensureSettingIsRegistered(settingA); + ensureSettingIsRegistered(settingB); + SettingUpdater, A>> affixUpdaterA = settingA.newAffixUpdater((a,b)-> {}, logger, (a,b)-> {}); + SettingUpdater, B>> affixUpdaterB = settingB.newAffixUpdater((a,b)-> {}, logger, (a,b)-> {}); + + addSettingsUpdater(new SettingUpdater>>() { + + @Override + public boolean hasChanged(Settings current, Settings previous) { + return affixUpdaterA.hasChanged(current, previous) || affixUpdaterB.hasChanged(current, previous); + } + + @Override + public Map> getValue(Settings current, Settings previous) { + Map> map = new HashMap<>(); + BiConsumer aConsumer = (key, value) -> { + assert map.containsKey(key) == false : "duplicate key: " + key; + map.put(key, new Tuple<>(value, settingB.getConcreteSettingForNamespace(key).get(current))); + }; + BiConsumer bConsumer = (key, value) -> { + Tuple abTuple = map.get(key); + if (abTuple != null) { + map.put(key, new Tuple<>(abTuple.v1(), value)); + } else { + assert settingA.getConcreteSettingForNamespace(key).get(current).equals(settingA.getConcreteSettingForNamespace + (key).get(previous)) : "expected: " + settingA.getConcreteSettingForNamespace(key).get(current) + + " but was " + settingA.getConcreteSettingForNamespace(key).get(previous); + map.put(key, new Tuple<>(settingA.getConcreteSettingForNamespace(key).get(current), value)); + } + }; + SettingUpdater, A>> affixUpdaterA = settingA.newAffixUpdater(aConsumer, logger, (a,b) ->{}); + SettingUpdater, B>> affixUpdaterB = settingB.newAffixUpdater(bConsumer, logger, (a,b) ->{}); + affixUpdaterA.apply(current, previous); + affixUpdaterB.apply(current, previous); + for (Map.Entry> entry : map.entrySet()) { + validator.accept(entry.getKey(), entry.getValue()); + } + return Collections.unmodifiableMap(map); + } + + @Override + public void apply(Map> values, Settings current, Settings previous) { + for (Map.Entry> entry : values.entrySet()) { + consumer.accept(entry.getKey(), entry.getValue()); + } + } + }); + } + + private void ensureSettingIsRegistered(Setting.AffixSetting setting) { final Setting registeredSetting = this.complexMatchers.get(setting.getKey()); if (setting != registeredSetting) { throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]"); } - addSettingsUpdater(setting.newAffixUpdater(consumer, logger, validator)); } /** diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 94edb5a297a..b98c2753d70 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -547,7 +547,7 @@ public class Setting implements ToXContentObject { }; } - static AbstractScopedSettings.SettingUpdater groupedSettingsUpdater(Consumer consumer, Logger logger, + static AbstractScopedSettings.SettingUpdater groupedSettingsUpdater(Consumer consumer, final List> configuredSettings) { return new AbstractScopedSettings.SettingUpdater() { diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 2376d566340..1580c1a3797 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting.Property; @@ -180,6 +181,99 @@ public class ScopedSettingsTests extends ESTestCase { service.validate(Settings.builder().put("foo.test.bar", 7).build(), false); } + public void testTupleAffixUpdateConsumer() { + String prefix = randomAlphaOfLength(3) + "foo."; + String intSuffix = randomAlphaOfLength(3); + String listSuffix = randomAlphaOfLength(4); + Setting.AffixSetting intSetting = Setting.affixKeySetting(prefix, intSuffix, + (k) -> Setting.intSetting(k, 1, Property.Dynamic, Property.NodeScope)); + Setting.AffixSetting> listSetting = Setting.affixKeySetting(prefix, listSuffix, + (k) -> Setting.listSetting(k, Arrays.asList("1"), Integer::parseInt, Property.Dynamic, Property.NodeScope)); + AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY,new HashSet<>(Arrays.asList(intSetting, listSetting))); + Map, Integer>> results = new HashMap<>(); + Function listBuilder = g -> (prefix + g + "." + listSuffix); + Function intBuilder = g -> (prefix + g + "." + intSuffix); + String group1 = randomAlphaOfLength(3); + String group2 = randomAlphaOfLength(4); + String group3 = randomAlphaOfLength(5); + BiConsumer, Integer>> listConsumer = results::put; + + service.addAffixUpdateConsumer(listSetting, intSetting, listConsumer, (s, k) -> { + if (k.v1().isEmpty() && k.v2() == 2) { + throw new IllegalArgumentException("boom"); + } + }); + assertEquals(0, results.size()); + service.applySettings(Settings.builder() + .put(intBuilder.apply(group1), 2) + .put(intBuilder.apply(group2), 7) + .putList(listBuilder.apply(group1), "16", "17") + .putList(listBuilder.apply(group2), "18", "19", "20") + .build()); + assertEquals(2, results.get(group1).v2().intValue()); + assertEquals(7, results.get(group2).v2().intValue()); + assertEquals(Arrays.asList(16, 17), results.get(group1).v1()); + assertEquals(Arrays.asList(18, 19, 20), results.get(group2).v1()); + assertEquals(2, results.size()); + + results.clear(); + + service.applySettings(Settings.builder() + .put(intBuilder.apply(group1), 2) + .put(intBuilder.apply(group2), 7) + .putList(listBuilder.apply(group1), "16", "17") + .putNull(listBuilder.apply(group2)) // removed + .build()); + + assertNull(group1 + " wasn't changed", results.get(group1)); + assertEquals(1, results.get(group2).v1().size()); + assertEquals(Arrays.asList(1), results.get(group2).v1()); + assertEquals(7, results.get(group2).v2().intValue()); + assertEquals(1, results.size()); + results.clear(); + + service.applySettings(Settings.builder() + .put(intBuilder.apply(group1), 2) + .put(intBuilder.apply(group2), 7) + .putList(listBuilder.apply(group1), "16", "17") + .putList(listBuilder.apply(group3), "5", "6") // added + .build()); + assertNull(group1 + " wasn't changed", results.get(group1)); + assertNull(group2 + " wasn't changed", results.get(group2)); + + assertEquals(2, results.get(group3).v1().size()); + assertEquals(Arrays.asList(5, 6), results.get(group3).v1()); + assertEquals(1, results.get(group3).v2().intValue()); + assertEquals(1, results.size()); + results.clear(); + + service.applySettings(Settings.builder() + .put(intBuilder.apply(group1), 4) // modified + .put(intBuilder.apply(group2), 7) + .putList(listBuilder.apply(group1), "16", "17") + .putList(listBuilder.apply(group3), "5", "6") + .build()); + assertNull(group2 + " wasn't changed", results.get(group2)); + assertNull(group3 + " wasn't changed", results.get(group3)); + + assertEquals(2, results.get(group1).v1().size()); + assertEquals(Arrays.asList(16, 17), results.get(group1).v1()); + assertEquals(4, results.get(group1).v2().intValue()); + assertEquals(1, results.size()); + results.clear(); + + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + service.applySettings(Settings.builder() + .put(intBuilder.apply(group1), 2) // modified to trip validator + .put(intBuilder.apply(group2), 7) + .putList(listBuilder.apply(group1)) // modified to trip validator + .putList(listBuilder.apply(group3), "5", "6") + .build()) + ); + assertEquals("boom", iae.getMessage()); + assertEquals(0, results.size()); + } + public void testAddConsumerAffix() { Setting.AffixSetting intSetting = Setting.affixKeySetting("foo.", "bar", (k) -> Setting.intSetting(k, 1, Property.Dynamic, Property.NodeScope)); @@ -893,7 +987,7 @@ public class ScopedSettingsTests extends ESTestCase { public void testInternalIndexSettingsSkipValidation() { final Setting internalIndexSetting = Setting.simpleString("index.internal", Property.InternalIndex, Property.IndexScope); - final IndexScopedSettings indexScopedSettings = + final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(Settings.EMPTY, Collections.singleton(internalIndexSetting)); // nothing should happen, validation should not throw an exception final Settings settings = Settings.builder().put("index.internal", "internal").build(); diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index c32037f4452..7063e53f789 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -757,7 +757,7 @@ public class SettingTests extends ESTestCase { public void testSettingsGroupUpdater() { Setting intSetting = Setting.intSetting("prefix.foo", 1, Property.NodeScope, Property.Dynamic); Setting intSetting2 = Setting.intSetting("prefix.same", 1, Property.NodeScope, Property.Dynamic); - AbstractScopedSettings.SettingUpdater updater = Setting.groupedSettingsUpdater(s -> {}, logger, + AbstractScopedSettings.SettingUpdater updater = Setting.groupedSettingsUpdater(s -> {}, Arrays.asList(intSetting, intSetting2)); Settings current = Settings.builder().put("prefix.foo", 123).put("prefix.same", 5555).build(); @@ -768,7 +768,7 @@ public class SettingTests extends ESTestCase { public void testSettingsGroupUpdaterRemoval() { Setting intSetting = Setting.intSetting("prefix.foo", 1, Property.NodeScope, Property.Dynamic); Setting intSetting2 = Setting.intSetting("prefix.same", 1, Property.NodeScope, Property.Dynamic); - AbstractScopedSettings.SettingUpdater updater = Setting.groupedSettingsUpdater(s -> {}, logger, + AbstractScopedSettings.SettingUpdater updater = Setting.groupedSettingsUpdater(s -> {}, Arrays.asList(intSetting, intSetting2)); Settings current = Settings.builder().put("prefix.same", 5555).build(); @@ -783,7 +783,7 @@ public class SettingTests extends ESTestCase { Setting.AffixSetting affixSetting = Setting.affixKeySetting("prefix.foo.", "suffix", key -> Setting.simpleString(key,Property.NodeScope, Property.Dynamic)); - AbstractScopedSettings.SettingUpdater updater = Setting.groupedSettingsUpdater(s -> {}, logger, + AbstractScopedSettings.SettingUpdater updater = Setting.groupedSettingsUpdater(s -> {}, Arrays.asList(intSetting, prefixKeySetting, affixSetting)); Settings.Builder currentSettingsBuilder = Settings.builder() From 67bfb765ee8b7958a4d7a26b17f653ae8955878d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 22 Aug 2018 10:18:07 -0400 Subject: [PATCH 28/48] Refactor Netty4Utils#maybeDie (#33021) In our Netty layer we have had to take extra precautions against Netty catching throwables which prevents them from reaching the uncaught exception handler. This code has taken on additional uses in NIO layer and now in the scheduler engine because there are other components in stack traces that could catch throwables and suppress them from reaching the uncaught exception handler. This commit is a simple cleanup of the iterative evolution of this code to refactor all uses into a single method in ExceptionsHelper. --- .../http/netty4/Netty4HttpChannel.java | 6 +- .../http/netty4/Netty4HttpRequestHandler.java | 5 +- .../http/netty4/Netty4HttpServerChannel.java | 4 +- .../netty4/Netty4HttpServerTransport.java | 5 +- .../netty4/Netty4MessageChannelHandler.java | 2 +- .../transport/netty4/Netty4TcpChannel.java | 5 +- .../netty4/Netty4TcpServerChannel.java | 3 +- .../transport/netty4/Netty4Transport.java | 11 +-- .../transport/netty4/Netty4Utils.java | 34 -------- .../http/nio/HttpReadWriteHandler.java | 2 +- .../elasticsearch/http/nio/NettyAdaptor.java | 4 +- .../elasticsearch/http/nio/NettyListener.java | 2 +- .../qa/die_with_dignity/DieWithDignityIT.java | 12 +-- .../org/elasticsearch/ExceptionsHelper.java | 79 ++++++++++--------- .../xpack/core/scheduler/SchedulerEngine.java | 8 +- 15 files changed, 76 insertions(+), 106 deletions(-) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java index 981a417449f..73135c2a145 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java @@ -21,11 +21,11 @@ package org.elasticsearch.http.netty4; import io.netty.channel.Channel; import io.netty.channel.ChannelPromise; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.concurrent.CompletableContext; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpResponse; -import org.elasticsearch.transport.netty4.Netty4Utils; import java.net.InetSocketAddress; @@ -42,7 +42,7 @@ public class Netty4HttpChannel implements HttpChannel { } else { Throwable cause = f.cause(); if (cause instanceof Error) { - Netty4Utils.maybeDie(cause); + ExceptionsHelper.maybeDieOnAnotherThread(cause); closeContext.completeExceptionally(new Exception(cause)); } else { closeContext.completeExceptionally((Exception) cause); @@ -59,7 +59,7 @@ public class Netty4HttpChannel implements HttpChannel { listener.onResponse(null); } else { final Throwable cause = f.cause(); - Netty4Utils.maybeDie(cause); + ExceptionsHelper.maybeDieOnAnotherThread(cause); if (cause instanceof Error) { listener.onFailure(new Exception(cause)); } else { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java index ab078ad10d3..472e34d09fc 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java @@ -27,7 +27,6 @@ import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.FullHttpRequest; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.http.HttpPipelinedRequest; -import org.elasticsearch.transport.netty4.Netty4Utils; @ChannelHandler.Sharable class Netty4HttpRequestHandler extends SimpleChannelInboundHandler> { @@ -58,7 +57,7 @@ class Netty4HttpRequestHandler extends SimpleChannelInboundHandler maybeError = ExceptionsHelper.maybeError(cause, logger); - if (maybeError.isPresent()) { - /* - * Here be dragons. We want to rethrow this so that it bubbles up to the uncaught exception handler. Yet, Netty wraps too many - * invocations of user-code in try/catch blocks that swallow all throwables. This means that a rethrow here will not bubble up - * to where we want it to. So, we fork a thread and throw the exception from there where Netty can not get to it. We do not wrap - * the exception so as to not lose the original cause during exit. - */ - try { - // try to log the current stack trace - final String formatted = ExceptionsHelper.formatStackTrace(Thread.currentThread().getStackTrace()); - logger.error("fatal error on the network layer\n{}", formatted); - } finally { - new Thread( - () -> { - throw maybeError.get(); - }) - .start(); - } - } - } - } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java index 3dcd59cf8e2..17a5c1fb97e 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java @@ -139,7 +139,7 @@ public class HttpReadWriteHandler implements ReadWriteHandler { if (request.decoderResult().isFailure()) { Throwable cause = request.decoderResult().cause(); if (cause instanceof Error) { - ExceptionsHelper.dieOnError(cause); + ExceptionsHelper.maybeDieOnAnotherThread(cause); transport.incomingRequestError(httpRequest, nioHttpChannel, new Exception(cause)); } else { transport.incomingRequestError(httpRequest, nioHttpChannel, (Exception) cause); diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java index 41cb72aa322..133206e1322 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java @@ -73,7 +73,7 @@ public class NettyAdaptor implements AutoCloseable { closeFuture.await(); if (closeFuture.isSuccess() == false) { Throwable cause = closeFuture.cause(); - ExceptionsHelper.dieOnError(cause); + ExceptionsHelper.maybeDieOnAnotherThread(cause); throw (Exception) cause; } } @@ -84,7 +84,7 @@ public class NettyAdaptor implements AutoCloseable { listener.accept(null, null); } else { final Throwable cause = f.cause(); - ExceptionsHelper.dieOnError(cause); + ExceptionsHelper.maybeDieOnAnotherThread(cause); assert cause instanceof Exception; listener.accept(null, (Exception) cause); } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java index 2cdaa4708d1..637bbafff8e 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java @@ -223,7 +223,7 @@ public class NettyListener implements BiConsumer, ChannelPromis biConsumer.accept(null, null); } else { if (cause instanceof Error) { - ExceptionsHelper.dieOnError(cause); + ExceptionsHelper.maybeDieOnAnotherThread(cause); biConsumer.accept(null, new Exception(cause)); } else { biConsumer.accept(null, (Exception) cause); diff --git a/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java b/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java index 992d3ce71f6..9250122025c 100644 --- a/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java +++ b/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java @@ -90,14 +90,14 @@ public class DieWithDignityIT extends ESRestTestCase { final Iterator it = lines.iterator(); - boolean fatalErrorOnTheNetworkLayer = false; + boolean fatalError = false; boolean fatalErrorInThreadExiting = false; - while (it.hasNext() && (fatalErrorOnTheNetworkLayer == false || fatalErrorInThreadExiting == false)) { + while (it.hasNext() && (fatalError == false || fatalErrorInThreadExiting == false)) { final String line = it.next(); - if (line.contains("fatal error on the network layer")) { - fatalErrorOnTheNetworkLayer = true; - } else if (line.matches(".*\\[ERROR\\]\\[o.e.b.ElasticsearchUncaughtExceptionHandler\\] \\[node-0\\]" + if (line.matches(".*\\[ERROR\\]\\[o\\.e\\.ExceptionsHelper\\s*\\] \\[node-0\\] fatal error")) { + fatalError = true; + } else if (line.matches(".*\\[ERROR\\]\\[o\\.e\\.b\\.ElasticsearchUncaughtExceptionHandler\\] \\[node-0\\]" + " fatal error in thread \\[Thread-\\d+\\], exiting$")) { fatalErrorInThreadExiting = true; assertTrue(it.hasNext()); @@ -105,7 +105,7 @@ public class DieWithDignityIT extends ESRestTestCase { } } - assertTrue(fatalErrorOnTheNetworkLayer); + assertTrue(fatalError); assertTrue(fatalErrorInThreadExiting); } diff --git a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java index 09347f519fb..9f756666217 100644 --- a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java +++ b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java @@ -136,42 +136,6 @@ public final class ExceptionsHelper { return Arrays.stream(stackTrace).skip(1).map(e -> "\tat " + e).collect(Collectors.joining("\n")); } - static final int MAX_ITERATIONS = 1024; - - /** - * Unwrap the specified throwable looking for any suppressed errors or errors as a root cause of the specified throwable. - * - * @param cause the root throwable - * - * @return an optional error if one is found suppressed or a root cause in the tree rooted at the specified throwable - */ - public static Optional maybeError(final Throwable cause, final Logger logger) { - // early terminate if the cause is already an error - if (cause instanceof Error) { - return Optional.of((Error) cause); - } - - final Queue queue = new LinkedList<>(); - queue.add(cause); - int iterations = 0; - while (!queue.isEmpty()) { - iterations++; - if (iterations > MAX_ITERATIONS) { - logger.warn("giving up looking for fatal errors", cause); - break; - } - final Throwable current = queue.remove(); - if (current instanceof Error) { - return Optional.of((Error) current); - } - Collections.addAll(queue, current.getSuppressed()); - if (current.getCause() != null) { - queue.add(current.getCause()); - } - } - return Optional.empty(); - } - /** * Rethrows the first exception in the list and adds all remaining to the suppressed list. * If the given list is empty no exception is thrown @@ -243,13 +207,50 @@ public final class ExceptionsHelper { return true; } + static final int MAX_ITERATIONS = 1024; + + /** + * Unwrap the specified throwable looking for any suppressed errors or errors as a root cause of the specified throwable. + * + * @param cause the root throwable + * @return an optional error if one is found suppressed or a root cause in the tree rooted at the specified throwable + */ + public static Optional maybeError(final Throwable cause, final Logger logger) { + // early terminate if the cause is already an error + if (cause instanceof Error) { + return Optional.of((Error) cause); + } + + final Queue queue = new LinkedList<>(); + queue.add(cause); + int iterations = 0; + while (queue.isEmpty() == false) { + iterations++; + // this is a guard against deeply nested or circular chains of exceptions + if (iterations > MAX_ITERATIONS) { + logger.warn("giving up looking for fatal errors", cause); + break; + } + final Throwable current = queue.remove(); + if (current instanceof Error) { + return Optional.of((Error) current); + } + Collections.addAll(queue, current.getSuppressed()); + if (current.getCause() != null) { + queue.add(current.getCause()); + } + } + return Optional.empty(); + } + /** * If the specified cause is an unrecoverable error, this method will rethrow the cause on a separate thread so that it can not be - * caught and bubbles up to the uncaught exception handler. + * caught and bubbles up to the uncaught exception handler. Note that the cause tree is examined for any {@link Error}. See + * {@link #maybeError(Throwable, Logger)} for the semantics. * - * @param throwable the throwable to test + * @param throwable the throwable to possibly throw on another thread */ - public static void dieOnError(Throwable throwable) { + public static void maybeDieOnAnotherThread(final Throwable throwable) { ExceptionsHelper.maybeError(throwable, logger).ifPresent(error -> { /* * Here be dragons. We want to rethrow this so that it bubbles up to the uncaught exception handler. Yet, sometimes the stack diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngine.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngine.java index 3f99818f31a..30e41734906 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngine.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngine.java @@ -194,11 +194,11 @@ public class SchedulerEngine { /* * Allowing the throwable to escape here will lead to be it being caught in FutureTask#run and set as the outcome of this * task; however, we never inspect the the outcomes of these scheduled tasks and so allowing the throwable to escape - * unhandled here could lead to us losing fatal errors. Instead, we rely on ExceptionsHelper#dieOnError to appropriately - * dispatch any error to the uncaught exception handler. We should never see an exception here as these do not escape from - * SchedulerEngine#notifyListeners. + * unhandled here could lead to us losing fatal errors. Instead, we rely on ExceptionsHelper#maybeThrowErrorOnAnotherThread + * to appropriately dispatch any error to the uncaught exception handler. We should never see an exception here as these do + * not escape from SchedulerEngine#notifyListeners. */ - ExceptionsHelper.dieOnError(t); + ExceptionsHelper.maybeDieOnAnotherThread(t); throw t; } scheduleNextRun(triggeredTime); From e9912081dd3507bf3ab1ceabd949ca692a5a92eb Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Wed, 22 Aug 2018 09:19:58 -0500 Subject: [PATCH 29/48] HLRC: Create server agnostic request and response (#32912) The HLRC has historically reused the same Request and Response classes that the server module uses. This commit deprecates the use of any server module Request and Response classes, and adds a small bit of validation logic that differs from server slightly, in that it does not assume a check for a null ValidationException class is not enough to determine if validation failed. --- .../client/RestHighLevelClient.java | 127 ++++++++++++++++-- .../org/elasticsearch/client/Validatable.java | 41 ++++++ .../client/ValidationException.java | 55 ++++++++ 3 files changed, 209 insertions(+), 14 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 2b71b5be59d..7376f74839c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -960,6 +960,11 @@ public class RestHighLevelClient implements Closeable { FieldCapabilitiesResponse::fromXContent, listener, emptySet()); } + /** + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation + * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + */ + @Deprecated protected final Resp performRequestAndParseEntity(Req request, CheckedFunction requestConverter, RequestOptions options, @@ -969,15 +974,58 @@ public class RestHighLevelClient implements Closeable { response -> parseEntity(response.getEntity(), entityParser), ignores); } + /** + * Defines a helper method for performing a request and then parsing the returned entity using the provided entityParser. + */ + protected final Resp performRequestAndParseEntity(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction entityParser, + Set ignores) throws IOException { + return performRequest(request, requestConverter, options, + response -> parseEntity(response.getEntity(), entityParser), ignores); + } + + /** + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation + * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + */ + @Deprecated protected final Resp performRequest(Req request, - CheckedFunction requestConverter, - RequestOptions options, - CheckedFunction responseConverter, - Set ignores) throws IOException { + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + Set ignores) throws IOException { ActionRequestValidationException validationException = request.validate(); - if (validationException != null) { + if (validationException != null && validationException.validationErrors().isEmpty() == false) { throw validationException; } + return internalPerformRequest(request, requestConverter, options, responseConverter, ignores); + } + + /** + * Defines a helper method for performing a request. + */ + protected final Resp performRequest(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + Set ignores) throws IOException { + ValidationException validationException = request.validate(); + if (validationException != null && validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + return internalPerformRequest(request, requestConverter, options, responseConverter, ignores); + } + + /** + * Provides common functionality for performing a request. + */ + private Resp internalPerformRequest(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + Set ignores) throws IOException { Request req = requestConverter.apply(request); req.setOptions(options); Response response; @@ -1005,25 +1053,75 @@ public class RestHighLevelClient implements Closeable { } } + /** + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation + * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + */ + @Deprecated protected final void performRequestAsyncAndParseEntity(Req request, - CheckedFunction requestConverter, - RequestOptions options, - CheckedFunction entityParser, - ActionListener listener, Set ignores) { + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction entityParser, + ActionListener listener, Set ignores) { performRequestAsync(request, requestConverter, options, response -> parseEntity(response.getEntity(), entityParser), listener, ignores); } + /** + * Defines a helper method for asynchronously performing a request. + */ + protected final void performRequestAsyncAndParseEntity(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction entityParser, + ActionListener listener, Set ignores) { + performRequestAsync(request, requestConverter, options, + response -> parseEntity(response.getEntity(), entityParser), listener, ignores); + } + + + /** + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation + * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + */ + @Deprecated protected final void performRequestAsync(Req request, - CheckedFunction requestConverter, - RequestOptions options, - CheckedFunction responseConverter, - ActionListener listener, Set ignores) { + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + ActionListener listener, Set ignores) { ActionRequestValidationException validationException = request.validate(); - if (validationException != null) { + if (validationException != null && validationException.validationErrors().isEmpty() == false) { listener.onFailure(validationException); return; } + internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores); + } + + /** + * Defines a helper method for asynchronously performing a request. + */ + protected final void performRequestAsync(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + ActionListener listener, Set ignores) { + ValidationException validationException = request.validate(); + if (validationException != null && validationException.validationErrors().isEmpty() == false) { + listener.onFailure(validationException); + return; + } + internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores); + } + + /** + * Provides common functionality for asynchronously performing a request. + */ + private void internalPerformRequestAsync(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + ActionListener listener, Set ignores) { Request req; try { req = requestConverter.apply(request); @@ -1037,6 +1135,7 @@ public class RestHighLevelClient implements Closeable { client.performRequestAsync(req, responseListener); } + final ResponseListener wrapResponseListener(CheckedFunction responseConverter, ActionListener actionListener, Set ignores) { return new ResponseListener() { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java new file mode 100644 index 00000000000..2efff4d3663 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +/** + * Defines a validation layer for Requests. + */ +public interface Validatable { + ValidationException EMPTY_VALIDATION = new ValidationException() { + @Override + public void addValidationError(String error) { + throw new UnsupportedOperationException("Validation messages should not be added to the empty validation"); + } + }; + + /** + * Perform validation. This method does not have to be overridden in the event that no validation needs to be done. + * + * @return potentially null, in the event of older actions, an empty {@link ValidationException} in newer actions, or finally a + * {@link ValidationException} that contains a list of all failed validation. + */ + default ValidationException validate() { + return EMPTY_VALIDATION; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java new file mode 100644 index 00000000000..6b5d738d675 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import java.util.ArrayList; +import java.util.List; + +/** + * Encapsulates an accumulation of validation errors + */ +public class ValidationException extends IllegalArgumentException { + private final List validationErrors = new ArrayList<>(); + + /** + * Add a new validation error to the accumulating validation errors + * @param error the error to add + */ + public void addValidationError(String error) { + validationErrors.add(error); + } + + /** + * Returns the validation errors accumulated + */ + public final List validationErrors() { + return validationErrors; + } + + @Override + public final String getMessage() { + StringBuilder sb = new StringBuilder(); + sb.append("Validation Failed: "); + int index = 0; + for (String error : validationErrors) { + sb.append(++index).append(": ").append(error).append(";"); + } + return sb.toString(); + } +} From 528e727999bc5bf851019e0848d1c2bbb8c17f34 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 22 Aug 2018 10:19:30 -0400 Subject: [PATCH 30/48] Fix method reference in comment in SchedulerEngine This commit fixes the name of a method reference in a comment in SchedulerEngine. --- .../elasticsearch/xpack/core/scheduler/SchedulerEngine.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngine.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngine.java index 30e41734906..66a2eb35898 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngine.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngine.java @@ -194,8 +194,8 @@ public class SchedulerEngine { /* * Allowing the throwable to escape here will lead to be it being caught in FutureTask#run and set as the outcome of this * task; however, we never inspect the the outcomes of these scheduled tasks and so allowing the throwable to escape - * unhandled here could lead to us losing fatal errors. Instead, we rely on ExceptionsHelper#maybeThrowErrorOnAnotherThread - * to appropriately dispatch any error to the uncaught exception handler. We should never see an exception here as these do + * unhandled here could lead to us losing fatal errors. Instead, we rely on ExceptionsHelper#maybeDieOnAnotherThread to + * appropriately dispatch any error to the uncaught exception handler. We should never see an exception here as these do * not escape from SchedulerEngine#notifyListeners. */ ExceptionsHelper.maybeDieOnAnotherThread(t); From abb4c183f1e9ed8835f4c4bfede6eebc9c85c3ef Mon Sep 17 00:00:00 2001 From: Dimitrios Liappis Date: Wed, 22 Aug 2018 18:18:30 +0300 Subject: [PATCH 31/48] Clarify ignore_above behavior with arrays of strings Currently docs don't explain how `ignore_above` behaves with arrays of strings. Clarify how `ignore_above` applies for arrays of strings and also note that all string(s) will still be visible in the `_source` field. Relates #33057 --- docs/reference/mapping/params/ignore-above.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/reference/mapping/params/ignore-above.asciidoc b/docs/reference/mapping/params/ignore-above.asciidoc index 95704c6c8bb..fe7c6881a06 100644 --- a/docs/reference/mapping/params/ignore-above.asciidoc +++ b/docs/reference/mapping/params/ignore-above.asciidoc @@ -2,6 +2,9 @@ === `ignore_above` Strings longer than the `ignore_above` setting will not be indexed or stored. +For arrays of strings, `ignore_above` will be applied for each array element separately and string elements longer than `ignore_above` will not be indexed or stored. + +NOTE: All strings/array elements will still be present in the `_source` field, if the latter is enabled which is the default in Elasticsearch. [source,js] -------------------------------------------------- From 393eec148217a721281b111ea8ab95c436ec5684 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 22 Aug 2018 17:23:54 +0200 Subject: [PATCH 32/48] Set maxScore for empty TopDocs to Nan rather than 0 (#32938) We used to set `maxScore` to `0` within `TopDocs` in situations where there is really no score as the size was set to `0` and scores were not even tracked. In such scenarios, `Float.Nan` is more appropriate, which gets converted to `max_score: null` on the REST layer. That's also more consistent with lucene which set `maxScore` to `Float.Nan` when merging empty `TopDocs` (see `TopDocs#merge`). --- .../org/elasticsearch/client/SearchIT.java | 12 ++--- .../bucket/children-aggregation.asciidoc | 2 +- docs/reference/getting-started.asciidoc | 2 +- .../mapping/params/normalizer.asciidoc | 2 +- .../migration/migrate_7_0/search.asciidoc | 5 ++ docs/reference/search/request-body.asciidoc | 2 +- .../suggesters/completion-suggest.asciidoc | 2 +- .../ParentChildInnerHitContextBuilder.java | 2 +- .../rest-api-spec/test/scroll/10_basic.yml | 48 +++++++++++++++++++ .../test/search/110_field_collapsing.yml | 17 +++++++ .../search/140_pre_filter_search_shards.yml | 1 - .../test/search/190_index_prefix_search.yml | 3 ++ .../test/search/210_rescore_explain.yml | 1 + .../elasticsearch/common/lucene/Lucene.java | 2 +- .../index/query/NestedQueryBuilder.java | 2 +- .../search/query/QueryPhase.java | 2 +- .../search/query/TopDocsCollectorContext.java | 2 +- .../aggregations/metrics/TopHitsIT.java | 4 +- .../search/query/QueryPhaseTests.java | 27 +++++++++-- .../rest/yaml/section/MatchAssertion.java | 8 +++- .../yaml/section/MatchAssertionTests.java | 42 ++++++++++++++++ 21 files changed, 163 insertions(+), 25 deletions(-) create mode 100644 test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/MatchAssertionTests.java diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 9c9c5425f00..739a590ba5f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -256,7 +256,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertNull(searchResponse.getSuggest()); assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); assertEquals(0, searchResponse.getHits().getHits().length); - assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + assertEquals(Float.NaN, searchResponse.getHits().getMaxScore(), 0f); Terms termsAgg = searchResponse.getAggregations().get("agg1"); assertEquals("agg1", termsAgg.getName()); assertEquals(2, termsAgg.getBuckets().size()); @@ -293,7 +293,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); assertEquals(5, searchResponse.getHits().totalHits); assertEquals(0, searchResponse.getHits().getHits().length); - assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + assertEquals(Float.NaN, searchResponse.getHits().getMaxScore(), 0f); Range rangeAgg = searchResponse.getAggregations().get("agg1"); assertEquals("agg1", rangeAgg.getName()); assertEquals(2, rangeAgg.getBuckets().size()); @@ -323,7 +323,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertNull(searchResponse.getSuggest()); assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); assertEquals(0, searchResponse.getHits().getHits().length); - assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + assertEquals(Float.NaN, searchResponse.getHits().getMaxScore(), 0f); Terms termsAgg = searchResponse.getAggregations().get("agg1"); assertEquals("agg1", termsAgg.getName()); assertEquals(2, termsAgg.getBuckets().size()); @@ -375,7 +375,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); assertEquals(5, searchResponse.getHits().totalHits); assertEquals(0, searchResponse.getHits().getHits().length); - assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + assertEquals(Float.NaN, searchResponse.getHits().getMaxScore(), 0f); assertEquals(1, searchResponse.getAggregations().asList().size()); MatrixStats matrixStats = searchResponse.getAggregations().get("agg1"); assertEquals(5, matrixStats.getFieldCount("num")); @@ -474,7 +474,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); assertEquals(3, searchResponse.getHits().totalHits); assertEquals(0, searchResponse.getHits().getHits().length); - assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + assertEquals(Float.NaN, searchResponse.getHits().getMaxScore(), 0f); assertEquals(1, searchResponse.getAggregations().asList().size()); Terms terms = searchResponse.getAggregations().get("top-tags"); assertEquals(0, terms.getDocCountError()); @@ -513,7 +513,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertNull(searchResponse.getAggregations()); assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); assertEquals(0, searchResponse.getHits().totalHits); - assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + assertEquals(Float.NaN, searchResponse.getHits().getMaxScore(), 0f); assertEquals(0, searchResponse.getHits().getHits().length); assertEquals(1, searchResponse.getSuggest().size()); diff --git a/docs/reference/aggregations/bucket/children-aggregation.asciidoc b/docs/reference/aggregations/bucket/children-aggregation.asciidoc index 3805b2e564c..e2b3c8ec591 100644 --- a/docs/reference/aggregations/bucket/children-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/children-aggregation.asciidoc @@ -144,7 +144,7 @@ Possible response: }, "hits": { "total": 3, - "max_score": 0.0, + "max_score": null, "hits": [] }, "aggregations": { diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 8229f74bdd0..c69597e74fd 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -1141,7 +1141,7 @@ And the response (partially shown): }, "hits" : { "total" : 1000, - "max_score" : 0.0, + "max_score" : null, "hits" : [ ] }, "aggregations" : { diff --git a/docs/reference/mapping/params/normalizer.asciidoc b/docs/reference/mapping/params/normalizer.asciidoc index 3688a0e9454..73110cd11f5 100644 --- a/docs/reference/mapping/params/normalizer.asciidoc +++ b/docs/reference/mapping/params/normalizer.asciidoc @@ -151,7 +151,7 @@ returns }, "hits": { "total": 3, - "max_score": 0.0, + "max_score": null, "hits": [] }, "aggregations": { diff --git a/docs/reference/migration/migrate_7_0/search.asciidoc b/docs/reference/migration/migrate_7_0/search.asciidoc index 094294d8530..76367115e13 100644 --- a/docs/reference/migration/migrate_7_0/search.asciidoc +++ b/docs/reference/migration/migrate_7_0/search.asciidoc @@ -100,3 +100,8 @@ and the context is only accepted if `path` points to a field with `geo_point` ty `max_concurrent_shard_requests` used to limit the total number of concurrent shard requests a single high level search request can execute. In 7.0 this changed to be the max number of concurrent shard requests per node. The default is now `5`. + +==== `max_score` set to `null` when scores are not tracked + +`max_score` used to be set to `0` whenever scores are not tracked. `null` is now used +instead which is a more appropriate value for a scenario where scores are not available. diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index e7c9b593af3..ad24d9c93c6 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -161,7 +161,7 @@ be set to `true` in the response. }, "hits": { "total": 1, - "max_score": 0.0, + "max_score": null, "hits": [] } } diff --git a/docs/reference/search/suggesters/completion-suggest.asciidoc b/docs/reference/search/suggesters/completion-suggest.asciidoc index 9f9833bde9d..c52f28bc7be 100644 --- a/docs/reference/search/suggesters/completion-suggest.asciidoc +++ b/docs/reference/search/suggesters/completion-suggest.asciidoc @@ -258,7 +258,7 @@ Which should look like: }, "hits": { "total" : 0, - "max_score" : 0.0, + "max_score" : null, "hits" : [] }, "suggest": { diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index 6593c7efb9f..5e57a277405 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -131,7 +131,7 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder { for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) { intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx); } - result[i] = new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0); + result[i] = new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, Float.NaN); } else { int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc()); TopDocsCollector topDocsCollector; diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml index 5ecc357e0e1..6ab18146bba 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml @@ -233,3 +233,51 @@ query: match_all: {} size: 0 + +--- +"Scroll max_score is null": + - skip: + version: " - 6.99.99" + reason: max_score was set to 0 rather than null before 7.0 + + - do: + indices.create: + index: test_scroll + - do: + index: + index: test_scroll + type: test + id: 42 + body: { foo: 1 } + + - do: + index: + index: test_scroll + type: test + id: 43 + body: { foo: 2 } + + - do: + indices.refresh: {} + + - do: + search: + index: test_scroll + size: 1 + scroll: 1m + sort: foo + body: + query: + match_all: {} + + - set: {_scroll_id: scroll_id} + - length: {hits.hits: 1 } + - match: { hits.max_score: null } + + - do: + scroll: + scroll_id: $scroll_id + scroll: 1m + + - length: {hits.hits: 1 } + - match: { hits.max_score: null } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml index 521dc4c1cac..dad05cce4eb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml @@ -244,6 +244,23 @@ setup: - match: { hits.total: 6 } - length: { hits.hits: 0 } +--- +"no hits and inner_hits max_score null": + + - skip: + version: " - 6.99.99" + reason: max_score was set to 0 rather than null before 7.0 + + - do: + search: + index: test + body: + size: 0 + collapse: { field: numeric_group, inner_hits: { name: sub_hits, size: 1} } + sort: [{ sort: desc }] + + - match: { hits.max_score: null } + --- "field collapsing and multiple inner_hits": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml index dc6b130b289..c63dee2e211 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml @@ -128,7 +128,6 @@ setup: - match: { hits.total: 2 } - match: { aggregations.some_agg.doc_count: 3 } - - do: search: pre_filter_shard_size: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml index dfe0b6825cd..62770e2915d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml @@ -39,6 +39,7 @@ setup: df: text - match: {hits.total: 1} + - match: {hits.max_score: 1} - match: {hits.hits.0._score: 1} - do: @@ -52,6 +53,7 @@ setup: boost: 2 - match: {hits.total: 1} + - match: {hits.max_score: 2} - match: {hits.hits.0._score: 2} - do: @@ -61,6 +63,7 @@ setup: df: text - match: {hits.total: 1} + - match: {hits.max_score: 1} - match: {hits.hits.0._score: 1} --- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml index 24920580c45..4d7ee91bef5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml @@ -29,6 +29,7 @@ query_weight: 5 rescore_query_weight: 10 + - match: {hits.max_score: 15} - match: { hits.hits.0._score: 15 } - match: { hits.hits.0._explanation.value: 15 } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index ebd0d5ba2ef..a24a6aea07f 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -101,7 +101,7 @@ public class Lucene { public static final ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0]; - public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(0, EMPTY_SCORE_DOCS, 0.0f); + public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(0, EMPTY_SCORE_DOCS, Float.NaN); public static Version parseVersion(@Nullable String version, Version defaultVersion, Logger logger) { if (version == null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 889f41a037f..99162857894 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -398,7 +398,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder if (size() == 0) { TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx); - result[i] = new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0); + result[i] = new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, Float.NaN); } else { int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc()); TopDocsCollector topDocsCollector; diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index ca06005448c..84c76e85f3d 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -95,7 +95,7 @@ public class QueryPhase implements SearchPhase { suggestPhase.execute(searchContext); // TODO: fix this once we can fetch docs for suggestions searchContext.queryResult().topDocs( - new TopDocs(0, Lucene.EMPTY_SCORE_DOCS, 0), + new TopDocs(0, Lucene.EMPTY_SCORE_DOCS, Float.NaN), new DocValueFormat[0]); return; } diff --git a/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java b/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java index dc110b27977..8d40cc802ff 100644 --- a/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java +++ b/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java @@ -120,7 +120,7 @@ abstract class TopDocsCollectorContext extends QueryCollectorContext { @Override void postProcess(QuerySearchResult result) { final int totalHitCount = hitCountSupplier.getAsInt(); - result.topDocs(new TopDocs(totalHitCount, Lucene.EMPTY_SCORE_DOCS, 0), null); + result.topDocs(new TopDocs(totalHitCount, Lucene.EMPTY_SCORE_DOCS, Float.NaN), null); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 952eb22848e..a74734c622f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -321,7 +321,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(response.getHits().getTotalHits(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); - assertThat(response.getHits().getMaxScore(), equalTo(0f)); + assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -356,7 +356,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(response.getHits().getTotalHits(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); - assertThat(response.getHits().getMaxScore(), equalTo(0f)); + assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 16365d829a8..872267417c3 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -67,6 +67,7 @@ import java.util.List; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; @@ -103,6 +104,7 @@ public class QueryPhaseTests extends IndexShardTestCase { final boolean rescore = QueryPhase.execute(context, searcher, checkCancelled -> {}); assertFalse(rescore); assertEquals(searcher.count(query), context.queryResult().topDocs().totalHits); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); } private void countTestCase(boolean withDeletions) throws Exception { @@ -172,11 +174,14 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertEquals(1, context.queryResult().topDocs().totalHits); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); contextSearcher = new IndexSearcher(reader); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertEquals(0, context.queryResult().topDocs().totalHits); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); reader.close(); dir.close(); } @@ -205,13 +210,13 @@ public class QueryPhaseTests extends IndexShardTestCase { context.parsedPostFilter(new ParsedQuery(new TermQuery(new Term("foo", Integer.toString(i))))); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertEquals(1, context.queryResult().topDocs().totalHits); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); } reader.close(); dir.close(); } - public void testMinScoreDisablesCountOptimization() throws Exception { Directory dir = newDirectory(); final Sort sort = new Sort(new SortField("rank", SortField.Type.INT)); @@ -230,11 +235,13 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setTask(new SearchTask(123L, "", "", "", null, Collections.emptyMap())); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertEquals(1, context.queryResult().topDocs().totalHits); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); contextSearcher = new IndexSearcher(reader); context.minimumScore(100); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertEquals(0, context.queryResult().topDocs().totalHits); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); reader.close(); dir.close(); } @@ -289,6 +296,7 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertThat(context.queryResult().topDocs().totalHits, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); assertThat(context.queryResult().getTotalHits(), equalTo((long) numDocs)); @@ -296,9 +304,11 @@ public class QueryPhaseTests extends IndexShardTestCase { contextSearcher = getAssertingEarlyTerminationSearcher(reader, size); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertThat(context.queryResult().topDocs().totalHits, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); assertTrue(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(size)); assertThat(context.queryResult().getTotalHits(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); assertThat(context.queryResult().topDocs().scoreDocs[0].doc, greaterThanOrEqualTo(size)); reader.close(); dir.close(); @@ -334,12 +344,14 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); context.setSize(0); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); } @@ -348,6 +360,7 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); } { @@ -360,6 +373,7 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); + assertThat(context.queryResult().topDocs().getMaxScore(), greaterThan(0f)); assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); context.setSize(0); @@ -367,6 +381,7 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); } { @@ -376,6 +391,7 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); + assertThat(context.queryResult().topDocs().getMaxScore(), greaterThan(0f)); assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); assertThat(collector.getTotalHits(), equalTo(1)); context.queryCollectors().clear(); @@ -387,6 +403,7 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); + assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); assertThat(collector.getTotalHits(), equalTo(1)); } @@ -539,19 +556,19 @@ public class QueryPhaseTests extends IndexShardTestCase { dir.close(); } - static IndexSearcher getAssertingEarlyTerminationSearcher(IndexReader reader, int size) { + private static IndexSearcher getAssertingEarlyTerminationSearcher(IndexReader reader, int size) { return new IndexSearcher(reader) { protected void search(List leaves, Weight weight, Collector collector) throws IOException { - final Collector in = new AssertingEalyTerminationFilterCollector(collector, size); + final Collector in = new AssertingEarlyTerminationFilterCollector(collector, size); super.search(leaves, weight, in); } }; } - private static class AssertingEalyTerminationFilterCollector extends FilterCollector { + private static class AssertingEarlyTerminationFilterCollector extends FilterCollector { private final int size; - AssertingEalyTerminationFilterCollector(Collector in, int size) { + AssertingEarlyTerminationFilterCollector(Collector in, int size) { super(in); this.size = size; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java index 82d8dbeebe6..6ecaae75a8e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java @@ -32,6 +32,7 @@ import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; /** @@ -70,8 +71,13 @@ public class MatchAssertion extends Assertion { } } - assertNotNull("field [" + getField() + "] is null", actualValue); logger.trace("assert that [{}] matches [{}] (field [{}])", actualValue, expectedValue, getField()); + if (expectedValue == null) { + assertNull("field [" + getField() + "] should be null but was [" + actualValue + "]", actualValue); + return; + } + assertNotNull("field [" + getField() + "] is null", actualValue); + if (actualValue.getClass().equals(safeClass(expectedValue)) == false) { if (actualValue instanceof Number && expectedValue instanceof Number) { //Double 1.0 is equal to Integer 1 diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/MatchAssertionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/MatchAssertionTests.java new file mode 100644 index 00000000000..2bd72347441 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/MatchAssertionTests.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.rest.yaml.section; + +import org.elasticsearch.common.xcontent.XContentLocation; +import org.elasticsearch.test.ESTestCase; + +public class MatchAssertionTests extends ESTestCase { + + public void testNull() { + XContentLocation xContentLocation = new XContentLocation(0, 0); + { + MatchAssertion matchAssertion = new MatchAssertion(xContentLocation, "field", null); + matchAssertion.doAssert(null, null); + expectThrows(AssertionError.class, () -> matchAssertion.doAssert("non-null", null)); + } + { + MatchAssertion matchAssertion = new MatchAssertion(xContentLocation, "field", "non-null"); + expectThrows(AssertionError.class, () -> matchAssertion.doAssert(null, "non-null")); + } + { + MatchAssertion matchAssertion = new MatchAssertion(xContentLocation, "field", "/exp/"); + expectThrows(AssertionError.class, () -> matchAssertion.doAssert(null, "/exp/")); + } + } +} From c3438bc8d87ddb1786646505d84880b2018623c5 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 22 Aug 2018 14:02:39 -0400 Subject: [PATCH 33/48] Switch some watcher tests to new style Requests (#33044) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes all calls in the `x-pack/qa/smoke-test-monitoring-with-watcher`, `x-pack/qa/smoke-test-watcher`, and `x-pack/qa/smoke-test-watcher-with-security` projects to use the new versions. --- .../MonitoringWithWatcherRestIT.java | 69 +++++++++---------- ...cherWithSecurityClientYamlTestSuiteIT.java | 11 ++- .../SmokeTestWatcherWithSecurityIT.java | 56 +++++++-------- .../SmokeTestWatcherTestSuiteIT.java | 38 +++++----- 4 files changed, 81 insertions(+), 93 deletions(-) diff --git a/x-pack/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherRestIT.java b/x-pack/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherRestIT.java index d89d558f02f..d3b9a974398 100644 --- a/x-pack/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherRestIT.java +++ b/x-pack/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherRestIT.java @@ -5,12 +5,10 @@ */ package org.elasticsearch.smoketest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.lucene.util.LuceneTestCase.AwaitsFix; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.rest.ESRestTestCase; @@ -23,7 +21,6 @@ import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; import org.junit.After; import java.io.IOException; -import java.util.Collections; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput; @@ -36,25 +33,25 @@ public class MonitoringWithWatcherRestIT extends ESRestTestCase { @After public void cleanExporters() throws Exception { - String body = Strings.toString(jsonBuilder().startObject().startObject("transient") - .nullField("xpack.monitoring.exporters.*") - .endObject().endObject()); - assertOK(adminClient().performRequest("PUT", "_cluster/settings", Collections.emptyMap(), - new StringEntity(body, ContentType.APPLICATION_JSON))); - - assertOK(adminClient().performRequest("DELETE", ".watch*", Collections.emptyMap())); + Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity(Strings.toString(jsonBuilder().startObject() + .startObject("transient") + .nullField("xpack.monitoring.exporters.*") + .endObject().endObject())); + adminClient().performRequest(request); + adminClient().performRequest(new Request("DELETE", "/.watch*")); } public void testThatLocalExporterAddsWatches() throws Exception { String watchId = createMonitoringWatch(); - String body = BytesReference.bytes(jsonBuilder().startObject().startObject("transient") - .field("xpack.monitoring.exporters.my_local_exporter.type", "local") - .field("xpack.monitoring.exporters.my_local_exporter.cluster_alerts.management.enabled", true) - .endObject().endObject()).utf8ToString(); - - adminClient().performRequest("PUT", "_cluster/settings", Collections.emptyMap(), - new StringEntity(body, ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity(Strings.toString(jsonBuilder().startObject() + .startObject("transient") + .field("xpack.monitoring.exporters.my_local_exporter.type", "local") + .field("xpack.monitoring.exporters.my_local_exporter.cluster_alerts.management.enabled", true) + .endObject().endObject())); + adminClient().performRequest(request); assertTotalWatchCount(ClusterAlertsUtil.WATCH_IDS.length); @@ -65,14 +62,14 @@ public class MonitoringWithWatcherRestIT extends ESRestTestCase { String watchId = createMonitoringWatch(); String httpHost = getHttpHost(); - String body = BytesReference.bytes(jsonBuilder().startObject().startObject("transient") - .field("xpack.monitoring.exporters.my_http_exporter.type", "http") - .field("xpack.monitoring.exporters.my_http_exporter.host", httpHost) - .field("xpack.monitoring.exporters.my_http_exporter.cluster_alerts.management.enabled", true) - .endObject().endObject()).utf8ToString(); - - adminClient().performRequest("PUT", "_cluster/settings", Collections.emptyMap(), - new StringEntity(body, ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity(Strings.toString(jsonBuilder().startObject() + .startObject("transient") + .field("xpack.monitoring.exporters.my_http_exporter.type", "http") + .field("xpack.monitoring.exporters.my_http_exporter.host", httpHost) + .field("xpack.monitoring.exporters.my_http_exporter.cluster_alerts.management.enabled", true) + .endObject().endObject())); + adminClient().performRequest(request); assertTotalWatchCount(ClusterAlertsUtil.WATCH_IDS.length); @@ -80,15 +77,15 @@ public class MonitoringWithWatcherRestIT extends ESRestTestCase { } private void assertMonitoringWatchHasBeenOverWritten(String watchId) throws Exception { - ObjectPath path = ObjectPath.createFromResponse(client().performRequest("GET", "_xpack/watcher/watch/" + watchId)); + ObjectPath path = ObjectPath.createFromResponse(client().performRequest(new Request("GET", "/_xpack/watcher/watch/" + watchId))); String interval = path.evaluate("watch.trigger.schedule.interval"); assertThat(interval, is("1m")); } private void assertTotalWatchCount(int expectedWatches) throws Exception { assertBusy(() -> { - assertOK(client().performRequest("POST", ".watches/_refresh")); - ObjectPath path = ObjectPath.createFromResponse(client().performRequest("POST", ".watches/_count")); + assertOK(client().performRequest(new Request("POST", "/.watches/_refresh"))); + ObjectPath path = ObjectPath.createFromResponse(client().performRequest(new Request("POST", "/.watches/_count"))); int count = path.evaluate("count"); assertThat(count, is(expectedWatches)); }); @@ -97,28 +94,28 @@ public class MonitoringWithWatcherRestIT extends ESRestTestCase { private String createMonitoringWatch() throws Exception { String clusterUUID = getClusterUUID(); String watchId = clusterUUID + "_kibana_version_mismatch"; - String sampleWatch = WatchSourceBuilders.watchBuilder() + Request request = new Request("PUT", "/_xpack/watcher/watch/" + watchId); + request.setJsonEntity(WatchSourceBuilders.watchBuilder() .trigger(TriggerBuilders.schedule(new IntervalSchedule(new IntervalSchedule.Interval(1000, MINUTES)))) .input(simpleInput()) .addAction("logme", ActionBuilders.loggingAction("foo")) - .buildAsBytes(XContentType.JSON).utf8ToString(); - client().performRequest("PUT", "_xpack/watcher/watch/" + watchId, Collections.emptyMap(), - new StringEntity(sampleWatch, ContentType.APPLICATION_JSON)); + .buildAsBytes(XContentType.JSON).utf8ToString()); + client().performRequest(request); return watchId; } private String getClusterUUID() throws Exception { - Response response = client().performRequest("GET", "_cluster/state/metadata", Collections.emptyMap()); + Response response = client().performRequest(new Request("GET", "/_cluster/state/metadata")); ObjectPath objectPath = ObjectPath.createFromResponse(response); String clusterUUID = objectPath.evaluate("metadata.cluster_uuid"); return clusterUUID; } public String getHttpHost() throws IOException { - ObjectPath path = ObjectPath.createFromResponse(client().performRequest("GET", "_cluster/state", Collections.emptyMap())); + ObjectPath path = ObjectPath.createFromResponse(client().performRequest(new Request("GET", "/_cluster/state"))); String masterNodeId = path.evaluate("master_node"); - ObjectPath nodesPath = ObjectPath.createFromResponse(client().performRequest("GET", "_nodes", Collections.emptyMap())); + ObjectPath nodesPath = ObjectPath.createFromResponse(client().performRequest(new Request("GET", "/_nodes"))); String httpHost = nodesPath.evaluate("nodes." + masterNodeId + ".http.publish_address"); return httpHost; } diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java index a989bb47611..0c4afff509e 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java +++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java @@ -7,9 +7,7 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.elasticsearch.client.Response; +import org.elasticsearch.client.Request; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -49,9 +47,9 @@ public class SmokeTestWatcherWithSecurityClientYamlTestSuiteIT extends ESClientY emptyList(), emptyMap()); // create one document in this index, so we can test in the YAML tests, that the index cannot be accessed - Response resp = adminClient().performRequest("PUT", "/index_not_allowed_to_read/doc/1", Collections.emptyMap(), - new StringEntity("{\"foo\":\"bar\"}", ContentType.APPLICATION_JSON)); - assertThat(resp.getStatusLine().getStatusCode(), is(201)); + Request request = new Request("PUT", "/index_not_allowed_to_read/doc/1"); + request.setJsonEntity("{\"foo\":\"bar\"}"); + adminClient().performRequest(request); assertBusy(() -> { ClientYamlTestResponse response = @@ -129,4 +127,3 @@ public class SmokeTestWatcherWithSecurityClientYamlTestSuiteIT extends ESClientY .build(); } } - diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java index 1c8204aa1ec..665b92bbc0e 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java +++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java @@ -5,9 +5,8 @@ */ package org.elasticsearch.smoketest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; @@ -21,7 +20,6 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; -import java.util.Collections; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; @@ -41,27 +39,28 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { @Before public void startWatcher() throws Exception { - StringEntity entity = new StringEntity("{ \"value\" : \"15\" }", ContentType.APPLICATION_JSON); - assertOK(adminClient().performRequest("PUT", "my_test_index/doc/1", Collections.singletonMap("refresh", "true"), entity)); + Request createAllowedDoc = new Request("PUT", "/my_test_index/doc/1"); + createAllowedDoc.setJsonEntity("{ \"value\" : \"15\" }"); + createAllowedDoc.addParameter("refresh", "true"); + adminClient().performRequest(createAllowedDoc); // delete the watcher history to not clutter with entries from other test - adminClient().performRequest("DELETE", ".watcher-history-*", Collections.emptyMap()); + adminClient().performRequest(new Request("DELETE", ".watcher-history-*")); // create one document in this index, so we can test in the YAML tests, that the index cannot be accessed - Response resp = adminClient().performRequest("PUT", "/index_not_allowed_to_read/doc/1", Collections.emptyMap(), - new StringEntity("{\"foo\":\"bar\"}", ContentType.APPLICATION_JSON)); - assertThat(resp.getStatusLine().getStatusCode(), is(201)); + Request createNotAllowedDoc = new Request("PUT", "/index_not_allowed_to_read/doc/1"); + createNotAllowedDoc.setJsonEntity("{\"foo\":\"bar\"}"); + adminClient().performRequest(createNotAllowedDoc); assertBusy(() -> { try { - Response statsResponse = adminClient().performRequest("GET", "_xpack/watcher/stats"); + Response statsResponse = adminClient().performRequest(new Request("GET", "/_xpack/watcher/stats")); ObjectPath objectPath = ObjectPath.createFromResponse(statsResponse); String state = objectPath.evaluate("stats.0.watcher_state"); switch (state) { case "stopped": - Response startResponse = adminClient().performRequest("POST", "_xpack/watcher/_start"); - assertOK(startResponse); + Response startResponse = adminClient().performRequest(new Request("POST", "/_xpack/watcher/_start")); String body = EntityUtils.toString(startResponse.getEntity()); assertThat(body, containsString("\"acknowledged\":true")); break; @@ -82,18 +81,18 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertBusy(() -> { for (String template : WatcherIndexTemplateRegistryField.TEMPLATE_NAMES) { - assertOK(adminClient().performRequest("HEAD", "_template/" + template)); + assertOK(adminClient().performRequest(new Request("HEAD", "_template/" + template))); } }); } @After public void stopWatcher() throws Exception { - assertOK(adminClient().performRequest("DELETE", "my_test_index")); + adminClient().performRequest(new Request("DELETE", "/my_test_index")); assertBusy(() -> { try { - Response statsResponse = adminClient().performRequest("GET", "_xpack/watcher/stats"); + Response statsResponse = adminClient().performRequest(new Request("GET", "/_xpack/watcher/stats")); ObjectPath objectPath = ObjectPath.createFromResponse(statsResponse); String state = objectPath.evaluate("stats.0.watcher_state"); @@ -106,8 +105,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { case "starting": throw new AssertionError("waiting until starting state reached started state to stop"); case "started": - Response stopResponse = adminClient().performRequest("POST", "_xpack/watcher/_stop", Collections.emptyMap()); - assertOK(stopResponse); + Response stopResponse = adminClient().performRequest(new Request("POST", "/_xpack/watcher/_stop")); String body = EntityUtils.toString(stopResponse.getEntity()); assertThat(body, containsString("\"acknowledged\":true")); break; @@ -210,7 +208,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { boolean conditionMet = objectPath.evaluate("hits.hits.0._source.result.condition.met"); assertThat(conditionMet, is(true)); - ObjectPath getObjectPath = ObjectPath.createFromResponse(client().performRequest("GET", "my_test_index/doc/my-id")); + ObjectPath getObjectPath = ObjectPath.createFromResponse(client().performRequest(new Request("GET", "/my_test_index/doc/my-id"))); String value = getObjectPath.evaluate("_source.hits.hits.0._source.value"); assertThat(value, is("15")); } @@ -238,8 +236,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { getWatchHistoryEntry(watchId); - Response response = adminClient().performRequest("GET", "my_test_index/doc/some-id", - Collections.singletonMap("ignore", "404")); + Response response = adminClient().performRequest(new Request("HEAD", "/my_test_index/doc/some-id")); assertThat(response.getStatusLine().getStatusCode(), is(404)); } @@ -262,7 +259,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { boolean conditionMet = objectPath.evaluate("hits.hits.0._source.result.condition.met"); assertThat(conditionMet, is(true)); - ObjectPath getObjectPath = ObjectPath.createFromResponse(client().performRequest("GET", "my_test_index/doc/my-id")); + ObjectPath getObjectPath = ObjectPath.createFromResponse(client().performRequest(new Request("GET", "/my_test_index/doc/my-id"))); String spam = getObjectPath.evaluate("_source.spam"); assertThat(spam, is("eggs")); } @@ -286,16 +283,14 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { boolean conditionMet = objectPath.evaluate("hits.hits.0._source.result.condition.met"); assertThat(conditionMet, is(true)); - Response response = adminClient().performRequest("GET", "index_not_allowed_to_read/doc/my-id", - Collections.singletonMap("ignore", "404")); + Response response = adminClient().performRequest(new Request("HEAD", "/index_not_allowed_to_read/doc/my-id")); assertThat(response.getStatusLine().getStatusCode(), is(404)); } private void indexWatch(String watchId, XContentBuilder builder) throws Exception { - StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - - Response response = client().performRequest("PUT", "_xpack/watcher/watch/" + watchId, Collections.emptyMap(), entity); - assertOK(response); + Request request = new Request("PUT", "/_xpack/watcher/watch/" + watchId); + request.setJsonEntity(Strings.toString(builder)); + Response response = client().performRequest(request); Map responseMap = entityAsMap(response); assertThat(responseMap, hasEntry("_id", watchId)); } @@ -307,7 +302,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { private ObjectPath getWatchHistoryEntry(String watchId, String state) throws Exception { final AtomicReference objectPathReference = new AtomicReference<>(); assertBusy(() -> { - client().performRequest("POST", ".watcher-history-*/_refresh"); + client().performRequest(new Request("POST", "/.watcher-history-*/_refresh")); try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -323,8 +318,9 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { .endObject().endArray(); builder.endObject(); - StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - Response response = client().performRequest("POST", ".watcher-history-*/_search", Collections.emptyMap(), entity); + Request searchRequest = new Request("POST", "/.watcher-history-*/_search"); + searchRequest.setJsonEntity(Strings.toString(builder)); + Response response = client().performRequest(searchRequest); ObjectPath objectPath = ObjectPath.createFromResponse(response); int totalHits = objectPath.evaluate("hits.total"); assertThat(totalHits, is(greaterThanOrEqualTo(1))); diff --git a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java index 86d97d01904..f56f96efc78 100644 --- a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java +++ b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.smoketest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; @@ -23,7 +22,6 @@ import java.io.IOException; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; -import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -39,15 +37,15 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase { @Before public void startWatcher() throws Exception { // delete the watcher history to not clutter with entries from other test - assertOK(adminClient().performRequest("DELETE", ".watcher-history-*")); + assertOK(adminClient().performRequest(new Request("DELETE", "/.watcher-history-*"))); assertBusy(() -> { - Response response = adminClient().performRequest("GET", "_xpack/watcher/stats"); + Response response = adminClient().performRequest(new Request("GET", "/_xpack/watcher/stats")); String state = ObjectPath.createFromResponse(response).evaluate("stats.0.watcher_state"); switch (state) { case "stopped": - Response startResponse = adminClient().performRequest("POST", "/_xpack/watcher/_start"); + Response startResponse = adminClient().performRequest(new Request("POST", "/_xpack/watcher/_start")); boolean isAcknowledged = ObjectPath.createFromResponse(startResponse).evaluate("acknowledged"); assertThat(isAcknowledged, is(true)); break; @@ -65,7 +63,7 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase { assertBusy(() -> { for (String template : WatcherIndexTemplateRegistryField.TEMPLATE_NAMES) { - Response templateExistsResponse = adminClient().performRequest("HEAD", "_template/" + template, emptyMap()); + Response templateExistsResponse = adminClient().performRequest(new Request("HEAD", "/_template/" + template)); assertThat(templateExistsResponse.getStatusLine().getStatusCode(), is(200)); } }); @@ -74,7 +72,7 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase { @After public void stopWatcher() throws Exception { assertBusy(() -> { - Response response = adminClient().performRequest("GET", "_xpack/watcher/stats", emptyMap()); + Response response = adminClient().performRequest(new Request("GET", "/_xpack/watcher/stats")); String state = ObjectPath.createFromResponse(response).evaluate("stats.0.watcher_state"); switch (state) { @@ -86,7 +84,7 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase { case "starting": throw new AssertionError("waiting until starting state reached started state to stop"); case "started": - Response stopResponse = adminClient().performRequest("POST", "/_xpack/watcher/_stop", emptyMap()); + Response stopResponse = adminClient().performRequest(new Request("POST", "/_xpack/watcher/_stop")); boolean isAcknowledged = ObjectPath.createFromResponse(stopResponse).evaluate("acknowledged"); assertThat(isAcknowledged, is(true)); break; @@ -112,12 +110,12 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase { String watchId = "cluster_health_watch"; // get master publish address - Response clusterStateResponse = adminClient().performRequest("GET", "_cluster/state"); + Response clusterStateResponse = adminClient().performRequest(new Request("GET", "/_cluster/state")); ObjectPath clusterState = ObjectPath.createFromResponse(clusterStateResponse); String masterNode = clusterState.evaluate("master_node"); assertThat(masterNode, is(notNullValue())); - Response statsResponse = adminClient().performRequest("GET", "_nodes"); + Response statsResponse = adminClient().performRequest(new Request("GET", "/_nodes")); ObjectPath stats = ObjectPath.createFromResponse(statsResponse); String address = stats.evaluate("nodes." + masterNode + ".http.publish_address"); assertThat(address, is(notNullValue())); @@ -163,16 +161,15 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase { } private void indexWatch(String watchId, XContentBuilder builder) throws Exception { - StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - - Response response = client().performRequest("PUT", "_xpack/watcher/watch/" + watchId, emptyMap(), entity); - assertOK(response); + Request request = new Request("PUT", "/_xpack/watcher/watch/" + watchId); + request.setJsonEntity(Strings.toString(builder)); + Response response = client().performRequest(request); Map responseMap = entityAsMap(response); assertThat(responseMap, hasEntry("_id", watchId)); } private void deleteWatch(String watchId) throws IOException { - Response response = client().performRequest("DELETE", "_xpack/watcher/watch/" + watchId); + Response response = client().performRequest(new Request("DELETE", "/_xpack/watcher/watch/" + watchId)); assertOK(response); ObjectPath path = ObjectPath.createFromResponse(response); boolean found = path.evaluate("found"); @@ -182,7 +179,7 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase { private ObjectPath getWatchHistoryEntry(String watchId) throws Exception { final AtomicReference objectPathReference = new AtomicReference<>(); assertBusy(() -> { - client().performRequest("POST", ".watcher-history-*/_refresh"); + client().performRequest(new Request("POST", "/.watcher-history-*/_refresh")); try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -194,8 +191,9 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase { .endObject().endArray(); builder.endObject(); - StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - Response response = client().performRequest("POST", ".watcher-history-*/_search", emptyMap(), entity); + Request searchRequest = new Request("POST", "/.watcher-history-*/_search"); + searchRequest.setJsonEntity(Strings.toString(builder)); + Response response = client().performRequest(searchRequest); ObjectPath objectPath = ObjectPath.createFromResponse(response); int totalHits = objectPath.evaluate("hits.total"); assertThat(totalHits, is(greaterThanOrEqualTo(1))); @@ -208,7 +206,7 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase { } private void assertWatchCount(int expectedWatches) throws IOException { - Response watcherStatsResponse = adminClient().performRequest("GET", "_xpack/watcher/stats"); + Response watcherStatsResponse = adminClient().performRequest(new Request("GET", "/_xpack/watcher/stats")); ObjectPath objectPath = ObjectPath.createFromResponse(watcherStatsResponse); int watchCount = objectPath.evaluate("stats.0.watch_count"); assertThat(watchCount, is(expectedWatches)); From 0cc99d270c6ad290efd600444cfa4dacda79c57d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 22 Aug 2018 14:23:43 -0400 Subject: [PATCH 34/48] Switch ml basic tests to new style Requests (#32483) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes all calls in the `x-pack/qa/ml-basic-multi-node` project to use the new versions. --- .../ml/integration/MlBasicMultiNodeIT.java | 292 +++++++++--------- 1 file changed, 143 insertions(+), 149 deletions(-) diff --git a/x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java b/x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java index e7381050260..6e22e5b3f18 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java +++ b/x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java @@ -6,12 +6,12 @@ package org.elasticsearch.xpack.ml.integration; import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; +import org.apache.http.nio.entity.NStringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.ml.MachineLearning; @@ -22,18 +22,15 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.common.xcontent.XContentType.JSON; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class MlBasicMultiNodeIT extends ESRestTestCase { - @SuppressWarnings("unchecked") public void testMachineLearningInstalled() throws Exception { - Response response = client().performRequest("get", "/_xpack"); - assertEquals(200, response.getStatusLine().getStatusCode()); - Map features = (Map) responseEntityToMap(response).get("features"); - Map ml = (Map) features.get("ml"); + Response response = client().performRequest(new Request("GET", "/_xpack")); + Map features = (Map) entityAsMap(response).get("features"); + Map ml = (Map) features.get("ml"); assertNotNull(ml); assertTrue((Boolean) ml.get("available")); assertTrue((Boolean) ml.get("enabled")); @@ -55,18 +52,18 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { String jobId = "mini-farequote-job"; createFarequoteJob(jobId); - Response response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open"); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("opened", true), responseEntityToMap(response)); + Response openResponse = client().performRequest( + new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open")); + assertEquals(Collections.singletonMap("opened", true), entityAsMap(openResponse)); - String postData = + Request addData = new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_data"); + addData.setEntity(new NStringEntity( "{\"airline\":\"AAL\",\"responsetime\":\"132.2046\",\"sourcetype\":\"farequote\",\"time\":\"1403481600\"}\n" + - "{\"airline\":\"JZA\",\"responsetime\":\"990.4628\",\"sourcetype\":\"farequote\",\"time\":\"1403481700\"}"; - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_data", - Collections.emptyMap(), - new StringEntity(postData, randomFrom(ContentType.APPLICATION_JSON, ContentType.create("application/x-ndjson")))); - assertEquals(202, response.getStatusLine().getStatusCode()); - Map responseBody = responseEntityToMap(response); + "{\"airline\":\"JZA\",\"responsetime\":\"990.4628\",\"sourcetype\":\"farequote\",\"time\":\"1403481700\"}", + randomFrom(ContentType.APPLICATION_JSON, ContentType.create("application/x-ndjson")))); + Response addDataResponse = client().performRequest(addData); + assertEquals(202, addDataResponse.getStatusLine().getStatusCode()); + Map responseBody = entityAsMap(addDataResponse); assertEquals(2, responseBody.get("processed_record_count")); assertEquals(4, responseBody.get("processed_field_count")); assertEquals(177, responseBody.get("input_bytes")); @@ -78,20 +75,19 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { assertEquals(1403481600000L, responseBody.get("earliest_record_timestamp")); assertEquals(1403481700000L, responseBody.get("latest_record_timestamp")); - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush"); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertFlushResponse(response, true, 1403481600000L); + Response flushResponse = client().performRequest( + new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush")); + assertFlushResponse(flushResponse, true, 1403481600000L); - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close", - Collections.singletonMap("timeout", "20s")); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("closed", true), responseEntityToMap(response)); + Request closeRequest = new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close"); + closeRequest.addParameter("timeout", "20s"); + Response closeResponse = client().performRequest(closeRequest); + assertEquals(Collections.singletonMap("closed", true), entityAsMap(closeResponse)); - response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"); - assertEquals(200, response.getStatusLine().getStatusCode()); - @SuppressWarnings("unchecked") - Map dataCountsDoc = (Map) - ((Map)((List) responseEntityToMap(response).get("jobs")).get(0)).get("data_counts"); + Response statsResponse = client().performRequest( + new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")); + Map dataCountsDoc = (Map) + ((Map)((List) entityAsMap(statsResponse).get("jobs")).get(0)).get("data_counts"); assertEquals(2, dataCountsDoc.get("processed_record_count")); assertEquals(4, dataCountsDoc.get("processed_field_count")); assertEquals(177, dataCountsDoc.get("input_bytes")); @@ -103,12 +99,12 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { assertEquals(1403481600000L, dataCountsDoc.get("earliest_record_timestamp")); assertEquals(1403481700000L, dataCountsDoc.get("latest_record_timestamp")); - response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); - assertEquals(200, response.getStatusLine().getStatusCode()); + client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId)); } public void testMiniFarequoteWithDatafeeder() throws Exception { - String mappings = "{" + Request createAirlineDataRequest = new Request("PUT", "/airline-data"); + createAirlineDataRequest.setJsonEntity("{" + " \"mappings\": {" + " \"response\": {" + " \"properties\": {" @@ -118,40 +114,38 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { + " }" + " }" + " }" - + "}"; - client().performRequest("put", "airline-data", Collections.emptyMap(), new StringEntity(mappings, ContentType.APPLICATION_JSON)); - client().performRequest("put", "airline-data/response/1", Collections.emptyMap(), - new StringEntity("{\"time\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}", - ContentType.APPLICATION_JSON)); - client().performRequest("put", "airline-data/response/2", Collections.emptyMap(), - new StringEntity("{\"time\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}", - ContentType.APPLICATION_JSON)); + + "}"); + client().performRequest(createAirlineDataRequest); + Request airlineData1 = new Request("PUT", "/airline-data/response/1"); + airlineData1.setJsonEntity("{\"time\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}"); + client().performRequest(airlineData1); + Request airlineData2 = new Request("PUT", "/airline-data/response/2"); + airlineData2.setJsonEntity("{\"time\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}"); + client().performRequest(airlineData2); // Ensure all data is searchable - client().performRequest("post", "_refresh"); + client().performRequest(new Request("POST", "/_refresh")); String jobId = "mini-farequote-with-data-feeder-job"; createFarequoteJob(jobId); String datafeedId = "bar"; createDatafeed(datafeedId, jobId); - Response response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open"); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("opened", true), responseEntityToMap(response)); + Response openResponse = client().performRequest( + new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open")); + assertEquals(Collections.singletonMap("opened", true), entityAsMap(openResponse)); - response = client().performRequest("post", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start", - Collections.singletonMap("start", "0")); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("started", true), responseEntityToMap(response)); + Request startRequest = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start"); + startRequest.addParameter("start", "0"); + Response startResponse = client().performRequest(startRequest); + assertEquals(Collections.singletonMap("started", true), entityAsMap(startResponse)); assertBusy(() -> { try { - Response statsResponse = - client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"); - assertEquals(200, statsResponse.getStatusLine().getStatusCode()); - @SuppressWarnings("unchecked") - Map dataCountsDoc = (Map) - ((Map)((List) responseEntityToMap(statsResponse).get("jobs")).get(0)).get("data_counts"); + Response statsResponse = client().performRequest( + new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")); + Map dataCountsDoc = (Map) + ((Map)((List) entityAsMap(statsResponse).get("jobs")).get(0)).get("data_counts"); assertEquals(2, dataCountsDoc.get("input_record_count")); assertEquals(2, dataCountsDoc.get("processed_record_count")); } catch (IOException e) { @@ -159,41 +153,38 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { } }); - response = client().performRequest("post", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stop"); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("stopped", true), responseEntityToMap(response)); + Response stopResponse = client().performRequest( + new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stop")); + assertEquals(Collections.singletonMap("stopped", true), entityAsMap(stopResponse)); - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close", - Collections.singletonMap("timeout", "20s")); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("closed", true), responseEntityToMap(response)); + Request closeRequest = new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close"); + closeRequest.addParameter("timeout", "20s"); + assertEquals(Collections.singletonMap("closed", true), + entityAsMap(client().performRequest(closeRequest))); - response = client().performRequest("delete", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId); - assertEquals(200, response.getStatusLine().getStatusCode()); - - response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); - assertEquals(200, response.getStatusLine().getStatusCode()); + client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId)); + client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId)); } public void testMiniFarequoteReopen() throws Exception { String jobId = "mini-farequote-reopen"; createFarequoteJob(jobId); - Response response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open"); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("opened", true), responseEntityToMap(response)); + Response openResponse = client().performRequest( + new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open")); + assertEquals(Collections.singletonMap("opened", true), entityAsMap(openResponse)); - String postData = + Request addDataRequest = new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_data"); + addDataRequest.setEntity(new NStringEntity( "{\"airline\":\"AAL\",\"responsetime\":\"132.2046\",\"sourcetype\":\"farequote\",\"time\":\"1403481600\"}\n" + "{\"airline\":\"JZA\",\"responsetime\":\"990.4628\",\"sourcetype\":\"farequote\",\"time\":\"1403481700\"}\n" + "{\"airline\":\"JBU\",\"responsetime\":\"877.5927\",\"sourcetype\":\"farequote\",\"time\":\"1403481800\"}\n" + "{\"airline\":\"KLM\",\"responsetime\":\"1355.4812\",\"sourcetype\":\"farequote\",\"time\":\"1403481900\"}\n" + - "{\"airline\":\"NKS\",\"responsetime\":\"9991.3981\",\"sourcetype\":\"farequote\",\"time\":\"1403482000\"}"; - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_data", - Collections.emptyMap(), - new StringEntity(postData, randomFrom(ContentType.APPLICATION_JSON, ContentType.create("application/x-ndjson")))); - assertEquals(202, response.getStatusLine().getStatusCode()); - Map responseBody = responseEntityToMap(response); + "{\"airline\":\"NKS\",\"responsetime\":\"9991.3981\",\"sourcetype\":\"farequote\",\"time\":\"1403482000\"}", + randomFrom(ContentType.APPLICATION_JSON, ContentType.create("application/x-ndjson")))); + Response addDataResponse = client().performRequest(addDataRequest); + assertEquals(202, addDataResponse.getStatusLine().getStatusCode()); + Map responseBody = entityAsMap(addDataResponse); assertEquals(5, responseBody.get("processed_record_count")); assertEquals(10, responseBody.get("processed_field_count")); assertEquals(446, responseBody.get("input_bytes")); @@ -205,60 +196,56 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { assertEquals(1403481600000L, responseBody.get("earliest_record_timestamp")); assertEquals(1403482000000L, responseBody.get("latest_record_timestamp")); - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush"); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertFlushResponse(response, true, 1403481600000L); + Response flushResponse = client().performRequest( + new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush")); + assertFlushResponse(flushResponse, true, 1403481600000L); - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close", - Collections.singletonMap("timeout", "20s")); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("closed", true), responseEntityToMap(response)); + Request closeRequest = new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close"); + closeRequest.addParameter("timeout", "20s"); + assertEquals(Collections.singletonMap("closed", true), + entityAsMap(client().performRequest(closeRequest))); - response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"); - assertEquals(200, response.getStatusLine().getStatusCode()); - - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open", - Collections.singletonMap("timeout", "20s")); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("opened", true), responseEntityToMap(response)); + Request statsRequest = new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"); + client().performRequest(statsRequest); + + Request openRequest = new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open"); + openRequest.addParameter("timeout", "20s"); + Response openResponse2 = client().performRequest(openRequest); + assertEquals(Collections.singletonMap("opened", true), entityAsMap(openResponse2)); // feed some more data points - postData = + Request addDataRequest2 = new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_data"); + addDataRequest2.setEntity(new NStringEntity( "{\"airline\":\"AAL\",\"responsetime\":\"136.2361\",\"sourcetype\":\"farequote\",\"time\":\"1407081600\"}\n" + "{\"airline\":\"VRD\",\"responsetime\":\"282.9847\",\"sourcetype\":\"farequote\",\"time\":\"1407081700\"}\n" + "{\"airline\":\"JAL\",\"responsetime\":\"493.0338\",\"sourcetype\":\"farequote\",\"time\":\"1407081800\"}\n" + "{\"airline\":\"UAL\",\"responsetime\":\"8.4275\",\"sourcetype\":\"farequote\",\"time\":\"1407081900\"}\n" + - "{\"airline\":\"FFT\",\"responsetime\":\"221.8693\",\"sourcetype\":\"farequote\",\"time\":\"1407082000\"}"; - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_data", - Collections.emptyMap(), - new StringEntity(postData, randomFrom(ContentType.APPLICATION_JSON, ContentType.create("application/x-ndjson")))); - assertEquals(202, response.getStatusLine().getStatusCode()); - responseBody = responseEntityToMap(response); - assertEquals(5, responseBody.get("processed_record_count")); - assertEquals(10, responseBody.get("processed_field_count")); - assertEquals(442, responseBody.get("input_bytes")); - assertEquals(15, responseBody.get("input_field_count")); - assertEquals(0, responseBody.get("invalid_date_count")); - assertEquals(0, responseBody.get("missing_field_count")); - assertEquals(0, responseBody.get("out_of_order_timestamp_count")); - assertEquals(1000, responseBody.get("bucket_count")); - + "{\"airline\":\"FFT\",\"responsetime\":\"221.8693\",\"sourcetype\":\"farequote\",\"time\":\"1407082000\"}", + randomFrom(ContentType.APPLICATION_JSON, ContentType.create("application/x-ndjson")))); + Response addDataResponse2 = client().performRequest(addDataRequest2); + assertEquals(202, addDataResponse2.getStatusLine().getStatusCode()); + Map responseBody2 = entityAsMap(addDataResponse2); + assertEquals(5, responseBody2.get("processed_record_count")); + assertEquals(10, responseBody2.get("processed_field_count")); + assertEquals(442, responseBody2.get("input_bytes")); + assertEquals(15, responseBody2.get("input_field_count")); + assertEquals(0, responseBody2.get("invalid_date_count")); + assertEquals(0, responseBody2.get("missing_field_count")); + assertEquals(0, responseBody2.get("out_of_order_timestamp_count")); + assertEquals(1000, responseBody2.get("bucket_count")); + // unintuitive: should return the earliest record timestamp of this feed??? - assertEquals(null, responseBody.get("earliest_record_timestamp")); - assertEquals(1407082000000L, responseBody.get("latest_record_timestamp")); - - response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close", - Collections.singletonMap("timeout", "20s")); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("closed", true), responseEntityToMap(response)); + assertEquals(null, responseBody2.get("earliest_record_timestamp")); + assertEquals(1407082000000L, responseBody2.get("latest_record_timestamp")); + + assertEquals(Collections.singletonMap("closed", true), + entityAsMap(client().performRequest(closeRequest))); // counts should be summed up - response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"); - assertEquals(200, response.getStatusLine().getStatusCode()); - - @SuppressWarnings("unchecked") - Map dataCountsDoc = (Map) - ((Map)((List) responseEntityToMap(response).get("jobs")).get(0)).get("data_counts"); + Response statsResponse = client().performRequest(statsRequest); + + Map dataCountsDoc = (Map) + ((Map)((List) entityAsMap(statsResponse).get("jobs")).get(0)).get("data_counts"); assertEquals(10, dataCountsDoc.get("processed_record_count")); assertEquals(20, dataCountsDoc.get("processed_field_count")); assertEquals(888, dataCountsDoc.get("input_bytes")); @@ -270,8 +257,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { assertEquals(1403481600000L, dataCountsDoc.get("earliest_record_timestamp")); assertEquals(1407082000000L, dataCountsDoc.get("latest_record_timestamp")); - response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); - assertEquals(200, response.getStatusLine().getStatusCode()); + client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId)); } private Response createDatafeed(String datafeedId, String jobId) throws Exception { @@ -282,45 +268,53 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { xContentBuilder.array("types", "response"); xContentBuilder.field("_source", true); xContentBuilder.endObject(); - return client().performRequest("put", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId, - Collections.emptyMap(), new StringEntity(Strings.toString(xContentBuilder), ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId); + request.setJsonEntity(Strings.toString(xContentBuilder)); + return client().performRequest(request); } private Response createFarequoteJob(String jobId) throws Exception { XContentBuilder xContentBuilder = jsonBuilder(); xContentBuilder.startObject(); - xContentBuilder.field("job_id", jobId); - xContentBuilder.field("description", "Analysis of response time by airline"); + { + xContentBuilder.field("job_id", jobId); + xContentBuilder.field("description", "Analysis of response time by airline"); - xContentBuilder.startObject("analysis_config"); - xContentBuilder.field("bucket_span", "3600s"); - xContentBuilder.startArray("detectors"); - xContentBuilder.startObject(); - xContentBuilder.field("function", "metric"); - xContentBuilder.field("field_name", "responsetime"); - xContentBuilder.field("by_field_name", "airline"); - xContentBuilder.endObject(); - xContentBuilder.endArray(); + xContentBuilder.startObject("analysis_config"); + { + xContentBuilder.field("bucket_span", "3600s"); + xContentBuilder.startArray("detectors"); + { + xContentBuilder.startObject(); + { + xContentBuilder.field("function", "metric"); + xContentBuilder.field("field_name", "responsetime"); + xContentBuilder.field("by_field_name", "airline"); + } + xContentBuilder.endObject(); + } + xContentBuilder.endArray(); + } + xContentBuilder.endObject(); + + xContentBuilder.startObject("data_description"); + { + xContentBuilder.field("format", "xcontent"); + xContentBuilder.field("time_field", "time"); + xContentBuilder.field("time_format", "epoch"); + } + xContentBuilder.endObject(); + } xContentBuilder.endObject(); - xContentBuilder.startObject("data_description"); - xContentBuilder.field("format", "xcontent"); - xContentBuilder.field("time_field", "time"); - xContentBuilder.field("time_format", "epoch"); - xContentBuilder.endObject(); - xContentBuilder.endObject(); - - return client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + URLEncoder.encode(jobId, "UTF-8"), - Collections.emptyMap(), new StringEntity(Strings.toString(xContentBuilder), ContentType.APPLICATION_JSON)); - } - - private static Map responseEntityToMap(Response response) throws IOException { - return XContentHelper.convertToMap(JSON.xContent(), response.getEntity().getContent(), false); + Request request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + URLEncoder.encode(jobId, "UTF-8")); + request.setJsonEntity(Strings.toString(xContentBuilder)); + return client().performRequest(request); } private static void assertFlushResponse(Response response, boolean expectedFlushed, long expectedLastFinalizedBucketEnd) throws IOException { - Map asMap = responseEntityToMap(response); + Map asMap = entityAsMap(response); assertThat(asMap.size(), equalTo(2)); assertThat(asMap.get("flushed"), is(true)); assertThat(asMap.get("last_finalized_bucket_end"), equalTo(expectedLastFinalizedBucketEnd)); From edd477a15e07f021f40e496379df09c4e2e792c1 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Wed, 22 Aug 2018 21:53:42 +0300 Subject: [PATCH 35/48] Fix the default pom file name (#33063) Before this change the default was fixed at compile time and not picking up changes in the build script. --- .../groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 7713d5c64df..fb979a77dac 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -531,12 +531,16 @@ class BuildPlugin implements Plugin { project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask -> // The GenerateMavenPom task is aggressive about setting the destination, instead of fighting it, // just make a copy. - generatePOMTask.ext.pomFileName = "${project.archivesBaseName}-${project.version}.pom" + generatePOMTask.ext.pomFileName = null doLast { project.copy { from generatePOMTask.destination into "${project.buildDir}/distributions" - rename { generatePOMTask.ext.pomFileName } + rename { + generatePOMTask.ext.pomFileName == null ? + "${project.archivesBaseName}-${project.version}.pom" : + generatePOMTask.ext.pomFileName + } } } // build poms with assemble (if the assemble task exists) From de95dead2dae32ffcdf7cc22e1c37ea4896d1f5a Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Thu, 23 Aug 2018 00:21:38 +0300 Subject: [PATCH 36/48] SQL: skip uppercasing/lowercasing function tests for AZ locales as well (#32910) * Added the rest of the Locales that have different behavior for uppercasing/lowercasing scenarios to the skip list --- .../string/StringFunctionProcessorTests.java | 51 +++++++++++++------ .../xpack/qa/sql/jdbc/SqlSpecTestCase.java | 11 ++-- 2 files changed, 42 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java index a4d9d4cb57a..d3336ec89a8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java @@ -75,17 +75,27 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas stringCharInputValidation(proc); } - public void testLCaseWithTRLocale() { + public void testLCaseWithAZTRLocale() { + Locale initialLocale = Locale.getDefault(); Locale.setDefault(Locale.forLanguageTag("tr")); - StringProcessor proc = new StringProcessor(StringOperation.LCASE); - // ES-SQL is not locale sensitive (so far). The obvious test for this is the Turkish language, uppercase letter I conversion - // in non-Turkish locale the lowercasing would create i and an additional dot, while in Turkish Locale it would only create "i" - // unicode 0069 = i - assertEquals("\u0069\u0307", proc.process("\u0130")); - // unicode 0049 = I (regular capital letter i) - // in Turkish locale this would be lowercased to a "i" without dot (unicode 0131) - assertEquals("\u0069", proc.process("\u0049")); + try { + StringProcessor proc = new StringProcessor(StringOperation.LCASE); + // ES-SQL is not locale sensitive (so far). The obvious test for this is the Turkish language, uppercase letter I conversion + // in non-Turkish locale the lowercasing would create i and an additional dot, while in Turkish Locale it would only create "i" + // unicode 0069 = i + assertEquals("\u0069\u0307", proc.process("\u0130")); + // unicode 0049 = I (regular capital letter i) + // in Turkish locale this would be lowercased to a "i" without dot (unicode 0131) + assertEquals("\u0069", proc.process("\u0049")); + + Locale.setDefault(Locale.forLanguageTag("az")); + assertEquals("\u0069\u0307", proc.process("\u0130")); + assertEquals("\u0069", proc.process("\u0049")); + } finally { + // restore the original Locale + Locale.setDefault(initialLocale); + } } public void testUCase() { @@ -102,13 +112,22 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas stringCharInputValidation(proc); } - public void testUCaseWithTRLocale() { + public void testUCaseWithAZTRLocale() { + Locale initialLocale = Locale.getDefault(); Locale.setDefault(Locale.forLanguageTag("tr")); - StringProcessor proc = new StringProcessor(StringOperation.UCASE); - - // ES-SQL is not Locale sensitive (so far). - // in Turkish locale, small letter "i" is uppercased to "I" with a dot above (unicode 130), otherwise in "i" (unicode 49) - assertEquals("\u0049", proc.process("\u0069")); + + try { + StringProcessor proc = new StringProcessor(StringOperation.UCASE); + // ES-SQL is not Locale sensitive (so far). + // in Turkish locale, small letter "i" is uppercased to "I" with a dot above (unicode 130), otherwise in "i" (unicode 49) + assertEquals("\u0049", proc.process("\u0069")); + + Locale.setDefault(Locale.forLanguageTag("az")); + assertEquals("\u0049", proc.process("\u0069")); + } finally { + // restore the original Locale + Locale.setDefault(initialLocale); + } } public void testLength() { @@ -179,7 +198,7 @@ public class StringFunctionProcessorTests extends AbstractWireSerializingTestCas assertEquals(7, proc.process("foo bar")); assertEquals(0, proc.process("")); assertEquals(1, proc.process('f')); - assertEquals(1, proc.process('€')); + assertEquals(1, proc.process('\u20ac')); // euro symbol stringCharInputValidation(proc); } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java index d61b4b9a946..4d90c9cce50 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java @@ -13,6 +13,7 @@ import org.junit.ClassRule; import java.sql.Connection; import java.sql.ResultSet; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -37,8 +38,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase { tests.addAll(readScriptSpec("/agg.sql-spec", parser)); tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser)); tests.addAll(readScriptSpec("/string-functions.sql-spec", parser)); - // AwaitsFix: https://github.com/elastic/elasticsearch/issues/32589 - // tests.addAll(readScriptSpec("/case-functions.sql-spec", parser)); + tests.addAll(readScriptSpec("/case-functions.sql-spec", parser)); return tests; } @@ -60,8 +60,11 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase { @Override protected final void doTest() throws Throwable { - boolean goodLocale = !(Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr").build()) - || Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr-TR").build())); + // we skip the tests in case of these locales because ES-SQL is Locale-insensitive for now + // while H2 does take the Locale into consideration + String[] h2IncompatibleLocales = new String[] {"tr", "az", "tr-TR", "tr-CY", "az-Latn", "az-Cyrl", "az-Latn-AZ", "az-Cyrl-AZ"}; + boolean goodLocale = !Arrays.stream(h2IncompatibleLocales) + .anyMatch((l) -> Locale.getDefault().equals(new Locale.Builder().setLanguageTag(l).build())); if (fileName.startsWith("case-functions")) { Assume.assumeTrue(goodLocale); } From 46247ff1f9788ca46958833fbb15e7afcc8f6f4e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 23 Aug 2018 07:43:36 +0200 Subject: [PATCH 37/48] INGEST: Cleanup Redundant Put Method (#33034) --- .../elasticsearch/ingest/IngestService.java | 38 ++++++++----------- 1 file changed, 15 insertions(+), 23 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 4cc4fb69a54..ae3416ef3b0 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -207,7 +207,21 @@ public class IngestService implements ClusterStateApplier { */ public void putPipeline(Map ingestInfos, PutPipelineRequest request, ActionListener listener) throws Exception { - put(clusterService, ingestInfos, request, listener); + // validates the pipeline and processor configuration before submitting a cluster update task: + validatePipeline(ingestInfos, request); + clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(), + new AckedClusterStateUpdateTask(request, listener) { + + @Override + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); + } + + @Override + public ClusterState execute(ClusterState currentState) { + return innerPut(request, currentState); + } + }); } /** @@ -280,28 +294,6 @@ public class IngestService implements ClusterStateApplier { return newState.build(); } - /** - * Stores the specified pipeline definition in the request. - */ - public void put(ClusterService clusterService, Map ingestInfos, PutPipelineRequest request, - ActionListener listener) throws Exception { - // validates the pipeline and processor configuration before submitting a cluster update task: - validatePipeline(ingestInfos, request); - clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(), - new AckedClusterStateUpdateTask(request, listener) { - - @Override - protected AcknowledgedResponse newResponse(boolean acknowledged) { - return new AcknowledgedResponse(acknowledged); - } - - @Override - public ClusterState execute(ClusterState currentState) { - return innerPut(request, currentState); - } - }); - } - void validatePipeline(Map ingestInfos, PutPipelineRequest request) throws Exception { if (ingestInfos.isEmpty()) { throw new IllegalStateException("Ingest info is empty"); From ffe895e16e38917f88789443ed975ad1fc35056c Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 23 Aug 2018 09:52:48 +0200 Subject: [PATCH 38/48] Change query field expansion (#33020) This commit changes the query field expansion for query parsers to not rely on an hardcoded list of field types. Instead we rely on the type of exception that is thrown by MappedFieldType#termQuery to include/exclude an expanded field. Supersedes #31655 Closes #31798 --- .../index/mapper/MappedFieldType.java | 9 ++- .../index/search/QueryParserHelper.java | 72 ++++--------------- .../search/query/QueryStringIT.java | 4 +- .../search/query/all-query-index.json | 8 +-- 4 files changed, 29 insertions(+), 64 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 5f3f4a4de49..4a3fa852e7f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -35,6 +35,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.joda.DateMathParser; @@ -314,7 +315,13 @@ public abstract class MappedFieldType extends FieldType { /** Generates a query that will only match documents that contain the given value. * The default implementation returns a {@link TermQuery} over the value bytes, * boosted by {@link #boost()}. - * @throws IllegalArgumentException if {@code value} cannot be converted to the expected data type */ + * @throws IllegalArgumentException if {@code value} cannot be converted to the expected data type or if the field is not searchable + * due to the way it is configured (eg. not indexed) + * @throws ElasticsearchParseException if {@code value} cannot be converted to the expected data type + * @throws UnsupportedOperationException if the field is not searchable regardless of options + * @throws QueryShardException if the field is not searchable regardless of options + */ + // TODO: Standardize exception types public abstract Query termQuery(Object value, @Nullable QueryShardContext context); /** Build a constant-scoring query that matches all values. The default implementation uses a diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java index df96ff87ec2..d3bac583eac 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java @@ -19,47 +19,21 @@ package org.elasticsearch.index.search; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.IpFieldMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MetadataFieldMapper; -import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; /** * Helpers to extract and expand field names and boosts */ public final class QueryParserHelper { - // Mapping types the "all-ish" query can be executed against - // TODO: Fix the API so that we don't need a hardcoded list of types - private static final Set ALLOWED_QUERY_MAPPER_TYPES; - - static { - ALLOWED_QUERY_MAPPER_TYPES = new HashSet<>(); - ALLOWED_QUERY_MAPPER_TYPES.add(DateFieldMapper.CONTENT_TYPE); - ALLOWED_QUERY_MAPPER_TYPES.add(IpFieldMapper.CONTENT_TYPE); - ALLOWED_QUERY_MAPPER_TYPES.add(KeywordFieldMapper.CONTENT_TYPE); - for (NumberFieldMapper.NumberType nt : NumberFieldMapper.NumberType.values()) { - ALLOWED_QUERY_MAPPER_TYPES.add(nt.typeName()); - } - ALLOWED_QUERY_MAPPER_TYPES.add("scaled_float"); - ALLOWED_QUERY_MAPPER_TYPES.add(TextFieldMapper.CONTENT_TYPE); - } - private QueryParserHelper() {} /** @@ -85,22 +59,6 @@ public final class QueryParserHelper { return fieldsAndWeights; } - /** - * Get a {@link FieldMapper} associated with a field name or null. - * @param mapperService The mapper service where to find the mapping. - * @param field The field name to search. - */ - public static Mapper getFieldMapper(MapperService mapperService, String field) { - DocumentMapper mapper = mapperService.documentMapper(); - if (mapper != null) { - Mapper fieldMapper = mapper.mappers().getMapper(field); - if (fieldMapper != null) { - return fieldMapper; - } - } - return null; - } - public static Map resolveMappingFields(QueryShardContext context, Map fieldsAndWeights) { return resolveMappingFields(context, fieldsAndWeights, null); @@ -138,8 +96,7 @@ public final class QueryParserHelper { * @param fieldOrPattern The field name or the pattern to resolve * @param weight The weight for the field * @param acceptAllTypes Whether all field type should be added when a pattern is expanded. - * If false, only {@link #ALLOWED_QUERY_MAPPER_TYPES} are accepted and other field types - * are discarded from the query. + * If false, only searchable field types are added. * @param acceptMetadataField Whether metadata fields should be added when a pattern is expanded. */ public static Map resolveMappingField(QueryShardContext context, String fieldOrPattern, float weight, @@ -154,8 +111,7 @@ public final class QueryParserHelper { * @param fieldOrPattern The field name or the pattern to resolve * @param weight The weight for the field * @param acceptAllTypes Whether all field type should be added when a pattern is expanded. - * If false, only {@link #ALLOWED_QUERY_MAPPER_TYPES} are accepted and other field types - * are discarded from the query. + * If false, only searchable field types are added. * @param acceptMetadataField Whether metadata fields should be added when a pattern is expanded. * @param fieldSuffix The suffix name to add to the expanded field names if a mapping exists for that name. * The original name of the field is kept if adding the suffix to the field name does not point to a valid field @@ -177,18 +133,20 @@ public final class QueryParserHelper { continue; } - // Ignore fields that are not in the allowed mapper types. Some - // types do not support term queries, and thus we cannot generate - // a special query for them. - String mappingType = fieldType.typeName(); - if (acceptAllTypes == false && ALLOWED_QUERY_MAPPER_TYPES.contains(mappingType) == false) { + if (acceptMetadataField == false && fieldType.name().startsWith("_")) { + // Ignore metadata fields continue; } - // Ignore metadata fields. - Mapper mapper = getFieldMapper(context.getMapperService(), fieldName); - if (acceptMetadataField == false && mapper instanceof MetadataFieldMapper) { - continue; + if (acceptAllTypes == false) { + try { + fieldType.termQuery("", context); + } catch (QueryShardException |UnsupportedOperationException e) { + // field type is never searchable with term queries (eg. geo point): ignore + continue; + } catch (IllegalArgumentException |ElasticsearchParseException e) { + // other exceptions are parsing errors or not indexed fields: keep + } } fields.put(fieldName, weight); } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java index 5caab8c9dfe..a90e98a38ee 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java @@ -430,8 +430,8 @@ public class QueryStringIT extends ESIntegTestCase { indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); indexRandom(true, false, indexRequests); - // The wildcard field matches aliases for both a text and boolean field. - // By default, the boolean field should be ignored when building the query. + // The wildcard field matches aliases for both a text and geo_point field. + // By default, the geo_point field should be ignored when building the query. SearchResponse response = client().prepareSearch("test") .setQuery(queryStringQuery("text").field("f*_alias")) .execute().actionGet(); diff --git a/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json b/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json index abdc1192822..9ab8995813e 100644 --- a/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json +++ b/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json @@ -46,10 +46,6 @@ "format": "yyyy/MM/dd||epoch_millis" }, "f_bool": {"type": "boolean"}, - "f_bool_alias": { - "type": "alias", - "path": "f_bool" - }, "f_byte": {"type": "byte"}, "f_short": {"type": "short"}, "f_int": {"type": "integer"}, @@ -60,6 +56,10 @@ "f_binary": {"type": "binary"}, "f_suggest": {"type": "completion"}, "f_geop": {"type": "geo_point"}, + "f_geop_alias": { + "type": "alias", + "path": "f_geop" + }, "f_geos": {"type": "geo_shape"} } } From d7219c05a27ebdee7f0a56ed52d5f625bfb11f5a Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 23 Aug 2018 10:04:00 +0200 Subject: [PATCH 39/48] Search: Support of wildcard on docvalue_fields (#32980) * Search: Support of wildcard on docvalue_fields For consistency with stored_fields, docvalue_fields should support the use of wildcards. Documentation of doc values fields is updated accordingly. See also: #26390 Closes #26299 --- .../search/request/docvalue-fields.asciidoc | 21 +++++ .../elasticsearch/search/SearchService.java | 23 +++-- .../search/fields/SearchFieldsIT.java | 90 +++++++++++++++++++ 3 files changed, 127 insertions(+), 7 deletions(-) diff --git a/docs/reference/search/request/docvalue-fields.asciidoc b/docs/reference/search/request/docvalue-fields.asciidoc index fa5baf1db22..bcfcb20d1d5 100644 --- a/docs/reference/search/request/docvalue-fields.asciidoc +++ b/docs/reference/search/request/docvalue-fields.asciidoc @@ -30,6 +30,27 @@ GET /_search Doc value fields can work on fields that are not stored. +`*` can be used as a wild card, for example: + +[source,js] +-------------------------------------------------- +GET /_search +{ + "query" : { + "match_all": {} + }, + "docvalue_fields" : [ + { + "field": "*field", <1> + "format": "use_field_mapping" <2> + } + ] +} +-------------------------------------------------- +// CONSOLE +<1> Match all fields ending with `field` +<2> Format to be applied to all matching fields. + Note that if the fields parameter specifies fields without docvalues it will try to load the value from the fielddata cache causing the terms for that field to be loaded to memory (cached), which will result in more memory consumption. diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 4bf5e03b8a7..a7db2c55fe1 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -98,6 +98,8 @@ import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportRequest; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -789,14 +791,21 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv context.fetchSourceContext(source.fetchSource()); } if (source.docValueFields() != null) { - int maxAllowedDocvalueFields = context.mapperService().getIndexSettings().getMaxDocvalueFields(); - if (source.docValueFields().size() > maxAllowedDocvalueFields) { - throw new IllegalArgumentException( - "Trying to retrieve too many docvalue_fields. Must be less than or equal to: [" + maxAllowedDocvalueFields - + "] but was [" + source.docValueFields().size() + "]. This limit can be set by changing the [" - + IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING.getKey() + "] index level setting."); + List docValueFields = new ArrayList<>(); + for (DocValueFieldsContext.FieldAndFormat format : source.docValueFields()) { + Collection fieldNames = context.mapperService().simpleMatchToFullName(format.field); + for (String fieldName: fieldNames) { + docValueFields.add(new DocValueFieldsContext.FieldAndFormat(fieldName, format.format)); + } } - context.docValueFieldsContext(new DocValueFieldsContext(source.docValueFields())); + int maxAllowedDocvalueFields = context.mapperService().getIndexSettings().getMaxDocvalueFields(); + if (docValueFields.size() > maxAllowedDocvalueFields) { + throw new IllegalArgumentException( + "Trying to retrieve too many docvalue_fields. Must be less than or equal to: [" + maxAllowedDocvalueFields + + "] but was [" + docValueFields.size() + "]. This limit can be set by changing the [" + + IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING.getKey() + "] index level setting."); + } + context.docValueFieldsContext(new DocValueFieldsContext(docValueFields)); } if (source.highlighter() != null) { HighlightBuilder highlightBuilder = source.highlighter(); diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index aea0243a399..45b6340ba6f 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -810,6 +810,32 @@ public class SearchFieldsIT extends ESIntegTestCase { equalTo(new BytesRef(new byte[] {42, 100}))); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1")); + builder = client().prepareSearch().setQuery(matchAllQuery()) + .addDocValueField("*field"); + searchResponse = builder.execute().actionGet(); + + assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + fields = new HashSet<>(searchResponse.getHits().getAt(0).getFields().keySet()); + assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", + "float_field", "double_field", "date_field", "boolean_field", "text_field", "keyword_field", + "binary_field", "ip_field"))); + + assertThat(searchResponse.getHits().getAt(0).getFields().get("byte_field").getValue().toString(), equalTo("1")); + assertThat(searchResponse.getHits().getAt(0).getFields().get("short_field").getValue().toString(), equalTo("2")); + assertThat(searchResponse.getHits().getAt(0).getFields().get("integer_field").getValue(), equalTo((Object) 3L)); + assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L)); + assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0)); + assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); + dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(); + assertThat(dateField.toInstant().toEpochMilli(), equalTo(date.toInstant().toEpochMilli())); + assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true)); + assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo")); + assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); + assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(), + equalTo(new BytesRef(new byte[] {42, 100}))); + assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1")); + builder = client().prepareSearch().setQuery(matchAllQuery()) .addDocValueField("text_field", "use_field_mapping") .addDocValueField("keyword_field", "use_field_mapping") @@ -977,6 +1003,70 @@ public class SearchFieldsIT extends ESIntegTestCase { assertThat(fetchedDate, equalTo(date)); } + public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("text_field") + .field("type", "text") + .field("fielddata", true) + .endObject() + .startObject("date_field") + .field("type", "date") + .field("format", "yyyy-MM-dd") + .endObject() + .startObject("text_field_alias") + .field("type", "alias") + .field("path", "text_field") + .endObject() + .startObject("date_field_alias") + .field("type", "alias") + .field("path", "date_field") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + ensureGreen("test"); + + ZonedDateTime date = ZonedDateTime.of(1990, 12, 29, 0, 0, 0, 0, ZoneOffset.UTC); + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd", Locale.ROOT); + + index("test", "type", "1", "text_field", "foo", "date_field", formatter.format(date)); + refresh("test"); + + SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery()) + .addDocValueField("*alias", "use_field_mapping") + .addDocValueField("date_field"); + SearchResponse searchResponse = builder.execute().actionGet(); + + assertNoFailures(searchResponse); + assertHitCount(searchResponse, 1); + SearchHit hit = searchResponse.getHits().getAt(0); + + Map fields = hit.getFields(); + assertThat(fields.keySet(), equalTo(newHashSet("text_field_alias", "date_field_alias", "date_field"))); + + DocumentField textFieldAlias = fields.get("text_field_alias"); + assertThat(textFieldAlias.getName(), equalTo("text_field_alias")); + assertThat(textFieldAlias.getValue(), equalTo("foo")); + + DocumentField dateFieldAlias = fields.get("date_field_alias"); + assertThat(dateFieldAlias.getName(), equalTo("date_field_alias")); + assertThat(dateFieldAlias.getValue(), + equalTo("1990-12-29")); + + DocumentField dateField = fields.get("date_field"); + assertThat(dateField.getName(), equalTo("date_field")); + + ZonedDateTime fetchedDate = dateField.getValue(); + assertThat(fetchedDate, equalTo(date)); + } + public void testStoredFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() From f84ed14294db30ec2fb068438f5a9082ef0302f1 Mon Sep 17 00:00:00 2001 From: lipsill <39668292+lipsill@users.noreply.github.com> Date: Thu, 23 Aug 2018 10:07:59 +0200 Subject: [PATCH 40/48] Watcher: Improve error messages for CronEvalTool (#32800) CronEvalTool prints an error only for cron expressions that result in no upcoming time events. If a cron expression results in less than the specified count (default 10) time events, now all the coming times are printed without displaying error message. Closes #32735 --- .../trigger/schedule/tool/CronEvalTool.java | 10 +++++-- .../schedule/tool/CronEvalToolTests.java | 30 ++++++++++++++++++- 2 files changed, 36 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalTool.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalTool.java index 33b1217895d..d22d402aa15 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalTool.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalTool.java @@ -61,14 +61,18 @@ public class CronEvalTool extends LoggingAwareCommand { Cron cron = new Cron(expression); long time = date.getMillis(); + for (int i = 0; i < count; i++) { long prevTime = time; time = cron.getNextValidTimeAfter(time); if (time < 0) { - throw new UserException(ExitCodes.OK, (i + 1) + ".\t Could not compute future times since [" - + formatter.print(prevTime) + "] " + "(perhaps the cron expression only points to times in the past?)"); + if (i == 0) { + throw new UserException(ExitCodes.OK, "Could not compute future times since [" + + formatter.print(prevTime) + "] " + "(perhaps the cron expression only points to times in the past?)"); + } + break; } - terminal.println((i+1) + ".\t" + formatter.print(time)); + terminal.println((i + 1) + ".\t" + formatter.print(time)); } } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalToolTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalToolTests.java index 22388424948..f1e864d547c 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalToolTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalToolTests.java @@ -8,6 +8,13 @@ package org.elasticsearch.xpack.watcher.trigger.schedule.tool; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.CommandTestCase; +import java.util.Calendar; +import java.util.Locale; +import java.util.TimeZone; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + public class CronEvalToolTests extends CommandTestCase { @Override protected Command newCommand() { @@ -18,6 +25,27 @@ public class CronEvalToolTests extends CommandTestCase { String countOption = randomBoolean() ? "-c" : "--count"; int count = randomIntBetween(1, 100); String output = execute(countOption, Integer.toString(count), "0 0 0 1-6 * ?"); - assertTrue(output, output.contains("Here are the next " + count + " times this cron expression will trigger")); + assertThat(output, containsString("Here are the next " + count + " times this cron expression will trigger")); + } + + public void testGetNextValidTimes() throws Exception { + final int year = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT).get(Calendar.YEAR) + 1; + { + String output = execute("0 3 23 8 9 ? " + year); + assertThat(output, containsString("Here are the next 10 times this cron expression will trigger:")); + assertThat(output, not(containsString("ERROR"))); + assertThat(output, not(containsString("2.\t"))); + } + { + String output = execute("0 3 23 */4 9 ? " + year); + assertThat(output, containsString("Here are the next 10 times this cron expression will trigger:")); + assertThat(output, not(containsString("ERROR"))); + } + { + Exception expectThrows = expectThrows(Exception.class, () -> execute("0 3 23 */4 9 ? 2017")); + String message = expectThrows.getMessage(); + assertThat(message, containsString("Could not compute future times since")); + assertThat(message, containsString("(perhaps the cron expression only points to times in the past?)")); + } } } From 50441f97ae745814db96c262e99d0f465aca5b2c Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Thu, 23 Aug 2018 09:35:06 +0100 Subject: [PATCH 41/48] HLRC: Add ML Get Buckets API (#33056) Relates #29827 --- .../client/MLRequestConverters.java | 17 +- .../client/MachineLearningClient.java | 58 +++- .../client/MLRequestConvertersTests.java | 21 ++ .../client/MachineLearningGetResultsIT.java | 217 ++++++++++++++ .../MlClientDocumentationIT.java | 111 +++++++- .../high-level/ml/get-buckets.asciidoc | 125 ++++++++ .../high-level/supported-apis.asciidoc | 2 + .../protocol/xpack/ml/GetBucketsRequest.java | 268 ++++++++++++++++++ .../protocol/xpack/ml/GetBucketsResponse.java | 78 +++++ .../protocol/xpack/ml/PutJobResponse.java | 3 - .../xpack/ml/job/util/PageParams.java | 99 +++++++ .../xpack/ml/GetBucketsRequestTests.java | 78 +++++ .../xpack/ml/GetBucketsResponseTests.java | 53 ++++ .../xpack/ml/job/results/BucketTests.java | 2 +- .../xpack/ml/util/PageParamsTests.java | 43 +++ 15 files changed, 1159 insertions(+), 16 deletions(-) create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java create mode 100644 docs/java-rest/high-level/ml/get-buckets.asciidoc create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequest.java create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponse.java create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/PageParams.java create mode 100644 x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequestTests.java create mode 100644 x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponseTests.java create mode 100644 x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/util/PageParamsTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 1c2b6f79eaf..024ba9e2fca 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; import org.elasticsearch.protocol.xpack.ml.GetJobRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; @@ -69,7 +70,7 @@ final class MLRequestConverters { return request; } - static Request openJob(OpenJobRequest openJobRequest) throws IOException { + static Request openJob(OpenJobRequest openJobRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") .addPathPartAsIs("ml") @@ -109,4 +110,18 @@ final class MLRequestConverters { return request; } + + static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(getBucketsRequest.getJobId()) + .addPathPartAsIs("results") + .addPathPartAsIs("buckets") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + request.setEntity(createEntity(getBucketsRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index 90acabfbdd8..c4dcc1eaffc 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -23,6 +23,8 @@ import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.CloseJobResponse; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsResponse; import org.elasticsearch.protocol.xpack.ml.GetJobRequest; import org.elasticsearch.protocol.xpack.ml.GetJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; @@ -54,7 +56,7 @@ public final class MachineLearningClient { * For additional info * see ML PUT job documentation * - * @param request the PutJobRequest containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings + * @param request The PutJobRequest containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return PutJobResponse with enclosed {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} object * @throws IOException when there is a serialization issue sending the request or receiving the response @@ -73,7 +75,7 @@ public final class MachineLearningClient { * For additional info * see ML PUT job documentation * - * @param request the request containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings + * @param request The request containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ @@ -93,7 +95,7 @@ public final class MachineLearningClient { * For additional info * see *

- * @param request {@link GetJobRequest} request containing a list of jobId(s) and additional options + * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return {@link GetJobResponse} response object containing * the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} objects and the number of jobs found @@ -114,7 +116,7 @@ public final class MachineLearningClient { * For additional info * see *

- * @param request {@link GetJobRequest} request containing a list of jobId(s) and additional options + * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified with {@link GetJobResponse} upon request completion */ @@ -133,7 +135,7 @@ public final class MachineLearningClient { * For additional info * see ML Delete Job documentation *

- * @param request the request to delete the job + * @param request The request to delete the job * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return action acknowledgement * @throws IOException when there is a serialization issue sending the request or receiving the response @@ -152,7 +154,7 @@ public final class MachineLearningClient { * For additional info * see ML Delete Job documentation *

- * @param request the request to delete the job + * @param request The request to delete the job * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ @@ -176,7 +178,7 @@ public final class MachineLearningClient { * For additional info * see *

- * @param request request containing job_id and additional optional options + * @param request Request containing job_id and additional optional options * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return response containing if the job was successfully opened or not. * @throws IOException when there is a serialization issue sending the request or receiving the response @@ -199,7 +201,7 @@ public final class MachineLearningClient { * For additional info * see *

- * @param request request containing job_id and additional optional options + * @param request Request containing job_id and additional optional options * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ @@ -217,7 +219,7 @@ public final class MachineLearningClient { * * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. * - * @param request request containing job_ids and additional options. See {@link CloseJobRequest} + * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return response containing if the job was successfully closed or not. * @throws IOException when there is a serialization issue sending the request or receiving the response @@ -235,7 +237,7 @@ public final class MachineLearningClient { * * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. * - * @param request request containing job_ids and additional options. See {@link CloseJobRequest} + * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ @@ -247,4 +249,40 @@ public final class MachineLearningClient { listener, Collections.emptySet()); } + + /** + * Gets the buckets for a Machine Learning Job. + *

+ * For additional info + * see ML GET buckets documentation + * + * @param request The request + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + */ + public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::getBuckets, + options, + GetBucketsResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved. + *

+ * For additional info + * see ML GET buckets documentation + * + * @param request The request + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::getBuckets, + options, + GetBucketsResponse::fromXContent, + listener, + Collections.emptySet()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index 0d95c359658..9065cda9cd6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -22,17 +22,20 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; import org.elasticsearch.protocol.xpack.ml.GetJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.protocol.xpack.ml.job.config.Detector; import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayOutputStream; @@ -49,6 +52,7 @@ public class MLRequestConvertersTests extends ESTestCase { Request request = MLRequestConverters.putJob(putJobRequest); + assertEquals(HttpPut.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_xpack/ml/anomaly_detectors/foo")); try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { Job parsedJob = Job.PARSER.apply(parser, null).build(); @@ -118,6 +122,23 @@ public class MLRequestConvertersTests extends ESTestCase { assertEquals(Boolean.toString(true), request.getParameters().get("force")); } + public void testGetBuckets() throws IOException { + String jobId = randomAlphaOfLength(10); + GetBucketsRequest getBucketsRequest = new GetBucketsRequest(jobId); + getBucketsRequest.setPageParams(new PageParams(100, 300)); + getBucketsRequest.setAnomalyScore(75.0); + getBucketsRequest.setSort("anomaly_score"); + getBucketsRequest.setDescending(true); + + Request request = MLRequestConverters.getBuckets(getBucketsRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/buckets", request.getEndpoint()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + GetBucketsRequest parsedRequest = GetBucketsRequest.PARSER.apply(parser, null); + assertThat(parsedRequest, equalTo(getBucketsRequest)); + } + } + private static Job createValidJob(String jobId) { AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList( Detector.builder().setFunction("count").build())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java new file mode 100644 index 00000000000..a4f83c347ad --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java @@ -0,0 +1,217 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsResponse; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.results.Bucket; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase { + + private static final String RESULTS_INDEX = ".ml-anomalies-shared"; + private static final String DOC = "doc"; + + private static final String JOB_ID = "get-results-it-job"; + + // 2018-08-01T00:00:00Z + private static final long START_TIME_EPOCH_MS = 1533081600000L; + + private BucketStats bucketStats = new BucketStats(); + + @Before + public void createJobAndIndexResults() throws IOException { + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + Job job = MachineLearningIT.buildJob(JOB_ID); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + long time = START_TIME_EPOCH_MS; + long endTime = time + 3600000L * 24 * 10; // 10 days of hourly buckets + while (time < endTime) { + addBucketIndexRequest(time, false, bulkRequest); + addRecordIndexRequests(time, false, bulkRequest); + time += 3600000L; + } + + // Also index an interim bucket + addBucketIndexRequest(time, true, bulkRequest); + addRecordIndexRequests(time, true, bulkRequest); + + highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT); + } + + private void addBucketIndexRequest(long timestamp, boolean isInterim, BulkRequest bulkRequest) { + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + double bucketScore = randomDoubleBetween(0.0, 100.0, true); + bucketStats.report(bucketScore); + indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"bucket\", \"timestamp\": " + timestamp + "," + + "\"bucket_span\": 3600,\"is_interim\": " + isInterim + ", \"anomaly_score\": " + bucketScore + + ", \"bucket_influencers\":[{\"job_id\": \"" + JOB_ID + "\", \"result_type\":\"bucket_influencer\", " + + "\"influencer_field_name\": \"bucket_time\", \"timestamp\": " + timestamp + ", \"bucket_span\": 3600, " + + "\"is_interim\": " + isInterim + "}]}", XContentType.JSON); + bulkRequest.add(indexRequest); + } + + private void addRecordIndexRequests(long timestamp, boolean isInterim, BulkRequest bulkRequest) { + if (randomBoolean()) { + return; + } + int recordCount = randomIntBetween(1, 3); + for (int i = 0; i < recordCount; ++i) { + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + double recordScore = randomDoubleBetween(0.0, 100.0, true); + double p = randomDoubleBetween(0.0, 0.05, false); + indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"record\", \"timestamp\": " + timestamp + "," + + "\"bucket_span\": 3600,\"is_interim\": " + isInterim + ", \"record_score\": " + recordScore + ", \"probability\": " + + p + "}", XContentType.JSON); + bulkRequest.add(indexRequest); + } + } + + @After + public void deleteJob() throws IOException { + new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + } + + public void testGetBuckets() throws IOException { + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(241L)); + assertThat(response.buckets().size(), equalTo(100)); + assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setTimestamp("1533081600000"); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(1L)); + assertThat(response.buckets().size(), equalTo(1)); + assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setAnomalyScore(75.0); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(bucketStats.criticalCount)); + assertThat(response.buckets().size(), equalTo((int) Math.min(100, bucketStats.criticalCount))); + assertThat(response.buckets().stream().anyMatch(b -> b.getAnomalyScore() < 75.0), is(false)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setExcludeInterim(true); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(240L)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setStart("1533081600000"); + request.setEnd("1533092400000"); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(3L)); + assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS)); + assertThat(response.buckets().get(1).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 3600000L)); + assertThat(response.buckets().get(2).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 2 * + 3600000L)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setPageParams(new PageParams(3, 3)); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.buckets().size(), equalTo(3)); + assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 3 * 3600000L)); + assertThat(response.buckets().get(1).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 4 * 3600000L)); + assertThat(response.buckets().get(2).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 5 * 3600000L)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setSort("anomaly_score"); + request.setDescending(true); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + double previousScore = 100.0; + for (Bucket bucket : response.buckets()) { + assertThat(bucket.getAnomalyScore(), lessThanOrEqualTo(previousScore)); + previousScore = bucket.getAnomalyScore(); + } + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + // Make sure we get all buckets + request.setPageParams(new PageParams(0, 10000)); + request.setExpand(true); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.buckets().stream().anyMatch(b -> b.getRecords().size() > 0), is(true)); + } + } + + private static class BucketStats { + // score < 50.0 + private long minorCount; + + // score < 75.0 + private long majorCount; + + // score > 75.0 + private long criticalCount; + + private void report(double anomalyScore) { + if (anomalyScore < 50.0) { + minorCount++; + } else if (anomalyScore < 75.0) { + majorCount++; + } else { + criticalCount++; + } + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 73531bae553..683f91dae2e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -20,16 +20,21 @@ package org.elasticsearch.client.documentation; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.MachineLearningIT; import org.elasticsearch.client.MlRestTestStateCleaner; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.CloseJobResponse; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsResponse; import org.elasticsearch.protocol.xpack.ml.GetJobRequest; import org.elasticsearch.protocol.xpack.ml.GetJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; @@ -40,6 +45,8 @@ import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription; import org.elasticsearch.protocol.xpack.ml.job.config.Detector; import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.results.Bucket; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; import org.junit.After; import java.io.IOException; @@ -293,7 +300,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } - + public void testCloseJob() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -334,6 +341,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { }; //end::x-pack-ml-close-job-listener CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-second-machine-learning-job"); + // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); @@ -345,4 +353,105 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } + + public void testGetBuckets() throws IOException, InterruptedException { + RestHighLevelClient client = highLevelClient(); + + String jobId = "test-get-buckets"; + Job job = MachineLearningIT.buildJob(jobId); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + // Let us index a bucket + IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc"); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + indexRequest.source("{\"job_id\":\"test-get-buckets\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," + + "\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 80.0}", XContentType.JSON); + client.index(indexRequest, RequestOptions.DEFAULT); + + { + // tag::x-pack-ml-get-buckets-request + GetBucketsRequest request = new GetBucketsRequest(jobId); // <1> + // end::x-pack-ml-get-buckets-request + + // tag::x-pack-ml-get-buckets-timestamp + request.setTimestamp("2018-08-17T00:00:00Z"); // <1> + // end::x-pack-ml-get-buckets-timestamp + + // Set timestamp to null as it is incompatible with other args + request.setTimestamp(null); + + // tag::x-pack-ml-get-buckets-anomaly-score + request.setAnomalyScore(75.0); // <1> + // end::x-pack-ml-get-buckets-anomaly-score + + // tag::x-pack-ml-get-buckets-desc + request.setDescending(true); // <1> + // end::x-pack-ml-get-buckets-desc + + // tag::x-pack-ml-get-buckets-end + request.setEnd("2018-08-21T00:00:00Z"); // <1> + // end::x-pack-ml-get-buckets-end + + // tag::x-pack-ml-get-buckets-exclude-interim + request.setExcludeInterim(true); // <1> + // end::x-pack-ml-get-buckets-exclude-interim + + // tag::x-pack-ml-get-buckets-expand + request.setExpand(true); // <1> + // end::x-pack-ml-get-buckets-expand + + // tag::x-pack-ml-get-buckets-page + request.setPageParams(new PageParams(100, 200)); // <1> + // end::x-pack-ml-get-buckets-page + + // Set page params back to null so the response contains the bucket we indexed + request.setPageParams(null); + + // tag::x-pack-ml-get-buckets-sort + request.setSort("anomaly_score"); // <1> + // end::x-pack-ml-get-buckets-sort + + // tag::x-pack-ml-get-buckets-start + request.setStart("2018-08-01T00:00:00Z"); // <1> + // end::x-pack-ml-get-buckets-start + + // tag::x-pack-ml-get-buckets-execute + GetBucketsResponse response = client.machineLearning().getBuckets(request, RequestOptions.DEFAULT); + // end::x-pack-ml-get-buckets-execute + + // tag::x-pack-ml-get-buckets-response + long count = response.count(); // <1> + List buckets = response.buckets(); // <2> + // end::x-pack-ml-get-buckets-response + assertEquals(1, buckets.size()); + } + { + GetBucketsRequest request = new GetBucketsRequest(jobId); + + // tag::x-pack-ml-get-buckets-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetBucketsResponse getBucketsResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-ml-get-buckets-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-get-buckets-execute-async + client.machineLearning().getBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-ml-get-buckets-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } } diff --git a/docs/java-rest/high-level/ml/get-buckets.asciidoc b/docs/java-rest/high-level/ml/get-buckets.asciidoc new file mode 100644 index 00000000000..81a21d3d18a --- /dev/null +++ b/docs/java-rest/high-level/ml/get-buckets.asciidoc @@ -0,0 +1,125 @@ +[[java-rest-high-x-pack-ml-get-buckets]] +=== Get Buckets API + +The Get Buckets API retrieves one or more bucket results. +It accepts a `GetBucketsRequest` object and responds +with a `GetBucketsResponse` object. + +[[java-rest-high-x-pack-ml-get-buckets-request]] +==== Get Buckets Request + +A `GetBucketsRequest` object gets created with an existing non-null `jobId`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-request] +-------------------------------------------------- +<1> Constructing a new request referencing an existing `jobId` + +==== Optional Arguments +The following arguments are optional: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-timestamp] +-------------------------------------------------- +<1> The timestamp of the bucket to get. Otherwise it will return all buckets. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-anomaly-score] +-------------------------------------------------- +<1> Buckets with anomaly scores greater or equal than this value will be returned. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-desc] +-------------------------------------------------- +<1> If `true`, the buckets are sorted in descending order. Defaults to `false`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-end] +-------------------------------------------------- +<1> Buckets with timestamps earlier than this time will be returned. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-exclude-interim] +-------------------------------------------------- +<1> If `true`, interim results will be excluded. Defaults to `false`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-expand] +-------------------------------------------------- +<1> If `true`, buckets will include their anomaly records. Defaults to `false`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-page] +-------------------------------------------------- +<1> The page parameters `from` and `size`. `from` specifies the number of buckets to skip. +`size` specifies the maximum number of buckets to get. Defaults to `0` and `100` respectively. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-sort] +-------------------------------------------------- +<1> The field to sort buckets on. Defaults to `timestamp`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-end] +-------------------------------------------------- +<1> Buckets with timestamps on or after this time will be returned. + +[[java-rest-high-x-pack-ml-get-buckets-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-execute] +-------------------------------------------------- + + +[[java-rest-high-x-pack-ml-get-buckets-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-execute-async] +-------------------------------------------------- +<1> The `GetBucketsRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back with the `onResponse` method +if the execution is successful or the `onFailure` method if the execution +failed. + +A typical listener for `GetBucketsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs + +[[java-rest-high-snapshot-ml-get-buckets-response]] +==== Get Buckets Response + +The returned `GetBucketsResponse` contains the requested buckets: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-response] +-------------------------------------------------- +<1> The count of buckets that were matched +<2> The buckets retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index c7b46b39962..e04e391f3e0 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -209,12 +209,14 @@ The Java High Level REST Client supports the following Machine Learning APIs: * <> * <> * <> +* <> include::ml/put-job.asciidoc[] include::ml/get-job.asciidoc[] include::ml/delete-job.asciidoc[] include::ml/open-job.asciidoc[] include::ml/close-job.asciidoc[] +include::ml/get-buckets.asciidoc[] == Migration APIs diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequest.java new file mode 100644 index 00000000000..4957f9b6ff6 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequest.java @@ -0,0 +1,268 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.results.Result; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; + +import java.io.IOException; +import java.util.Objects; + +/** + * A request to retrieve buckets of a given job + */ +public class GetBucketsRequest extends ActionRequest implements ToXContentObject { + + public static final ParseField EXPAND = new ParseField("expand"); + public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); + public static final ParseField START = new ParseField("start"); + public static final ParseField END = new ParseField("end"); + public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); + public static final ParseField TIMESTAMP = new ParseField("timestamp"); + public static final ParseField SORT = new ParseField("sort"); + public static final ParseField DESCENDING = new ParseField("desc"); + + public static final ObjectParser PARSER = new ObjectParser<>("get_buckets_request", GetBucketsRequest::new); + + static { + PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); + PARSER.declareString(GetBucketsRequest::setTimestamp, Result.TIMESTAMP); + PARSER.declareBoolean(GetBucketsRequest::setExpand, EXPAND); + PARSER.declareBoolean(GetBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM); + PARSER.declareStringOrNull(GetBucketsRequest::setStart, START); + PARSER.declareStringOrNull(GetBucketsRequest::setEnd, END); + PARSER.declareObject(GetBucketsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); + PARSER.declareDouble(GetBucketsRequest::setAnomalyScore, ANOMALY_SCORE); + PARSER.declareString(GetBucketsRequest::setSort, SORT); + PARSER.declareBoolean(GetBucketsRequest::setDescending, DESCENDING); + } + + private String jobId; + private String timestamp; + private Boolean expand; + private Boolean excludeInterim; + private String start; + private String end; + private PageParams pageParams; + private Double anomalyScore; + private String sort; + private Boolean descending; + + private GetBucketsRequest() {} + + /** + * Constructs a request to retrieve buckets of a given job + * @param jobId id of the job to retrieve buckets of + */ + public GetBucketsRequest(String jobId) { + this.jobId = Objects.requireNonNull(jobId); + } + + public String getJobId() { + return jobId; + } + + /** + * Sets the timestamp of a specific bucket to be retrieved. + * @param timestamp the timestamp of a specific bucket to be retrieved + */ + public void setTimestamp(String timestamp) { + this.timestamp = timestamp; + } + + public String getTimestamp() { + return timestamp; + } + + public boolean isExpand() { + return expand; + } + + /** + * Sets the value of "expand". + * When {@code true}, buckets will be expanded to include their records. + * @param expand value of "expand" to be set + */ + public void setExpand(boolean expand) { + this.expand = expand; + } + + public boolean isExcludeInterim() { + return excludeInterim; + } + + /** + * Sets the value of "exclude_interim". + * When {@code true}, interim buckets will be filtered out. + * @param excludeInterim value of "exclude_interim" to be set + */ + public void setExcludeInterim(boolean excludeInterim) { + this.excludeInterim = excludeInterim; + } + + public String getStart() { + return start; + } + + /** + * Sets the value of "start" which is a timestamp. + * Only buckets whose timestamp is on or after the "start" value will be returned. + * @param start value of "start" to be set + */ + public void setStart(String start) { + this.start = start; + } + + public String getEnd() { + return end; + } + + /** + * Sets the value of "end" which is a timestamp. + * Only buckets whose timestamp is before the "end" value will be returned. + * @param end value of "end" to be set + */ + public void setEnd(String end) { + this.end = end; + } + + public PageParams getPageParams() { + return pageParams; + } + + /** + * Sets the paging parameters + * @param pageParams the paging parameters + */ + public void setPageParams(PageParams pageParams) { + this.pageParams = pageParams; + } + + public Double getAnomalyScore() { + return anomalyScore; + } + + /** + * Sets the value of "anomaly_score". + * Only buckets with "anomaly_score" equal or greater will be returned. + * @param anomalyScore value of "anomaly_score". + */ + public void setAnomalyScore(double anomalyScore) { + this.anomalyScore = anomalyScore; + } + + public String getSort() { + return sort; + } + + /** + * Sets the value of "sort". + * Specifies the bucket field to sort on. + * @param sort value of "sort". + */ + public void setSort(String sort) { + this.sort = sort; + } + + public boolean isDescending() { + return descending; + } + + /** + * Sets the value of "desc". + * Specifies the sorting order. + * @param descending value of "desc" + */ + public void setDescending(boolean descending) { + this.descending = descending; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + if (timestamp != null) { + builder.field(Result.TIMESTAMP.getPreferredName(), timestamp); + } + if (expand != null) { + builder.field(EXPAND.getPreferredName(), expand); + } + if (excludeInterim != null) { + builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); + } + if (start != null) { + builder.field(START.getPreferredName(), start); + } + if (end != null) { + builder.field(END.getPreferredName(), end); + } + if (pageParams != null) { + builder.field(PageParams.PAGE.getPreferredName(), pageParams); + } + if (anomalyScore != null) { + builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); + } + if (sort != null) { + builder.field(SORT.getPreferredName(), sort); + } + if (descending != null) { + builder.field(DESCENDING.getPreferredName(), descending); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, timestamp, expand, excludeInterim, anomalyScore, pageParams, start, end, sort, descending); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + GetBucketsRequest other = (GetBucketsRequest) obj; + return Objects.equals(jobId, other.jobId) && + Objects.equals(timestamp, other.timestamp) && + Objects.equals(expand, other.expand) && + Objects.equals(excludeInterim, other.excludeInterim) && + Objects.equals(anomalyScore, other.anomalyScore) && + Objects.equals(pageParams, other.pageParams) && + Objects.equals(start, other.start) && + Objects.equals(end, other.end) && + Objects.equals(sort, other.sort) && + Objects.equals(descending, other.descending); + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponse.java new file mode 100644 index 00000000000..4350661f68b --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponse.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.results.Bucket; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +/** + * A response containing the requested buckets + */ +public class GetBucketsResponse extends AbstractResultResponse { + + public static final ParseField BUCKETS = new ParseField("buckets"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("get_buckets_response", + true, a -> new GetBucketsResponse((List) a[0], (long) a[1])); + + static { + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Bucket.PARSER, BUCKETS); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); + } + + public static GetBucketsResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + GetBucketsResponse(List buckets, long count) { + super(BUCKETS, buckets, count); + } + + /** + * The retrieved buckets + * @return the retrieved buckets + */ + public List buckets() { + return results; + } + + @Override + public int hashCode() { + return Objects.hash(count, results); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + GetBucketsResponse other = (GetBucketsResponse) obj; + return count == other.count && Objects.equals(results, other.results); + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java index b37bd35d6b1..1bd9e87f654 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java @@ -39,9 +39,6 @@ public class PutJobResponse implements ToXContentObject { this.job = job; } - public PutJobResponse() { - } - public Job getResponse() { return job; } diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/PageParams.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/PageParams.java new file mode 100644 index 00000000000..2e20e84d7b8 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/PageParams.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.util; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Paging parameters for GET requests + */ +public class PageParams implements ToXContentObject { + + public static final ParseField PAGE = new ParseField("page"); + public static final ParseField FROM = new ParseField("from"); + public static final ParseField SIZE = new ParseField("size"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(PAGE.getPreferredName(), + a -> new PageParams((Integer) a[0], (Integer) a[1])); + + static { + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), FROM); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), SIZE); + } + + private final Integer from; + private final Integer size; + + /** + * Constructs paging parameters + * @param from skips the specified number of items. When {@code null} the default value will be used. + * @param size specifies the maximum number of items to obtain. When {@code null} the default value will be used. + */ + public PageParams(@Nullable Integer from, @Nullable Integer size) { + this.from = from; + this.size = size; + } + + public int getFrom() { + return from; + } + + public int getSize() { + return size; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (from != null) { + builder.field(FROM.getPreferredName(), from); + } + if (size != null) { + builder.field(SIZE.getPreferredName(), size); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(from, size); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + PageParams other = (PageParams) obj; + return Objects.equals(from, other.from) && + Objects.equals(size, other.size); + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequestTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequestTests.java new file mode 100644 index 00000000000..6364ad339b1 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequestTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class GetBucketsRequestTests extends AbstractXContentTestCase { + + @Override + protected GetBucketsRequest createTestInstance() { + GetBucketsRequest request = new GetBucketsRequest(randomAlphaOfLengthBetween(1, 20)); + + if (randomBoolean()) { + request.setTimestamp(String.valueOf(randomLong())); + } else { + if (randomBoolean()) { + request.setStart(String.valueOf(randomLong())); + } + if (randomBoolean()) { + request.setEnd(String.valueOf(randomLong())); + } + if (randomBoolean()) { + request.setExcludeInterim(randomBoolean()); + } + if (randomBoolean()) { + request.setAnomalyScore(randomDouble()); + } + if (randomBoolean()) { + int from = randomInt(10000); + int size = randomInt(10000); + request.setPageParams(new PageParams(from, size)); + } + if (randomBoolean()) { + request.setSort("anomaly_score"); + } + if (randomBoolean()) { + request.setDescending(randomBoolean()); + } + } + if (randomBoolean()) { + request.setExpand(randomBoolean()); + } + if (randomBoolean()) { + request.setExcludeInterim(randomBoolean()); + } + return request; + } + + @Override + protected GetBucketsRequest doParseInstance(XContentParser parser) throws IOException { + return GetBucketsRequest.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponseTests.java new file mode 100644 index 00000000000..889c3e93bc7 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponseTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.results.Bucket; +import org.elasticsearch.protocol.xpack.ml.job.results.BucketTests; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class GetBucketsResponseTests extends AbstractXContentTestCase { + + @Override + protected GetBucketsResponse createTestInstance() { + String jobId = randomAlphaOfLength(20); + int listSize = randomInt(10); + List buckets = new ArrayList<>(listSize); + for (int j = 0; j < listSize; j++) { + Bucket bucket = BucketTests.createTestInstance(jobId); + buckets.add(bucket); + } + return new GetBucketsResponse(buckets, listSize); + } + + @Override + protected GetBucketsResponse doParseInstance(XContentParser parser) throws IOException { + return GetBucketsResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketTests.java index 28b1893afe1..0eb988d8eb8 100644 --- a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketTests.java +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketTests.java @@ -35,7 +35,7 @@ public class BucketTests extends AbstractXContentTestCase { return createTestInstance("foo"); } - public Bucket createTestInstance(String jobId) { + public static Bucket createTestInstance(String jobId) { Bucket bucket = new Bucket(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong()); if (randomBoolean()) { bucket.setAnomalyScore(randomDouble()); diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/util/PageParamsTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/util/PageParamsTests.java new file mode 100644 index 00000000000..6bd51e93c6f --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/util/PageParamsTests.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.util; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; +import org.elasticsearch.test.AbstractXContentTestCase; + +public class PageParamsTests extends AbstractXContentTestCase { + + @Override + protected PageParams doParseInstance(XContentParser parser) { + return PageParams.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected PageParams createTestInstance() { + Integer from = randomBoolean() ? randomInt() : null; + Integer size = randomBoolean() ? randomInt() : null; + return new PageParams(from, size); + } +} From 07cce953059e6e0001366405d555187c8498f7e3 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 23 Aug 2018 12:08:06 +0300 Subject: [PATCH 42/48] [DOCS] Remove reload password from docs cf. #32889 Reload call `_nodes/reload_secure_settings` is not requiring an empty password anymore (#32889). Reflect this in docs. --- docs/reference/setup/secure-settings.asciidoc | 3 --- 1 file changed, 3 deletions(-) diff --git a/docs/reference/setup/secure-settings.asciidoc b/docs/reference/setup/secure-settings.asciidoc index 2177440457a..6abf5dea14d 100644 --- a/docs/reference/setup/secure-settings.asciidoc +++ b/docs/reference/setup/secure-settings.asciidoc @@ -91,9 +91,6 @@ using the `bin/elasticsearch-keystore add` command, call: [source,js] ---- POST _nodes/reload_secure_settings -{ - "secure_settings_password": "" -} ---- // CONSOLE This API will decrypt and re-read the entire keystore, on every cluster node, From 61f5c188e0a577555db382dd0fe9a4222da9df1a Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 23 Aug 2018 12:24:32 +0200 Subject: [PATCH 43/48] HLRC: Fix Compile Error From Missing Throws (#33083) * 50441f97ae745814db96c262e99d0f465aca5b2c#diff-53a95fe7ded21313483f1b2f15977395L72 removed the throws breaking compilation here --- .../main/java/org/elasticsearch/client/MLRequestConverters.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 024ba9e2fca..6c1cc205701 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -70,7 +70,7 @@ final class MLRequestConverters { return request; } - static Request openJob(OpenJobRequest openJobRequest) { + static Request openJob(OpenJobRequest openJobRequest) throws IOException { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") .addPathPartAsIs("ml") From 917e5a8c949824e653d92b443768e3909c375992 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 23 Aug 2018 13:19:21 +0200 Subject: [PATCH 44/48] TESTS: Fix Random Fail in MockTcpTransportTests (#33061) * `foobar.txGet()` appears to return before `serviceB.stop()` returns, causing `ServiceB.close()` to run concurrently with the `stop` call and running into a race codition * Closes #32863 --- .../transport/AbstractSimpleTransportTestCase.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 21dbc561c6b..c485f9d45bd 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -768,6 +768,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { public void testNotifyOnShutdown() throws Exception { final CountDownLatch latch2 = new CountDownLatch(1); + final CountDownLatch latch3 = new CountDownLatch(1); try { serviceA.registerRequestHandler("internal:foobar", StringMessageRequest::new, ThreadPool.Names.GENERIC, (request, channel, task) -> { @@ -777,6 +778,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { serviceB.stop(); } catch (Exception e) { fail(e.getMessage()); + } finally { + latch3.countDown(); } }); TransportFuture foobar = serviceB.submitRequest(nodeA, "internal:foobar", @@ -788,6 +791,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } catch (TransportException ex) { } + latch3.await(); } finally { serviceB.close(); // make sure we are fully closed here otherwise we might run into assertions down the road serviceA.disconnectFromNode(nodeB); From f3cfd4504f4828db2f2b694fd0bba5654121cbfc Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 23 Aug 2018 13:33:39 +0200 Subject: [PATCH 45/48] Use `addIfAbsent` instead of checking if an element is contained Relates to #32988 --- .../org/elasticsearch/transport/ConnectionManager.java | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java index 0c6be15fd92..da8faaf3c33 100644 --- a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java +++ b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java @@ -38,7 +38,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; @@ -85,9 +84,7 @@ public class ConnectionManager implements Closeable { } public void addListener(TransportConnectionListener listener) { - if (connectionListener.listeners.contains(listener) == false) { - this.connectionListener.listeners.add(listener); - } + this.connectionListener.listeners.addIfAbsent(listener); } public void removeListener(TransportConnectionListener listener) { @@ -297,7 +294,7 @@ public class ConnectionManager implements Closeable { private static final class DelegatingNodeConnectionListener implements TransportConnectionListener { - private final List listeners = new CopyOnWriteArrayList<>(); + private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); @Override public void onNodeDisconnected(DiscoveryNode key) { From f860e589a629bd89f24f022a50c8a2fcb7a77673 Mon Sep 17 00:00:00 2001 From: markharwood Date: Thu, 23 Aug 2018 15:00:30 +0100 Subject: [PATCH 46/48] Test fix - GraphExploreResponseTests should not randomise array elements Closes #33086 --- .../protocol/xpack/graph/GraphExploreResponseTests.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java index 74b43458178..0f8f055049b 100644 --- a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java @@ -87,6 +87,11 @@ public class GraphExploreResponseTests extends AbstractXContentTestCase< GraphEx protected boolean supportsUnknownFields() { return true; } + + @Override + protected String[] getShuffleFieldsExceptions() { + return new String[]{"vertices"}; + } protected Predicate getRandomFieldsExcludeFilterWhenResultHasErrors() { return field -> field.startsWith("responses"); From 644c0de5ec0e14ca6ab3abc8d4a18b96f95a023b Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Thu, 23 Aug 2018 09:48:53 -0500 Subject: [PATCH 47/48] Move non duplicated actions back into xpack core (#32952) Most actions' request and response were moved from xpack core into protocol. We have decided to instead duplicate the actions in the HLRC instead of trying to reuse them. This commit moves the non duplicated actions back into xpack core and severs the tie between xpack core and protocol so no other actions can be moved and not duplicated. --- x-pack/plugin/core/build.gradle | 7 +- .../protocol/xpack/XPackInfoRequest.java | 86 ++++ .../protocol/xpack/XPackInfoResponse.java | 487 ++++++++++++++++++ .../protocol/xpack/XPackUsageRequest.java | 18 + .../protocol/xpack/XPackUsageResponse.java | 43 ++ .../protocol/xpack/common/ProtocolUtils.java | 58 +++ .../protocol/xpack/graph/Connection.java | 216 ++++++++ .../xpack/graph/GraphExploreRequest.java | 388 ++++++++++++++ .../xpack/graph/GraphExploreResponse.java | 248 +++++++++ .../protocol/xpack/graph/Hop.java | 160 ++++++ .../protocol/xpack/graph/Vertex.java | 255 +++++++++ .../protocol/xpack/graph/VertexRequest.java | 235 +++++++++ .../protocol/xpack/graph/package-info.java | 11 + .../xpack/license/DeleteLicenseRequest.java | 18 + .../xpack/license/GetLicenseRequest.java | 28 + .../xpack/license/GetLicenseResponse.java | 25 + .../protocol/xpack/license/LicenseStatus.java | 54 ++ .../xpack/license/LicensesStatus.java | 55 ++ .../xpack/license/PutLicenseRequest.java | 40 ++ .../xpack/license/PutLicenseResponse.java | 195 +++++++ .../protocol/xpack/license/package-info.java | 11 + .../migration/IndexUpgradeInfoRequest.java | 85 +++ .../migration/IndexUpgradeInfoResponse.java | 120 +++++ .../migration/UpgradeActionRequired.java | 42 ++ .../xpack/migration/package-info.java | 11 + .../protocol/xpack/package-info.java | 10 + .../protocol/xpack/security/User.java | 246 +++++++++ .../protocol/xpack/security/package-info.java | 11 + .../xpack/watcher/DeleteWatchRequest.java | 76 +++ .../xpack/watcher/DeleteWatchResponse.java | 110 ++++ .../xpack/watcher/PutWatchRequest.java | 145 ++++++ .../xpack/watcher/PutWatchResponse.java | 111 ++++ .../protocol/xpack/watcher/package-info.java | 11 + .../xpack/XPackInfoResponseTests.java | 146 ++++++ .../xpack/common/ProtocolUtilsTests.java | 58 +++ .../graph/GraphExploreResponseTests.java | 118 +++++ .../xpack/license/LicenseStatusTests.java | 17 + .../license/PutLicenseResponseTests.java | 112 ++++ .../IndexUpgradeInfoRequestTests.java | 36 ++ .../IndexUpgradeInfoResponseTests.java | 54 ++ .../protocol/xpack/security/UserTests.java | 25 + .../watcher/DeleteWatchResponseTests.java | 32 ++ .../xpack/watcher/PutWatchResponseTests.java | 32 ++ 43 files changed, 4241 insertions(+), 5 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index ef428bdd73d..a58500b880f 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -8,7 +8,6 @@ import java.nio.file.StandardCopyOption apply plugin: 'elasticsearch.esplugin' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' -apply plugin: 'com.github.johnrengelman.shadow' archivesBaseName = 'x-pack-core' @@ -27,7 +26,6 @@ dependencyLicenses { dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - bundle project(':x-pack:protocol') compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" @@ -112,8 +110,7 @@ test { // TODO: don't publish test artifacts just to run messy tests, fix the tests! // https://github.com/elastic/x-plugins/issues/724 configurations { - testArtifacts.extendsFrom(testRuntime, shadow) - testArtifacts.exclude(group: project(':x-pack:protocol').group, module: project(':x-pack:protocol').name) + testArtifacts.extendsFrom testRuntime } task testJar(type: Jar) { appendix 'test' @@ -122,7 +119,7 @@ task testJar(type: Jar) { artifacts { // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions - archives shadowJar + archives jar testArtifacts testJar } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java new file mode 100644 index 00000000000..41f066daf93 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.EnumSet; +import java.util.Locale; + +/** + * Fetch information about X-Pack from the cluster. + */ +public class XPackInfoRequest extends ActionRequest { + + public enum Category { + BUILD, LICENSE, FEATURES; + + public static EnumSet toSet(String... categories) { + EnumSet set = EnumSet.noneOf(Category.class); + for (String category : categories) { + switch (category) { + case "_all": + return EnumSet.allOf(Category.class); + case "_none": + return EnumSet.noneOf(Category.class); + default: + set.add(Category.valueOf(category.toUpperCase(Locale.ROOT))); + } + } + return set; + } + } + + private boolean verbose; + private EnumSet categories = EnumSet.noneOf(Category.class); + + public XPackInfoRequest() {} + + public void setVerbose(boolean verbose) { + this.verbose = verbose; + } + + public boolean isVerbose() { + return verbose; + } + + public void setCategories(EnumSet categories) { + this.categories = categories; + } + + public EnumSet getCategories() { + return categories; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.verbose = in.readBoolean(); + EnumSet categories = EnumSet.noneOf(Category.class); + int size = in.readVInt(); + for (int i = 0; i < size; i++) { + categories.add(Category.valueOf(in.readString())); + } + this.categories = categories; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(verbose); + out.writeVInt(categories.size()); + for (Category category : categories) { + out.writeString(category.name()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java new file mode 100644 index 00000000000..2a7eddcf353 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -0,0 +1,487 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class XPackInfoResponse extends ActionResponse implements ToXContentObject { + /** + * Value of the license's expiration time if it should never expire. + */ + public static final long BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS = Long.MAX_VALUE - TimeUnit.HOURS.toMillis(24 * 365); + // TODO move this constant to License.java once we move License.java to the protocol jar + + @Nullable private BuildInfo buildInfo; + @Nullable private LicenseInfo licenseInfo; + @Nullable private FeatureSetsInfo featureSetsInfo; + + public XPackInfoResponse() {} + + public XPackInfoResponse(@Nullable BuildInfo buildInfo, @Nullable LicenseInfo licenseInfo, @Nullable FeatureSetsInfo featureSetsInfo) { + this.buildInfo = buildInfo; + this.licenseInfo = licenseInfo; + this.featureSetsInfo = featureSetsInfo; + } + + /** + * @return The build info (incl. build hash and timestamp) + */ + public BuildInfo getBuildInfo() { + return buildInfo; + } + + /** + * @return The current license info (incl. UID, type/mode. status and expiry date). May return {@code null} when no + * license is currently installed. + */ + public LicenseInfo getLicenseInfo() { + return licenseInfo; + } + + /** + * @return The current status of the feature sets in X-Pack. Feature sets describe the features available/enabled in X-Pack. + */ + public FeatureSetsInfo getFeatureSetsInfo() { + return featureSetsInfo; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalWriteable(buildInfo); + out.writeOptionalWriteable(licenseInfo); + out.writeOptionalWriteable(featureSetsInfo); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.buildInfo = in.readOptionalWriteable(BuildInfo::new); + this.licenseInfo = in.readOptionalWriteable(LicenseInfo::new); + this.featureSetsInfo = in.readOptionalWriteable(FeatureSetsInfo::new); + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + XPackInfoResponse rhs = (XPackInfoResponse) other; + return Objects.equals(buildInfo, rhs.buildInfo) + && Objects.equals(licenseInfo, rhs.licenseInfo) + && Objects.equals(featureSetsInfo, rhs.featureSetsInfo); + } + + @Override + public int hashCode() { + return Objects.hash(buildInfo, licenseInfo, featureSetsInfo); + } + + @Override + public String toString() { + return Strings.toString(this, true, false); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "xpack_info_response", true, (a, v) -> { + BuildInfo buildInfo = (BuildInfo) a[0]; + LicenseInfo licenseInfo = (LicenseInfo) a[1]; + @SuppressWarnings("unchecked") // This is how constructing object parser works + List featureSets = (List) a[2]; + FeatureSetsInfo featureSetsInfo = featureSets == null ? null : new FeatureSetsInfo(new HashSet<>(featureSets)); + return new XPackInfoResponse(buildInfo, licenseInfo, featureSetsInfo); + }); + static { + PARSER.declareObject(optionalConstructorArg(), BuildInfo.PARSER, new ParseField("build")); + /* + * licenseInfo is sort of "double optional" because it is + * optional but it can also be send as `null`. + */ + PARSER.declareField(optionalConstructorArg(), (p, v) -> { + if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + return LicenseInfo.PARSER.parse(p, v); + }, + new ParseField("license"), ValueType.OBJECT_OR_NULL); + PARSER.declareNamedObjects(optionalConstructorArg(), + (p, c, name) -> FeatureSetsInfo.FeatureSet.PARSER.parse(p, name), + new ParseField("features")); + } + public static XPackInfoResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (buildInfo != null) { + builder.field("build", buildInfo, params); + } + + EnumSet categories = XPackInfoRequest.Category + .toSet(Strings.splitStringByCommaToArray(params.param("categories", "_all"))); + if (licenseInfo != null) { + builder.field("license", licenseInfo, params); + } else if (categories.contains(XPackInfoRequest.Category.LICENSE)) { + // if the user requested the license info, and there is no license, we should send + // back an explicit null value (indicating there is no license). This is different + // than not adding the license info at all + builder.nullField("license"); + } + + if (featureSetsInfo != null) { + builder.field("features", featureSetsInfo, params); + } + + if (params.paramAsBoolean("human", true)) { + builder.field("tagline", "You know, for X"); + } + + return builder.endObject(); + } + + public static class LicenseInfo implements ToXContentObject, Writeable { + private final String uid; + private final String type; + private final String mode; + private final LicenseStatus status; + private final long expiryDate; + + public LicenseInfo(String uid, String type, String mode, LicenseStatus status, long expiryDate) { + this.uid = uid; + this.type = type; + this.mode = mode; + this.status = status; + this.expiryDate = expiryDate; + } + + public LicenseInfo(StreamInput in) throws IOException { + this(in.readString(), in.readString(), in.readString(), LicenseStatus.readFrom(in), in.readLong()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uid); + out.writeString(type); + out.writeString(mode); + status.writeTo(out); + out.writeLong(expiryDate); + } + + public String getUid() { + return uid; + } + + public String getType() { + return type; + } + + public String getMode() { + return mode; + } + + public long getExpiryDate() { + return expiryDate; + } + + public LicenseStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + LicenseInfo rhs = (LicenseInfo) other; + return Objects.equals(uid, rhs.uid) + && Objects.equals(type, rhs.type) + && Objects.equals(mode, rhs.mode) + && Objects.equals(status, rhs.status) + && expiryDate == rhs.expiryDate; + } + + @Override + public int hashCode() { + return Objects.hash(uid, type, mode, status, expiryDate); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "license_info", true, (a, v) -> { + String uid = (String) a[0]; + String type = (String) a[1]; + String mode = (String) a[2]; + LicenseStatus status = LicenseStatus.fromString((String) a[3]); + Long expiryDate = (Long) a[4]; + long primitiveExpiryDate = expiryDate == null ? BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS : expiryDate; + return new LicenseInfo(uid, type, mode, status, primitiveExpiryDate); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("uid")); + PARSER.declareString(constructorArg(), new ParseField("type")); + PARSER.declareString(constructorArg(), new ParseField("mode")); + PARSER.declareString(constructorArg(), new ParseField("status")); + PARSER.declareLong(optionalConstructorArg(), new ParseField("expiry_date_in_millis")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + .field("uid", uid) + .field("type", type) + .field("mode", mode) + .field("status", status.label()); + if (expiryDate != BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) { + builder.timeField("expiry_date_in_millis", "expiry_date", expiryDate); + } + return builder.endObject(); + } + } + + public static class BuildInfo implements ToXContentObject, Writeable { + private final String hash; + private final String timestamp; + + public BuildInfo(String hash, String timestamp) { + this.hash = hash; + this.timestamp = timestamp; + } + + public BuildInfo(StreamInput input) throws IOException { + this(input.readString(), input.readString()); + } + + @Override + public void writeTo(StreamOutput output) throws IOException { + output.writeString(hash); + output.writeString(timestamp); + } + + public String getHash() { + return hash; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + BuildInfo rhs = (BuildInfo) other; + return Objects.equals(hash, rhs.hash) + && Objects.equals(timestamp, rhs.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(hash, timestamp); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "build_info", true, (a, v) -> new BuildInfo((String) a[0], (String) a[1])); + static { + PARSER.declareString(constructorArg(), new ParseField("hash")); + PARSER.declareString(constructorArg(), new ParseField("date")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("hash", hash) + .field("date", timestamp) + .endObject(); + } + } + + public static class FeatureSetsInfo implements ToXContentObject, Writeable { + private final Map featureSets; + + public FeatureSetsInfo(Set featureSets) { + Map map = new HashMap<>(featureSets.size()); + for (FeatureSet featureSet : featureSets) { + map.put(featureSet.name, featureSet); + } + this.featureSets = Collections.unmodifiableMap(map); + } + + public FeatureSetsInfo(StreamInput in) throws IOException { + int size = in.readVInt(); + Map featureSets = new HashMap<>(size); + for (int i = 0; i < size; i++) { + FeatureSet featureSet = new FeatureSet(in); + featureSets.put(featureSet.name, featureSet); + } + this.featureSets = Collections.unmodifiableMap(featureSets); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(featureSets.size()); + for (FeatureSet featureSet : featureSets.values()) { + featureSet.writeTo(out); + } + } + + public Map getFeatureSets() { + return featureSets; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + FeatureSetsInfo rhs = (FeatureSetsInfo) other; + return Objects.equals(featureSets, rhs.featureSets); + } + + @Override + public int hashCode() { + return Objects.hash(featureSets); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + List names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().collect(Collectors.toList()); + for (String name : names) { + builder.field(name, featureSets.get(name), params); + } + return builder.endObject(); + } + + public static class FeatureSet implements ToXContentObject, Writeable { + private final String name; + @Nullable private final String description; + private final boolean available; + private final boolean enabled; + @Nullable private final Map nativeCodeInfo; + + public FeatureSet(String name, @Nullable String description, boolean available, boolean enabled, + @Nullable Map nativeCodeInfo) { + this.name = name; + this.description = description; + this.available = available; + this.enabled = enabled; + this.nativeCodeInfo = nativeCodeInfo; + } + + public FeatureSet(StreamInput in) throws IOException { + this(in.readString(), in.readOptionalString(), in.readBoolean(), in.readBoolean(), + in.getVersion().onOrAfter(Version.V_5_4_0) ? in.readMap() : null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeOptionalString(description); + out.writeBoolean(available); + out.writeBoolean(enabled); + if (out.getVersion().onOrAfter(Version.V_5_4_0)) { + out.writeMap(nativeCodeInfo); + } + } + + public String name() { + return name; + } + + @Nullable + public String description() { + return description; + } + + public boolean available() { + return available; + } + + public boolean enabled() { + return enabled; + } + + @Nullable + public Map nativeCodeInfo() { + return nativeCodeInfo; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + FeatureSet rhs = (FeatureSet) other; + return Objects.equals(name, rhs.name) + && Objects.equals(description, rhs.description) + && available == rhs.available + && enabled == rhs.enabled + && Objects.equals(nativeCodeInfo, rhs.nativeCodeInfo); + } + + @Override + public int hashCode() { + return Objects.hash(name, description, available, enabled, nativeCodeInfo); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "feature_set", true, (a, name) -> { + String description = (String) a[0]; + boolean available = (Boolean) a[1]; + boolean enabled = (Boolean) a[2]; + @SuppressWarnings("unchecked") // Matches up with declaration below + Map nativeCodeInfo = (Map) a[3]; + return new FeatureSet(name, description, available, enabled, nativeCodeInfo); + }); + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("description")); + PARSER.declareBoolean(constructorArg(), new ParseField("available")); + PARSER.declareBoolean(constructorArg(), new ParseField("enabled")); + PARSER.declareObject(optionalConstructorArg(), (p, name) -> p.map(), new ParseField("native_code_info")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (description != null) { + builder.field("description", description); + } + builder.field("available", available); + builder.field("enabled", enabled); + if (nativeCodeInfo != null) { + builder.field("native_code_info", nativeCodeInfo); + } + return builder.endObject(); + } + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java new file mode 100644 index 00000000000..83621a9ac3d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeRequest; + +public class XPackUsageRequest extends MasterNodeRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java new file mode 100644 index 00000000000..ccf681837fd --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Response object from calling the xpack usage api. + * + * Usage information for each feature is accessible through {@link #getUsages()}. + */ +public class XPackUsageResponse { + + private final Map> usages; + + private XPackUsageResponse(Map> usages) throws IOException { + this.usages = usages; + } + + @SuppressWarnings("unchecked") + private static Map castMap(Object value) { + return (Map)value; + } + + /** Return a map from feature name to usage information for that feature. */ + public Map> getUsages() { + return usages; + } + + public static XPackUsageResponse fromXContent(XContentParser parser) throws IOException { + Map rawMap = parser.map(); + Map> usages = rawMap.entrySet().stream().collect( + Collectors.toMap(Map.Entry::getKey, e -> castMap(e.getValue()))); + return new XPackUsageResponse(usages); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java new file mode 100644 index 00000000000..39340955121 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.common; + +import java.util.Arrays; +import java.util.Map; + +/** + * Common utilities used for XPack protocol classes + */ +public final class ProtocolUtils { + + /** + * Implements equals for a map of string arrays + * + * The map of string arrays is used in some XPack protocol classes but does't work with equal. + */ + public static boolean equals(Map a, Map b) { + if (a == null) { + return b == null; + } + if (b == null) { + return false; + } + if (a.size() != b.size()) { + return false; + } + for (Map.Entry entry : a.entrySet()) { + String[] val = entry.getValue(); + String key = entry.getKey(); + if (val == null) { + if (b.get(key) != null || b.containsKey(key) == false) { + return false; + } + } else { + if (Arrays.equals(val, b.get(key)) == false) { + return false; + } + } + } + return true; + } + + /** + * Implements hashCode for map of string arrays + * + * The map of string arrays does't work with hashCode. + */ + public static int hashCode(Map a) { + int hash = 0; + for (Map.Entry entry : a.entrySet()) + hash += Arrays.hashCode(entry.getValue()); + return hash; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java new file mode 100644 index 00000000000..994c7e2c2d5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java @@ -0,0 +1,216 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import com.carrotsearch.hppc.ObjectIntHashMap; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * A Connection links exactly two {@link Vertex} objects. The basis of a + * connection is one or more documents have been found that contain + * this pair of terms and the strength of the connection is recorded + * as a weight. + */ +public class Connection { + private Vertex from; + private Vertex to; + private double weight; + private long docCount; + + public Connection(Vertex from, Vertex to, double weight, long docCount) { + this.from = from; + this.to = to; + this.weight = weight; + this.docCount = docCount; + } + + public Connection(StreamInput in, Map vertices) throws IOException { + from = vertices.get(new VertexId(in.readString(), in.readString())); + to = vertices.get(new VertexId(in.readString(), in.readString())); + weight = in.readDouble(); + docCount = in.readVLong(); + } + + Connection() { + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(from.getField()); + out.writeString(from.getTerm()); + out.writeString(to.getField()); + out.writeString(to.getTerm()); + out.writeDouble(weight); + out.writeVLong(docCount); + } + + public ConnectionId getId() { + return new ConnectionId(from.getId(), to.getId()); + } + + public Vertex getFrom() { + return from; + } + + public Vertex getTo() { + return to; + } + + /** + * @return a measure of the relative connectedness between a pair of {@link Vertex} objects + */ + public double getWeight() { + return weight; + } + + /** + * @return the number of documents in the sampled set that contained this + * pair of {@link Vertex} objects. + */ + public long getDocCount() { + return docCount; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Connection other = (Connection) obj; + return docCount == other.docCount && + weight == other.weight && + Objects.equals(to, other.to) && + Objects.equals(from, other.from); + } + + @Override + public int hashCode() { + return Objects.hash(docCount, weight, from, to); + } + + + private static final ParseField SOURCE = new ParseField("source"); + private static final ParseField TARGET = new ParseField("target"); + private static final ParseField WEIGHT = new ParseField("weight"); + private static final ParseField DOC_COUNT = new ParseField("doc_count"); + + + void toXContent(XContentBuilder builder, Params params, ObjectIntHashMap vertexNumbers) throws IOException { + builder.field(SOURCE.getPreferredName(), vertexNumbers.get(from)); + builder.field(TARGET.getPreferredName(), vertexNumbers.get(to)); + builder.field(WEIGHT.getPreferredName(), weight); + builder.field(DOC_COUNT.getPreferredName(), docCount); + } + + //When deserializing from XContent we need to wait for all vertices to be loaded before + // Connection objects can be created that reference them. This class provides the interim + // state for connections. + static class UnresolvedConnection { + int fromIndex; + int toIndex; + double weight; + long docCount; + UnresolvedConnection(int fromIndex, int toIndex, double weight, long docCount) { + super(); + this.fromIndex = fromIndex; + this.toIndex = toIndex; + this.weight = weight; + this.docCount = docCount; + } + public Connection resolve(List vertices) { + return new Connection(vertices.get(fromIndex), vertices.get(toIndex), weight, docCount); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "ConnectionParser", true, + args -> { + int source = (Integer) args[0]; + int target = (Integer) args[1]; + double weight = (Double) args[2]; + long docCount = (Long) args[3]; + return new UnresolvedConnection(source, target, weight, docCount); + }); + + static { + PARSER.declareInt(constructorArg(), SOURCE); + PARSER.declareInt(constructorArg(), TARGET); + PARSER.declareDouble(constructorArg(), WEIGHT); + PARSER.declareLong(constructorArg(), DOC_COUNT); + } + static UnresolvedConnection fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + } + + + /** + * An identifier (implements hashcode and equals) that represents a + * unique key for a {@link Connection} + */ + public static class ConnectionId { + private final VertexId source; + private final VertexId target; + + public ConnectionId(VertexId source, VertexId target) { + this.source = source; + this.target = target; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ConnectionId vertexId = (ConnectionId) o; + + if (source != null ? !source.equals(vertexId.source) : vertexId.source != null) + return false; + if (target != null ? !target.equals(vertexId.target) : vertexId.target != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = source != null ? source.hashCode() : 0; + result = 31 * result + (target != null ? target.hashCode() : 0); + return result; + } + + public VertexId getSource() { + return source; + } + + public VertexId getTarget() { + return target; + } + + @Override + public String toString() { + return getSource() + "->" + getTarget(); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java new file mode 100644 index 00000000000..196982c0a35 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java @@ -0,0 +1,388 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +/** + * Holds the criteria required to guide the exploration of connected terms which + * can be returned as a graph. + */ +public class GraphExploreRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContentObject { + + public static final String NO_HOPS_ERROR_MESSAGE = "Graph explore request must have at least one hop"; + public static final String NO_VERTICES_ERROR_MESSAGE = "Graph explore hop must have at least one VertexRequest"; + private String[] indices = Strings.EMPTY_ARRAY; + private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false); + private String[] types = Strings.EMPTY_ARRAY; + private String routing; + private TimeValue timeout; + + private int sampleSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE; + private String sampleDiversityField; + private int maxDocsPerDiversityValue; + private boolean useSignificance = true; + private boolean returnDetailedInfo; + + private List hops = new ArrayList<>(); + + public GraphExploreRequest() { + } + + /** + * Constructs a new graph request to run against the provided indices. No + * indices means it will run against all indices. + */ + public GraphExploreRequest(String... indices) { + this.indices = indices; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (hops.size() == 0) { + validationException = ValidateActions.addValidationError(NO_HOPS_ERROR_MESSAGE, validationException); + } + for (Hop hop : hops) { + validationException = hop.validate(validationException); + } + return validationException; + } + + @Override + public String[] indices() { + return this.indices; + } + + @Override + public GraphExploreRequest indices(String... indices) { + this.indices = indices; + return this; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public GraphExploreRequest indicesOptions(IndicesOptions indicesOptions) { + if (indicesOptions == null) { + throw new IllegalArgumentException("IndicesOptions must not be null"); + } + this.indicesOptions = indicesOptions; + return this; + } + + public String[] types() { + return this.types; + } + + public GraphExploreRequest types(String... types) { + this.types = types; + return this; + } + + public String routing() { + return this.routing; + } + + public GraphExploreRequest routing(String routing) { + this.routing = routing; + return this; + } + + public GraphExploreRequest routing(String... routings) { + this.routing = Strings.arrayToCommaDelimitedString(routings); + return this; + } + + public TimeValue timeout() { + return timeout; + } + + /** + * Graph exploration can be set to timeout after the given period. Search + * operations involved in each hop are limited to the remaining time + * available but can still overrun due to the nature of their "best efforts" + * timeout support. When a timeout occurs partial results are returned. + * + * @param timeout + * a {@link TimeValue} object which determines the maximum length + * of time to spend exploring + */ + public GraphExploreRequest timeout(TimeValue timeout) { + if (timeout == null) { + throw new IllegalArgumentException("timeout must not be null"); + } + this.timeout = timeout; + return this; + } + + public GraphExploreRequest timeout(String timeout) { + timeout(TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout")); + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + types = in.readStringArray(); + routing = in.readOptionalString(); + timeout = in.readOptionalTimeValue(); + sampleSize = in.readInt(); + sampleDiversityField = in.readOptionalString(); + maxDocsPerDiversityValue = in.readInt(); + + useSignificance = in.readBoolean(); + returnDetailedInfo = in.readBoolean(); + + int numHops = in.readInt(); + Hop parentHop = null; + for (int i = 0; i < numHops; i++) { + Hop hop = new Hop(parentHop); + hop.readFrom(in); + hops.add(hop); + parentHop = hop; + } + + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + out.writeStringArray(types); + out.writeOptionalString(routing); + out.writeOptionalTimeValue(timeout); + + out.writeInt(sampleSize); + out.writeOptionalString(sampleDiversityField); + out.writeInt(maxDocsPerDiversityValue); + + out.writeBoolean(useSignificance); + out.writeBoolean(returnDetailedInfo); + out.writeInt(hops.size()); + for (Iterator iterator = hops.iterator(); iterator.hasNext();) { + Hop hop = iterator.next(); + hop.writeTo(out); + } + } + + @Override + public String toString() { + return "graph explore [" + Arrays.toString(indices) + "][" + Arrays.toString(types) + "]"; + } + + /** + * The number of top-matching documents that are considered during each hop + * (default is {@link SamplerAggregationBuilder#DEFAULT_SHARD_SAMPLE_SIZE} + * Very small values (less than 50) may not provide sufficient + * weight-of-evidence to identify significant connections between terms. + *

+ * Very large values (many thousands) are not recommended with loosely + * defined queries (fuzzy queries or those with many OR clauses). This is + * because any useful signals in the best documents are diluted with + * irrelevant noise from low-quality matches. Performance is also typically + * better with smaller samples as there are less look-ups required for + * background frequencies of terms found in the documents + *

+ * + * @param maxNumberOfDocsPerHop + * shard-level sample size in documents + */ + public void sampleSize(int maxNumberOfDocsPerHop) { + sampleSize = maxNumberOfDocsPerHop; + } + + public int sampleSize() { + return sampleSize; + } + + /** + * Optional choice of single-value field on which to diversify sampled + * search results + */ + public void sampleDiversityField(String name) { + sampleDiversityField = name; + } + + public String sampleDiversityField() { + return sampleDiversityField; + } + + /** + * Optional number of permitted docs with same value in sampled search + * results. Must also declare which field using sampleDiversityField + */ + public void maxDocsPerDiversityValue(int maxDocs) { + this.maxDocsPerDiversityValue = maxDocs; + } + + public int maxDocsPerDiversityValue() { + return maxDocsPerDiversityValue; + } + + /** + * Controls the choice of algorithm used to select interesting terms. The + * default value is true which means terms are selected based on + * significance (see the {@link SignificantTerms} aggregation) rather than + * popularity (using the {@link TermsAggregator}). + * + * @param value + * true if the significant_terms algorithm should be used. + */ + public void useSignificance(boolean value) { + this.useSignificance = value; + } + + public boolean useSignificance() { + return useSignificance; + } + + /** + * Return detailed information about vertex frequencies as part of JSON + * results - defaults to false + * + * @param value + * true if detailed information is required in JSON responses + */ + public void returnDetailedInfo(boolean value) { + this.returnDetailedInfo = value; + } + + public boolean returnDetailedInfo() { + return returnDetailedInfo; + } + + /** + * Add a stage in the graph exploration. Each hop represents a stage of + * querying elasticsearch to identify terms which can then be connnected to + * other terms in a subsequent hop. + * + * @param guidingQuery + * optional choice of query which influences which documents are + * considered in this stage + * @return a {@link Hop} object that holds settings for a stage in the graph + * exploration + */ + public Hop createNextHop(QueryBuilder guidingQuery) { + Hop parent = null; + if (hops.size() > 0) { + parent = hops.get(hops.size() - 1); + } + Hop newHop = new Hop(parent); + newHop.guidingQuery = guidingQuery; + hops.add(newHop); + return newHop; + } + + public int getHopNumbers() { + return hops.size(); + } + + public Hop getHop(int hopNumber) { + return hops.get(hopNumber); + } + + public static class TermBoost { + String term; + float boost; + + public TermBoost(String term, float boost) { + super(); + this.term = term; + if (boost <= 0) { + throw new IllegalArgumentException("Boosts must be a positive non-zero number"); + } + this.boost = boost; + } + + TermBoost() { + } + + public String getTerm() { + return term; + } + + public float getBoost() { + return boost; + } + + void readFrom(StreamInput in) throws IOException { + this.term = in.readString(); + this.boost = in.readFloat(); + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(term); + out.writeFloat(boost); + } + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.startObject("controls"); + { + if (sampleSize != SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE) { + builder.field("sample_size", sampleSize); + } + if (sampleDiversityField != null) { + builder.startObject("sample_diversity"); + builder.field("field", sampleDiversityField); + builder.field("max_docs_per_value", maxDocsPerDiversityValue); + builder.endObject(); + } + builder.field("use_significance", useSignificance); + if (returnDetailedInfo) { + builder.field("return_detailed_stats", returnDetailedInfo); + } + } + builder.endObject(); + + for (Hop hop : hops) { + if (hop.parentHop != null) { + builder.startObject("connections"); + } + hop.toXContent(builder, params); + } + for (Hop hop : hops) { + if (hop.parentHop != null) { + builder.endObject(); + } + } + builder.endObject(); + + return builder; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java new file mode 100644 index 00000000000..12eb20617ff --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import com.carrotsearch.hppc.ObjectIntHashMap; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId; +import org.elasticsearch.protocol.xpack.graph.Connection.UnresolvedConnection; +import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Graph explore response holds a graph of {@link Vertex} and {@link Connection} objects + * (nodes and edges in common graph parlance). + * + * @see GraphExploreRequest + */ +public class GraphExploreResponse extends ActionResponse implements ToXContentObject { + + private long tookInMillis; + private boolean timedOut = false; + private ShardOperationFailedException[] shardFailures = ShardSearchFailure.EMPTY_ARRAY; + private Map vertices; + private Map connections; + private boolean returnDetailedInfo; + static final String RETURN_DETAILED_INFO_PARAM = "returnDetailedInfo"; + + public GraphExploreResponse() { + } + + public GraphExploreResponse(long tookInMillis, boolean timedOut, ShardOperationFailedException[] shardFailures, + Map vertices, Map connections, boolean returnDetailedInfo) { + this.tookInMillis = tookInMillis; + this.timedOut = timedOut; + this.shardFailures = shardFailures; + this.vertices = vertices; + this.connections = connections; + this.returnDetailedInfo = returnDetailedInfo; + } + + + public TimeValue getTook() { + return new TimeValue(tookInMillis); + } + + public long getTookInMillis() { + return tookInMillis; + } + + /** + * @return true if the time stated in {@link GraphExploreRequest#timeout(TimeValue)} was exceeded + * (not all hops may have been completed in this case) + */ + public boolean isTimedOut() { + return this.timedOut; + } + public ShardOperationFailedException[] getShardFailures() { + return shardFailures; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + tookInMillis = in.readVLong(); + timedOut = in.readBoolean(); + + int size = in.readVInt(); + if (size == 0) { + shardFailures = ShardSearchFailure.EMPTY_ARRAY; + } else { + shardFailures = new ShardSearchFailure[size]; + for (int i = 0; i < shardFailures.length; i++) { + shardFailures[i] = readShardSearchFailure(in); + } + } + // read vertices + size = in.readVInt(); + vertices = new HashMap<>(); + for (int i = 0; i < size; i++) { + Vertex n = Vertex.readFrom(in); + vertices.put(n.getId(), n); + } + + size = in.readVInt(); + + connections = new HashMap<>(); + for (int i = 0; i < size; i++) { + Connection e = new Connection(in, vertices); + connections.put(e.getId(), e); + } + + returnDetailedInfo = in.readBoolean(); + + } + + public Collection getConnections() { + return connections.values(); + } + + public Collection getVertices() { + return vertices.values(); + } + + public Vertex getVertex(VertexId id) { + return vertices.get(id); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVLong(tookInMillis); + out.writeBoolean(timedOut); + + out.writeVInt(shardFailures.length); + for (ShardOperationFailedException shardSearchFailure : shardFailures) { + shardSearchFailure.writeTo(out); + } + + out.writeVInt(vertices.size()); + for (Vertex vertex : vertices.values()) { + vertex.writeTo(out); + } + + out.writeVInt(connections.size()); + for (Connection connection : connections.values()) { + connection.writeTo(out); + } + + out.writeBoolean(returnDetailedInfo); + + } + + private static final ParseField TOOK = new ParseField("took"); + private static final ParseField TIMED_OUT = new ParseField("timed_out"); + private static final ParseField VERTICES = new ParseField("vertices"); + private static final ParseField CONNECTIONS = new ParseField("connections"); + private static final ParseField FAILURES = new ParseField("failures"); + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TOOK.getPreferredName(), tookInMillis); + builder.field(TIMED_OUT.getPreferredName(), timedOut); + + builder.startArray(FAILURES.getPreferredName()); + if (shardFailures != null) { + for (ShardOperationFailedException shardFailure : shardFailures) { + builder.startObject(); + shardFailure.toXContent(builder, params); + builder.endObject(); + } + } + builder.endArray(); + + ObjectIntHashMap vertexNumbers = new ObjectIntHashMap<>(vertices.size()); + + Map extraParams = new HashMap<>(); + extraParams.put(RETURN_DETAILED_INFO_PARAM, Boolean.toString(returnDetailedInfo)); + Params extendedParams = new DelegatingMapParams(extraParams, params); + + builder.startArray(VERTICES.getPreferredName()); + for (Vertex vertex : vertices.values()) { + builder.startObject(); + vertexNumbers.put(vertex, vertexNumbers.size()); + vertex.toXContent(builder, extendedParams); + builder.endObject(); + } + builder.endArray(); + + builder.startArray(CONNECTIONS.getPreferredName()); + for (Connection connection : connections.values()) { + builder.startObject(); + connection.toXContent(builder, extendedParams, vertexNumbers); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "GraphExploreResponsenParser", true, + args -> { + GraphExploreResponse result = new GraphExploreResponse(); + result.vertices = new HashMap<>(); + result.connections = new HashMap<>(); + + result.tookInMillis = (Long) args[0]; + result.timedOut = (Boolean) args[1]; + + @SuppressWarnings("unchecked") + List vertices = (List) args[2]; + @SuppressWarnings("unchecked") + List unresolvedConnections = (List) args[3]; + @SuppressWarnings("unchecked") + List failures = (List) args[4]; + for (Vertex vertex : vertices) { + // reverse-engineer if detailed stats were requested - + // mainly here for testing framework's equality tests + result.returnDetailedInfo = result.returnDetailedInfo || vertex.getFg() > 0; + result.vertices.put(vertex.getId(), vertex); + } + for (UnresolvedConnection unresolvedConnection : unresolvedConnections) { + Connection resolvedConnection = unresolvedConnection.resolve(vertices); + result.connections.put(resolvedConnection.getId(), resolvedConnection); + } + if (failures.size() > 0) { + result.shardFailures = failures.toArray(new ShardSearchFailure[failures.size()]); + } + return result; + }); + + static { + PARSER.declareLong(constructorArg(), TOOK); + PARSER.declareBoolean(constructorArg(), TIMED_OUT); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> Vertex.fromXContent(p), VERTICES); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> UnresolvedConnection.fromXContent(p), CONNECTIONS); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ShardSearchFailure.fromXContent(p), FAILURES); + } + + public static GraphExploreResponse fromXContext(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java new file mode 100644 index 00000000000..e61403e8b37 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * A Hop represents one of potentially many stages in a graph exploration. + * Each Hop identifies one or more fields in which it will attempt to find + * terms that are significantly connected to the previous Hop. Each field is identified + * using a {@link VertexRequest} + * + *

An example series of Hops on webserver logs would be: + *

    + *
  1. an initial Hop to find + * the top ten IPAddresses trying to access urls containing the word "admin"
  2. + *
  3. a secondary Hop to see which other URLs those IPAddresses were trying to access
  4. + *
+ * + *

+ * Optionally, each hop can contain a "guiding query" that further limits the set of documents considered. + * In our weblog example above we might choose to constrain the second hop to only look at log records that + * had a reponse code of 404. + *

+ *

+ * If absent, the list of {@link VertexRequest}s is inherited from the prior Hop's list to avoid repeating + * the fields that will be examined at each stage. + *

+ * + */ +public class Hop implements ToXContentFragment{ + final Hop parentHop; + List vertices = null; + QueryBuilder guidingQuery = null; + + public Hop(Hop parent) { + this.parentHop = parent; + } + + public ActionRequestValidationException validate(ActionRequestValidationException validationException) { + + if (getEffectiveVertexRequests().size() == 0) { + validationException = ValidateActions.addValidationError(GraphExploreRequest.NO_VERTICES_ERROR_MESSAGE, validationException); + } + return validationException; + + } + + public Hop getParentHop() { + return parentHop; + } + + void writeTo(StreamOutput out) throws IOException { + out.writeOptionalNamedWriteable(guidingQuery); + if (vertices == null) { + out.writeVInt(0); + } else { + out.writeVInt(vertices.size()); + for (VertexRequest vr : vertices) { + vr.writeTo(out); + } + } + } + + void readFrom(StreamInput in) throws IOException { + guidingQuery = in.readOptionalNamedWriteable(QueryBuilder.class); + int size = in.readVInt(); + if (size > 0) { + vertices = new ArrayList<>(); + for (int i = 0; i < size; i++) { + VertexRequest vr = new VertexRequest(); + vr.readFrom(in); + vertices.add(vr); + } + } + } + + public QueryBuilder guidingQuery() { + if (guidingQuery != null) { + return guidingQuery; + } + return QueryBuilders.matchAllQuery(); + } + + /** + * Add a field in which this {@link Hop} will look for terms that are highly linked to + * previous hops and optionally the guiding query. + * + * @param fieldName a field in the chosen index + */ + public VertexRequest addVertexRequest(String fieldName) { + if (vertices == null) { + vertices = new ArrayList<>(); + } + VertexRequest vr = new VertexRequest(); + vr.fieldName(fieldName); + vertices.add(vr); + return vr; + } + + /** + * An optional parameter that focuses the exploration on documents that + * match the given query. + * + * @param queryBuilder any query + */ + public void guidingQuery(QueryBuilder queryBuilder) { + guidingQuery = queryBuilder; + } + + protected List getEffectiveVertexRequests() { + if (vertices != null) { + return vertices; + } + if (parentHop == null) { + return Collections.emptyList(); + } + // otherwise inherit settings from parent + return parentHop.getEffectiveVertexRequests(); + } + + public int getNumberVertexRequests() { + return getEffectiveVertexRequests().size(); + } + + public VertexRequest getVertexRequest(int requestNumber) { + return getEffectiveVertexRequests().get(requestNumber); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (guidingQuery != null) { + builder.field("query"); + guidingQuery.toXContent(builder, params); + } + if(vertices != null && vertices.size()>0) { + builder.startArray("vertices"); + for (VertexRequest vertexRequest : vertices) { + vertexRequest.toXContent(builder, params); + } + builder.endArray(); + } + return builder; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java new file mode 100644 index 00000000000..f17812a6396 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java @@ -0,0 +1,255 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * A vertex in a graph response represents a single term (a field and value pair) + * which appears in one or more documents found as part of the graph exploration. + * + * A vertex term could be a bank account number, an email address, a hashtag or any + * other term that appears in documents and is interesting to represent in a network. + */ +public class Vertex implements ToXContentFragment { + + private final String field; + private final String term; + private double weight; + private final int depth; + private final long bg; + private long fg; + private static final ParseField FIELD = new ParseField("field"); + private static final ParseField TERM = new ParseField("term"); + private static final ParseField WEIGHT = new ParseField("weight"); + private static final ParseField DEPTH = new ParseField("depth"); + private static final ParseField FG = new ParseField("fg"); + private static final ParseField BG = new ParseField("bg"); + + + public Vertex(String field, String term, double weight, int depth, long bg, long fg) { + super(); + this.field = field; + this.term = term; + this.weight = weight; + this.depth = depth; + this.bg = bg; + this.fg = fg; + } + + static Vertex readFrom(StreamInput in) throws IOException { + return new Vertex(in.readString(), in.readString(), in.readDouble(), in.readVInt(), in.readVLong(), in.readVLong()); + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeString(term); + out.writeDouble(weight); + out.writeVInt(depth); + out.writeVLong(bg); + out.writeVLong(fg); + } + + @Override + public int hashCode() { + return Objects.hash(field, term, weight, depth, bg, fg); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Vertex other = (Vertex) obj; + return depth == other.depth && + weight == other.weight && + bg == other.bg && + fg == other.fg && + Objects.equals(field, other.field) && + Objects.equals(term, other.term); + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + boolean returnDetailedInfo = params.paramAsBoolean(GraphExploreResponse.RETURN_DETAILED_INFO_PARAM, false); + builder.field(FIELD.getPreferredName(), field); + builder.field(TERM.getPreferredName(), term); + builder.field(WEIGHT.getPreferredName(), weight); + builder.field(DEPTH.getPreferredName(), depth); + if (returnDetailedInfo) { + builder.field(FG.getPreferredName(), fg); + builder.field(BG.getPreferredName(), bg); + } + return builder; + } + + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "VertexParser", true, + args -> { + String field = (String) args[0]; + String term = (String) args[1]; + double weight = (Double) args[2]; + int depth = (Integer) args[3]; + Long optionalBg = (Long) args[4]; + Long optionalFg = (Long) args[5]; + long bg = optionalBg == null ? 0 : optionalBg; + long fg = optionalFg == null ? 0 : optionalFg; + return new Vertex(field, term, weight, depth, bg, fg); + }); + + static { + PARSER.declareString(constructorArg(), FIELD); + PARSER.declareString(constructorArg(), TERM); + PARSER.declareDouble(constructorArg(), WEIGHT); + PARSER.declareInt(constructorArg(), DEPTH); + PARSER.declareLong(optionalConstructorArg(), BG); + PARSER.declareLong(optionalConstructorArg(), FG); + } + + static Vertex fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + + + /** + * @return a {@link VertexId} object that uniquely identifies this Vertex + */ + public VertexId getId() { + return createId(field, term); + } + + /** + * A convenience method for creating a {@link VertexId} + * @param field the field + * @param term the term + * @return a {@link VertexId} that can be used for looking up vertices + */ + public static VertexId createId(String field, String term) { + return new VertexId(field,term); + } + + @Override + public String toString() { + return getId().toString(); + } + + public String getField() { + return field; + } + + public String getTerm() { + return term; + } + + /** + * The weight of a vertex is an accumulation of all of the {@link Connection}s + * that are linked to this {@link Vertex} as part of a graph exploration. + * It is used internally to identify the most interesting vertices to be returned. + * @return a measure of the {@link Vertex}'s relative importance. + */ + public double getWeight() { + return weight; + } + + public void setWeight(final double weight) { + this.weight = weight; + } + + /** + * If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default) + * this statistic is available. + * @return the number of documents in the index that contain this term (see bg_count in + * + * the significant_terms aggregation) + */ + public long getBg() { + return bg; + } + + /** + * If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default) + * this statistic is available. + * Together with {@link #getBg()} these numbers are used to derive the significance of a term. + * @return the number of documents in the sample of best matching documents that contain this term (see fg_count in + * + * the significant_terms aggregation) + */ + public long getFg() { + return fg; + } + + public void setFg(final long fg) { + this.fg = fg; + } + + /** + * @return the sequence number in the series of hops where this Vertex term was first encountered + */ + public int getHopDepth() { + return depth; + } + + /** + * An identifier (implements hashcode and equals) that represents a + * unique key for a {@link Vertex} + */ + public static class VertexId { + private final String field; + private final String term; + + public VertexId(String field, String term) { + this.field = field; + this.term = term; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + VertexId vertexId = (VertexId) o; + + if (field != null ? !field.equals(vertexId.field) : vertexId.field != null) + return false; + if (term != null ? !term.equals(vertexId.term) : vertexId.term != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = field != null ? field.hashCode() : 0; + result = 31 * result + (term != null ? term.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return field + ":" + term; + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java new file mode 100644 index 00000000000..63d2c616547 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java @@ -0,0 +1,235 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost; + +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * A request to identify terms from a choice of field as part of a {@link Hop}. + * Optionally, a set of terms can be provided that are used as an exclusion or + * inclusion list to filter which terms are considered. + * + */ +public class VertexRequest implements ToXContentObject { + private String fieldName; + private int size = DEFAULT_SIZE; + public static final int DEFAULT_SIZE = 5; + private Map includes; + private Set excludes; + public static final int DEFAULT_MIN_DOC_COUNT = 3; + private int minDocCount = DEFAULT_MIN_DOC_COUNT; + public static final int DEFAULT_SHARD_MIN_DOC_COUNT = 2; + private int shardMinDocCount = DEFAULT_SHARD_MIN_DOC_COUNT; + + + public VertexRequest() { + + } + + void readFrom(StreamInput in) throws IOException { + fieldName = in.readString(); + size = in.readVInt(); + minDocCount = in.readVInt(); + shardMinDocCount = in.readVInt(); + + int numIncludes = in.readVInt(); + if (numIncludes > 0) { + includes = new HashMap<>(); + for (int i = 0; i < numIncludes; i++) { + TermBoost tb = new TermBoost(); + tb.readFrom(in); + includes.put(tb.term, tb); + } + } + + int numExcludes = in.readVInt(); + if (numExcludes > 0) { + excludes = new HashSet<>(); + for (int i = 0; i < numExcludes; i++) { + excludes.add(in.readString()); + } + } + + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeVInt(size); + out.writeVInt(minDocCount); + out.writeVInt(shardMinDocCount); + + if (includes != null) { + out.writeVInt(includes.size()); + for (TermBoost tb : includes.values()) { + tb.writeTo(out); + } + } else { + out.writeVInt(0); + } + + if (excludes != null) { + out.writeVInt(excludes.size()); + for (String term : excludes) { + out.writeString(term); + } + } else { + out.writeVInt(0); + } + } + + public String fieldName() { + return fieldName; + } + + public VertexRequest fieldName(String fieldName) { + this.fieldName = fieldName; + return this; + } + + public int size() { + return size; + } + + /** + * @param size The maximum number of terms that should be returned from this field as part of this {@link Hop} + */ + public VertexRequest size(int size) { + this.size = size; + return this; + } + + public boolean hasIncludeClauses() { + return includes != null && includes.size() > 0; + } + + public boolean hasExcludeClauses() { + return excludes != null && excludes.size() > 0; + } + + /** + * Adds a term that should be excluded from results + * @param term A term to be excluded + */ + public void addExclude(String term) { + if (includes != null) { + throw new IllegalArgumentException("Cannot have both include and exclude clauses"); + } + if (excludes == null) { + excludes = new HashSet<>(); + } + excludes.add(term); + } + + /** + * Adds a term to the set of allowed values - the boost defines the relative + * importance when pursuing connections in subsequent {@link Hop}s. The boost value + * appears as part of the query. + * @param term a required term + * @param boost an optional boost + */ + public void addInclude(String term, float boost) { + if (excludes != null) { + throw new IllegalArgumentException("Cannot have both include and exclude clauses"); + } + if (includes == null) { + includes = new HashMap<>(); + } + includes.put(term, new TermBoost(term, boost)); + } + + public TermBoost[] includeValues() { + return includes.values().toArray(new TermBoost[includes.size()]); + } + + public String[] includeValuesAsStringArray() { + String[] result = new String[includes.size()]; + int i = 0; + for (TermBoost tb : includes.values()) { + result[i++] = tb.term; + } + return result; + } + + public String[] excludesAsArray() { + return excludes.toArray(new String[excludes.size()]); + } + + public int minDocCount() { + return minDocCount; + } + + /** + * A "certainty" threshold which defines the weight-of-evidence required before + * a term found in this field is identified as a useful connection + * + * @param value The minimum number of documents that contain this term found in the samples used across all shards + */ + public VertexRequest minDocCount(int value) { + minDocCount = value; + return this; + } + + + public int shardMinDocCount() { + return Math.min(shardMinDocCount, minDocCount); + } + + /** + * A "certainty" threshold which defines the weight-of-evidence required before + * a term found in this field is identified as a useful connection + * + * @param value The minimum number of documents that contain this term found in the samples used across all shards + */ + public VertexRequest shardMinDocCount(int value) { + shardMinDocCount = value; + return this; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("field", fieldName); + if (size != DEFAULT_SIZE) { + builder.field("size", size); + } + if (minDocCount != DEFAULT_MIN_DOC_COUNT) { + builder.field("min_doc_count", minDocCount); + } + if (shardMinDocCount != DEFAULT_SHARD_MIN_DOC_COUNT) { + builder.field("shard_min_doc_count", shardMinDocCount); + } + if(includes!=null) { + builder.startArray("include"); + for (TermBoost tb : includes.values()) { + builder.startObject(); + builder.field("term", tb.term); + builder.field("boost", tb.boost); + builder.endObject(); + } + builder.endArray(); + } + if(excludes!=null) { + builder.startArray("exclude"); + for (String value : excludes) { + builder.value(value); + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java new file mode 100644 index 00000000000..5d5dd0f5ef6 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Graph + * APIs. + */ +package org.elasticsearch.protocol.xpack.graph; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java new file mode 100644 index 00000000000..62353b093b5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; + + +public class DeleteLicenseRequest extends AcknowledgedRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java new file mode 100644 index 00000000000..926ce1d1d70 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; + + +public class GetLicenseRequest extends MasterNodeReadRequest { + + public GetLicenseRequest() { + } + + public GetLicenseRequest(StreamInput in) throws IOException { + super(in); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java new file mode 100644 index 00000000000..6d5e1b5653f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionResponse; + +public class GetLicenseResponse extends ActionResponse { + + private String license; + + GetLicenseResponse() { + } + + public GetLicenseResponse(String license) { + this.license = license; + } + + public String getLicenseDefinition() { + return license; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java new file mode 100644 index 00000000000..5bc66ab745e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.io.IOException; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +/** + * Status of an X-Pack license. + */ +public enum LicenseStatus implements Writeable { + + ACTIVE("active"), + INVALID("invalid"), + EXPIRED("expired"); + + private final String label; + + LicenseStatus(String label) { + this.label = label; + } + + public String label() { + return label; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(label); + } + + public static LicenseStatus readFrom(StreamInput in) throws IOException { + return fromString(in.readString()); + } + + public static LicenseStatus fromString(String value) { + switch (value) { + case "active": + return ACTIVE; + case "invalid": + return INVALID; + case "expired": + return EXPIRED; + default: + throw new IllegalArgumentException("unknown license status [" + value + "]"); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java new file mode 100644 index 00000000000..18745653e76 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.util.Locale; + +public enum LicensesStatus { + VALID((byte) 0), + INVALID((byte) 1), + EXPIRED((byte) 2); + + private final byte id; + + LicensesStatus(byte id) { + this.id = id; + } + + public int id() { + return id; + } + + public static LicensesStatus fromId(int id) { + if (id == 0) { + return VALID; + } else if (id == 1) { + return INVALID; + } else if (id == 2) { + return EXPIRED; + } else { + throw new IllegalStateException("no valid LicensesStatus for id=" + id); + } + } + + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } + + public static LicensesStatus fromString(String value) { + switch (value) { + case "valid": + return VALID; + case "invalid": + return INVALID; + case "expired": + return EXPIRED; + default: + throw new IllegalArgumentException("unknown licenses status [" + value + "]"); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java new file mode 100644 index 00000000000..342e6c296e7 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; + +public class PutLicenseRequest extends AcknowledgedRequest { + + private String licenseDefinition; + private boolean acknowledge = false; + + public PutLicenseRequest() { + + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public void setLicenseDefinition(String licenseDefinition) { + this.licenseDefinition = licenseDefinition; + } + + public String getLicenseDefinition() { + return licenseDefinition; + } + + public void setAcknowledge(boolean acknowledge) { + this.acknowledge = acknowledge; + } + + public boolean isAcknowledge() { + return acknowledge; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java new file mode 100644 index 00000000000..206c5a3b383 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.common.ProtocolUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class PutLicenseResponse extends AcknowledgedResponse { + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "put_license_response", true, (a, v) -> { + boolean acknowledged = (Boolean) a[0]; + LicensesStatus licensesStatus = LicensesStatus.fromString((String) a[1]); + @SuppressWarnings("unchecked") Tuple> acknowledgements = (Tuple>) a[2]; + if (acknowledgements == null) { + return new PutLicenseResponse(acknowledged, licensesStatus); + } else { + return new PutLicenseResponse(acknowledged, licensesStatus, acknowledgements.v1(), acknowledgements.v2()); + } + + }); + + static { + PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged")); + PARSER.declareString(constructorArg(), new ParseField("license_status")); + PARSER.declareObject(optionalConstructorArg(), (parser, v) -> { + Map acknowledgeMessages = new HashMap<>(); + String message = null; + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else { + if (currentFieldName == null) { + throw new XContentParseException(parser.getTokenLocation(), "expected message header or acknowledgement"); + } + if ("message".equals(currentFieldName)) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected message header type"); + } + message = parser.text(); + } else { + if (token != XContentParser.Token.START_ARRAY) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected acknowledgement type"); + } + List acknowledgeMessagesList = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected acknowledgement text"); + } + acknowledgeMessagesList.add(parser.text()); + } + acknowledgeMessages.put(currentFieldName, acknowledgeMessagesList.toArray(new String[0])); + } + } + } + return new Tuple<>(message, acknowledgeMessages); + }, + new ParseField("acknowledge")); + } + + private LicensesStatus status; + private Map acknowledgeMessages; + private String acknowledgeHeader; + + public PutLicenseResponse() { + } + + public PutLicenseResponse(boolean acknowledged, LicensesStatus status) { + this(acknowledged, status, null, Collections.emptyMap()); + } + + public PutLicenseResponse(boolean acknowledged, LicensesStatus status, String acknowledgeHeader, + Map acknowledgeMessages) { + super(acknowledged); + this.status = status; + this.acknowledgeHeader = acknowledgeHeader; + this.acknowledgeMessages = acknowledgeMessages; + } + + public LicensesStatus status() { + return status; + } + + public Map acknowledgeMessages() { + return acknowledgeMessages; + } + + public String acknowledgeHeader() { + return acknowledgeHeader; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + status = LicensesStatus.fromId(in.readVInt()); + acknowledgeHeader = in.readOptionalString(); + int size = in.readVInt(); + Map acknowledgeMessages = new HashMap<>(size); + for (int i = 0; i < size; i++) { + String feature = in.readString(); + int nMessages = in.readVInt(); + String[] messages = new String[nMessages]; + for (int j = 0; j < nMessages; j++) { + messages[j] = in.readString(); + } + acknowledgeMessages.put(feature, messages); + } + this.acknowledgeMessages = acknowledgeMessages; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(status.id()); + out.writeOptionalString(acknowledgeHeader); + out.writeVInt(acknowledgeMessages.size()); + for (Map.Entry entry : acknowledgeMessages.entrySet()) { + out.writeString(entry.getKey()); + out.writeVInt(entry.getValue().length); + for (String message : entry.getValue()) { + out.writeString(message); + } + } + } + + @Override + protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { + builder.field("license_status", status.toString()); + if (!acknowledgeMessages.isEmpty()) { + builder.startObject("acknowledge"); + builder.field("message", acknowledgeHeader); + for (Map.Entry entry : acknowledgeMessages.entrySet()) { + builder.startArray(entry.getKey()); + for (String message : entry.getValue()) { + builder.value(message); + } + builder.endArray(); + } + builder.endObject(); + } + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + public static PutLicenseResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + PutLicenseResponse that = (PutLicenseResponse) o; + + return status == that.status && + ProtocolUtils.equals(acknowledgeMessages, that.acknowledgeMessages) && + Objects.equals(acknowledgeHeader, that.acknowledgeHeader); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), status, ProtocolUtils.hashCode(acknowledgeMessages), acknowledgeHeader); + } + + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java new file mode 100644 index 00000000000..a0a80a9958b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's License + * APIs. + */ +package org.elasticsearch.protocol.xpack.license; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java new file mode 100644 index 00000000000..17afee59fa1 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +public class IndexUpgradeInfoRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { + + private String[] indices = Strings.EMPTY_ARRAY; + private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true); + + public IndexUpgradeInfoRequest(String... indices) { + indices(indices); + } + + public IndexUpgradeInfoRequest(StreamInput in) throws IOException { + super(in); + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndexUpgradeInfoRequest indices(String... indices) { + this.indices = Objects.requireNonNull(indices, "indices cannot be null"); + return this; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public void indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IndexUpgradeInfoRequest request = (IndexUpgradeInfoRequest) o; + return Arrays.equals(indices, request.indices) && + Objects.equals(indicesOptions.toString(), request.indicesOptions.toString()); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(indices), indicesOptions.toString()); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java new file mode 100644 index 00000000000..17115ac9b17 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class IndexUpgradeInfoResponse extends ActionResponse implements ToXContentObject { + + private static final ParseField INDICES = new ParseField("indices"); + private static final ParseField ACTION_REQUIRED = new ParseField("action_required"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("IndexUpgradeInfoResponse", + true, + (a, c) -> { + @SuppressWarnings("unchecked") + Map map = (Map)a[0]; + Map actionsRequired = map.entrySet().stream() + .filter(e -> { + if (e.getValue() instanceof Map == false) { + return false; + } + @SuppressWarnings("unchecked") + Map value =(Map)e.getValue(); + return value.containsKey(ACTION_REQUIRED.getPreferredName()); + }) + .collect(Collectors.toMap( + Map.Entry::getKey, + e -> { + @SuppressWarnings("unchecked") + Map value = (Map) e.getValue(); + return UpgradeActionRequired.fromString((String)value.get(ACTION_REQUIRED.getPreferredName())); + } + )); + return new IndexUpgradeInfoResponse(actionsRequired); + }); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> p.map(), INDICES); + } + + + private Map actions; + + public IndexUpgradeInfoResponse() { + + } + + public IndexUpgradeInfoResponse(Map actions) { + this.actions = actions; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + actions = in.readMap(StreamInput::readString, UpgradeActionRequired::readFromStream); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(actions, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } + + public Map getActions() { + return actions; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.startObject(INDICES.getPreferredName()); + for (Map.Entry entry : actions.entrySet()) { + builder.startObject(entry.getKey()); + { + builder.field(ACTION_REQUIRED.getPreferredName(), entry.getValue().toString()); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IndexUpgradeInfoResponse response = (IndexUpgradeInfoResponse) o; + return Objects.equals(actions, response.actions); + } + + @Override + public int hashCode() { + return Objects.hash(actions); + } + + public static IndexUpgradeInfoResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java new file mode 100644 index 00000000000..dce1c7d18f5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Locale; + +/** + * Indicates the type of the upgrade required for the index + */ +public enum UpgradeActionRequired implements Writeable { + NOT_APPLICABLE, // Indicates that the check is not applicable to this index type, the next check will be performed + UP_TO_DATE, // Indicates that the check finds this index to be up to date - no additional checks are required + REINDEX, // The index should be reindex + UPGRADE; // The index should go through the upgrade procedure + + public static UpgradeActionRequired fromString(String value) { + return UpgradeActionRequired.valueOf(value.toUpperCase(Locale.ROOT)); + } + + public static UpgradeActionRequired readFromStream(StreamInput in) throws IOException { + return in.readEnum(UpgradeActionRequired.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(this); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java new file mode 100644 index 00000000000..7c52f6a8fd4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Migration + * APIs. + */ +package org.elasticsearch.protocol.xpack.migration; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java new file mode 100644 index 00000000000..3ed877d08cc --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for miscellaneous X-Pack APIs. + */ +package org.elasticsearch.protocol.xpack; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java new file mode 100644 index 00000000000..e5b116a3a7a --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java @@ -0,0 +1,246 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.security; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; + +/** + * An authenticated user + */ +public class User implements ToXContentObject { + + private final String username; + private final String[] roles; + private final User authenticatedUser; + private final Map metadata; + private final boolean enabled; + + @Nullable private final String fullName; + @Nullable private final String email; + + public User(String username, String... roles) { + this(username, roles, null, null, null, true); + } + + public User(String username, String[] roles, User authenticatedUser) { + this(username, roles, null, null, null, true, authenticatedUser); + } + + public User(User user, User authenticatedUser) { + this(user.principal(), user.roles(), user.fullName(), user.email(), user.metadata(), user.enabled(), authenticatedUser); + } + + public User(String username, String[] roles, String fullName, String email, Map metadata, boolean enabled) { + this(username, roles, fullName, email, metadata, enabled, null); + } + + private User(String username, String[] roles, String fullName, String email, Map metadata, boolean enabled, + User authenticatedUser) { + this.username = username; + this.roles = roles == null ? Strings.EMPTY_ARRAY : roles; + this.metadata = metadata != null ? Collections.unmodifiableMap(metadata) : Collections.emptyMap(); + this.fullName = fullName; + this.email = email; + this.enabled = enabled; + assert (authenticatedUser == null || authenticatedUser.isRunAs() == false) : "the authenticated user should not be a run_as user"; + this.authenticatedUser = authenticatedUser; + } + + /** + * @return The principal of this user - effectively serving as the + * unique identity of of the user. + */ + public String principal() { + return this.username; + } + + /** + * @return The roles this user is associated with. The roles are + * identified by their unique names and each represents as + * set of permissions + */ + public String[] roles() { + return this.roles; + } + + /** + * @return The metadata that is associated with this user. Can never be {@code null}. + */ + public Map metadata() { + return metadata; + } + + /** + * @return The full name of this user. May be {@code null}. + */ + public String fullName() { + return fullName; + } + + /** + * @return The email of this user. May be {@code null}. + */ + public String email() { + return email; + } + + /** + * @return whether the user is enabled or not + */ + public boolean enabled() { + return enabled; + } + + /** + * @return The user that was originally authenticated. + * This may be the user itself, or a different user which used runAs. + */ + public User authenticatedUser() { + return authenticatedUser == null ? this : authenticatedUser; + } + + /** Return true if this user was not the originally authenticated user, false otherwise. */ + public boolean isRunAs() { + return authenticatedUser != null; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("User[username=").append(username); + sb.append(",roles=[").append(Strings.arrayToCommaDelimitedString(roles)).append("]"); + sb.append(",fullName=").append(fullName); + sb.append(",email=").append(email); + sb.append(",metadata="); + sb.append(metadata); + if (authenticatedUser != null) { + sb.append(",authenticatedUser=[").append(authenticatedUser.toString()).append("]"); + } + sb.append("]"); + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o instanceof User == false) return false; + + User user = (User) o; + + if (!username.equals(user.username)) return false; + // Probably incorrect - comparing Object[] arrays with Arrays.equals + if (!Arrays.equals(roles, user.roles)) return false; + if (authenticatedUser != null ? !authenticatedUser.equals(user.authenticatedUser) : user.authenticatedUser != null) return false; + if (!metadata.equals(user.metadata)) return false; + if (fullName != null ? !fullName.equals(user.fullName) : user.fullName != null) return false; + return !(email != null ? !email.equals(user.email) : user.email != null); + + } + + @Override + public int hashCode() { + int result = username.hashCode(); + result = 31 * result + Arrays.hashCode(roles); + result = 31 * result + (authenticatedUser != null ? authenticatedUser.hashCode() : 0); + result = 31 * result + metadata.hashCode(); + result = 31 * result + (fullName != null ? fullName.hashCode() : 0); + result = 31 * result + (email != null ? email.hashCode() : 0); + return result; + } + + @Override + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.USERNAME.getPreferredName(), principal()); + builder.array(Fields.ROLES.getPreferredName(), roles()); + builder.field(Fields.FULL_NAME.getPreferredName(), fullName()); + builder.field(Fields.EMAIL.getPreferredName(), email()); + builder.field(Fields.METADATA.getPreferredName(), metadata()); + builder.field(Fields.ENABLED.getPreferredName(), enabled()); + return builder.endObject(); + } + + public static User partialReadFrom(String username, StreamInput input) throws IOException { + String[] roles = input.readStringArray(); + Map metadata = input.readMap(); + String fullName = input.readOptionalString(); + String email = input.readOptionalString(); + boolean enabled = input.readBoolean(); + User outerUser = new User(username, roles, fullName, email, metadata, enabled, null); + boolean hasInnerUser = input.readBoolean(); + if (hasInnerUser) { + User innerUser = readFrom(input); + if (input.getVersion().onOrBefore(Version.V_5_4_0)) { + // backcompat: runas user was read first, so reverse outer and inner + return new User(innerUser, outerUser); + } else { + return new User(outerUser, innerUser); + } + } else { + return outerUser; + } + } + + public static User readFrom(StreamInput input) throws IOException { + final boolean isInternalUser = input.readBoolean(); + assert isInternalUser == false: "should always return false. Internal users should use the InternalUserSerializationHelper"; + final String username = input.readString(); + return partialReadFrom(username, input); + } + + public static void writeTo(User user, StreamOutput output) throws IOException { + if (user.authenticatedUser == null) { + // no backcompat necessary, since there is no inner user + writeUser(user, output); + } else if (output.getVersion().onOrBefore(Version.V_5_4_0)) { + // backcompat: write runas user as the "inner" user + writeUser(user.authenticatedUser, output); + output.writeBoolean(true); + writeUser(user, output); + } else { + writeUser(user, output); + output.writeBoolean(true); + writeUser(user.authenticatedUser, output); + } + output.writeBoolean(false); // last user written, regardless of bwc, does not have an inner user + } + + /** Write just the given {@link User}, but not the inner {@link #authenticatedUser}. */ + private static void writeUser(User user, StreamOutput output) throws IOException { + output.writeBoolean(false); // not a system user + output.writeString(user.username); + output.writeStringArray(user.roles); + output.writeMap(user.metadata); + output.writeOptionalString(user.fullName); + output.writeOptionalString(user.email); + output.writeBoolean(user.enabled); + } + + public interface Fields { + ParseField USERNAME = new ParseField("username"); + ParseField PASSWORD = new ParseField("password"); + ParseField PASSWORD_HASH = new ParseField("password_hash"); + ParseField ROLES = new ParseField("roles"); + ParseField FULL_NAME = new ParseField("full_name"); + ParseField EMAIL = new ParseField("email"); + ParseField METADATA = new ParseField("metadata"); + ParseField ENABLED = new ParseField("enabled"); + ParseField TYPE = new ParseField("type"); + } +} + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java new file mode 100644 index 00000000000..ce627b267f3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Security + * APIs. + */ +package org.elasticsearch.protocol.xpack.security; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java new file mode 100644 index 00000000000..4a458b69a75 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.uid.Versions; + +import java.io.IOException; + +/** + * A delete watch request to delete an watch by name (id) + */ +public class DeleteWatchRequest extends ActionRequest { + + private String id; + private long version = Versions.MATCH_ANY; + + public DeleteWatchRequest() { + this(null); + } + + public DeleteWatchRequest(String id) { + this.id = id; + } + + /** + * @return The name of the watch to be deleted + */ + public String getId() { + return id; + } + + /** + * Sets the name of the watch to be deleted + */ + public void setId(String id) { + this.id = id; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (id == null){ + validationException = ValidateActions.addValidationError("watch id is missing", validationException); + } else if (PutWatchRequest.isValidId(id) == false) { + validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeLong(version); + } + + @Override + public String toString() { + return "delete [" + id + "]"; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java new file mode 100644 index 00000000000..39cd5e966fa --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteWatchResponse extends ActionResponse implements ToXContentObject { + + private static final ObjectParser PARSER + = new ObjectParser<>("x_pack_delete_watch_response", DeleteWatchResponse::new); + static { + PARSER.declareString(DeleteWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(DeleteWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(DeleteWatchResponse::setFound, new ParseField("found")); + } + + private String id; + private long version; + private boolean found; + + public DeleteWatchResponse() { + } + + public DeleteWatchResponse(String id, long version, boolean found) { + this.id = id; + this.version = version; + this.found = found; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isFound() { + return found; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setFound(boolean found) { + this.found = found; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + DeleteWatchResponse that = (DeleteWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(found, that.found); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, found); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + found = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(found); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("found", found) + .endObject(); + } + + public static DeleteWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java new file mode 100644 index 00000000000..7997d853db3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.regex.Pattern; + +/** + * This request class contains the data needed to create a watch along with the name of the watch. + * The name of the watch will become the ID of the indexed document. + */ +public final class PutWatchRequest extends ActionRequest { + + private static final Pattern NO_WS_PATTERN = Pattern.compile("\\S+"); + + private String id; + private BytesReference source; + private XContentType xContentType = XContentType.JSON; + private boolean active = true; + private long version = Versions.MATCH_ANY; + + public PutWatchRequest() {} + + public PutWatchRequest(StreamInput in) throws IOException { + readFrom(in); + } + + public PutWatchRequest(String id, BytesReference source, XContentType xContentType) { + this.id = id; + this.source = source; + this.xContentType = xContentType; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + source = in.readBytesReference(); + active = in.readBoolean(); + xContentType = in.readEnum(XContentType.class); + version = in.readZLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeBytesReference(source); + out.writeBoolean(active); + out.writeEnum(xContentType); + out.writeZLong(version); + } + + /** + * @return The name that will be the ID of the indexed document + */ + public String getId() { + return id; + } + + /** + * Set the watch name + */ + public void setId(String id) { + this.id = id; + } + + /** + * @return The source of the watch + */ + public BytesReference getSource() { + return source; + } + + /** + * Set the source of the watch + */ + public void setSource(BytesReference source, XContentType xContentType) { + this.source = source; + this.xContentType = xContentType; + } + + /** + * @return The initial active state of the watch (defaults to {@code true}, e.g. "active") + */ + public boolean isActive() { + return active; + } + + /** + * Sets the initial active state of the watch + */ + public void setActive(boolean active) { + this.active = active; + } + + /** + * Get the content type for the source + */ + public XContentType xContentType() { + return xContentType; + } + + public long getVersion() { + return version; + } + + public void setVersion(long version) { + this.version = version; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (id == null) { + validationException = ValidateActions.addValidationError("watch id is missing", validationException); + } else if (isValidId(id) == false) { + validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); + } + if (source == null) { + validationException = ValidateActions.addValidationError("watch source is missing", validationException); + } + if (xContentType == null) { + validationException = ValidateActions.addValidationError("request body is missing", validationException); + } + return validationException; + } + + public static boolean isValidId(String id) { + return Strings.isEmpty(id) == false && NO_WS_PATTERN.matcher(id).matches(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java new file mode 100644 index 00000000000..f6e55ff5553 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class PutWatchResponse extends ActionResponse implements ToXContentObject { + + private static final ObjectParser PARSER + = new ObjectParser<>("x_pack_put_watch_response", PutWatchResponse::new); + static { + PARSER.declareString(PutWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(PutWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(PutWatchResponse::setCreated, new ParseField("created")); + } + + private String id; + private long version; + private boolean created; + + public PutWatchResponse() { + } + + public PutWatchResponse(String id, long version, boolean created) { + this.id = id; + this.version = version; + this.created = created; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setCreated(boolean created) { + this.created = created; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isCreated() { + return created; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PutWatchResponse that = (PutWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(created, that.created); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, created); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(created); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + created = in.readBoolean(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("created", created) + .endObject(); + } + + public static PutWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java new file mode 100644 index 00000000000..0d9edf3b5c0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Watcher + * APIs. + */ +package org.elasticsearch.protocol.xpack.watcher; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java new file mode 100644 index 00000000000..fac99959c53 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; +import java.io.IOException; + +public class XPackInfoResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected XPackInfoResponse doParseInstance(XContentParser parser) throws IOException { + return XPackInfoResponse.fromXContent(parser); + } + + @Override + protected XPackInfoResponse createBlankInstance() { + return new XPackInfoResponse(); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return path -> path.equals("features") + || (path.startsWith("features") && path.endsWith("native_code_info")); + } + + @Override + protected ToXContent.Params getToXContentParams() { + Map params = new HashMap<>(); + if (randomBoolean()) { + params.put("human", randomBoolean() ? "true" : "false"); + } + if (randomBoolean()) { + params.put("categories", "_none"); + } + return new ToXContent.MapParams(params); + } + + @Override + protected XPackInfoResponse createTestInstance() { + return new XPackInfoResponse( + randomBoolean() ? null : randomBuildInfo(), + randomBoolean() ? null : randomLicenseInfo(), + randomBoolean() ? null : randomFeatureSetsInfo()); + } + + @Override + protected XPackInfoResponse mutateInstance(XPackInfoResponse response) { + @SuppressWarnings("unchecked") + Function mutator = randomFrom( + r -> new XPackInfoResponse( + mutateBuildInfo(r.getBuildInfo()), + r.getLicenseInfo(), + r.getFeatureSetsInfo()), + r -> new XPackInfoResponse( + r.getBuildInfo(), + mutateLicenseInfo(r.getLicenseInfo()), + r.getFeatureSetsInfo()), + r -> new XPackInfoResponse( + r.getBuildInfo(), + r.getLicenseInfo(), + mutateFeatureSetsInfo(r.getFeatureSetsInfo()))); + return mutator.apply(response); + } + + private BuildInfo randomBuildInfo() { + return new BuildInfo( + randomAlphaOfLength(10), + randomAlphaOfLength(15)); + } + + private BuildInfo mutateBuildInfo(BuildInfo buildInfo) { + if (buildInfo == null) { + return randomBuildInfo(); + } + return null; + } + + private LicenseInfo randomLicenseInfo() { + return new LicenseInfo( + randomAlphaOfLength(10), + randomAlphaOfLength(4), + randomAlphaOfLength(5), + randomFrom(LicenseStatus.values()), + randomLong()); + } + + private LicenseInfo mutateLicenseInfo(LicenseInfo licenseInfo) { + if (licenseInfo == null) { + return randomLicenseInfo(); + } + return null; + } + + private FeatureSetsInfo randomFeatureSetsInfo() { + int size = between(0, 10); + Set featureSets = new HashSet<>(size); + while (featureSets.size() < size) { + featureSets.add(randomFeatureSet()); + } + return new FeatureSetsInfo(featureSets); + } + + private FeatureSetsInfo mutateFeatureSetsInfo(FeatureSetsInfo featureSetsInfo) { + if (featureSetsInfo == null) { + return randomFeatureSetsInfo(); + } + return null; + } + + private FeatureSet randomFeatureSet() { + return new FeatureSet( + randomAlphaOfLength(5), + randomBoolean() ? null : randomAlphaOfLength(20), + randomBoolean(), + randomBoolean(), + randomNativeCodeInfo()); + } + + private Map randomNativeCodeInfo() { + if (randomBoolean()) { + return null; + } + int size = between(0, 10); + Map nativeCodeInfo = new HashMap<>(size); + while (nativeCodeInfo.size() < size) { + nativeCodeInfo.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); + } + return nativeCodeInfo; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java new file mode 100644 index 00000000000..c4e29d7c230 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.common; + +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +public class ProtocolUtilsTests extends ESTestCase { + + public void testMapStringEqualsAndHash() { + assertTrue(ProtocolUtils.equals(null, null)); + assertFalse(ProtocolUtils.equals(null, new HashMap<>())); + assertFalse(ProtocolUtils.equals(new HashMap<>(), null)); + + Map a = new HashMap<>(); + a.put("foo", new String[] { "a", "b" }); + a.put("bar", new String[] { "b", "c" }); + + Map b = new HashMap<>(); + b.put("foo", new String[] { "a", "b" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("bar", new String[] { "c", "b" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("bar", new String[] { "b", "c" }); + + assertTrue(ProtocolUtils.equals(a, b)); + assertTrue(ProtocolUtils.equals(b, a)); + assertEquals(ProtocolUtils.hashCode(a), ProtocolUtils.hashCode(b)); + + b.put("baz", new String[] { "b", "c" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + a.put("non", null); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("non", null); + b.remove("baz"); + + assertTrue(ProtocolUtils.equals(a, b)); + assertTrue(ProtocolUtils.equals(b, a)); + assertEquals(ProtocolUtils.hashCode(a), ProtocolUtils.hashCode(b)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java new file mode 100644 index 00000000000..4331bdd3780 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class GraphExploreResponseTests extends AbstractXContentTestCase< GraphExploreResponse> { + + @Override + protected GraphExploreResponse createTestInstance() { + return createInstance(0); + } + private static GraphExploreResponse createInstance(int numFailures) { + int numItems = randomIntBetween(4, 128); + boolean timedOut = randomBoolean(); + boolean showDetails = randomBoolean(); + long overallTookInMillis = randomNonNegativeLong(); + Map vertices = new HashMap<>(); + Map connections = new HashMap<>(); + ShardOperationFailedException [] failures = new ShardOperationFailedException [numFailures]; + for (int i = 0; i < failures.length; i++) { + failures[i] = new ShardSearchFailure(new ElasticsearchException("an error")); + } + + //Create random set of vertices + for (int i = 0; i < numItems; i++) { + Vertex v = new Vertex("field1", randomAlphaOfLength(5), randomDouble(), 0, + showDetails?randomIntBetween(100, 200):0, + showDetails?randomIntBetween(1, 100):0); + vertices.put(v.getId(), v); + } + + //Wire up half the vertices randomly + Vertex[] vs = vertices.values().toArray(new Vertex[vertices.size()]); + for (int i = 0; i < numItems/2; i++) { + Vertex v1 = vs[randomIntBetween(0, vs.length-1)]; + Vertex v2 = vs[randomIntBetween(0, vs.length-1)]; + if(v1 != v2) { + Connection conn = new Connection(v1, v2, randomDouble(), randomLongBetween(1, 10)); + connections.put(conn.getId(), conn); + } + } + return new GraphExploreResponse(overallTookInMillis, timedOut, failures, vertices, connections, showDetails); + } + + + private static GraphExploreResponse createTestInstanceWithFailures() { + return createInstance(randomIntBetween(1, 128)); + } + + @Override + protected GraphExploreResponse doParseInstance(XContentParser parser) throws IOException { + return GraphExploreResponse.fromXContext(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + protected Predicate getRandomFieldsExcludeFilterWhenResultHasErrors() { + return field -> field.startsWith("responses"); + } + + @Override + protected void assertEqualInstances( GraphExploreResponse expectedInstance, GraphExploreResponse newInstance) { + assertThat(newInstance.getTook(), equalTo(expectedInstance.getTook())); + assertThat(newInstance.isTimedOut(), equalTo(expectedInstance.isTimedOut())); + + Connection[] newConns = newInstance.getConnections().toArray(new Connection[0]); + Connection[] expectedConns = expectedInstance.getConnections().toArray(new Connection[0]); + assertArrayEquals(expectedConns, newConns); + + Vertex[] newVertices = newInstance.getVertices().toArray(new Vertex[0]); + Vertex[] expectedVertices = expectedInstance.getVertices().toArray(new Vertex[0]); + assertArrayEquals(expectedVertices, newVertices); + + ShardOperationFailedException[] newFailures = newInstance.getShardFailures(); + ShardOperationFailedException[] expectedFailures = expectedInstance.getShardFailures(); + assertEquals(expectedFailures.length, newFailures.length); + + } + + /** + * Test parsing {@link GraphExploreResponse} with inner failures as they don't support asserting on xcontent equivalence, given + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier< GraphExploreResponse> instanceSupplier = GraphExploreResponseTests::createTestInstanceWithFailures; + //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + //but that does not bother our assertions, as we only want to test that we don't break. + boolean supportsUnknownFields = true; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY, + getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java new file mode 100644 index 00000000000..7149477d007 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.io.IOException; + +import org.elasticsearch.test.ESTestCase; + +public class LicenseStatusTests extends ESTestCase { + public void testSerialization() throws IOException { + LicenseStatus status = randomFrom(LicenseStatus.values()); + assertSame(status, copyWriteable(status, writableRegistry(), LicenseStatus::readFrom)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java new file mode 100644 index 00000000000..a09fd6fb99b --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; + +public class PutLicenseResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // The structure of the response is such that unknown fields inside acknowledge cannot be supported since they + // are treated as messages from new services + return p -> p.startsWith("acknowledge"); + } + + @Override + protected PutLicenseResponse createTestInstance() { + boolean acknowledged = randomBoolean(); + LicensesStatus status = randomFrom(LicensesStatus.VALID, LicensesStatus.INVALID, LicensesStatus.EXPIRED); + String messageHeader; + Map ackMessages; + if (randomBoolean()) { + messageHeader = randomAlphaOfLength(10); + ackMessages = randomAckMessages(); + } else { + messageHeader = null; + ackMessages = Collections.emptyMap(); + } + + return new PutLicenseResponse(acknowledged, status, messageHeader, ackMessages); + } + + private static Map randomAckMessages() { + int nFeatures = randomIntBetween(1, 5); + + Map ackMessages = new HashMap<>(); + + for (int i = 0; i < nFeatures; i++) { + String feature = randomAlphaOfLengthBetween(9, 15); + int nMessages = randomIntBetween(1, 5); + String[] messages = new String[nMessages]; + for (int j = 0; j < nMessages; j++) { + messages[j] = randomAlphaOfLengthBetween(10, 30); + } + ackMessages.put(feature, messages); + } + + return ackMessages; + } + + @Override + protected PutLicenseResponse doParseInstance(XContentParser parser) throws IOException { + return PutLicenseResponse.fromXContent(parser); + } + + @Override + protected PutLicenseResponse createBlankInstance() { + return new PutLicenseResponse(); + } + + @Override + protected PutLicenseResponse mutateInstance(PutLicenseResponse response) { + @SuppressWarnings("unchecked") + Function mutator = randomFrom( + r -> new PutLicenseResponse( + r.isAcknowledged() == false, + r.status(), + r.acknowledgeHeader(), + r.acknowledgeMessages()), + r -> new PutLicenseResponse( + r.isAcknowledged(), + mutateStatus(r.status()), + r.acknowledgeHeader(), + r.acknowledgeMessages()), + r -> { + if (r.acknowledgeMessages().isEmpty()) { + return new PutLicenseResponse( + r.isAcknowledged(), + r.status(), + randomAlphaOfLength(10), + randomAckMessages() + ); + } else { + return new PutLicenseResponse(r.isAcknowledged(), r.status()); + } + } + + ); + return mutator.apply(response); + } + + private LicensesStatus mutateStatus(LicensesStatus status) { + return randomValueOtherThan(status, () -> randomFrom(LicensesStatus.values())); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java new file mode 100644 index 00000000000..0e09a05fb96 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +public class IndexUpgradeInfoRequestTests extends AbstractWireSerializingTestCase { + @Override + protected IndexUpgradeInfoRequest createTestInstance() { + int indexCount = randomInt(4); + String[] indices = new String[indexCount]; + for (int i = 0; i < indexCount; i++) { + indices[i] = randomAlphaOfLength(10); + } + IndexUpgradeInfoRequest request = new IndexUpgradeInfoRequest(indices); + if (randomBoolean()) { + request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); + } + return request; + } + + @Override + protected Writeable.Reader instanceReader() { + return IndexUpgradeInfoRequest::new; + } + + public void testNullIndices() { + expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest((String[])null)); + expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest().indices((String[])null)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java new file mode 100644 index 00000000000..57f01a4454e --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +public class IndexUpgradeInfoResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected IndexUpgradeInfoResponse doParseInstance(XContentParser parser) { + return IndexUpgradeInfoResponse.fromXContent(parser); + } + + @Override + protected IndexUpgradeInfoResponse createBlankInstance() { + return new IndexUpgradeInfoResponse(); + } + + @Override + protected IndexUpgradeInfoResponse createTestInstance() { + return randomIndexUpgradeInfoResponse(randomIntBetween(0, 10)); + } + + private static IndexUpgradeInfoResponse randomIndexUpgradeInfoResponse(int numIndices) { + Map actions = new HashMap<>(); + for (int i = 0; i < numIndices; i++) { + actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values())); + } + return new IndexUpgradeInfoResponse(actions); + } + + @Override + protected IndexUpgradeInfoResponse mutateInstance(IndexUpgradeInfoResponse instance) { + if (instance.getActions().size() == 0) { + return randomIndexUpgradeInfoResponse(1); + } + Map actions = new HashMap<>(instance.getActions()); + if (randomBoolean()) { + Iterator> iterator = actions.entrySet().iterator(); + iterator.next(); + iterator.remove(); + } else { + actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values())); + } + return new IndexUpgradeInfoResponse(actions); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java new file mode 100644 index 00000000000..28a27e63998 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.security; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; + +import static org.hamcrest.Matchers.is; + +public class UserTests extends ESTestCase { + + public void testUserToString() { + User user = new User("u1", "r1"); + assertThat(user.toString(), is("User[username=u1,roles=[r1],fullName=null,email=null,metadata={}]")); + user = new User("u1", new String[] { "r1", "r2" }, "user1", "user1@domain.com", Collections.singletonMap("key", "val"), true); + assertThat(user.toString(), is("User[username=u1,roles=[r1,r2],fullName=user1,email=user1@domain.com,metadata={key=val}]")); + user = new User("u1", new String[] {"r1"}, new User("u2", "r2", "r3")); + assertThat(user.toString(), is("User[username=u1,roles=[r1],fullName=null,email=null,metadata={}," + + "authenticatedUser=[User[username=u2,roles=[r2,r3],fullName=null,email=null,metadata={}]]]")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java new file mode 100644 index 00000000000..209bc790a8c --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class DeleteWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected DeleteWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean found = randomBoolean(); + return new DeleteWatchResponse(id, version, found); + } + + @Override + protected DeleteWatchResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java new file mode 100644 index 00000000000..1fc2f61b684 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PutWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected PutWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean created = randomBoolean(); + return new PutWatchResponse(id, version, created); + } + + @Override + protected PutWatchResponse doParseInstance(XContentParser parser) throws IOException { + return PutWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} From 0da981a6a9d3c687dbff6de6f866997b5a8c30d2 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 23 Aug 2018 11:43:48 -0400 Subject: [PATCH 48/48] [TEST] Add some ACL yaml tests for Rollup (#33035) These two tests compliment the existing unit tests which check Rollup's ACL/security integration. The first test creates to indices, puts a document in each one, and then assigns a role to the test user that can only access one of the indices. A rollup job is created with a pattern that would match both indices, and we verify that only the allowed document was rolled up (e.g. verifying that the unpermissioned index stays hidden). The second test creates a single index with two documents tagged by the keyword "public"/"private". An attribute-based role is created that only allows viewing "public" documents. We then verify the rollup job only rolled the "public" doc, and not the "private" one. --- .../test/rollup/security_tests.yml | 343 ++++++++++++++++++ 1 file changed, 343 insertions(+) create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/security_tests.yml diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/security_tests.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/security_tests.yml new file mode 100644 index 00000000000..57bfd821ea2 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/security_tests.yml @@ -0,0 +1,343 @@ +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow + + + +--- +teardown: + - do: + xpack.security.delete_user: + username: "test_user" + ignore: 404 + + - do: + xpack.security.delete_role: + name: "foo_only_access" + ignore: 404 + +--- +"Index-based access": + + - do: + xpack.security.put_role: + name: "foo_only_access" + body: > + { + "cluster": [ "all" ], + "indices": [ + { "names": ["foo"], "privileges": ["all"] }, + { "names": ["rollup"], "privileges": ["all"] } + ] + } + + - do: + xpack.security.put_user: + username: "test_user" + body: > + { + "password" : "x-pack-test-password", + "roles" : [ "foo_only_access" ], + "full_name" : "foo only" + } + + - do: + indices.create: + index: foo + body: + mappings: + _doc: + properties: + timestamp: + type: date + value_field: + type: integer + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + index: + index: foo + type: _doc + body: + timestamp: 123 + value_field: 1232 + + - do: + indices.create: + index: foobar + body: + mappings: + _doc: + properties: + timestamp: + type: date + value_field: + type: integer + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + index: + index: foobar + type: _doc + body: + timestamp: 123 + value_field: 456 + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + indices.refresh: + index: foo + + # This index pattern will match both indices, but we only have permission to read one + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.rollup.put_job: + id: foo + body: > + { + "index_pattern": "foo*", + "rollup_index": "rollup", + "cron": "*/1 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "timestamp", + "interval": "1s" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + + - is_true: acknowledged + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.rollup.start_job: + id: foo + - is_true: started + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + indices.refresh: + index: rollup + + # this is a hacky way to sleep for 5s, since we will never have 10 nodes + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 10 + timeout: "5s" + - match: + timed_out: true + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.rollup.get_jobs: + id: foo + - match: + jobs.0.stats.documents_processed: 1 + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + search: + index: foo + body: + query: + match_all: {} + + - match: + hits.total: 1 + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + search: + index: rollup + body: + query: + match_all: {} + + - match: + hits.total: 1 + - match: + hits.hits.0._id: "foo$VxMkzTqILshClbtbFi4-rQ" + - match: + hits.hits.0._source: + timestamp.date_histogram.time_zone: "UTC" + timestamp.date_histogram.timestamp: 0 + value_field.max.value: 1232.0 + _rollup.version: 2 + timestamp.date_histogram.interval: "1s" + value_field.sum.value: 1232.0 + value_field.min.value: 1232.0 + timestamp.date_histogram._count: 1 + _rollup.id: "foo" + + +--- +"Attribute-based access": + + - do: + xpack.security.put_role: + name: "foo_only_access" + body: > + { + "cluster": [ "all" ], + "indices": [ + { + "names": ["foo"], + "privileges": ["all"], + "query": { + "template": { + "source": "{\"bool\":{\"filter\":[{\"term\":{\"visibility\":\"public\"}}]}}" + } + } + }, + { "names": ["rollup"], "privileges": ["all"] } + ] + } + + - do: + xpack.security.put_user: + username: "test_user" + body: > + { + "password" : "x-pack-test-password", + "roles" : [ "foo_only_access" ], + "full_name" : "foo only" + } + + - do: + indices.create: + index: foo + body: + mappings: + _doc: + properties: + timestamp: + type: date + value_field: + type: integer + visibility: + type: keyword + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + index: + index: foo + type: _doc + body: + timestamp: 123 + value_field: 1232 + visibility: "public" + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + index: + index: foobar + type: _doc + body: + timestamp: 123 + value_field: 456 + visibility: "private" + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + indices.refresh: + index: foo + + # Index contains two docs, but we should only be able to see one of them + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.rollup.put_job: + id: foo + body: > + { + "index_pattern": "foo", + "rollup_index": "rollup", + "cron": "*/1 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "timestamp", + "interval": "1s" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + - is_true: acknowledged + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.rollup.start_job: + id: foo + - is_true: started + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + indices.refresh: + index: rollup + + # this is a hacky way to sleep for 5s, since we will never have 10 nodes + - do: + catch: request_timeout + cluster.health: + wait_for_nodes: 10 + timeout: "5s" + - match: + timed_out: true + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.rollup.get_jobs: + id: foo + - match: + jobs.0.stats.documents_processed: 1 + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + search: + index: foo + body: + query: + match_all: {} + + - match: + hits.total: 1 + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + search: + index: rollup + body: + query: + match_all: {} + + - match: + hits.total: 1 + - match: + hits.hits.0._id: "foo$VxMkzTqILshClbtbFi4-rQ" + - match: + hits.hits.0._source: + timestamp.date_histogram.time_zone: "UTC" + timestamp.date_histogram.timestamp: 0 + value_field.max.value: 1232.0 + _rollup.version: 2 + timestamp.date_histogram.interval: "1s" + value_field.sum.value: 1232.0 + value_field.min.value: 1232.0 + timestamp.date_histogram._count: 1 + _rollup.id: "foo"