Merge pull request #17072 from s1monw/add_backwards_rest_tests
Add infrastructure to run REST tests on a multi-version cluster This change adds the infrastructure to run the rest tests on a multi-node cluster that users 2 different minor versions of elasticsearch. It doesn't implement any dedicated BWC tests but rather leverages the existing REST tests. Since we don't have a real version to test against, the tests uses the current version until the first minor / RC is released to ensure the infrastructure works. Given the amount of problems this change already found I think it's worth having this run with our test suite by default. The structure of this infra will likely change over time but for now it's a step into the right direction. We will likely want to split it up into integTests and integBwcTests etc. so each plugin can have it's own bwc tests but that's left for future refactoring.
This commit is contained in:
commit
345e988bbc
|
@ -307,6 +307,12 @@ class BuildPlugin implements Plugin<Project> {
|
||||||
/** Adds repositores used by ES dependencies */
|
/** Adds repositores used by ES dependencies */
|
||||||
static void configureRepositories(Project project) {
|
static void configureRepositories(Project project) {
|
||||||
RepositoryHandler repos = project.repositories
|
RepositoryHandler repos = project.repositories
|
||||||
|
if (System.getProperty("repos.mavenlocal") != null) {
|
||||||
|
// with -Drepos.mavenlocal=true we can force checking the local .m2 repo which is
|
||||||
|
// useful for development ie. bwc tests where we install stuff in the local repository
|
||||||
|
// such that we don't have to pass hardcoded files to gradle
|
||||||
|
repos.mavenLocal()
|
||||||
|
}
|
||||||
repos.mavenCentral()
|
repos.mavenCentral()
|
||||||
repos.maven {
|
repos.maven {
|
||||||
name 'sonatype-snapshots'
|
name 'sonatype-snapshots'
|
||||||
|
|
|
@ -23,8 +23,6 @@ import org.gradle.api.Project
|
||||||
import org.gradle.api.file.FileCollection
|
import org.gradle.api.file.FileCollection
|
||||||
import org.gradle.api.tasks.Input
|
import org.gradle.api.tasks.Input
|
||||||
|
|
||||||
import java.time.LocalDateTime
|
|
||||||
|
|
||||||
/** Configuration for an elasticsearch cluster, used for integration tests. */
|
/** Configuration for an elasticsearch cluster, used for integration tests. */
|
||||||
class ClusterConfiguration {
|
class ClusterConfiguration {
|
||||||
|
|
||||||
|
@ -34,6 +32,12 @@ class ClusterConfiguration {
|
||||||
@Input
|
@Input
|
||||||
int numNodes = 1
|
int numNodes = 1
|
||||||
|
|
||||||
|
@Input
|
||||||
|
int numBwcNodes = 0
|
||||||
|
|
||||||
|
@Input
|
||||||
|
String bwcVersion = null
|
||||||
|
|
||||||
@Input
|
@Input
|
||||||
int httpPort = 0
|
int httpPort = 0
|
||||||
|
|
||||||
|
|
|
@ -53,11 +53,50 @@ class ClusterFormationTasks {
|
||||||
// no need to add cluster formation tasks if the task won't run!
|
// no need to add cluster formation tasks if the task won't run!
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
configureDistributionDependency(project, config.distribution)
|
File sharedDir = new File(project.buildDir, "cluster/shared")
|
||||||
List<Task> startTasks = []
|
// first we remove everything in the shared cluster directory to ensure there are no leftovers in repos or anything
|
||||||
|
// in theory this should not be necessary but repositories are only deleted in the cluster-state and not on-disk
|
||||||
|
// such that snapshots survive failures / test runs and there is no simple way today to fix that.
|
||||||
|
Task cleanup = project.tasks.create(name: "${task.name}#prepareCluster.cleanShared", type: Delete, dependsOn: task.dependsOn.collect()) {
|
||||||
|
delete sharedDir
|
||||||
|
doLast {
|
||||||
|
sharedDir.mkdirs()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
List<Task> startTasks = [cleanup]
|
||||||
List<NodeInfo> nodes = []
|
List<NodeInfo> nodes = []
|
||||||
|
if (config.numNodes < config.numBwcNodes) {
|
||||||
|
throw new GradleException("numNodes must be >= numBwcNodes [${config.numNodes} < ${config.numBwcNodes}]")
|
||||||
|
}
|
||||||
|
if (config.numBwcNodes > 0 && config.bwcVersion == null) {
|
||||||
|
throw new GradleException("bwcVersion must not be null if numBwcNodes is > 0")
|
||||||
|
}
|
||||||
|
// this is our current version distribution configuration we use for all kinds of REST tests etc.
|
||||||
|
project.configurations {
|
||||||
|
elasticsearchDistro
|
||||||
|
}
|
||||||
|
configureDistributionDependency(project, config.distribution, project.configurations.elasticsearchDistro, VersionProperties.elasticsearch)
|
||||||
|
if (config.bwcVersion != null && config.numBwcNodes > 0) {
|
||||||
|
// if we have a cluster that has a BWC cluster we also need to configure a dependency on the BWC version
|
||||||
|
// this version uses the same distribution etc. and only differs in the version we depend on.
|
||||||
|
// from here on everything else works the same as if it's the current version, we fetch the BWC version
|
||||||
|
// from mirrors using gradles built-in mechanism etc.
|
||||||
|
project.configurations {
|
||||||
|
elasticsearchBwcDistro
|
||||||
|
}
|
||||||
|
configureDistributionDependency(project, config.distribution, project.configurations.elasticsearchBwcDistro, config.bwcVersion)
|
||||||
|
}
|
||||||
|
|
||||||
for (int i = 0; i < config.numNodes; ++i) {
|
for (int i = 0; i < config.numNodes; ++i) {
|
||||||
NodeInfo node = new NodeInfo(config, i, project, task)
|
// we start N nodes and out of these N nodes there might be M bwc nodes.
|
||||||
|
// for each of those nodes we might have a different configuratioon
|
||||||
|
String elasticsearchVersion = VersionProperties.elasticsearch
|
||||||
|
Configuration configuration = project.configurations.elasticsearchDistro
|
||||||
|
if (i < config.numBwcNodes) {
|
||||||
|
elasticsearchVersion = config.bwcVersion
|
||||||
|
configuration = project.configurations.elasticsearchBwcDistro
|
||||||
|
}
|
||||||
|
NodeInfo node = new NodeInfo(config, i, project, task, elasticsearchVersion, sharedDir)
|
||||||
if (i == 0) {
|
if (i == 0) {
|
||||||
if (config.seedNodePortsFile != null) {
|
if (config.seedNodePortsFile != null) {
|
||||||
// we might allow this in the future to be set but for now we are the only authority to set this!
|
// we might allow this in the future to be set but for now we are the only authority to set this!
|
||||||
|
@ -66,7 +105,7 @@ class ClusterFormationTasks {
|
||||||
config.seedNodePortsFile = node.transportPortsFile;
|
config.seedNodePortsFile = node.transportPortsFile;
|
||||||
}
|
}
|
||||||
nodes.add(node)
|
nodes.add(node)
|
||||||
startTasks.add(configureNode(project, task, node))
|
startTasks.add(configureNode(project, task, cleanup, node, configuration))
|
||||||
}
|
}
|
||||||
|
|
||||||
Task wait = configureWaitTask("${task.name}#wait", project, nodes, startTasks)
|
Task wait = configureWaitTask("${task.name}#wait", project, nodes, startTasks)
|
||||||
|
@ -77,20 +116,14 @@ class ClusterFormationTasks {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Adds a dependency on the given distribution */
|
/** Adds a dependency on the given distribution */
|
||||||
static void configureDistributionDependency(Project project, String distro) {
|
static void configureDistributionDependency(Project project, String distro, Configuration configuration, String elasticsearchVersion) {
|
||||||
String elasticsearchVersion = VersionProperties.elasticsearch
|
|
||||||
String packaging = distro
|
String packaging = distro
|
||||||
if (distro == 'tar') {
|
if (distro == 'tar') {
|
||||||
packaging = 'tar.gz'
|
packaging = 'tar.gz'
|
||||||
} else if (distro == 'integ-test-zip') {
|
} else if (distro == 'integ-test-zip') {
|
||||||
packaging = 'zip'
|
packaging = 'zip'
|
||||||
}
|
}
|
||||||
project.configurations {
|
project.dependencies.add(configuration.name, "org.elasticsearch.distribution.${distro}:elasticsearch:${elasticsearchVersion}@${packaging}")
|
||||||
elasticsearchDistro
|
|
||||||
}
|
|
||||||
project.dependencies {
|
|
||||||
elasticsearchDistro "org.elasticsearch.distribution.${distro}:elasticsearch:${elasticsearchVersion}@${packaging}"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -110,10 +143,10 @@ class ClusterFormationTasks {
|
||||||
*
|
*
|
||||||
* @return a task which starts the node.
|
* @return a task which starts the node.
|
||||||
*/
|
*/
|
||||||
static Task configureNode(Project project, Task task, NodeInfo node) {
|
static Task configureNode(Project project, Task task, Object dependsOn, NodeInfo node, Configuration configuration) {
|
||||||
|
|
||||||
// tasks are chained so their execution order is maintained
|
// tasks are chained so their execution order is maintained
|
||||||
Task setup = project.tasks.create(name: taskName(task, node, 'clean'), type: Delete, dependsOn: task.dependsOn.collect()) {
|
Task setup = project.tasks.create(name: taskName(task, node, 'clean'), type: Delete, dependsOn: dependsOn) {
|
||||||
delete node.homeDir
|
delete node.homeDir
|
||||||
delete node.cwd
|
delete node.cwd
|
||||||
doLast {
|
doLast {
|
||||||
|
@ -122,7 +155,7 @@ class ClusterFormationTasks {
|
||||||
}
|
}
|
||||||
setup = configureCheckPreviousTask(taskName(task, node, 'checkPrevious'), project, setup, node)
|
setup = configureCheckPreviousTask(taskName(task, node, 'checkPrevious'), project, setup, node)
|
||||||
setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node)
|
setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node)
|
||||||
setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node)
|
setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node, configuration)
|
||||||
setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node)
|
setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node)
|
||||||
setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
|
setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
|
||||||
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
|
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
|
||||||
|
@ -158,27 +191,28 @@ class ClusterFormationTasks {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Adds a task to extract the elasticsearch distribution */
|
/** Adds a task to extract the elasticsearch distribution */
|
||||||
static Task configureExtractTask(String name, Project project, Task setup, NodeInfo node) {
|
static Task configureExtractTask(String name, Project project, Task setup, NodeInfo node, Configuration configuration) {
|
||||||
List extractDependsOn = [project.configurations.elasticsearchDistro, setup]
|
List extractDependsOn = [configuration, setup]
|
||||||
/* project.configurations.elasticsearchDistro.singleFile will be an
|
/* configuration.singleFile will be an external artifact if this is being run by a plugin not living in the
|
||||||
external artifact if this is being run by a plugin not living in the
|
elasticsearch source tree. If this is a plugin built in the elasticsearch source tree or this is a distro in
|
||||||
elasticsearch source tree. If this is a plugin built in the
|
the elasticsearch source tree then this should be the version of elasticsearch built by the source tree.
|
||||||
elasticsearch source tree or this is a distro in the elasticsearch
|
If it isn't then Bad Things(TM) will happen. */
|
||||||
source tree then this should be the version of elasticsearch built
|
|
||||||
by the source tree. If it isn't then Bad Things(TM) will happen. */
|
|
||||||
Task extract
|
Task extract
|
||||||
|
|
||||||
switch (node.config.distribution) {
|
switch (node.config.distribution) {
|
||||||
case 'integ-test-zip':
|
case 'integ-test-zip':
|
||||||
case 'zip':
|
case 'zip':
|
||||||
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
||||||
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) }
|
from {
|
||||||
|
project.zipTree(configuration.singleFile)
|
||||||
|
}
|
||||||
into node.baseDir
|
into node.baseDir
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case 'tar':
|
case 'tar':
|
||||||
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
||||||
from {
|
from {
|
||||||
project.tarTree(project.resources.gzip(project.configurations.elasticsearchDistro.singleFile))
|
project.tarTree(project.resources.gzip(configuration.singleFile))
|
||||||
}
|
}
|
||||||
into node.baseDir
|
into node.baseDir
|
||||||
}
|
}
|
||||||
|
@ -187,7 +221,7 @@ class ClusterFormationTasks {
|
||||||
File rpmDatabase = new File(node.baseDir, 'rpm-database')
|
File rpmDatabase = new File(node.baseDir, 'rpm-database')
|
||||||
File rpmExtracted = new File(node.baseDir, 'rpm-extracted')
|
File rpmExtracted = new File(node.baseDir, 'rpm-extracted')
|
||||||
/* Delay reading the location of the rpm file until task execution */
|
/* Delay reading the location of the rpm file until task execution */
|
||||||
Object rpm = "${ -> project.configurations.elasticsearchDistro.singleFile}"
|
Object rpm = "${ -> configuration.singleFile}"
|
||||||
extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) {
|
extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) {
|
||||||
commandLine 'rpm', '--badreloc', '--nodeps', '--noscripts', '--notriggers',
|
commandLine 'rpm', '--badreloc', '--nodeps', '--noscripts', '--notriggers',
|
||||||
'--dbpath', rpmDatabase,
|
'--dbpath', rpmDatabase,
|
||||||
|
@ -202,7 +236,7 @@ class ClusterFormationTasks {
|
||||||
case 'deb':
|
case 'deb':
|
||||||
/* Delay reading the location of the deb file until task execution */
|
/* Delay reading the location of the deb file until task execution */
|
||||||
File debExtracted = new File(node.baseDir, 'deb-extracted')
|
File debExtracted = new File(node.baseDir, 'deb-extracted')
|
||||||
Object deb = "${ -> project.configurations.elasticsearchDistro.singleFile}"
|
Object deb = "${ -> configuration.singleFile}"
|
||||||
extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) {
|
extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) {
|
||||||
commandLine 'dpkg-deb', '-x', deb, debExtracted
|
commandLine 'dpkg-deb', '-x', deb, debExtracted
|
||||||
doFirst {
|
doFirst {
|
||||||
|
@ -221,8 +255,8 @@ class ClusterFormationTasks {
|
||||||
Map esConfig = [
|
Map esConfig = [
|
||||||
'cluster.name' : node.clusterName,
|
'cluster.name' : node.clusterName,
|
||||||
'pidfile' : node.pidFile,
|
'pidfile' : node.pidFile,
|
||||||
'path.repo' : "${node.homeDir}/repo",
|
'path.repo' : "${node.sharedDir}/repo",
|
||||||
'path.shared_data' : "${node.homeDir}/../",
|
'path.shared_data' : "${node.sharedDir}/",
|
||||||
// Define a node attribute so we can test that it exists
|
// Define a node attribute so we can test that it exists
|
||||||
'node.testattr' : 'test',
|
'node.testattr' : 'test',
|
||||||
'repositories.url.allowed_urls': 'http://snapshot.test*'
|
'repositories.url.allowed_urls': 'http://snapshot.test*'
|
||||||
|
|
|
@ -40,6 +40,9 @@ class NodeInfo {
|
||||||
/** root directory all node files and operations happen under */
|
/** root directory all node files and operations happen under */
|
||||||
File baseDir
|
File baseDir
|
||||||
|
|
||||||
|
/** shared data directory all nodes share */
|
||||||
|
File sharedDir
|
||||||
|
|
||||||
/** the pid file the node will use */
|
/** the pid file the node will use */
|
||||||
File pidFile
|
File pidFile
|
||||||
|
|
||||||
|
@ -89,14 +92,15 @@ class NodeInfo {
|
||||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream()
|
ByteArrayOutputStream buffer = new ByteArrayOutputStream()
|
||||||
|
|
||||||
/** Creates a node to run as part of a cluster for the given task */
|
/** Creates a node to run as part of a cluster for the given task */
|
||||||
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task) {
|
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task, String nodeVersion, File sharedDir) {
|
||||||
this.config = config
|
this.config = config
|
||||||
this.nodeNum = nodeNum
|
this.nodeNum = nodeNum
|
||||||
|
this.sharedDir = sharedDir
|
||||||
clusterName = "${task.path.replace(':', '_').substring(1)}"
|
clusterName = "${task.path.replace(':', '_').substring(1)}"
|
||||||
baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}")
|
baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}")
|
||||||
pidFile = new File(baseDir, 'es.pid')
|
pidFile = new File(baseDir, 'es.pid')
|
||||||
homeDir = homeDir(baseDir, config.distribution)
|
homeDir = homeDir(baseDir, config.distribution, nodeVersion)
|
||||||
confDir = confDir(baseDir, config.distribution)
|
confDir = confDir(baseDir, config.distribution, nodeVersion)
|
||||||
configFile = new File(confDir, 'elasticsearch.yml')
|
configFile = new File(confDir, 'elasticsearch.yml')
|
||||||
// even for rpm/deb, the logs are under home because we dont start with real services
|
// even for rpm/deb, the logs are under home because we dont start with real services
|
||||||
File logsDir = new File(homeDir, 'logs')
|
File logsDir = new File(homeDir, 'logs')
|
||||||
|
@ -181,13 +185,13 @@ class NodeInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the directory elasticsearch home is contained in for the given distribution */
|
/** Returns the directory elasticsearch home is contained in for the given distribution */
|
||||||
static File homeDir(File baseDir, String distro) {
|
static File homeDir(File baseDir, String distro, String nodeVersion) {
|
||||||
String path
|
String path
|
||||||
switch (distro) {
|
switch (distro) {
|
||||||
case 'integ-test-zip':
|
case 'integ-test-zip':
|
||||||
case 'zip':
|
case 'zip':
|
||||||
case 'tar':
|
case 'tar':
|
||||||
path = "elasticsearch-${VersionProperties.elasticsearch}"
|
path = "elasticsearch-${nodeVersion}"
|
||||||
break
|
break
|
||||||
case 'rpm':
|
case 'rpm':
|
||||||
case 'deb':
|
case 'deb':
|
||||||
|
@ -199,12 +203,12 @@ class NodeInfo {
|
||||||
return new File(baseDir, path)
|
return new File(baseDir, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
static File confDir(File baseDir, String distro) {
|
static File confDir(File baseDir, String distro, String nodeVersion) {
|
||||||
switch (distro) {
|
switch (distro) {
|
||||||
case 'integ-test-zip':
|
case 'integ-test-zip':
|
||||||
case 'zip':
|
case 'zip':
|
||||||
case 'tar':
|
case 'tar':
|
||||||
return new File(homeDir(baseDir, distro), 'config')
|
return new File(homeDir(baseDir, distro, nodeVersion), 'config')
|
||||||
case 'rpm':
|
case 'rpm':
|
||||||
case 'deb':
|
case 'deb':
|
||||||
return new File(baseDir, "${distro}-extracted/etc/elasticsearch")
|
return new File(baseDir, "${distro}-extracted/etc/elasticsearch")
|
||||||
|
|
|
@ -231,8 +231,7 @@ public class NodeInfo extends BaseNodeResponse {
|
||||||
plugins.readFrom(in);
|
plugins.readFrom(in);
|
||||||
}
|
}
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
ingest = new IngestInfo();
|
ingest = new IngestInfo(in);
|
||||||
ingest.readFrom(in);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -235,7 +235,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
||||||
breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in);
|
breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in);
|
||||||
scriptStats = in.readOptionalStreamable(ScriptStats::new);
|
scriptStats = in.readOptionalStreamable(ScriptStats::new);
|
||||||
discoveryStats = in.readOptionalStreamable(() -> new DiscoveryStats(null));
|
discoveryStats = in.readOptionalStreamable(() -> new DiscoveryStats(null));
|
||||||
ingestStats = in.readOptionalWritable(IngestStats.PROTO::readFrom);
|
ingestStats = in.readOptionalWritable(IngestStats::new);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -65,6 +65,7 @@ import java.util.HashMap;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Function;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
import static org.elasticsearch.ElasticsearchException.readException;
|
import static org.elasticsearch.ElasticsearchException.readException;
|
||||||
|
|
|
@ -31,12 +31,18 @@ import java.util.Map;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
public class IngestStats implements Writeable<IngestStats>, ToXContent {
|
public class IngestStats implements Writeable<IngestStats>, ToXContent {
|
||||||
|
|
||||||
public final static IngestStats PROTO = new IngestStats(null, null);
|
|
||||||
|
|
||||||
private final Stats totalStats;
|
private final Stats totalStats;
|
||||||
private final Map<String, Stats> statsPerPipeline;
|
private final Map<String, Stats> statsPerPipeline;
|
||||||
|
|
||||||
|
public IngestStats(StreamInput in) throws IOException {
|
||||||
|
this.totalStats = new Stats(in);
|
||||||
|
int size = in.readVInt();
|
||||||
|
this.statsPerPipeline = new HashMap<>(size);
|
||||||
|
for (int i = 0; i < size; i++) {
|
||||||
|
statsPerPipeline.put(in.readString(), new Stats(in));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public IngestStats(Stats totalStats, Map<String, Stats> statsPerPipeline) {
|
public IngestStats(Stats totalStats, Map<String, Stats> statsPerPipeline) {
|
||||||
this.totalStats = totalStats;
|
this.totalStats = totalStats;
|
||||||
this.statsPerPipeline = statsPerPipeline;
|
this.statsPerPipeline = statsPerPipeline;
|
||||||
|
@ -58,16 +64,7 @@ public class IngestStats implements Writeable<IngestStats>, ToXContent {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IngestStats readFrom(StreamInput in) throws IOException {
|
public IngestStats readFrom(StreamInput in) throws IOException {
|
||||||
Stats totalStats = Stats.PROTO.readFrom(in);
|
return new IngestStats(in);
|
||||||
totalStats.readFrom(in);
|
|
||||||
int size = in.readVInt();
|
|
||||||
Map<String, Stats> statsPerPipeline = new HashMap<>(size);
|
|
||||||
for (int i = 0; i < size; i++) {
|
|
||||||
Stats stats = Stats.PROTO.readFrom(in);
|
|
||||||
statsPerPipeline.put(in.readString(), stats);
|
|
||||||
stats.readFrom(in);
|
|
||||||
}
|
|
||||||
return new IngestStats(totalStats, statsPerPipeline);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -99,13 +96,18 @@ public class IngestStats implements Writeable<IngestStats>, ToXContent {
|
||||||
|
|
||||||
public static class Stats implements Writeable<Stats>, ToXContent {
|
public static class Stats implements Writeable<Stats>, ToXContent {
|
||||||
|
|
||||||
private final static Stats PROTO = new Stats(0, 0, 0, 0);
|
|
||||||
|
|
||||||
private final long ingestCount;
|
private final long ingestCount;
|
||||||
private final long ingestTimeInMillis;
|
private final long ingestTimeInMillis;
|
||||||
private final long ingestCurrent;
|
private final long ingestCurrent;
|
||||||
private final long ingestFailedCount;
|
private final long ingestFailedCount;
|
||||||
|
|
||||||
|
public Stats(StreamInput in) throws IOException {
|
||||||
|
ingestCount = in.readVLong();
|
||||||
|
ingestTimeInMillis = in.readVLong();
|
||||||
|
ingestCurrent = in.readVLong();
|
||||||
|
ingestFailedCount = in.readVLong();
|
||||||
|
}
|
||||||
|
|
||||||
public Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount) {
|
public Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount) {
|
||||||
this.ingestCount = ingestCount;
|
this.ingestCount = ingestCount;
|
||||||
this.ingestTimeInMillis = ingestTimeInMillis;
|
this.ingestTimeInMillis = ingestTimeInMillis;
|
||||||
|
@ -144,11 +146,7 @@ public class IngestStats implements Writeable<IngestStats>, ToXContent {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Stats readFrom(StreamInput in) throws IOException {
|
public Stats readFrom(StreamInput in) throws IOException {
|
||||||
long ingestCount = in.readVLong();
|
return new Stats(in);
|
||||||
long ingestTimeInMillis = in.readVLong();
|
|
||||||
long ingestCurrent = in.readVLong();
|
|
||||||
long ingestFailedCount = in.readVLong();
|
|
||||||
return new Stats(ingestCount, ingestTimeInMillis, ingestCurrent, ingestFailedCount);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.ingest.core;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
@ -32,17 +33,22 @@ import java.util.LinkedHashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.TreeSet;
|
||||||
|
|
||||||
public class IngestInfo implements Streamable, ToXContent {
|
public class IngestInfo implements Writeable<IngestInfo>, ToXContent {
|
||||||
|
|
||||||
private Set<ProcessorInfo> processors;
|
private final Set<ProcessorInfo> processors;
|
||||||
|
|
||||||
public IngestInfo() {
|
public IngestInfo(StreamInput in) throws IOException {
|
||||||
processors = Collections.emptySet();
|
this(Collections.emptyList());
|
||||||
|
final int size = in.readVInt();
|
||||||
|
for (int i = 0; i < size; i++) {
|
||||||
|
processors.add(new ProcessorInfo(in));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public IngestInfo(List<ProcessorInfo> processors) {
|
public IngestInfo(List<ProcessorInfo> processors) {
|
||||||
this.processors = new LinkedHashSet<>(processors);
|
this.processors = new TreeSet<>(processors); // we use a treeset here to have a test-able / predictable order
|
||||||
}
|
}
|
||||||
|
|
||||||
public Iterable<ProcessorInfo> getProcessors() {
|
public Iterable<ProcessorInfo> getProcessors() {
|
||||||
|
@ -54,15 +60,8 @@ public class IngestInfo implements Streamable, ToXContent {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
public IngestInfo readFrom(StreamInput in) throws IOException {
|
||||||
int size = in.readVInt();
|
return new IngestInfo(in);
|
||||||
Set<ProcessorInfo> processors = new LinkedHashSet<>(size);
|
|
||||||
for (int i = 0; i < size; i++) {
|
|
||||||
ProcessorInfo info = new ProcessorInfo();
|
|
||||||
info.readFrom(in);
|
|
||||||
processors.add(info);
|
|
||||||
}
|
|
||||||
this.processors = processors;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -22,16 +22,18 @@ package org.elasticsearch.ingest.core;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public class ProcessorInfo implements Streamable, ToXContent {
|
public class ProcessorInfo implements Writeable<ProcessorInfo>, ToXContent, Comparable<ProcessorInfo> {
|
||||||
|
|
||||||
private String type;
|
private final String type;
|
||||||
|
|
||||||
ProcessorInfo() {
|
public ProcessorInfo(StreamInput input) throws IOException {
|
||||||
|
type = input.readString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public ProcessorInfo(String type) {
|
public ProcessorInfo(String type) {
|
||||||
|
@ -46,8 +48,8 @@ public class ProcessorInfo implements Streamable, ToXContent {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
public ProcessorInfo readFrom(StreamInput in) throws IOException {
|
||||||
this.type = in.readString();
|
return new ProcessorInfo(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -78,4 +80,9 @@ public class ProcessorInfo implements Streamable, ToXContent {
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return type.hashCode();
|
return type.hashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compareTo(ProcessorInfo o) {
|
||||||
|
return type.compareTo(o.type);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.ingest;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class IngestStatsTests extends ESTestCase {
|
||||||
|
|
||||||
|
public void testSerialization() throws IOException {
|
||||||
|
IngestStats.Stats total = new IngestStats.Stats(5, 10, 20, 30);
|
||||||
|
IngestStats.Stats foo = new IngestStats.Stats(50, 100, 200, 300);
|
||||||
|
IngestStats ingestStats = new IngestStats(total, Collections.singletonMap("foo", foo));
|
||||||
|
IngestStats serialize = serialize(ingestStats);
|
||||||
|
assertNotSame(serialize, ingestStats);
|
||||||
|
assertNotSame(serialize.getTotalStats(), total);
|
||||||
|
assertEquals(total.getIngestCount(), serialize.getTotalStats().getIngestCount());
|
||||||
|
assertEquals(total.getIngestFailedCount(), serialize.getTotalStats().getIngestFailedCount());
|
||||||
|
assertEquals(total.getIngestTimeInMillis(), serialize.getTotalStats().getIngestTimeInMillis());
|
||||||
|
assertEquals(total.getIngestCurrent(), serialize.getTotalStats().getIngestCurrent());
|
||||||
|
|
||||||
|
assertEquals(ingestStats.getStatsPerPipeline().size(), 1);
|
||||||
|
assertTrue(ingestStats.getStatsPerPipeline().containsKey("foo"));
|
||||||
|
|
||||||
|
Map<String, IngestStats.Stats> left = ingestStats.getStatsPerPipeline();
|
||||||
|
Map<String, IngestStats.Stats> right = serialize.getStatsPerPipeline();
|
||||||
|
|
||||||
|
assertEquals(right.size(), 1);
|
||||||
|
assertTrue(right.containsKey("foo"));
|
||||||
|
assertEquals(left.size(), 1);
|
||||||
|
assertTrue(left.containsKey("foo"));
|
||||||
|
IngestStats.Stats leftStats = left.get("foo");
|
||||||
|
IngestStats.Stats rightStats = right.get("foo");
|
||||||
|
assertEquals(leftStats.getIngestCount(), rightStats.getIngestCount());
|
||||||
|
assertEquals(leftStats.getIngestFailedCount(), rightStats.getIngestFailedCount());
|
||||||
|
assertEquals(leftStats.getIngestTimeInMillis(), rightStats.getIngestTimeInMillis());
|
||||||
|
assertEquals(leftStats.getIngestCurrent(), rightStats.getIngestCurrent());
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> T serialize(Writeable<T> writeable) throws IOException {
|
||||||
|
BytesStreamOutput out = new BytesStreamOutput();
|
||||||
|
writeable.writeTo(out);
|
||||||
|
StreamInput in = StreamInput.wrap(out.bytes());
|
||||||
|
return writeable.readFrom(in);
|
||||||
|
}
|
||||||
|
}
|
|
@ -47,6 +47,7 @@ import org.elasticsearch.transport.TransportInfo;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -137,7 +138,7 @@ public class NodeInfoStreamingTests extends ESTestCase {
|
||||||
PluginsAndModules plugins = new PluginsAndModules();
|
PluginsAndModules plugins = new PluginsAndModules();
|
||||||
plugins.addModule(DummyPluginInfo.INSTANCE);
|
plugins.addModule(DummyPluginInfo.INSTANCE);
|
||||||
plugins.addPlugin(DummyPluginInfo.INSTANCE);
|
plugins.addPlugin(DummyPluginInfo.INSTANCE);
|
||||||
IngestInfo ingestInfo = new IngestInfo();
|
IngestInfo ingestInfo = new IngestInfo(Collections.emptyList());
|
||||||
return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins, ingestInfo);
|
return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins, ingestInfo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,3 +9,18 @@
|
||||||
nodes.info: {}
|
nodes.info: {}
|
||||||
|
|
||||||
- match: { nodes.$master.modules.0.name: ingest-grok }
|
- match: { nodes.$master.modules.0.name: ingest-grok }
|
||||||
|
- match: { nodes.$master.ingest.processors.0.type: append }
|
||||||
|
- match: { nodes.$master.ingest.processors.1.type: convert }
|
||||||
|
- match: { nodes.$master.ingest.processors.2.type: date }
|
||||||
|
- match: { nodes.$master.ingest.processors.3.type: fail }
|
||||||
|
- match: { nodes.$master.ingest.processors.4.type: foreach }
|
||||||
|
- match: { nodes.$master.ingest.processors.5.type: grok }
|
||||||
|
- match: { nodes.$master.ingest.processors.6.type: gsub }
|
||||||
|
- match: { nodes.$master.ingest.processors.7.type: join }
|
||||||
|
- match: { nodes.$master.ingest.processors.8.type: lowercase }
|
||||||
|
- match: { nodes.$master.ingest.processors.9.type: remove }
|
||||||
|
- match: { nodes.$master.ingest.processors.10.type: rename }
|
||||||
|
- match: { nodes.$master.ingest.processors.11.type: set }
|
||||||
|
- match: { nodes.$master.ingest.processors.12.type: split }
|
||||||
|
- match: { nodes.$master.ingest.processors.13.type: trim }
|
||||||
|
- match: { nodes.$master.ingest.processors.14.type: uppercase }
|
||||||
|
|
|
@ -8,4 +8,19 @@
|
||||||
nodes.info: {}
|
nodes.info: {}
|
||||||
|
|
||||||
- match: { nodes.$master.plugins.0.name: ingest-attachment }
|
- match: { nodes.$master.plugins.0.name: ingest-attachment }
|
||||||
- match: { nodes.$master.ingest.processors.11.type: attachment }
|
- match: { nodes.$master.ingest.processors.0.type: append }
|
||||||
|
- match: { nodes.$master.ingest.processors.1.type: attachment }
|
||||||
|
- match: { nodes.$master.ingest.processors.2.type: convert }
|
||||||
|
- match: { nodes.$master.ingest.processors.3.type: date }
|
||||||
|
- match: { nodes.$master.ingest.processors.4.type: fail }
|
||||||
|
- match: { nodes.$master.ingest.processors.5.type: foreach }
|
||||||
|
- match: { nodes.$master.ingest.processors.6.type: gsub }
|
||||||
|
- match: { nodes.$master.ingest.processors.7.type: join }
|
||||||
|
- match: { nodes.$master.ingest.processors.8.type: lowercase }
|
||||||
|
- match: { nodes.$master.ingest.processors.9.type: remove }
|
||||||
|
- match: { nodes.$master.ingest.processors.10.type: rename }
|
||||||
|
- match: { nodes.$master.ingest.processors.11.type: set }
|
||||||
|
- match: { nodes.$master.ingest.processors.12.type: split }
|
||||||
|
- match: { nodes.$master.ingest.processors.13.type: trim }
|
||||||
|
- match: { nodes.$master.ingest.processors.14.type: uppercase }
|
||||||
|
|
||||||
|
|
|
@ -8,4 +8,18 @@
|
||||||
nodes.info: {}
|
nodes.info: {}
|
||||||
|
|
||||||
- match: { nodes.$master.plugins.0.name: ingest-geoip }
|
- match: { nodes.$master.plugins.0.name: ingest-geoip }
|
||||||
- match: { nodes.$master.ingest.processors.3.type: geoip }
|
- match: { nodes.$master.ingest.processors.0.type: append }
|
||||||
|
- match: { nodes.$master.ingest.processors.1.type: convert }
|
||||||
|
- match: { nodes.$master.ingest.processors.2.type: date }
|
||||||
|
- match: { nodes.$master.ingest.processors.3.type: fail }
|
||||||
|
- match: { nodes.$master.ingest.processors.4.type: foreach }
|
||||||
|
- match: { nodes.$master.ingest.processors.5.type: geoip }
|
||||||
|
- match: { nodes.$master.ingest.processors.6.type: gsub }
|
||||||
|
- match: { nodes.$master.ingest.processors.7.type: join }
|
||||||
|
- match: { nodes.$master.ingest.processors.8.type: lowercase }
|
||||||
|
- match: { nodes.$master.ingest.processors.9.type: remove }
|
||||||
|
- match: { nodes.$master.ingest.processors.10.type: rename }
|
||||||
|
- match: { nodes.$master.ingest.processors.11.type: set }
|
||||||
|
- match: { nodes.$master.ingest.processors.12.type: split }
|
||||||
|
- match: { nodes.$master.ingest.processors.13.type: trim }
|
||||||
|
- match: { nodes.$master.ingest.processors.14.type: uppercase }
|
||||||
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
|
/* This project runs the core REST tests against a 2 node cluster where one of the nodes has a different minor.
|
||||||
|
* Since we don't have a version to test against we currently use the hardcoded snapshot for to bascially run
|
||||||
|
* against ourself. To test that useing a different version works got into distribution/zip and execute:
|
||||||
|
* gradle clean install -Dbuild.snapshot=false
|
||||||
|
*
|
||||||
|
* This installs the release-build into a local .m2 repository, then change this version here to:
|
||||||
|
* bwcVersion = "5.0.0"
|
||||||
|
*
|
||||||
|
* now you can run the bwc tests with:
|
||||||
|
* gradle check -Drepos.mavenlocal=true
|
||||||
|
*
|
||||||
|
* (-Drepos.mavenlocal=true will force gradle to look for the zip distribuiton in the local .m2 repository)
|
||||||
|
*/
|
||||||
|
integTest {
|
||||||
|
includePackaged = true
|
||||||
|
cluster {
|
||||||
|
numNodes = 2
|
||||||
|
numBwcNodes = 1
|
||||||
|
bwcVersion = "5.0.0-SNAPSHOT" // this is the same as the current version until we released the first RC
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,41 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.smoketest;
|
||||||
|
|
||||||
|
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||||
|
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||||
|
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||||
|
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||||
|
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class MultiNodeBackwardsIT extends ESRestTestCase {
|
||||||
|
|
||||||
|
public MultiNodeBackwardsIT(RestTestCandidate testCandidate) {
|
||||||
|
super(testCandidate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParametersFactory
|
||||||
|
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||||
|
return createParameters(0, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,28 +1,3 @@
|
||||||
---
|
|
||||||
"Check availability of default processors":
|
|
||||||
- do:
|
|
||||||
cluster.state: {}
|
|
||||||
|
|
||||||
- set: {master_node: master}
|
|
||||||
|
|
||||||
- do:
|
|
||||||
nodes.info: {}
|
|
||||||
|
|
||||||
- match: { nodes.$master.ingest.processors.0.type: date }
|
|
||||||
- match: { nodes.$master.ingest.processors.1.type: uppercase }
|
|
||||||
- match: { nodes.$master.ingest.processors.2.type: set }
|
|
||||||
- match: { nodes.$master.ingest.processors.3.type: lowercase }
|
|
||||||
- match: { nodes.$master.ingest.processors.4.type: gsub }
|
|
||||||
- match: { nodes.$master.ingest.processors.5.type: convert }
|
|
||||||
- match: { nodes.$master.ingest.processors.6.type: remove }
|
|
||||||
- match: { nodes.$master.ingest.processors.7.type: fail }
|
|
||||||
- match: { nodes.$master.ingest.processors.8.type: foreach }
|
|
||||||
- match: { nodes.$master.ingest.processors.9.type: split }
|
|
||||||
- match: { nodes.$master.ingest.processors.10.type: trim }
|
|
||||||
- match: { nodes.$master.ingest.processors.11.type: rename }
|
|
||||||
- match: { nodes.$master.ingest.processors.12.type: join }
|
|
||||||
- match: { nodes.$master.ingest.processors.13.type: append }
|
|
||||||
|
|
||||||
---
|
---
|
||||||
"Test basic pipeline crud":
|
"Test basic pipeline crud":
|
||||||
- do:
|
- do:
|
||||||
|
|
|
@ -76,11 +76,12 @@ setup:
|
||||||
- do:
|
- do:
|
||||||
nodes.stats:
|
nodes.stats:
|
||||||
metric: [ ingest ]
|
metric: [ ingest ]
|
||||||
- gte: {nodes.$master.ingest.total.count: 1}
|
#we can't assert anything here since we might have more than one node in the cluster
|
||||||
|
- gte: {nodes.$master.ingest.total.count: 0}
|
||||||
- gte: {nodes.$master.ingest.total.failed: 0}
|
- gte: {nodes.$master.ingest.total.failed: 0}
|
||||||
- gte: {nodes.$master.ingest.total.time_in_millis: 0}
|
- gte: {nodes.$master.ingest.total.time_in_millis: 0}
|
||||||
- match: {nodes.$master.ingest.total.current: 0}
|
- match: {nodes.$master.ingest.total.current: 0}
|
||||||
- gte: {nodes.$master.ingest.pipelines.pipeline1.count: 1}
|
- gte: {nodes.$master.ingest.pipelines.pipeline1.count: 0}
|
||||||
- match: {nodes.$master.ingest.pipelines.pipeline1.failed: 0}
|
- match: {nodes.$master.ingest.pipelines.pipeline1.failed: 0}
|
||||||
- gte: {nodes.$master.ingest.pipelines.pipeline1.time_in_millis: 0}
|
- gte: {nodes.$master.ingest.pipelines.pipeline1.time_in_millis: 0}
|
||||||
- match: {nodes.$master.ingest.pipelines.pipeline1.current: 0}
|
- match: {nodes.$master.ingest.pipelines.pipeline1.current: 0}
|
||||||
|
@ -113,11 +114,12 @@ setup:
|
||||||
- do:
|
- do:
|
||||||
nodes.stats:
|
nodes.stats:
|
||||||
metric: [ ingest ]
|
metric: [ ingest ]
|
||||||
- gte: {nodes.$master.ingest.total.count: 1}
|
#we can't assert anything here since we might have more than one node in the cluster
|
||||||
|
- gte: {nodes.$master.ingest.total.count: 0}
|
||||||
- gte: {nodes.$master.ingest.total.failed: 0}
|
- gte: {nodes.$master.ingest.total.failed: 0}
|
||||||
- gte: {nodes.$master.ingest.total.time_in_millis: 0}
|
- gte: {nodes.$master.ingest.total.time_in_millis: 0}
|
||||||
- match: {nodes.$master.ingest.total.current: 0}
|
- match: {nodes.$master.ingest.total.current: 0}
|
||||||
- match: {nodes.$master.ingest.pipelines.pipeline2.count: 1}
|
- gte: {nodes.$master.ingest.pipelines.pipeline2.count: 0}
|
||||||
- match: {nodes.$master.ingest.pipelines.pipeline2.failed: 0}
|
- match: {nodes.$master.ingest.pipelines.pipeline2.failed: 0}
|
||||||
- gte: {nodes.$master.ingest.pipelines.pipeline2.time_in_millis: 0}
|
- gte: {nodes.$master.ingest.pipelines.pipeline2.time_in_millis: 0}
|
||||||
- match: {nodes.$master.ingest.pipelines.pipeline2.current: 0}
|
- match: {nodes.$master.ingest.pipelines.pipeline2.current: 0}
|
||||||
|
|
|
@ -39,6 +39,7 @@ List projects = [
|
||||||
'plugins:repository-s3',
|
'plugins:repository-s3',
|
||||||
'plugins:jvm-example',
|
'plugins:jvm-example',
|
||||||
'plugins:store-smb',
|
'plugins:store-smb',
|
||||||
|
'qa:backwards-5.0',
|
||||||
'qa:evil-tests',
|
'qa:evil-tests',
|
||||||
'qa:smoke-test-client',
|
'qa:smoke-test-client',
|
||||||
'qa:smoke-test-multinode',
|
'qa:smoke-test-multinode',
|
||||||
|
|
Loading…
Reference in New Issue