From 44ed5f6306038c7f80cbd6ffe403104248145d1e Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 31 Aug 2018 09:31:55 +0300 Subject: [PATCH 01/23] Enable forbiddenapis server java9 (#33245) --- .../groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 3 ++- .../gradle/precommit/PrecommitTasks.groovy | 2 +- libs/core/build.gradle | 9 +++++---- server/build.gradle | 10 +++++----- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 4c4a8cbe881..6a9d4076eef 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -38,7 +38,6 @@ import org.gradle.api.artifacts.ModuleDependency import org.gradle.api.artifacts.ModuleVersionIdentifier import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolvedArtifact -import org.gradle.api.artifacts.SelfResolvingDependency import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.execution.TaskExecutionGraph import org.gradle.api.plugins.JavaPlugin @@ -212,6 +211,7 @@ class BuildPlugin implements Plugin { project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion project.rootProject.ext.inFipsJvm = inFipsJvm project.rootProject.ext.gradleJavaVersion = JavaVersion.toVersion(gradleJavaVersion) + project.rootProject.ext.java9Home = findJavaHome("9") } project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion @@ -225,6 +225,7 @@ class BuildPlugin implements Plugin { project.ext.javaVersions = project.rootProject.ext.javaVersions project.ext.inFipsJvm = project.rootProject.ext.inFipsJvm project.ext.gradleJavaVersion = project.rootProject.ext.gradleJavaVersion + project.ext.java9Home = project.rootProject.ext.java9Home } private static String getPaddedMajorVersion(JavaVersion compilerJavaVersionEnum) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index be7561853bb..06557d4ccfd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -100,7 +100,7 @@ class PrecommitTasks { private static Task configureForbiddenApisCli(Project project) { Task forbiddenApisCli = project.tasks.create('forbiddenApis') - project.sourceSets.forEach { sourceSet -> + project.sourceSets.all { sourceSet -> forbiddenApisCli.dependsOn( project.tasks.create(sourceSet.getTaskName('forbiddenApis', null), ForbiddenApisCliTask) { ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') diff --git a/libs/core/build.gradle b/libs/core/build.gradle index cc5c1e20fc1..9c90837bd80 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -46,12 +46,13 @@ if (!isEclipse && !isIdea) { targetCompatibility = 9 } - /* Enable this when forbiddenapis was updated to 2.6. - * See: https://github.com/elastic/elasticsearch/issues/29292 forbiddenApisJava9 { - targetCompatibility = 9 + if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { + targetCompatibility = JavaVersion.VERSION_1_9 + javaHome = project.java9Home + } + replaceSignatureFiles 'jdk-signatures' } - */ jar { metaInf { diff --git a/server/build.gradle b/server/build.gradle index edc3f427dfd..c01fb92b050 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -58,13 +58,13 @@ if (!isEclipse && !isIdea) { sourceCompatibility = 9 targetCompatibility = 9 } - - /* Enable this when forbiddenapis was updated to 2.6. - * See: https://github.com/elastic/elasticsearch/issues/29292 + forbiddenApisJava9 { - targetCompatibility = 9 + if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { + targetCompatibility = JavaVersion.VERSION_1_9 + javaHome = project.java9Home + } } - */ jar { metaInf { From c6cfa08a615f5b433cf64bef63220f4a22727115 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 31 Aug 2018 08:40:27 +0200 Subject: [PATCH 02/23] MINOR: Remove Dead Code from PathTrie (#33280) * The array size checks are redundant since the array sizes are checked earlier in those methods too * The removed methods are just not used anywhere --- .../elasticsearch/common/path/PathTrie.java | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/path/PathTrie.java b/server/src/main/java/org/elasticsearch/common/path/PathTrie.java index 5243809c64a..08787cea9df 100644 --- a/server/src/main/java/org/elasticsearch/common/path/PathTrie.java +++ b/server/src/main/java/org/elasticsearch/common/path/PathTrie.java @@ -104,24 +104,12 @@ public class PathTrie { namedWildcard = key.substring(key.indexOf('{') + 1, key.indexOf('}')); } - public boolean isWildcard() { - return isWildcard; - } - - public synchronized void addChild(TrieNode child) { - addInnerChild(child.key, child); - } - private void addInnerChild(String key, TrieNode child) { Map newChildren = new HashMap<>(children); newChildren.put(key, child); children = unmodifiableMap(newChildren); } - public TrieNode getChild(String key) { - return children.get(key); - } - public synchronized void insert(String[] path, int index, T value) { if (index >= path.length) return; @@ -302,7 +290,7 @@ public class PathTrie { } int index = 0; // Supports initial delimiter. - if (strings.length > 0 && strings[0].isEmpty()) { + if (strings[0].isEmpty()) { index = 1; } root.insert(strings, index, value); @@ -327,7 +315,7 @@ public class PathTrie { } int index = 0; // Supports initial delimiter. - if (strings.length > 0 && strings[0].isEmpty()) { + if (strings[0].isEmpty()) { index = 1; } root.insertOrUpdate(strings, index, value, updater); @@ -352,7 +340,7 @@ public class PathTrie { int index = 0; // Supports initial delimiter. - if (strings.length > 0 && strings[0].isEmpty()) { + if (strings[0].isEmpty()) { index = 1; } From 73eb4cbbbe1c38508f6fc303ca300c508952b507 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Fri, 31 Aug 2018 10:45:25 +0300 Subject: [PATCH 03/23] SQL: Support multi-index format as table identifier (#33278) Extend tableIdentifier to support multi-index format; not just * but also enumeration and exclusion Fix #33162 --- .../sql/analysis/index/IndexResolver.java | 3 +- .../xpack/sql/execution/search/Querier.java | 3 +- .../xpack/sql/parser/IdentifierBuilder.java | 14 ------- .../sql/parser/IdentifierBuilderTests.java | 38 ------------------- .../sql/src/main/resources/command.csv-spec | 24 ++++++++++++ 5 files changed, 28 insertions(+), 54 deletions(-) delete mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilderTests.java diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java index 10586c991b1..b11542d40ed 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java @@ -19,6 +19,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.xpack.sql.type.EsField; @@ -300,7 +301,7 @@ public class IndexResolver { private static GetIndexRequest createGetIndexRequest(String index) { return new GetIndexRequest() .local(true) - .indices(index) + .indices(Strings.commaDelimitedListToStringArray(index)) .features(Feature.MAPPINGS) //lenient because we throw our own errors looking at the response e.g. if something was not resolved //also because this way security doesn't throw authorization exceptions but rather honours ignore_unavailable diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 055e34758cc..d0bff77a648 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; @@ -92,7 +93,7 @@ public class Querier { log.trace("About to execute query {} on {}", StringUtils.toString(sourceBuilder), index); } - SearchRequest search = prepareRequest(client, sourceBuilder, timeout, index); + SearchRequest search = prepareRequest(client, sourceBuilder, timeout, Strings.commaDelimitedListToStringArray(index)); ActionListener l; if (query.isAggsOnly()) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java index 8c79ae1ef05..f09f543c6ff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java @@ -21,23 +21,9 @@ abstract class IdentifierBuilder extends AbstractBuilder { ParseTree tree = ctx.name != null ? ctx.name : ctx.TABLE_IDENTIFIER(); String index = tree.getText(); - validateIndex(index, source); return new TableIdentifier(source, visitIdentifier(ctx.catalog), index); } - // see https://github.com/elastic/elasticsearch/issues/6736 - static void validateIndex(String index, Location source) { - for (int i = 0; i < index.length(); i++) { - char c = index.charAt(i); - if (Character.isUpperCase(c)) { - throw new ParsingException(source, "Invalid index name (needs to be lowercase) {}", index); - } - if (c == '\\' || c == '/' || c == '<' || c == '>' || c == '|' || c == ',' || c == ' ') { - throw new ParsingException(source, "Invalid index name (illegal character {}) {}", c, index); - } - } - } - @Override public String visitIdentifier(IdentifierContext ctx) { return ctx == null ? null : ctx.getText(); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilderTests.java deleted file mode 100644 index ec8b8abc51f..00000000000 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilderTests.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.parser; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.tree.Location; - -import static org.hamcrest.Matchers.is; - -public class IdentifierBuilderTests extends ESTestCase { - - private static Location L = new Location(1, 10); - - public void testTypicalIndex() throws Exception { - IdentifierBuilder.validateIndex("some-index", L); - } - - public void testInternalIndex() throws Exception { - IdentifierBuilder.validateIndex(".some-internal-index-2020-02-02", L); - } - - public void testIndexPattern() throws Exception { - IdentifierBuilder.validateIndex(".some-*", L); - } - - public void testInvalidIndex() throws Exception { - ParsingException pe = expectThrows(ParsingException.class, () -> IdentifierBuilder.validateIndex("some,index", L)); - assertThat(pe.getMessage(), is("line 1:12: Invalid index name (illegal character ,) some,index")); - } - - public void testUpperCasedIndex() throws Exception { - ParsingException pe = expectThrows(ParsingException.class, () -> IdentifierBuilder.validateIndex("thisIsAnIndex", L)); - assertThat(pe.getMessage(), is("line 1:12: Invalid index name (needs to be lowercase) thisIsAnIndex")); - } -} diff --git a/x-pack/qa/sql/src/main/resources/command.csv-spec b/x-pack/qa/sql/src/main/resources/command.csv-spec index 89e86e887e1..a8f23e27ffa 100644 --- a/x-pack/qa/sql/src/main/resources/command.csv-spec +++ b/x-pack/qa/sql/src/main/resources/command.csv-spec @@ -162,3 +162,27 @@ last_name | VARCHAR last_name.keyword | VARCHAR salary | INTEGER ; + + +describeIncludeExclude +DESCRIBE "test_emp*,-test_alias*"; + +column:s | type:s +birth_date | TIMESTAMP +dep | STRUCT +dep.dep_id | VARCHAR +dep.dep_name | VARCHAR +dep.dep_name.keyword | VARCHAR +dep.from_date | TIMESTAMP +dep.to_date | TIMESTAMP +emp_no | INTEGER +first_name | VARCHAR +first_name.keyword | VARCHAR +gender | VARCHAR +hire_date | TIMESTAMP +languages | TINYINT +last_name | VARCHAR +last_name.keyword | VARCHAR +salary | INTEGER +; + From 7345878d33a00ce95eae929f79b2da0b326d182d Mon Sep 17 00:00:00 2001 From: David Roberts Date: Fri, 31 Aug 2018 08:48:45 +0100 Subject: [PATCH 04/23] [ML] Refactor delimited file structure detection (#33233) 1. Use the term "delimited" rather than "separated values" 2. Use a single factory class with arguments to specify the delimiter and identification constraints This change makes it easier to add support for other delimiter characters. --- .../CsvLogStructureFinderFactory.java | 35 ----- ....java => DelimitedLogStructureFinder.java} | 16 +-- .../DelimitedLogStructureFinderFactory.java | 57 ++++++++ .../ml/logstructurefinder/LogStructure.java | 124 ++++-------------- .../LogStructureFinderManager.java | 8 +- .../logstructurefinder/LogStructureUtils.java | 10 +- ...aratedValuesLogStructureFinderFactory.java | 38 ------ ...aratedValuesLogStructureFinderFactory.java | 37 ------ .../TsvLogStructureFinderFactory.java | 4 +- .../CsvLogStructureFinderFactoryTests.java | 38 ------ ...limitedLogStructureFinderFactoryTests.java | 93 +++++++++++++ ... => DelimitedLogStructureFinderTests.java} | 122 ++++++++--------- .../JsonLogStructureFinderFactoryTests.java | 8 +- .../JsonLogStructureFinderTests.java | 2 +- .../LogStructureFinderManagerTests.java | 2 +- .../LogStructureTestCase.java | 4 +- .../logstructurefinder/LogStructureTests.java | 8 +- ...dValuesLogStructureFinderFactoryTests.java | 23 ---- ...dValuesLogStructureFinderFactoryTests.java | 28 ---- .../TextLogStructureFinderFactoryTests.java | 4 +- .../TextLogStructureFinderTests.java | 2 +- .../TsvLogStructureFinderFactoryTests.java | 33 ----- .../XmlLogStructureFinderFactoryTests.java | 8 +- .../XmlLogStructureFinderTests.java | 2 +- 24 files changed, 277 insertions(+), 429 deletions(-) delete mode 100644 x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/CsvLogStructureFinderFactory.java rename x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/{SeparatedValuesLogStructureFinder.java => DelimitedLogStructureFinder.java} (97%) create mode 100644 x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactory.java delete mode 100644 x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/PipeSeparatedValuesLogStructureFinderFactory.java delete mode 100644 x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/SemiColonSeparatedValuesLogStructureFinderFactory.java delete mode 100644 x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/CsvLogStructureFinderFactoryTests.java create mode 100644 x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactoryTests.java rename x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/{SeparatedValuesLogStructureFinderTests.java => DelimitedLogStructureFinderTests.java} (65%) delete mode 100644 x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/PipeSeparatedValuesLogStructureFinderFactoryTests.java delete mode 100644 x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/SemiColonSeparatedValuesLogStructureFinderFactoryTests.java delete mode 100644 x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactoryTests.java diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/CsvLogStructureFinderFactory.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/CsvLogStructureFinderFactory.java deleted file mode 100644 index cb9e6537252..00000000000 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/CsvLogStructureFinderFactory.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.logstructurefinder; - -import org.supercsv.prefs.CsvPreference; - -import java.io.IOException; -import java.util.List; - -public class CsvLogStructureFinderFactory implements LogStructureFinderFactory { - - /** - * Rules are: - * - The file must be valid CSV - * - It must contain at least two complete records - * - There must be at least two fields per record (otherwise files with no commas could be treated as CSV!) - * - Every CSV record except the last must have the same number of fields - * The reason the last record is allowed to have fewer fields than the others is that - * it could have been truncated when the file was sampled. - */ - @Override - public boolean canCreateFromSample(List explanation, String sample) { - return SeparatedValuesLogStructureFinder.canCreateFromSample(explanation, sample, 2, CsvPreference.EXCEL_PREFERENCE, "CSV"); - } - - @Override - public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) - throws IOException { - return SeparatedValuesLogStructureFinder.makeSeparatedValuesLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, - CsvPreference.EXCEL_PREFERENCE, false); - } -} diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/SeparatedValuesLogStructureFinder.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinder.java similarity index 97% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/SeparatedValuesLogStructureFinder.java rename to x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinder.java index fd9d34096b2..2f7bb41d0ba 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/SeparatedValuesLogStructureFinder.java +++ b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinder.java @@ -29,17 +29,16 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; -public class SeparatedValuesLogStructureFinder implements LogStructureFinder { +public class DelimitedLogStructureFinder implements LogStructureFinder { private static final int MAX_LEVENSHTEIN_COMPARISONS = 100; private final List sampleMessages; private final LogStructure structure; - static SeparatedValuesLogStructureFinder makeSeparatedValuesLogStructureFinder(List explanation, String sample, - String charsetName, Boolean hasByteOrderMarker, - CsvPreference csvPreference, boolean trimFields) - throws IOException { + static DelimitedLogStructureFinder makeDelimitedLogStructureFinder(List explanation, String sample, String charsetName, + Boolean hasByteOrderMarker, CsvPreference csvPreference, + boolean trimFields) throws IOException { Tuple>, List> parsed = readRows(sample, csvPreference); List> rows = parsed.v1(); @@ -73,13 +72,14 @@ public class SeparatedValuesLogStructureFinder implements LogStructureFinder { String preamble = Pattern.compile("\n").splitAsStream(sample).limit(lineNumbers.get(1)).collect(Collectors.joining("\n", "", "\n")); char delimiter = (char) csvPreference.getDelimiterChar(); - LogStructure.Builder structureBuilder = new LogStructure.Builder(LogStructure.Format.fromSeparator(delimiter)) + LogStructure.Builder structureBuilder = new LogStructure.Builder(LogStructure.Format.DELIMITED) .setCharset(charsetName) .setHasByteOrderMarker(hasByteOrderMarker) .setSampleStart(preamble) .setNumLinesAnalyzed(lineNumbers.get(lineNumbers.size() - 1)) .setNumMessagesAnalyzed(sampleRecords.size()) .setHasHeaderRow(isHeaderInFile) + .setDelimiter(delimiter) .setInputFields(Arrays.stream(headerWithNamedBlanks).collect(Collectors.toList())); if (trimFields) { @@ -131,10 +131,10 @@ public class SeparatedValuesLogStructureFinder implements LogStructureFinder { .setExplanation(explanation) .build(); - return new SeparatedValuesLogStructureFinder(sampleMessages, structure); + return new DelimitedLogStructureFinder(sampleMessages, structure); } - private SeparatedValuesLogStructureFinder(List sampleMessages, LogStructure structure) { + private DelimitedLogStructureFinder(List sampleMessages, LogStructure structure) { this.sampleMessages = Collections.unmodifiableList(sampleMessages); this.structure = structure; } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactory.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactory.java new file mode 100644 index 00000000000..3e4c3ea225c --- /dev/null +++ b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactory.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.logstructurefinder; + +import org.supercsv.prefs.CsvPreference; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +public class DelimitedLogStructureFinderFactory implements LogStructureFinderFactory { + + private final CsvPreference csvPreference; + private final int minFieldsPerRow; + private final boolean trimFields; + + DelimitedLogStructureFinderFactory(char delimiter, int minFieldsPerRow, boolean trimFields) { + csvPreference = new CsvPreference.Builder('"', delimiter, "\n").build(); + this.minFieldsPerRow = minFieldsPerRow; + this.trimFields = trimFields; + } + + /** + * Rules are: + * - It must contain at least two complete records + * - There must be a minimum number of fields per record (otherwise files with no commas could be treated as CSV!) + * - Every record except the last must have the same number of fields + * The reason the last record is allowed to have fewer fields than the others is that + * it could have been truncated when the file was sampled. + */ + @Override + public boolean canCreateFromSample(List explanation, String sample) { + String formatName; + switch ((char) csvPreference.getDelimiterChar()) { + case ',': + formatName = "CSV"; + break; + case '\t': + formatName = "TSV"; + break; + default: + formatName = Character.getName(csvPreference.getDelimiterChar()).toLowerCase(Locale.ROOT) + " delimited values"; + break; + } + return DelimitedLogStructureFinder.canCreateFromSample(explanation, sample, minFieldsPerRow, csvPreference, formatName); + } + + @Override + public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) + throws IOException { + return DelimitedLogStructureFinder.makeDelimitedLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, + csvPreference, trimFields); + } +} diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructure.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructure.java index 64a00d20899..ea8fe37e62f 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructure.java +++ b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructure.java @@ -27,37 +27,14 @@ public class LogStructure implements ToXContentObject { public enum Format { - JSON, XML, CSV, TSV, SEMI_COLON_SEPARATED_VALUES, PIPE_SEPARATED_VALUES, SEMI_STRUCTURED_TEXT; - - public Character separator() { - switch (this) { - case JSON: - case XML: - return null; - case CSV: - return ','; - case TSV: - return '\t'; - case SEMI_COLON_SEPARATED_VALUES: - return ';'; - case PIPE_SEPARATED_VALUES: - return '|'; - case SEMI_STRUCTURED_TEXT: - return null; - default: - throw new IllegalStateException("enum value [" + this + "] missing from switch."); - } - } + JSON, XML, DELIMITED, SEMI_STRUCTURED_TEXT; public boolean supportsNesting() { switch (this) { case JSON: case XML: return true; - case CSV: - case TSV: - case SEMI_COLON_SEPARATED_VALUES: - case PIPE_SEPARATED_VALUES: + case DELIMITED: case SEMI_STRUCTURED_TEXT: return false; default: @@ -69,10 +46,7 @@ public class LogStructure implements ToXContentObject { switch (this) { case JSON: case XML: - case CSV: - case TSV: - case SEMI_COLON_SEPARATED_VALUES: - case PIPE_SEPARATED_VALUES: + case DELIMITED: return true; case SEMI_STRUCTURED_TEXT: return false; @@ -85,10 +59,7 @@ public class LogStructure implements ToXContentObject { switch (this) { case JSON: case XML: - case CSV: - case TSV: - case SEMI_COLON_SEPARATED_VALUES: - case PIPE_SEPARATED_VALUES: + case DELIMITED: return false; case SEMI_STRUCTURED_TEXT: return true; @@ -97,38 +68,6 @@ public class LogStructure implements ToXContentObject { } } - public boolean isSeparatedValues() { - switch (this) { - case JSON: - case XML: - return false; - case CSV: - case TSV: - case SEMI_COLON_SEPARATED_VALUES: - case PIPE_SEPARATED_VALUES: - return true; - case SEMI_STRUCTURED_TEXT: - return false; - default: - throw new IllegalStateException("enum value [" + this + "] missing from switch."); - } - } - - public static Format fromSeparator(char separator) { - switch (separator) { - case ',': - return CSV; - case '\t': - return TSV; - case ';': - return SEMI_COLON_SEPARATED_VALUES; - case '|': - return PIPE_SEPARATED_VALUES; - default: - throw new IllegalArgumentException("No known format has separator [" + separator + "]"); - } - } - public static Format fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); } @@ -149,7 +88,7 @@ public class LogStructure implements ToXContentObject { static final ParseField EXCLUDE_LINES_PATTERN = new ParseField("exclude_lines_pattern"); static final ParseField INPUT_FIELDS = new ParseField("input_fields"); static final ParseField HAS_HEADER_ROW = new ParseField("has_header_row"); - static final ParseField SEPARATOR = new ParseField("separator"); + static final ParseField DELIMITER = new ParseField("delimiter"); static final ParseField SHOULD_TRIM_FIELDS = new ParseField("should_trim_fields"); static final ParseField GROK_PATTERN = new ParseField("grok_pattern"); static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp_field"); @@ -171,7 +110,7 @@ public class LogStructure implements ToXContentObject { PARSER.declareString(Builder::setExcludeLinesPattern, EXCLUDE_LINES_PATTERN); PARSER.declareStringArray(Builder::setInputFields, INPUT_FIELDS); PARSER.declareBoolean(Builder::setHasHeaderRow, HAS_HEADER_ROW); - PARSER.declareString((p, c) -> p.setSeparator(c.charAt(0)), SEPARATOR); + PARSER.declareString((p, c) -> p.setDelimiter(c.charAt(0)), DELIMITER); PARSER.declareBoolean(Builder::setShouldTrimFields, SHOULD_TRIM_FIELDS); PARSER.declareString(Builder::setGrokPattern, GROK_PATTERN); PARSER.declareString(Builder::setTimestampField, TIMESTAMP_FIELD); @@ -191,7 +130,7 @@ public class LogStructure implements ToXContentObject { private final String excludeLinesPattern; private final List inputFields; private final Boolean hasHeaderRow; - private final Character separator; + private final Character delimiter; private final Boolean shouldTrimFields; private final String grokPattern; private final List timestampFormats; @@ -202,7 +141,7 @@ public class LogStructure implements ToXContentObject { public LogStructure(int numLinesAnalyzed, int numMessagesAnalyzed, String sampleStart, String charset, Boolean hasByteOrderMarker, Format format, String multilineStartPattern, String excludeLinesPattern, List inputFields, - Boolean hasHeaderRow, Character separator, Boolean shouldTrimFields, String grokPattern, String timestampField, + Boolean hasHeaderRow, Character delimiter, Boolean shouldTrimFields, String grokPattern, String timestampField, List timestampFormats, boolean needClientTimezone, Map mappings, List explanation) { @@ -216,7 +155,7 @@ public class LogStructure implements ToXContentObject { this.excludeLinesPattern = excludeLinesPattern; this.inputFields = (inputFields == null) ? null : Collections.unmodifiableList(new ArrayList<>(inputFields)); this.hasHeaderRow = hasHeaderRow; - this.separator = separator; + this.delimiter = delimiter; this.shouldTrimFields = shouldTrimFields; this.grokPattern = grokPattern; this.timestampField = timestampField; @@ -266,8 +205,8 @@ public class LogStructure implements ToXContentObject { return hasHeaderRow; } - public Character getSeparator() { - return separator; + public Character getDelimiter() { + return delimiter; } public Boolean getShouldTrimFields() { @@ -322,8 +261,8 @@ public class LogStructure implements ToXContentObject { if (hasHeaderRow != null) { builder.field(HAS_HEADER_ROW.getPreferredName(), hasHeaderRow.booleanValue()); } - if (separator != null) { - builder.field(SEPARATOR.getPreferredName(), String.valueOf(separator)); + if (delimiter != null) { + builder.field(DELIMITER.getPreferredName(), String.valueOf(delimiter)); } if (shouldTrimFields != null) { builder.field(SHOULD_TRIM_FIELDS.getPreferredName(), shouldTrimFields.booleanValue()); @@ -349,7 +288,7 @@ public class LogStructure implements ToXContentObject { public int hashCode() { return Objects.hash(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, - multilineStartPattern, excludeLinesPattern, inputFields, hasHeaderRow, separator, shouldTrimFields, grokPattern, timestampField, + multilineStartPattern, excludeLinesPattern, inputFields, hasHeaderRow, delimiter, shouldTrimFields, grokPattern, timestampField, timestampFormats, needClientTimezone, mappings, explanation); } @@ -376,7 +315,7 @@ public class LogStructure implements ToXContentObject { Objects.equals(this.excludeLinesPattern, that.excludeLinesPattern) && Objects.equals(this.inputFields, that.inputFields) && Objects.equals(this.hasHeaderRow, that.hasHeaderRow) && - Objects.equals(this.separator, that.separator) && + Objects.equals(this.delimiter, that.delimiter) && Objects.equals(this.shouldTrimFields, that.shouldTrimFields) && Objects.equals(this.grokPattern, that.grokPattern) && Objects.equals(this.timestampField, that.timestampField) && @@ -397,7 +336,7 @@ public class LogStructure implements ToXContentObject { private String excludeLinesPattern; private List inputFields; private Boolean hasHeaderRow; - private Character separator; + private Character delimiter; private Boolean shouldTrimFields; private String grokPattern; private String timestampField; @@ -441,7 +380,6 @@ public class LogStructure implements ToXContentObject { public Builder setFormat(Format format) { this.format = Objects.requireNonNull(format); - this.separator = format.separator(); return this; } @@ -465,13 +403,13 @@ public class LogStructure implements ToXContentObject { return this; } - public Builder setShouldTrimFields(Boolean shouldTrimFields) { - this.shouldTrimFields = shouldTrimFields; + public Builder setDelimiter(Character delimiter) { + this.delimiter = delimiter; return this; } - public Builder setSeparator(Character separator) { - this.separator = separator; + public Builder setShouldTrimFields(Boolean shouldTrimFields) { + this.shouldTrimFields = shouldTrimFields; return this; } @@ -542,28 +480,22 @@ public class LogStructure implements ToXContentObject { if (hasHeaderRow != null) { throw new IllegalArgumentException("Has header row may not be specified for [" + format + "] structures."); } - if (separator != null) { - throw new IllegalArgumentException("Separator may not be specified for [" + format + "] structures."); + if (delimiter != null) { + throw new IllegalArgumentException("Delimiter may not be specified for [" + format + "] structures."); } if (grokPattern != null) { throw new IllegalArgumentException("Grok pattern may not be specified for [" + format + "] structures."); } break; - case CSV: - case TSV: - case SEMI_COLON_SEPARATED_VALUES: - case PIPE_SEPARATED_VALUES: + case DELIMITED: if (inputFields == null || inputFields.isEmpty()) { throw new IllegalArgumentException("Input fields must be specified for [" + format + "] structures."); } if (hasHeaderRow == null) { throw new IllegalArgumentException("Has header row must be specified for [" + format + "] structures."); } - Character expectedSeparator = format.separator(); - assert expectedSeparator != null; - if (expectedSeparator.equals(separator) == false) { - throw new IllegalArgumentException("Separator must be [" + expectedSeparator + "] for [" + format + - "] structures."); + if (delimiter == null) { + throw new IllegalArgumentException("Delimiter must be specified for [" + format + "] structures."); } if (grokPattern != null) { throw new IllegalArgumentException("Grok pattern may not be specified for [" + format + "] structures."); @@ -576,8 +508,8 @@ public class LogStructure implements ToXContentObject { if (hasHeaderRow != null) { throw new IllegalArgumentException("Has header row may not be specified for [" + format + "] structures."); } - if (separator != null) { - throw new IllegalArgumentException("Separator may not be specified for [" + format + "] structures."); + if (delimiter != null) { + throw new IllegalArgumentException("Delimiter may not be specified for [" + format + "] structures."); } if (shouldTrimFields != null) { throw new IllegalArgumentException("Should trim fields may not be specified for [" + format + "] structures."); @@ -607,7 +539,7 @@ public class LogStructure implements ToXContentObject { } return new LogStructure(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, - multilineStartPattern, excludeLinesPattern, inputFields, hasHeaderRow, separator, shouldTrimFields, grokPattern, + multilineStartPattern, excludeLinesPattern, inputFields, hasHeaderRow, delimiter, shouldTrimFields, grokPattern, timestampField, timestampFormats, needClientTimezone, mappings, explanation); } } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManager.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManager.java index a8fd9d7eb89..e747a588dfd 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManager.java +++ b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManager.java @@ -69,10 +69,10 @@ public final class LogStructureFinderManager { new JsonLogStructureFinderFactory(), new XmlLogStructureFinderFactory(), // ND-JSON will often also be valid (although utterly weird) CSV, so JSON must come before CSV - new CsvLogStructureFinderFactory(), - new TsvLogStructureFinderFactory(), - new SemiColonSeparatedValuesLogStructureFinderFactory(), - new PipeSeparatedValuesLogStructureFinderFactory(), + new DelimitedLogStructureFinderFactory(',', 2, false), + new DelimitedLogStructureFinderFactory('\t', 2, false), + new DelimitedLogStructureFinderFactory(';', 4, false), + new DelimitedLogStructureFinderFactory('|', 5, true), new TextLogStructureFinderFactory() )); diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtils.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtils.java index b1dfee22ee6..71a68c39991 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtils.java +++ b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtils.java @@ -21,12 +21,12 @@ import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.Stream; -final class LogStructureUtils { +public final class LogStructureUtils { - static final String DEFAULT_TIMESTAMP_FIELD = "@timestamp"; - static final String MAPPING_TYPE_SETTING = "type"; - static final String MAPPING_FORMAT_SETTING = "format"; - static final String MAPPING_PROPERTIES_SETTING = "properties"; + public static final String DEFAULT_TIMESTAMP_FIELD = "@timestamp"; + public static final String MAPPING_TYPE_SETTING = "type"; + public static final String MAPPING_FORMAT_SETTING = "format"; + public static final String MAPPING_PROPERTIES_SETTING = "properties"; // NUMBER Grok pattern doesn't support scientific notation, so we extend it private static final Grok NUMBER_GROK = new Grok(Grok.getBuiltinPatterns(), "^%{NUMBER}(?:[eE][+-]?[0-3]?[0-9]{1,2})?$"); diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/PipeSeparatedValuesLogStructureFinderFactory.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/PipeSeparatedValuesLogStructureFinderFactory.java deleted file mode 100644 index 085599de847..00000000000 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/PipeSeparatedValuesLogStructureFinderFactory.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.logstructurefinder; - -import org.supercsv.prefs.CsvPreference; - -import java.io.IOException; -import java.util.List; - -public class PipeSeparatedValuesLogStructureFinderFactory implements LogStructureFinderFactory { - - private static final CsvPreference PIPE_PREFERENCE = new CsvPreference.Builder('"', '|', "\n").build(); - - /** - * Rules are: - * - The file must be valid pipe (|) separated values - * - It must contain at least two complete records - * - There must be at least five fields per record (otherwise files with coincidental - * or no pipe characters could be treated as pipe separated) - * - Every pipe separated value record except the last must have the same number of fields - * The reason the last record is allowed to have fewer fields than the others is that - * it could have been truncated when the file was sampled. - */ - @Override - public boolean canCreateFromSample(List explanation, String sample) { - return SeparatedValuesLogStructureFinder.canCreateFromSample(explanation, sample, 5, PIPE_PREFERENCE, "pipe separated values"); - } - - @Override - public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) - throws IOException { - return SeparatedValuesLogStructureFinder.makeSeparatedValuesLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, - PIPE_PREFERENCE, true); - } -} diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/SemiColonSeparatedValuesLogStructureFinderFactory.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/SemiColonSeparatedValuesLogStructureFinderFactory.java deleted file mode 100644 index e0e80fa7465..00000000000 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/SemiColonSeparatedValuesLogStructureFinderFactory.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.logstructurefinder; - -import org.supercsv.prefs.CsvPreference; - -import java.io.IOException; -import java.util.List; - -public class SemiColonSeparatedValuesLogStructureFinderFactory implements LogStructureFinderFactory { - - /** - * Rules are: - * - The file must be valid semi-colon separated values - * - It must contain at least two complete records - * - There must be at least four fields per record (otherwise files with coincidental - * or no semi-colons could be treated as semi-colon separated) - * - Every semi-colon separated value record except the last must have the same number of fields - * The reason the last record is allowed to have fewer fields than the others is that - * it could have been truncated when the file was sampled. - */ - @Override - public boolean canCreateFromSample(List explanation, String sample) { - return SeparatedValuesLogStructureFinder.canCreateFromSample(explanation, sample, 4, - CsvPreference.EXCEL_NORTH_EUROPE_PREFERENCE, "semi-colon separated values"); - } - - @Override - public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) - throws IOException { - return SeparatedValuesLogStructureFinder.makeSeparatedValuesLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, - CsvPreference.EXCEL_NORTH_EUROPE_PREFERENCE, false); - } -} diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactory.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactory.java index 733b32346fb..1b53a33f31e 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactory.java +++ b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactory.java @@ -23,13 +23,13 @@ public class TsvLogStructureFinderFactory implements LogStructureFinderFactory { */ @Override public boolean canCreateFromSample(List explanation, String sample) { - return SeparatedValuesLogStructureFinder.canCreateFromSample(explanation, sample, 2, CsvPreference.TAB_PREFERENCE, "TSV"); + return DelimitedLogStructureFinder.canCreateFromSample(explanation, sample, 2, CsvPreference.TAB_PREFERENCE, "TSV"); } @Override public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) throws IOException { - return SeparatedValuesLogStructureFinder.makeSeparatedValuesLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, + return DelimitedLogStructureFinder.makeDelimitedLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, CsvPreference.TAB_PREFERENCE, false); } } diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/CsvLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/CsvLogStructureFinderFactoryTests.java deleted file mode 100644 index f53ee008d69..00000000000 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/CsvLogStructureFinderFactoryTests.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.logstructurefinder; - -public class CsvLogStructureFinderFactoryTests extends LogStructureTestCase { - - private LogStructureFinderFactory factory = new CsvLogStructureFinderFactory(); - - // No need to check JSON or XML because they come earlier in the order we check formats - - public void testCanCreateFromSampleGivenCsv() { - - assertTrue(factory.canCreateFromSample(explanation, CSV_SAMPLE)); - } - - public void testCanCreateFromSampleGivenTsv() { - - assertFalse(factory.canCreateFromSample(explanation, TSV_SAMPLE)); - } - - public void testCanCreateFromSampleGivenSemiColonSeparatedValues() { - - assertFalse(factory.canCreateFromSample(explanation, SEMI_COLON_SEPARATED_VALUES_SAMPLE)); - } - - public void testCanCreateFromSampleGivenPipeSeparatedValues() { - - assertFalse(factory.canCreateFromSample(explanation, PIPE_SEPARATED_VALUES_SAMPLE)); - } - - public void testCanCreateFromSampleGivenText() { - - assertFalse(factory.canCreateFromSample(explanation, TEXT_SAMPLE)); - } -} diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactoryTests.java new file mode 100644 index 00000000000..d9eadbc8f0f --- /dev/null +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactoryTests.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.logstructurefinder; + +public class DelimitedLogStructureFinderFactoryTests extends LogStructureTestCase { + + private LogStructureFinderFactory csvFactory = new DelimitedLogStructureFinderFactory(',', 2, false); + private LogStructureFinderFactory tsvFactory = new DelimitedLogStructureFinderFactory('\t', 2, false); + private LogStructureFinderFactory semiColonDelimitedfactory = new DelimitedLogStructureFinderFactory(';', 4, false); + private LogStructureFinderFactory pipeDelimitedFactory = new DelimitedLogStructureFinderFactory('|', 5, true); + + // CSV - no need to check JSON or XML because they come earlier in the order we check formats + + public void testCanCreateCsvFromSampleGivenCsv() { + + assertTrue(csvFactory.canCreateFromSample(explanation, CSV_SAMPLE)); + } + + public void testCanCreateCsvFromSampleGivenTsv() { + + assertFalse(csvFactory.canCreateFromSample(explanation, TSV_SAMPLE)); + } + + public void testCanCreateCsvFromSampleGivenSemiColonDelimited() { + + assertFalse(csvFactory.canCreateFromSample(explanation, SEMI_COLON_DELIMITED_SAMPLE)); + } + + public void testCanCreateCsvFromSampleGivenPipeDelimited() { + + assertFalse(csvFactory.canCreateFromSample(explanation, PIPE_DELIMITED_SAMPLE)); + } + + public void testCanCreateCsvFromSampleGivenText() { + + assertFalse(csvFactory.canCreateFromSample(explanation, TEXT_SAMPLE)); + } + + // TSV - no need to check JSON, XML or CSV because they come earlier in the order we check formats + + public void testCanCreateTsvFromSampleGivenTsv() { + + assertTrue(tsvFactory.canCreateFromSample(explanation, TSV_SAMPLE)); + } + + public void testCanCreateTsvFromSampleGivenSemiColonDelimited() { + + assertFalse(tsvFactory.canCreateFromSample(explanation, SEMI_COLON_DELIMITED_SAMPLE)); + } + + public void testCanCreateTsvFromSampleGivenPipeDelimited() { + + assertFalse(tsvFactory.canCreateFromSample(explanation, PIPE_DELIMITED_SAMPLE)); + } + + public void testCanCreateTsvFromSampleGivenText() { + + assertFalse(tsvFactory.canCreateFromSample(explanation, TEXT_SAMPLE)); + } + + // Semi-colon delimited - no need to check JSON, XML, CSV or TSV because they come earlier in the order we check formats + + public void testCanCreateSemiColonDelimitedFromSampleGivenSemiColonDelimited() { + + assertTrue(semiColonDelimitedfactory.canCreateFromSample(explanation, SEMI_COLON_DELIMITED_SAMPLE)); + } + + public void testCanCreateSemiColonDelimitedFromSampleGivenPipeDelimited() { + + assertFalse(semiColonDelimitedfactory.canCreateFromSample(explanation, PIPE_DELIMITED_SAMPLE)); + } + + public void testCanCreateSemiColonDelimitedFromSampleGivenText() { + + assertFalse(semiColonDelimitedfactory.canCreateFromSample(explanation, TEXT_SAMPLE)); + } + + // Pipe delimited - no need to check JSON, XML, CSV, TSV or semi-colon delimited + // values because they come earlier in the order we check formats + + public void testCanCreatePipeDelimitedFromSampleGivenPipeDelimited() { + + assertTrue(pipeDelimitedFactory.canCreateFromSample(explanation, PIPE_DELIMITED_SAMPLE)); + } + + public void testCanCreatePipeDelimitedFromSampleGivenText() { + + assertFalse(pipeDelimitedFactory.canCreateFromSample(explanation, TEXT_SAMPLE)); + } +} diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/SeparatedValuesLogStructureFinderTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderTests.java similarity index 65% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/SeparatedValuesLogStructureFinderTests.java rename to x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderTests.java index b62832a0a19..57c297cf8d5 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/SeparatedValuesLogStructureFinderTests.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderTests.java @@ -12,27 +12,27 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; -import static org.elasticsearch.xpack.ml.logstructurefinder.SeparatedValuesLogStructureFinder.levenshteinFieldwiseCompareRows; -import static org.elasticsearch.xpack.ml.logstructurefinder.SeparatedValuesLogStructureFinder.levenshteinDistance; +import static org.elasticsearch.xpack.ml.logstructurefinder.DelimitedLogStructureFinder.levenshteinFieldwiseCompareRows; +import static org.elasticsearch.xpack.ml.logstructurefinder.DelimitedLogStructureFinder.levenshteinDistance; import static org.hamcrest.Matchers.arrayContaining; -public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase { +public class DelimitedLogStructureFinderTests extends LogStructureTestCase { - private LogStructureFinderFactory factory = new CsvLogStructureFinderFactory(); + private LogStructureFinderFactory csvFactory = new DelimitedLogStructureFinderFactory(',', 2, false); public void testCreateConfigsGivenCompleteCsv() throws Exception { String sample = "time,message\n" + "2018-05-17T13:41:23,hello\n" + "2018-05-17T13:41:32,hello again\n"; - assertTrue(factory.canCreateFromSample(explanation, sample)); + assertTrue(csvFactory.canCreateFromSample(explanation, sample)); String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = factory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); LogStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.CSV, structure.getFormat()); + assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -41,7 +41,7 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase } assertEquals("^\"?time\"?,\"?message\"?", structure.getExcludeLinesPattern()); assertEquals("^\"?\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); - assertEquals(Character.valueOf(','), structure.getSeparator()); + assertEquals(Character.valueOf(','), structure.getDelimiter()); assertTrue(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals(Arrays.asList("time", "message"), structure.getInputFields()); @@ -55,15 +55,15 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase "\"hello\n" + "world\",2018-05-17T13:41:23,1\n" + "\"hello again\n"; // note that this last record is truncated - assertTrue(factory.canCreateFromSample(explanation, sample)); + assertTrue(csvFactory.canCreateFromSample(explanation, sample)); String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = factory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); LogStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.CSV, structure.getFormat()); + assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -72,7 +72,7 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase } assertEquals("^\"?message\"?,\"?time\"?,\"?count\"?", structure.getExcludeLinesPattern()); assertEquals("^.*?,\"?\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); - assertEquals(Character.valueOf(','), structure.getSeparator()); + assertEquals(Character.valueOf(','), structure.getDelimiter()); assertTrue(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals(Arrays.asList("message", "time", "count"), structure.getInputFields()); @@ -88,15 +88,15 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase "2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" + "1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" + "1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n"; - assertTrue(factory.canCreateFromSample(explanation, sample)); + assertTrue(csvFactory.canCreateFromSample(explanation, sample)); String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = factory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); LogStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.CSV, structure.getFormat()); + assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -108,7 +108,7 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase "\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?,\"?\"?,\"?\"?", structure.getExcludeLinesPattern()); assertEquals("^.*?,\"?\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); - assertEquals(Character.valueOf(','), structure.getSeparator()); + assertEquals(Character.valueOf(','), structure.getDelimiter()); assertTrue(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance", @@ -126,15 +126,15 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase "2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" + "1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" + "1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n"; - assertTrue(factory.canCreateFromSample(explanation, sample)); + assertTrue(csvFactory.canCreateFromSample(explanation, sample)); String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = factory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); LogStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.CSV, structure.getFormat()); + assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -146,7 +146,7 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase "\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?", structure.getExcludeLinesPattern()); assertEquals("^.*?,\"?\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); - assertEquals(Character.valueOf(','), structure.getSeparator()); + assertEquals(Character.valueOf(','), structure.getDelimiter()); assertTrue(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance", @@ -161,15 +161,15 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase String sample = "\"pos_id\",\"trip_id\",\"latitude\",\"longitude\",\"altitude\",\"timestamp\"\n" + "\"1\",\"3\",\"4703.7815\",\"1527.4713\",\"359.9\",\"2017-01-19 16:19:04.742113\"\n" + "\"2\",\"3\",\"4703.7815\",\"1527.4714\",\"359.9\",\"2017-01-19 16:19:05.741890\"\n"; - assertTrue(factory.canCreateFromSample(explanation, sample)); + assertTrue(csvFactory.canCreateFromSample(explanation, sample)); String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = factory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); LogStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.CSV, structure.getFormat()); + assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -179,7 +179,7 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase assertEquals("^\"?pos_id\"?,\"?trip_id\"?,\"?latitude\"?,\"?longitude\"?,\"?altitude\"?,\"?timestamp\"?", structure.getExcludeLinesPattern()); assertNull(structure.getMultilineStartPattern()); - assertEquals(Character.valueOf(','), structure.getSeparator()); + assertEquals(Character.valueOf(','), structure.getDelimiter()); assertTrue(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals(Arrays.asList("pos_id", "trip_id", "latitude", "longitude", "altitude", "timestamp"), structure.getInputFields()); @@ -195,8 +195,8 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase "2014-06-23 00:00:01Z,JBU,877.5927,farequote\n" + "2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n"; - Tuple header = SeparatedValuesLogStructureFinder.findHeaderFromSample(explanation, - SeparatedValuesLogStructureFinder.readRows(withHeader, CsvPreference.EXCEL_PREFERENCE).v1()); + Tuple header = DelimitedLogStructureFinder.findHeaderFromSample(explanation, + DelimitedLogStructureFinder.readRows(withHeader, CsvPreference.EXCEL_PREFERENCE).v1()); assertTrue(header.v1()); assertThat(header.v2(), arrayContaining("time", "airline", "responsetime", "sourcetype")); @@ -208,8 +208,8 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase "2014-06-23 00:00:01Z,JBU,877.5927,farequote\n" + "2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n"; - Tuple header = SeparatedValuesLogStructureFinder.findHeaderFromSample(explanation, - SeparatedValuesLogStructureFinder.readRows(withoutHeader, CsvPreference.EXCEL_PREFERENCE).v1()); + Tuple header = DelimitedLogStructureFinder.findHeaderFromSample(explanation, + DelimitedLogStructureFinder.readRows(withoutHeader, CsvPreference.EXCEL_PREFERENCE).v1()); assertFalse(header.v1()); assertThat(header.v2(), arrayContaining("column1", "column2", "column3", "column4")); @@ -251,43 +251,43 @@ public class SeparatedValuesLogStructureFinderTests extends LogStructureTestCase public void testLineHasUnescapedQuote() { - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a,b,c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a\",b,c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a,b\",c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a,b,c\"", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a,\"b\",c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a,b,\"c\"", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a,\"b\"\"\",c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a,b,\"c\"\"\"", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"\"\"a\",b,c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a\"\"\",b,c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a,\"\"b\",c", CsvPreference.EXCEL_PREFERENCE)); - assertTrue(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("between\"words,b,c", CsvPreference.EXCEL_PREFERENCE)); - assertTrue(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("x and \"y\",b,c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,b,c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\",b,c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a,b\",c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a,b,c\"", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,\"b\",c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,b,\"c\"", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,\"b\"\"\",c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,b,\"c\"\"\"", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"\"\"a\",b,c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\"\"\",b,c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a,\"\"b\",c", CsvPreference.EXCEL_PREFERENCE)); + assertTrue(DelimitedLogStructureFinder.lineHasUnescapedQuote("between\"words,b,c", CsvPreference.EXCEL_PREFERENCE)); + assertTrue(DelimitedLogStructureFinder.lineHasUnescapedQuote("x and \"y\",b,c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a\tb\"\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a\tb\tc\"", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a\t\"b\"\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a\t\"b\"\"\"\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"\"\"", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"\"\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a\"\"\"\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("\"a\t\"\"b\"\tc", CsvPreference.TAB_PREFERENCE)); - assertTrue(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("between\"words\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertTrue(SeparatedValuesLogStructureFinder.lineHasUnescapedQuote("x and \"y\"\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\tb\"\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\tb\tc\"", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\t\"b\"\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\t\"b\"\"\"\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"\"\"", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"\"\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\"\"\"\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\t\"\"b\"\tc", CsvPreference.TAB_PREFERENCE)); + assertTrue(DelimitedLogStructureFinder.lineHasUnescapedQuote("between\"words\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertTrue(DelimitedLogStructureFinder.lineHasUnescapedQuote("x and \"y\"\tb\tc", CsvPreference.TAB_PREFERENCE)); } public void testRowContainsDuplicateNonEmptyValues() { - assertFalse(SeparatedValuesLogStructureFinder.rowContainsDuplicateNonEmptyValues(Collections.singletonList("a"))); - assertFalse(SeparatedValuesLogStructureFinder.rowContainsDuplicateNonEmptyValues(Collections.singletonList(""))); - assertFalse(SeparatedValuesLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "c"))); - assertTrue(SeparatedValuesLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "a"))); - assertTrue(SeparatedValuesLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "b"))); - assertFalse(SeparatedValuesLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "", ""))); - assertFalse(SeparatedValuesLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("", "a", ""))); + assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Collections.singletonList("a"))); + assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Collections.singletonList(""))); + assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "c"))); + assertTrue(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "a"))); + assertTrue(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "b"))); + assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "", ""))); + assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("", "a", ""))); } } diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactoryTests.java index 39ef3b9eedb..cdbffa8259e 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactoryTests.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactoryTests.java @@ -29,14 +29,14 @@ public class JsonLogStructureFinderFactoryTests extends LogStructureTestCase { assertFalse(factory.canCreateFromSample(explanation, TSV_SAMPLE)); } - public void testCanCreateFromSampleGivenSemiColonSeparatedValues() { + public void testCanCreateFromSampleGivenSemiColonDelimited() { - assertFalse(factory.canCreateFromSample(explanation, SEMI_COLON_SEPARATED_VALUES_SAMPLE)); + assertFalse(factory.canCreateFromSample(explanation, SEMI_COLON_DELIMITED_SAMPLE)); } - public void testCanCreateFromSampleGivenPipeSeparatedValues() { + public void testCanCreateFromSampleGivenPipeDelimited() { - assertFalse(factory.canCreateFromSample(explanation, PIPE_SEPARATED_VALUES_SAMPLE)); + assertFalse(factory.canCreateFromSample(explanation, PIPE_DELIMITED_SAMPLE)); } public void testCanCreateFromSampleGivenText() { diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderTests.java index 2f727747bbf..917054919dd 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderTests.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderTests.java @@ -29,7 +29,7 @@ public class JsonLogStructureFinderTests extends LogStructureTestCase { } assertNull(structure.getExcludeLinesPattern()); assertNull(structure.getMultilineStartPattern()); - assertNull(structure.getSeparator()); + assertNull(structure.getDelimiter()); assertNull(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertNull(structure.getGrokPattern()); diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManagerTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManagerTests.java index 1f8691de8cf..520a55510c7 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManagerTests.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManagerTests.java @@ -61,7 +61,7 @@ public class LogStructureFinderManagerTests extends LogStructureTestCase { public void testMakeBestStructureGivenCsv() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, "time,message\n" + "2018-05-17T13:41:23,hello\n", StandardCharsets.UTF_8.name(), randomBoolean()), - instanceOf(SeparatedValuesLogStructureFinder.class)); + instanceOf(DelimitedLogStructureFinder.class)); } public void testMakeBestStructureGivenText() throws Exception { diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTestCase.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTestCase.java index 5f9a87ef2a7..6b718fef6c7 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTestCase.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTestCase.java @@ -34,14 +34,14 @@ public abstract class LogStructureTestCase extends ESTestCase { "\"level\":\"INFO\",\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 2\",\"class\":\"ml\"," + "\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n"; - protected static final String PIPE_SEPARATED_VALUES_SAMPLE = "2018-01-06 16:56:14.295748|INFO |VirtualServer |1 |" + + protected static final String PIPE_DELIMITED_SAMPLE = "2018-01-06 16:56:14.295748|INFO |VirtualServer |1 |" + "listening on 0.0.0.0:9987, :::9987\n" + "2018-01-06 17:19:44.465252|INFO |VirtualServer |1 |client " + "'User1'(id:2) changed default admin channelgroup to 'Guest'(id:8)\n" + "2018-01-06 17:21:25.764368|INFO |VirtualServer |1 |client " + "'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel 'Default Channel'(id:1)"; - protected static final String SEMI_COLON_SEPARATED_VALUES_SAMPLE = "\"pos_id\";\"trip_id\";\"latitude\";\"longitude\";\"altitude\";" + + protected static final String SEMI_COLON_DELIMITED_SAMPLE = "\"pos_id\";\"trip_id\";\"latitude\";\"longitude\";\"altitude\";" + "\"timestamp\"\n" + "\"1\";\"3\";\"4703.7815\";\"1527.4713\";\"359.9\";\"2017-01-19 16:19:04.742113\"\n" + "\"2\";\"3\";\"4703.7815\";\"1527.4714\";\"359.9\";\"2017-01-19 16:19:05.741890\"\n" + diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTests.java index 738928ed28a..302946dcaa8 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTests.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTests.java @@ -43,14 +43,12 @@ public class LogStructureTests extends AbstractXContentTestCase { builder.setExcludeLinesPattern(randomAlphaOfLength(100)); } - if (format.isSeparatedValues() || (format.supportsNesting() && randomBoolean())) { + if (format == LogStructure.Format.DELIMITED || (format.supportsNesting() && randomBoolean())) { builder.setInputFields(Arrays.asList(generateRandomStringArray(10, 10, false, false))); } - if (format.isSeparatedValues()) { + if (format == LogStructure.Format.DELIMITED) { builder.setHasHeaderRow(randomBoolean()); - if (rarely()) { - builder.setSeparator(format.separator()); - } + builder.setDelimiter(randomFrom(',', '\t', ';', '|')); } if (format.isSemiStructured()) { builder.setGrokPattern(randomAlphaOfLength(100)); diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/PipeSeparatedValuesLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/PipeSeparatedValuesLogStructureFinderFactoryTests.java deleted file mode 100644 index 3fd2fb7840a..00000000000 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/PipeSeparatedValuesLogStructureFinderFactoryTests.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.logstructurefinder; - -public class PipeSeparatedValuesLogStructureFinderFactoryTests extends LogStructureTestCase { - - private LogStructureFinderFactory factory = new PipeSeparatedValuesLogStructureFinderFactory(); - - // No need to check JSON, XML, CSV, TSV or semi-colon separated values because they come earlier in the order we check formats - - public void testCanCreateFromSampleGivenPipeSeparatedValues() { - - assertTrue(factory.canCreateFromSample(explanation, PIPE_SEPARATED_VALUES_SAMPLE)); - } - - public void testCanCreateFromSampleGivenText() { - - assertFalse(factory.canCreateFromSample(explanation, TEXT_SAMPLE)); - } -} diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/SemiColonSeparatedValuesLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/SemiColonSeparatedValuesLogStructureFinderFactoryTests.java deleted file mode 100644 index 64dad7e078c..00000000000 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/SemiColonSeparatedValuesLogStructureFinderFactoryTests.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.logstructurefinder; - -public class SemiColonSeparatedValuesLogStructureFinderFactoryTests extends LogStructureTestCase { - - private LogStructureFinderFactory factory = new SemiColonSeparatedValuesLogStructureFinderFactory(); - - // No need to check JSON, XML, CSV or TSV because they come earlier in the order we check formats - - public void testCanCreateFromSampleGivenSemiColonSeparatedValues() { - - assertTrue(factory.canCreateFromSample(explanation, SEMI_COLON_SEPARATED_VALUES_SAMPLE)); - } - - public void testCanCreateFromSampleGivenPipeSeparatedValues() { - - assertFalse(factory.canCreateFromSample(explanation, PIPE_SEPARATED_VALUES_SAMPLE)); - } - - public void testCanCreateFromSampleGivenText() { - - assertFalse(factory.canCreateFromSample(explanation, TEXT_SAMPLE)); - } -} diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactoryTests.java index 267ce375d6e..c1b30cc7496 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactoryTests.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactoryTests.java @@ -9,8 +9,8 @@ public class TextLogStructureFinderFactoryTests extends LogStructureTestCase { private LogStructureFinderFactory factory = new TextLogStructureFinderFactory(); - // No need to check JSON, XML, CSV, TSV, semi-colon separated values or pipe - // separated values because they come earlier in the order we check formats + // No need to check JSON, XML, CSV, TSV, semi-colon delimited values or pipe + // delimited values because they come earlier in the order we check formats public void testCanCreateFromSampleGivenText() { diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderTests.java index 7c6a58bb683..c9e153a82c4 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderTests.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderTests.java @@ -34,7 +34,7 @@ public class TextLogStructureFinderTests extends LogStructureTestCase { } assertNull(structure.getExcludeLinesPattern()); assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); - assertNull(structure.getSeparator()); + assertNull(structure.getDelimiter()); assertNull(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals("\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel} \\]\\[.*", structure.getGrokPattern()); diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactoryTests.java deleted file mode 100644 index 1c8acc14d32..00000000000 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactoryTests.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.logstructurefinder; - -public class TsvLogStructureFinderFactoryTests extends LogStructureTestCase { - - private LogStructureFinderFactory factory = new TsvLogStructureFinderFactory(); - - // No need to check JSON, XML or CSV because they come earlier in the order we check formats - - public void testCanCreateFromSampleGivenTsv() { - - assertTrue(factory.canCreateFromSample(explanation, TSV_SAMPLE)); - } - - public void testCanCreateFromSampleGivenSemiColonSeparatedValues() { - - assertFalse(factory.canCreateFromSample(explanation, SEMI_COLON_SEPARATED_VALUES_SAMPLE)); - } - - public void testCanCreateFromSampleGivenPipeSeparatedValues() { - - assertFalse(factory.canCreateFromSample(explanation, PIPE_SEPARATED_VALUES_SAMPLE)); - } - - public void testCanCreateFromSampleGivenText() { - - assertFalse(factory.canCreateFromSample(explanation, TEXT_SAMPLE)); - } -} diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactoryTests.java index 27eb4ede040..b6dc3a56f1d 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactoryTests.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactoryTests.java @@ -26,14 +26,14 @@ public class XmlLogStructureFinderFactoryTests extends LogStructureTestCase { assertFalse(factory.canCreateFromSample(explanation, TSV_SAMPLE)); } - public void testCanCreateFromSampleGivenSemiColonSeparatedValues() { + public void testCanCreateFromSampleGivenSemiColonDelimited() { - assertFalse(factory.canCreateFromSample(explanation, SEMI_COLON_SEPARATED_VALUES_SAMPLE)); + assertFalse(factory.canCreateFromSample(explanation, SEMI_COLON_DELIMITED_SAMPLE)); } - public void testCanCreateFromSampleGivenPipeSeparatedValues() { + public void testCanCreateFromSampleGivenPipeDelimited() { - assertFalse(factory.canCreateFromSample(explanation, PIPE_SEPARATED_VALUES_SAMPLE)); + assertFalse(factory.canCreateFromSample(explanation, PIPE_DELIMITED_SAMPLE)); } public void testCanCreateFromSampleGivenText() { diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderTests.java b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderTests.java index 0d04df152ef..de653d7bcd0 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderTests.java +++ b/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderTests.java @@ -29,7 +29,7 @@ public class XmlLogStructureFinderTests extends LogStructureTestCase { } assertNull(structure.getExcludeLinesPattern()); assertEquals("^\\s* Date: Fri, 31 Aug 2018 13:08:32 +0300 Subject: [PATCH 05/23] Different handling for security specific errors in the CLI. Fix for https://github.com/elastic/elasticsearch/issues/33230 (#33255) --- docs/reference/sql/endpoints/cli.asciidoc | 9 +++++++++ .../main/java/org/elasticsearch/xpack/sql/cli/Cli.java | 5 +++++ 2 files changed, 14 insertions(+) diff --git a/docs/reference/sql/endpoints/cli.asciidoc b/docs/reference/sql/endpoints/cli.asciidoc index 0908c2344bb..eef2fbfbf59 100644 --- a/docs/reference/sql/endpoints/cli.asciidoc +++ b/docs/reference/sql/endpoints/cli.asciidoc @@ -22,6 +22,15 @@ the first parameter: $ ./bin/elasticsearch-sql-cli https://some.server:9200 -------------------------------------------------- +If security is enabled on your cluster, you can pass the username +and password in the form `username:password@host_name:port` +to the SQL CLI: + +[source,bash] +-------------------------------------------------- +$ ./bin/elasticsearch-sql-cli https://sql_user:strongpassword@some.server:9200 +-------------------------------------------------- + Once the CLI is running you can use any <> that Elasticsearch supports: diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java index 357a4bcb5a7..6431f10a492 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.sql.client.Version; import org.jline.terminal.TerminalBuilder; import java.io.IOException; import java.net.ConnectException; +import java.sql.SQLInvalidAuthorizationSpecException; import java.util.Arrays; import java.util.List; import java.util.logging.LogManager; @@ -139,6 +140,10 @@ public class Cli extends LoggingAwareCommand { // Most likely Elasticsearch is not running throw new UserException(ExitCodes.IO_ERROR, "Cannot connect to the server " + con.connectionString() + " - " + ex.getCause().getMessage()); + } else if (ex.getCause() != null && ex.getCause() instanceof SQLInvalidAuthorizationSpecException) { + throw new UserException(ExitCodes.NOPERM, + "Cannot establish a secure connection to the server " + + con.connectionString() + " - " + ex.getCause().getMessage()); } else { // Most likely we connected to something other than Elasticsearch throw new UserException(ExitCodes.DATA_ERROR, From a88f8789a0a9260de0774ee7b76446c2a792294f Mon Sep 17 00:00:00 2001 From: Pablo Musa Date: Fri, 31 Aug 2018 14:48:55 +0200 Subject: [PATCH 06/23] Highlight that index_phrases only works if no slop is used (#33303) Highlight that `index_phrases` only works if no slop is used at query time. --- docs/reference/mapping/types/text.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/mapping/types/text.asciidoc b/docs/reference/mapping/types/text.asciidoc index e2336bd5cb0..db64e87412e 100644 --- a/docs/reference/mapping/types/text.asciidoc +++ b/docs/reference/mapping/types/text.asciidoc @@ -99,7 +99,7 @@ The following parameters are accepted by `text` fields: `index_phrases`:: If enabled, two-term word combinations ('shingles') are indexed into a separate - field. This allows exact phrase queries to run more efficiently, at the expense + field. This allows exact phrase queries (no slop) to run more efficiently, at the expense of a larger index. Note that this works best when stopwords are not removed, as phrases containing stopwords will not use the subsidiary field and will fall back to a standard phrase query. Accepts `true` or `false` (default). @@ -171,4 +171,4 @@ PUT my_index -------------------------------- // CONSOLE <1> `min_chars` must be greater than zero, defaults to 2 -<2> `max_chars` must be greater than or equal to `min_chars` and less than 20, defaults to 5 \ No newline at end of file +<2> `max_chars` must be greater than or equal to `min_chars` and less than 20, defaults to 5 From f6a570880c5c44adea4ad1f35e950c51335d8ac7 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 31 Aug 2018 16:01:54 +0300 Subject: [PATCH 07/23] Work around to be able to generate eclipse projects (#33295) * Work around to be able to generate eclipse projects https://github.com/gradle/gradle/issues/6582 --- build.gradle | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/build.gradle b/build.gradle index 36d3a543d89..f8282ca5ae8 100644 --- a/build.gradle +++ b/build.gradle @@ -16,7 +16,9 @@ * specific language governing permissions and limitations * under the License. */ + import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin +import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version @@ -24,14 +26,9 @@ import org.elasticsearch.gradle.VersionCollection import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.gradle.plugins.ide.eclipse.model.SourceFolder -import org.gradle.util.GradleVersion -import org.gradle.util.DistributionLocator -import org.apache.tools.ant.taskdefs.condition.Os -import org.apache.tools.ant.filters.ReplaceTokens import java.nio.file.Files import java.nio.file.Path -import java.security.MessageDigest plugins { id 'com.gradle.build-scan' version '1.13.2' @@ -512,6 +509,16 @@ allprojects { tasks.cleanEclipse.dependsOn(wipeEclipseSettings) // otherwise the eclipse merging is *super confusing* tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings) + + // work arround https://github.com/gradle/gradle/issues/6582 + tasks.eclipseProject.mustRunAfter tasks.cleanEclipseProject + tasks.matching { it.name == 'eclipseClasspath' }.all { + it.mustRunAfter { tasks.cleanEclipseClasspath } + } + tasks.matching { it.name == 'eclipseJdt' }.all { + it.mustRunAfter { tasks.cleanEclipseJdt } + } + tasks.copyEclipseSettings.mustRunAfter tasks.wipeEclipseSettings } allprojects { From 0c4b3162be7c2a6bd03a44ab15821ef49a776738 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Fri, 31 Aug 2018 16:12:01 +0300 Subject: [PATCH 08/23] SQL: test coverage for JdbcResultSet (#32813) * Tests for JdbcResultSet * Added VARCHAR conversion for different types * Made error messages consistent: they now contain both the type that fails to be converted and the value itself --- .../xpack/sql/jdbc/jdbc/JdbcResultSet.java | 65 +- .../xpack/sql/jdbc/jdbc/TypeConverter.java | 87 +- .../jdbc/jdbc/JdbcPreparedStatementTests.java | 42 +- .../qa/sql/nosecurity/JdbcResultSetIT.java | 16 + .../xpack/qa/sql/jdbc/ResultSetTestCase.java | 1516 ++++++++++++++++- .../qa/sql/jdbc/SimpleExampleTestCase.java | 3 +- 6 files changed, 1621 insertions(+), 108 deletions(-) create mode 100644 x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcResultSetIT.java diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java index 201ae251ca0..ebdeaef15ca 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java @@ -133,72 +133,37 @@ class JdbcResultSet implements ResultSet, JdbcWrapper { @Override public boolean getBoolean(int columnIndex) throws SQLException { - Object val = column(columnIndex); - try { - return val != null ? (Boolean) val : false; - } catch (ClassCastException cce) { - throw new SQLException("unable to convert column " + columnIndex + " to a boolean", cce); - } + return column(columnIndex) != null ? getObject(columnIndex, Boolean.class) : false; } @Override public byte getByte(int columnIndex) throws SQLException { - Object val = column(columnIndex); - try { - return val != null ? ((Number) val).byteValue() : 0; - } catch (ClassCastException cce) { - throw new SQLException("unable to convert column " + columnIndex + " to a byte", cce); - } + return column(columnIndex) != null ? getObject(columnIndex, Byte.class) : 0; } @Override public short getShort(int columnIndex) throws SQLException { - Object val = column(columnIndex); - try { - return val != null ? ((Number) val).shortValue() : 0; - } catch (ClassCastException cce) { - throw new SQLException("unable to convert column " + columnIndex + " to a short", cce); - } + return column(columnIndex) != null ? getObject(columnIndex, Short.class) : 0; } @Override public int getInt(int columnIndex) throws SQLException { - Object val = column(columnIndex); - try { - return val != null ? ((Number) val).intValue() : 0; - } catch (ClassCastException cce) { - throw new SQLException("unable to convert column " + columnIndex + " to an int", cce); - } + return column(columnIndex) != null ? getObject(columnIndex, Integer.class) : 0; } @Override public long getLong(int columnIndex) throws SQLException { - Object val = column(columnIndex); - try { - return val != null ? ((Number) val).longValue() : 0; - } catch (ClassCastException cce) { - throw new SQLException("unable to convert column " + columnIndex + " to a long", cce); - } + return column(columnIndex) != null ? getObject(columnIndex, Long.class) : 0; } @Override public float getFloat(int columnIndex) throws SQLException { - Object val = column(columnIndex); - try { - return val != null ? ((Number) val).floatValue() : 0; - } catch (ClassCastException cce) { - throw new SQLException("unable to convert column " + columnIndex + " to a float", cce); - } + return column(columnIndex) != null ? getObject(columnIndex, Float.class) : 0; } @Override public double getDouble(int columnIndex) throws SQLException { - Object val = column(columnIndex); - try { - return val != null ? ((Number) val).doubleValue() : 0; - } catch (ClassCastException cce) { - throw new SQLException("unable to convert column " + columnIndex + " to a double", cce); - } + return column(columnIndex) != null ? getObject(columnIndex, Double.class) : 0; } @Override @@ -272,15 +237,29 @@ class JdbcResultSet implements ResultSet, JdbcWrapper { @Override public Date getDate(String columnLabel) throws SQLException { + // TODO: the error message in case the value in the column cannot be converted to a Date refers to a column index + // (for example - "unable to convert column 4 to a long") and not to the column name, which is a bit confusing. + // Should we reconsider this? Maybe by catching the exception here and rethrowing it with the columnLabel instead. return getDate(column(columnLabel)); } private Long dateTime(int columnIndex) throws SQLException { Object val = column(columnIndex); + JDBCType type = cursor.columns().get(columnIndex - 1).type; try { + // TODO: the B6 appendix of the jdbc spec does mention CHAR, VARCHAR, LONGVARCHAR, DATE, TIMESTAMP as supported + // jdbc types that should be handled by getDate and getTime methods. From all of those we support VARCHAR and + // TIMESTAMP. Should we consider the VARCHAR conversion as a later enhancement? + if (JDBCType.TIMESTAMP.equals(type)) { + // the cursor can return an Integer if the date-since-epoch is small enough, XContentParser (Jackson) will + // return the "smallest" data type for numbers when parsing + // TODO: this should probably be handled server side + return val == null ? null : ((Number) val).longValue(); + }; return val == null ? null : (Long) val; } catch (ClassCastException cce) { - throw new SQLException("unable to convert column " + columnIndex + " to a long", cce); + throw new SQLException( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Long", val, type.getName()), cce); } } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java index 3b5180b71f7..7b638d8bd09 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.sql.type.DataType; import java.sql.Date; import java.sql.JDBCType; -import java.sql.SQLDataException; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.Time; @@ -56,9 +55,10 @@ final class TypeConverter { } - private static final long DAY_IN_MILLIS = 60 * 60 * 24; + private static final long DAY_IN_MILLIS = 60 * 60 * 24 * 1000; private static final Map, JDBCType> javaToJDBC; + static { Map, JDBCType> aMap = Arrays.stream(DataType.values()) .filter(dataType -> dataType.javaClass() != null @@ -120,6 +120,7 @@ final class TypeConverter { } } + static long convertFromCalendarToUTC(long value, Calendar cal) { if (cal == null) { return value; @@ -143,11 +144,15 @@ final class TypeConverter { return (T) convert(val, columnType); } - if (type.isInstance(val)) { + // converting a Long to a Timestamp shouldn't be possible according to the spec, + // it feels a little brittle to check this scenario here and I don't particularly like it + // TODO: can we do any better or should we go over the spec and allow getLong(date) to be valid? + if (!(type == Long.class && columnType == JDBCType.TIMESTAMP) && type.isInstance(val)) { try { return type.cast(val); } catch (ClassCastException cce) { - throw new SQLDataException("Unable to convert " + val.getClass().getName() + " to " + columnType, cce); + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a %s", val, + columnType.getName(), type.getName()), cce); } } @@ -205,7 +210,8 @@ final class TypeConverter { if (type == OffsetDateTime.class) { return (T) asOffsetDateTime(val, columnType); } - throw new SQLException("Conversion from type [" + columnType + "] to [" + type.getName() + "] not supported"); + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a %s", val, + columnType.getName(), type.getName())); } /** @@ -336,8 +342,11 @@ final class TypeConverter { case FLOAT: case DOUBLE: return Boolean.valueOf(Integer.signum(((Number) val).intValue()) != 0); + case VARCHAR: + return Boolean.valueOf((String) val); default: - throw new SQLException("Conversion from type [" + columnType + "] to [Boolean] not supported"); + throw new SQLException( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Boolean", val, columnType.getName())); } } @@ -355,10 +364,16 @@ final class TypeConverter { case FLOAT: case DOUBLE: return safeToByte(safeToLong(((Number) val).doubleValue())); + case VARCHAR: + try { + return Byte.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Byte", val), e); + } default: } - throw new SQLException("Conversion from type [" + columnType + "] to [Byte] not supported"); + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Byte", val, columnType.getName())); } private static Short asShort(Object val, JDBCType columnType) throws SQLException { @@ -374,10 +389,16 @@ final class TypeConverter { case FLOAT: case DOUBLE: return safeToShort(safeToLong(((Number) val).doubleValue())); + case VARCHAR: + try { + return Short.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Short", val), e); + } default: } - throw new SQLException("Conversion from type [" + columnType + "] to [Short] not supported"); + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Short", val, columnType.getName())); } private static Integer asInteger(Object val, JDBCType columnType) throws SQLException { @@ -393,10 +414,18 @@ final class TypeConverter { case FLOAT: case DOUBLE: return safeToInt(safeToLong(((Number) val).doubleValue())); + case VARCHAR: + try { + return Integer.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to an Integer", val), e); + } default: } - throw new SQLException("Conversion from type [" + columnType + "] to [Integer] not supported"); + throw new SQLException( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to an Integer", val, columnType.getName())); } private static Long asLong(Object val, JDBCType columnType) throws SQLException { @@ -412,12 +441,21 @@ final class TypeConverter { case FLOAT: case DOUBLE: return safeToLong(((Number) val).doubleValue()); - case TIMESTAMP: - return ((Number) val).longValue(); + //TODO: should we support conversion to TIMESTAMP? + //The spec says that getLong() should support the following types conversions: + //TINYINT, SMALLINT, INTEGER, BIGINT, REAL, FLOAT, DOUBLE, DECIMAL, NUMERIC, BIT, BOOLEAN, CHAR, VARCHAR, LONGVARCHAR + //case TIMESTAMP: + // return ((Number) val).longValue(); + case VARCHAR: + try { + return Long.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Long", val), e); + } default: } - throw new SQLException("Conversion from type [" + columnType + "] to [Long] not supported"); + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Long", val, columnType.getName())); } private static Float asFloat(Object val, JDBCType columnType) throws SQLException { @@ -433,10 +471,16 @@ final class TypeConverter { case FLOAT: case DOUBLE: return Float.valueOf((((float) ((Number) val).doubleValue()))); + case VARCHAR: + try { + return Float.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Float", val), e); + } default: } - throw new SQLException("Conversion from type [" + columnType + "] to [Float] not supported"); + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Float", val, columnType.getName())); } private static Double asDouble(Object val, JDBCType columnType) throws SQLException { @@ -451,32 +495,41 @@ final class TypeConverter { case REAL: case FLOAT: case DOUBLE: + return Double.valueOf(((Number) val).doubleValue()); + case VARCHAR: + try { + return Double.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Double", val), e); + } default: } - throw new SQLException("Conversion from type [" + columnType + "] to [Double] not supported"); + throw new SQLException( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Double", val, columnType.getName())); } private static Date asDate(Object val, JDBCType columnType) throws SQLException { if (columnType == JDBCType.TIMESTAMP) { return new Date(utcMillisRemoveTime(((Number) val).longValue())); } - throw new SQLException("Conversion from type [" + columnType + "] to [Date] not supported"); + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Date", val, columnType.getName())); } private static Time asTime(Object val, JDBCType columnType) throws SQLException { if (columnType == JDBCType.TIMESTAMP) { return new Time(utcMillisRemoveDate(((Number) val).longValue())); } - throw new SQLException("Conversion from type [" + columnType + "] to [Time] not supported"); + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Time", val, columnType.getName())); } private static Timestamp asTimestamp(Object val, JDBCType columnType) throws SQLException { if (columnType == JDBCType.TIMESTAMP) { return new Timestamp(((Number) val).longValue()); } - throw new SQLException("Conversion from type [" + columnType + "] to [Timestamp] not supported"); + throw new SQLException( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Timestamp", val, columnType.getName())); } private static byte[] asByteArray(Object val, JDBCType columnType) { diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java index 9da06f6537c..35a3ec57487 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java @@ -25,6 +25,7 @@ import java.util.Date; import java.util.Locale; import java.util.Map; +import static java.lang.String.format; import static java.sql.JDBCType.BIGINT; import static java.sql.JDBCType.BOOLEAN; import static java.sql.JDBCType.DOUBLE; @@ -68,7 +69,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { JdbcPreparedStatement jps = createJdbcPreparedStatement(); SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, true, Types.TIMESTAMP)); - assertEquals("Conversion from type [BOOLEAN] to [Timestamp] not supported", sqle.getMessage()); + assertEquals("Unable to convert value [true] of type [BOOLEAN] to a Timestamp", sqle.getMessage()); } public void testSettingStringValues() throws SQLException { @@ -92,7 +93,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { JdbcPreparedStatement jps = createJdbcPreparedStatement(); SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, "foo bar", Types.INTEGER)); - assertEquals("Conversion from type [VARCHAR] to [Integer] not supported", sqle.getMessage()); + assertEquals("Unable to convert value [foo bar] of type [VARCHAR] to an Integer", sqle.getMessage()); } public void testSettingByteTypeValues() throws SQLException { @@ -128,7 +129,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { JdbcPreparedStatement jps = createJdbcPreparedStatement(); SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, (byte) 6, Types.TIMESTAMP)); - assertEquals("Conversion from type [TINYINT] to [Timestamp] not supported", sqle.getMessage()); + assertEquals("Unable to convert value [6] of type [TINYINT] to a Timestamp", sqle.getMessage()); } public void testSettingShortTypeValues() throws SQLException { @@ -161,7 +162,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { JdbcPreparedStatement jps = createJdbcPreparedStatement(); SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, (short) 6, Types.TIMESTAMP)); - assertEquals("Conversion from type [SMALLINT] to [Timestamp] not supported", sqle.getMessage()); + assertEquals("Unable to convert value [6] of type [SMALLINT] to a Timestamp", sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> jps.setObject(1, 256, Types.TINYINT)); assertEquals("Numeric " + 256 + " out of range", sqle.getMessage()); @@ -195,7 +196,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { int someInt = randomInt(); SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someInt, Types.TIMESTAMP)); - assertEquals("Conversion from type [INTEGER] to [Timestamp] not supported", sqle.getMessage()); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [INTEGER] to a Timestamp", someInt), sqle.getMessage()); Integer randomIntNotShort = randomIntBetween(32768, Integer.MAX_VALUE); sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomIntNotShort, Types.SMALLINT)); @@ -236,7 +237,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { long someLong = randomLong(); SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someLong, Types.TIMESTAMP)); - assertEquals("Conversion from type [BIGINT] to [Timestamp] not supported", sqle.getMessage()); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [BIGINT] to a Timestamp", someLong), sqle.getMessage()); Long randomLongNotShort = randomLongBetween(Integer.MAX_VALUE + 1, Long.MAX_VALUE); sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomLongNotShort, Types.INTEGER)); @@ -277,7 +278,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { float someFloat = randomFloat(); SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someFloat, Types.TIMESTAMP)); - assertEquals("Conversion from type [REAL] to [Timestamp] not supported", sqle.getMessage()); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [REAL] to a Timestamp", someFloat), sqle.getMessage()); Float floatNotInt = 5_155_000_000f; sqle = expectThrows(SQLException.class, () -> jps.setObject(1, floatNotInt, Types.INTEGER)); @@ -316,7 +317,8 @@ public class JdbcPreparedStatementTests extends ESTestCase { double someDouble = randomDouble(); SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someDouble, Types.TIMESTAMP)); - assertEquals("Conversion from type [DOUBLE] to [Timestamp] not supported", sqle.getMessage()); + assertEquals( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [DOUBLE] to a Timestamp", someDouble), sqle.getMessage()); Double doubleNotInt = 5_155_000_000d; sqle = expectThrows(SQLException.class, () -> jps.setObject(1, doubleNotInt, Types.INTEGER)); @@ -361,7 +363,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { public void testSettingTimestampValues() throws SQLException { JdbcPreparedStatement jps = createJdbcPreparedStatement(); - Timestamp someTimestamp = new Timestamp(randomMillisSinceEpoch()); + Timestamp someTimestamp = new Timestamp(randomLong()); jps.setTimestamp(1, someTimestamp); assertEquals(someTimestamp.getTime(), ((Date)value(jps)).getTime()); assertEquals(TIMESTAMP, jdbcType(jps)); @@ -372,7 +374,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { assertEquals(1456708675000L, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); assertEquals(TIMESTAMP, jdbcType(jps)); - long beforeEpochTime = -randomMillisSinceEpoch(); + long beforeEpochTime = randomLongBetween(Long.MIN_VALUE, 0); jps.setTimestamp(1, new Timestamp(beforeEpochTime), nonDefaultCal); assertEquals(beforeEpochTime, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); assertTrue(value(jps) instanceof java.util.Date); @@ -384,7 +386,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { public void testThrownExceptionsWhenSettingTimestampValues() throws SQLException { JdbcPreparedStatement jps = createJdbcPreparedStatement(); - Timestamp someTimestamp = new Timestamp(randomMillisSinceEpoch()); + Timestamp someTimestamp = new Timestamp(randomLong()); SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, someTimestamp, Types.INTEGER)); assertEquals("Conversion from type java.sql.Timestamp to INTEGER not supported", sqle.getMessage()); @@ -416,12 +418,12 @@ public class JdbcPreparedStatementTests extends ESTestCase { public void testSettingSqlDateValues() throws SQLException { JdbcPreparedStatement jps = createJdbcPreparedStatement(); - java.sql.Date someSqlDate = new java.sql.Date(randomMillisSinceEpoch()); + java.sql.Date someSqlDate = new java.sql.Date(randomLong()); jps.setDate(1, someSqlDate); assertEquals(someSqlDate.getTime(), ((Date)value(jps)).getTime()); assertEquals(TIMESTAMP, jdbcType(jps)); - someSqlDate = new java.sql.Date(randomMillisSinceEpoch()); + someSqlDate = new java.sql.Date(randomLong()); Calendar nonDefaultCal = randomCalendar(); jps.setDate(1, someSqlDate, nonDefaultCal); assertEquals(someSqlDate.getTime(), convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); @@ -435,17 +437,17 @@ public class JdbcPreparedStatementTests extends ESTestCase { public void testThrownExceptionsWhenSettingSqlDateValues() throws SQLException { JdbcPreparedStatement jps = createJdbcPreparedStatement(); - java.sql.Date someSqlDate = new java.sql.Date(randomMillisSinceEpoch()); + java.sql.Date someSqlDate = new java.sql.Date(randomLong()); SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, - () -> jps.setObject(1, new java.sql.Date(randomMillisSinceEpoch()), Types.DOUBLE)); + () -> jps.setObject(1, new java.sql.Date(randomLong()), Types.DOUBLE)); assertEquals("Conversion from type " + someSqlDate.getClass().getName() + " to DOUBLE not supported", sqle.getMessage()); } public void testSettingCalendarValues() throws SQLException { JdbcPreparedStatement jps = createJdbcPreparedStatement(); Calendar someCalendar = randomCalendar(); - someCalendar.setTimeInMillis(randomMillisSinceEpoch()); + someCalendar.setTimeInMillis(randomLong()); jps.setObject(1, someCalendar); assertEquals(someCalendar.getTime(), (Date) value(jps)); @@ -472,7 +474,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { public void testSettingDateValues() throws SQLException { JdbcPreparedStatement jps = createJdbcPreparedStatement(); - Date someDate = new Date(randomMillisSinceEpoch()); + Date someDate = new Date(randomLong()); jps.setObject(1, someDate); assertEquals(someDate, (Date) value(jps)); @@ -486,7 +488,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { public void testThrownExceptionsWhenSettingDateValues() throws SQLException { JdbcPreparedStatement jps = createJdbcPreparedStatement(); - Date someDate = new Date(randomMillisSinceEpoch()); + Date someDate = new Date(randomLong()); SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, someDate, Types.BIGINT)); assertEquals("Conversion from type " + someDate.getClass().getName() + " to BIGINT not supported", sqle.getMessage()); @@ -549,10 +551,6 @@ public class JdbcPreparedStatementTests extends ESTestCase { assertEquals("Conversion from type byte[] to DOUBLE not supported", sqle.getMessage()); } - private long randomMillisSinceEpoch() { - return randomLongBetween(0, System.currentTimeMillis()); - } - private JdbcPreparedStatement createJdbcPreparedStatement() throws SQLException { return new JdbcPreparedStatement(null, JdbcConfiguration.create("jdbc:es://l:1", null, 0), "?"); } diff --git a/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcResultSetIT.java b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcResultSetIT.java new file mode 100644 index 00000000000..30756a11f62 --- /dev/null +++ b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcResultSetIT.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.qa.sql.nosecurity; + +import org.elasticsearch.xpack.qa.sql.jdbc.ResultSetTestCase; + +/* + * Integration testing class for "no security" (cluster running without the Security plugin, + * or the Security is disbled) scenario. Runs all tests in the base class. + */ +public class JdbcResultSetIT extends ResultSetTestCase { +} diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java index 861a6dccaba..447fc4f17e1 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java @@ -5,55 +5,1067 @@ */ package org.elasticsearch.xpack.qa.sql.jdbc; +import org.elasticsearch.client.Request; +import org.elasticsearch.common.CheckedBiFunction; +import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration; +import org.elasticsearch.xpack.sql.jdbc.jdbcx.JdbcDataSource; +import org.elasticsearch.xpack.sql.type.DataType; + import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.sql.Blob; +import java.sql.Clob; import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.JDBCType; +import java.sql.NClob; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; import java.sql.Timestamp; +import java.sql.Types; +import java.util.Arrays; +import java.util.Calendar; import java.util.Date; +import java.util.GregorianCalendar; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Locale; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Set; +import java.util.TimeZone; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static java.lang.String.format; +import static java.util.Calendar.DAY_OF_MONTH; +import static java.util.Calendar.ERA; +import static java.util.Calendar.HOUR_OF_DAY; +import static java.util.Calendar.MILLISECOND; +import static java.util.Calendar.MINUTE; +import static java.util.Calendar.MONTH; +import static java.util.Calendar.SECOND; +import static java.util.Calendar.YEAR; public class ResultSetTestCase extends JdbcIntegrationTestCase { - public void testGettingTimestamp() throws Exception { - long randomMillis = randomLongBetween(0, System.currentTimeMillis()); + + static final Set fieldsNames = Stream.of("test_byte", "test_integer", "test_long", "test_short", "test_double", + "test_float", "test_keyword") + .collect(Collectors.toCollection(HashSet::new)); + static final Map,JDBCType> dateTimeTestingFields = new HashMap,JDBCType>(); + static final String SELECT_ALL_FIELDS = "SELECT test_boolean, test_byte, test_integer," + + "test_long, test_short, test_double, test_float, test_keyword, test_date FROM test"; + static final String SELECT_WILDCARD = "SELECT * FROM test"; + static { + dateTimeTestingFields.put(new Tuple("test_boolean", true), DataType.BOOLEAN.jdbcType); + dateTimeTestingFields.put(new Tuple("test_byte", 1), DataType.BYTE.jdbcType); + dateTimeTestingFields.put(new Tuple("test_integer", 1), DataType.INTEGER.jdbcType); + dateTimeTestingFields.put(new Tuple("test_long", 1L), DataType.LONG.jdbcType); + dateTimeTestingFields.put(new Tuple("test_short", 1), DataType.SHORT.jdbcType); + dateTimeTestingFields.put(new Tuple("test_double", 1d), DataType.DOUBLE.jdbcType); + dateTimeTestingFields.put(new Tuple("test_float", 1f), DataType.FLOAT.jdbcType); + dateTimeTestingFields.put(new Tuple("test_keyword", "true"), DataType.KEYWORD.jdbcType); + } + + // Byte values testing + public void testGettingValidByteWithoutCasting() throws Exception { + byte random1 = randomByte(); + byte random2 = randomValueOtherThan(random1, () -> randomByte()); + byte random3 = randomValueOtherThanMany(Arrays.asList(random1, random2)::contains, () -> randomByte()); + + createTestDataForByteValueTests(random1, random2, random3); + + doWithQuery("SELECT test_byte, test_null_byte, test_keyword FROM test", (results) -> { + ResultSetMetaData resultSetMetaData = results.getMetaData(); + + results.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + assertEquals(Types.TINYINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.TINYINT, resultSetMetaData.getColumnType(2)); + assertEquals(random1, results.getByte(1)); + assertEquals(random1, results.getByte("test_byte")); + assertEquals(random1, (byte) results.getObject("test_byte", Byte.class)); + assertTrue(results.getObject(1) instanceof Byte); + + assertEquals(0, results.getByte(2)); + assertTrue(results.wasNull()); + assertEquals(null, results.getObject("test_null_byte")); + assertTrue(results.wasNull()); + + assertTrue(results.next()); + assertEquals(random2, results.getByte(1)); + assertEquals(random2, results.getByte("test_byte")); + assertTrue(results.getObject(1) instanceof Byte); + assertEquals(random3, results.getByte("test_keyword")); + + assertFalse(results.next()); + }); + } + + public void testGettingValidByteWithCasting() throws Exception { + Map map = createTestDataForNumericValueTypes(() -> randomByte()); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + for(Entry e : map.entrySet()) { + byte actual = results.getObject(e.getKey(), Byte.class); + if (e.getValue() instanceof Double) { + assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), results.getByte(e.getKey())); + assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), actual); + } else if (e.getValue() instanceof Float) { + assertEquals("For field " + e.getKey(), Math.round(e.getValue().floatValue()), results.getByte(e.getKey())); + assertEquals("For field " + e.getKey(), Math.round(e.getValue().floatValue()), actual); + } else { + assertEquals("For field " + e.getKey(), e.getValue().byteValue(), results.getByte(e.getKey())); + assertEquals("For field " + e.getKey(), e.getValue().byteValue(), actual); + } + } + }); + } + + public void testGettingInvalidByte() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_keyword").field("type", "keyword").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + + int intNotByte = randomIntBetween(Byte.MAX_VALUE + 1, Integer.MAX_VALUE); + long longNotByte = randomLongBetween(Byte.MAX_VALUE + 1, Long.MAX_VALUE); + short shortNotByte = (short) randomIntBetween(Byte.MAX_VALUE + 1, Short.MAX_VALUE); + double doubleNotByte = randomDoubleBetween(Byte.MAX_VALUE + 1, Double.MAX_VALUE, true); + float floatNotByte = randomFloatBetween(Byte.MAX_VALUE + 1, Float.MAX_VALUE); + String randomString = randomUnicodeOfCodepointLengthBetween(128, 256); + long randomDate = randomLong(); + + String doubleErrorMessage = (doubleNotByte > Long.MAX_VALUE || doubleNotByte < Long.MIN_VALUE) ? + Double.toString(doubleNotByte) : Long.toString(Math.round(doubleNotByte)); + + index("test", "1", builder -> { + builder.field("test_integer", intNotByte); + builder.field("test_long", longNotByte); + builder.field("test_short", shortNotByte); + builder.field("test_double", doubleNotByte); + builder.field("test_float", floatNotByte); + builder.field("test_keyword", randomString); + builder.field("test_date", randomDate); + }); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + + SQLException sqle = expectThrows(SQLException.class, () -> results.getByte("test_integer")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", intNotByte), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_integer", Byte.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", intNotByte), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getByte("test_short")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", shortNotByte), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_short", Byte.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", shortNotByte), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getByte("test_long")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Long.toString(longNotByte)), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_long", Byte.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Long.toString(longNotByte)), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getByte("test_double")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", doubleErrorMessage), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_double", Byte.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", doubleErrorMessage), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getByte("test_float")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(floatNotByte)), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_float", Byte.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(floatNotByte)), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getByte("test_keyword")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Byte", randomString), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_keyword", Byte.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Byte", randomString), + sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getByte("test_date")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Byte", randomDate), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Byte.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Byte", randomDate), + sqle.getMessage()); + }); + } + + // Short values testing + public void testGettingValidShortWithoutCasting() throws Exception { + short random1 = randomShort(); + short random2 = randomValueOtherThan(random1, () -> randomShort()); + short random3 = randomValueOtherThanMany(Arrays.asList(random1, random2)::contains, () -> randomShort()); + + createTestDataForShortValueTests(random1, random2, random3); + + doWithQuery("SELECT test_short, test_null_short, test_keyword FROM test", (results) -> { + ResultSetMetaData resultSetMetaData = results.getMetaData(); + + results.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + assertEquals(Types.SMALLINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.SMALLINT, resultSetMetaData.getColumnType(2)); + assertEquals(random1, results.getShort(1)); + assertEquals(random1, results.getShort("test_short")); + assertEquals(random1, results.getObject("test_short")); + assertTrue(results.getObject(1) instanceof Short); + + assertEquals(0, results.getShort(2)); + assertTrue(results.wasNull()); + assertEquals(null, results.getObject("test_null_short")); + assertTrue(results.wasNull()); + + assertTrue(results.next()); + assertEquals(random2, results.getShort(1)); + assertEquals(random2, results.getShort("test_short")); + assertTrue(results.getObject(1) instanceof Short); + assertEquals(random3, results.getShort("test_keyword")); + + assertFalse(results.next()); + }); + } + + public void testGettingValidShortWithCasting() throws Exception { + Map map = createTestDataForNumericValueTypes(() -> randomShort()); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + for(Entry e : map.entrySet()) { + short actual = (short) results.getObject(e.getKey(), Short.class); + if (e.getValue() instanceof Double) { + assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), results.getShort(e.getKey())); + assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), actual); + } else if (e.getValue() instanceof Float) { + assertEquals("For field " + e.getKey(), Math.round(e.getValue().floatValue()), results.getShort(e.getKey())); + assertEquals("For field " + e.getKey(), Math.round(e.getValue().floatValue()), actual); + } else { + assertEquals("For field " + e.getKey(), + e.getValue().shortValue(), results.getShort(e.getKey())); + assertEquals("For field " + e.getKey(), e.getValue().shortValue(), actual); + } + } + }); + } + + public void testGettingInvalidShort() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_keyword").field("type", "keyword").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + + int intNotShort = randomIntBetween(Short.MAX_VALUE + 1, Integer.MAX_VALUE); + long longNotShort = randomLongBetween(Short.MAX_VALUE + 1, Long.MAX_VALUE); + double doubleNotShort = randomDoubleBetween(Short.MAX_VALUE + 1, Double.MAX_VALUE, true); + float floatNotShort = randomFloatBetween(Short.MAX_VALUE + 1, Float.MAX_VALUE); + String randomString = randomUnicodeOfCodepointLengthBetween(128, 256); + long randomDate = randomLong(); + + String doubleErrorMessage = (doubleNotShort > Long.MAX_VALUE || doubleNotShort < Long.MIN_VALUE) ? + Double.toString(doubleNotShort) : Long.toString(Math.round(doubleNotShort)); + + index("test", "1", builder -> { + builder.field("test_integer", intNotShort); + builder.field("test_long", longNotShort); + builder.field("test_double", doubleNotShort); + builder.field("test_float", floatNotShort); + builder.field("test_keyword", randomString); + builder.field("test_date", randomDate); + }); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + + SQLException sqle = expectThrows(SQLException.class, () -> results.getShort("test_integer")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", intNotShort), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_integer", Short.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", intNotShort), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getShort("test_long")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Long.toString(longNotShort)), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_long", Short.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Long.toString(longNotShort)), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getShort("test_double")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", doubleErrorMessage), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_double", Short.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", doubleErrorMessage), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getShort("test_float")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(floatNotShort)), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_float", Short.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(floatNotShort)), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getShort("test_keyword")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Short", randomString), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_keyword", Short.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Short", randomString), + sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getShort("test_date")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Short", randomDate), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Short.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Short", randomDate), + sqle.getMessage()); + }); + } + + // Integer values testing + public void testGettingValidIntegerWithoutCasting() throws Exception { + int random1 = randomInt(); + int random2 = randomValueOtherThan(random1, () -> randomInt()); + int random3 = randomValueOtherThanMany(Arrays.asList(random1, random2)::contains, () -> randomInt()); + + createTestDataForIntegerValueTests(random1, random2, random3); + + doWithQuery("SELECT test_integer,test_null_integer,test_keyword FROM test", (results) -> { + ResultSetMetaData resultSetMetaData = results.getMetaData(); + + results.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + assertEquals(Types.INTEGER, resultSetMetaData.getColumnType(1)); + assertEquals(Types.INTEGER, resultSetMetaData.getColumnType(2)); + assertEquals(random1, results.getInt(1)); + assertEquals(random1, results.getInt("test_integer")); + assertEquals(random1, (int) results.getObject("test_integer", Integer.class)); + assertTrue(results.getObject(1) instanceof Integer); + + assertEquals(0, results.getInt(2)); + assertTrue(results.wasNull()); + assertEquals(null, results.getObject("test_null_integer")); + assertTrue(results.wasNull()); + + assertTrue(results.next()); + assertEquals(random2, results.getInt(1)); + assertEquals(random2, results.getInt("test_integer")); + assertTrue(results.getObject(1) instanceof Integer); + assertEquals(random3, results.getInt("test_keyword")); + + assertFalse(results.next()); + }); + } + + public void testGettingValidIntegerWithCasting() throws Exception { + Map map = createTestDataForNumericValueTypes(() -> randomInt()); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + for(Entry e : map.entrySet()) { + int actual = results.getObject(e.getKey(), Integer.class); + if (e.getValue() instanceof Double) { + assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), results.getInt(e.getKey())); + assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), actual); + } else if (e.getValue() instanceof Float) { + assertEquals("For field " + e.getKey(), Math.round(e.getValue().floatValue()), results.getInt(e.getKey())); + assertEquals("For field " + e.getKey(), Math.round(e.getValue().floatValue()), actual); + } else { + assertEquals("For field " + e.getKey(), e.getValue().intValue(), results.getInt(e.getKey())); + assertEquals("For field " + e.getKey(), e.getValue().intValue(), actual); + } + } + }); + } + + public void testGettingInvalidInteger() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_keyword").field("type", "keyword").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + + long longNotInt = randomLongBetween(getMaxIntPlusOne(), Long.MAX_VALUE); + double doubleNotInt = randomDoubleBetween(getMaxIntPlusOne().doubleValue(), Double.MAX_VALUE, true); + float floatNotInt = randomFloatBetween(getMaxIntPlusOne().floatValue(), Float.MAX_VALUE); + String randomString = randomUnicodeOfCodepointLengthBetween(128, 256); + long randomDate = randomLong(); + + String doubleErrorMessage = (doubleNotInt > Long.MAX_VALUE || doubleNotInt < Long.MIN_VALUE) ? + Double.toString(doubleNotInt) : Long.toString(Math.round(doubleNotInt)); + + index("test", "1", builder -> { + builder.field("test_long", longNotInt); + builder.field("test_double", doubleNotInt); + builder.field("test_float", floatNotInt); + builder.field("test_keyword", randomString); + builder.field("test_date", randomDate); + }); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + + SQLException sqle = expectThrows(SQLException.class, () -> results.getInt("test_long")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Long.toString(longNotInt)), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_long", Integer.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Long.toString(longNotInt)), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getInt("test_double")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", doubleErrorMessage), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_double", Integer.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", doubleErrorMessage), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getInt("test_float")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(floatNotInt)), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_float", Integer.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(floatNotInt)), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getInt("test_keyword")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to an Integer", randomString), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_keyword", Integer.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to an Integer", randomString), + sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getInt("test_date")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to an Integer", randomDate), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Integer.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to an Integer", randomDate), + sqle.getMessage()); + }); + } + + // Long values testing + public void testGettingValidLongWithoutCasting() throws Exception { + long random1 = randomLong(); + long random2 = randomValueOtherThan(random1, () -> randomLong()); + long random3 = randomValueOtherThanMany(Arrays.asList(random1, random2)::contains, () -> randomLong()); + + createTestDataForLongValueTests(random1, random2, random3); + + doWithQuery("SELECT test_long, test_null_long, test_keyword FROM test", (results) -> { + ResultSetMetaData resultSetMetaData = results.getMetaData(); + + results.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(2)); + assertEquals(random1, results.getLong(1)); + assertEquals(random1, results.getLong("test_long")); + assertEquals(random1, (long) results.getObject("test_long", Long.class)); + assertTrue(results.getObject(1) instanceof Long); + + assertEquals(0, results.getLong(2)); + assertTrue(results.wasNull()); + assertEquals(null, results.getObject("test_null_long")); + assertTrue(results.wasNull()); + + assertTrue(results.next()); + assertEquals(random2, results.getLong(1)); + assertEquals(random2, results.getLong("test_long")); + assertTrue(results.getObject(1) instanceof Long); + assertEquals(random3, results.getLong("test_keyword")); + + assertFalse(results.next()); + }); + } + + public void testGettingValidLongWithCasting() throws Exception { + Map map = createTestDataForNumericValueTypes(() -> randomLong()); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + for(Entry e : map.entrySet()) { + long actual = results.getObject(e.getKey(), Long.class); + if (e.getValue() instanceof Double || e.getValue() instanceof Float) { + assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), results.getLong(e.getKey())); + assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), actual); + } else { + assertEquals("For field " + e.getKey(), e.getValue().longValue(), results.getLong(e.getKey())); + assertEquals("For field " + e.getKey(), e.getValue().longValue(), actual); + } + } + }); + } + + public void testGettingInvalidLong() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_keyword").field("type", "keyword").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + + double doubleNotLong = randomDoubleBetween(getMaxLongPlusOne().doubleValue(), Double.MAX_VALUE, true); + float floatNotLong = randomFloatBetween(getMaxLongPlusOne().floatValue(), Float.MAX_VALUE); + String randomString = randomUnicodeOfCodepointLengthBetween(128, 256); + long randomDate = randomLong(); + + index("test", "1", builder -> { + builder.field("test_double", doubleNotLong); + builder.field("test_float", floatNotLong); + builder.field("test_keyword", randomString); + builder.field("test_date", randomDate); + }); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + + SQLException sqle = expectThrows(SQLException.class, () -> results.getLong("test_double")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(doubleNotLong)), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_double", Long.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(doubleNotLong)), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getLong("test_float")); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(floatNotLong)), sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_float", Long.class)); + assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(floatNotLong)), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getLong("test_keyword")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Long", randomString), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_keyword", Long.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Long", randomString), + sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getLong("test_date")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Long", randomDate), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Long.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Long", randomDate), + sqle.getMessage()); + }); + } + + // Double values testing + public void testGettingValidDoubleWithoutCasting() throws Exception { + double random1 = randomDouble(); + double random2 = randomValueOtherThan(random1, () -> randomDouble()); + double random3 = randomValueOtherThanMany(Arrays.asList(random1, random2)::contains, () -> randomDouble()); + + createTestDataForDoubleValueTests(random1, random2, random3); + + doWithQuery("SELECT test_double, test_null_double, test_keyword FROM test", (results) -> { + ResultSetMetaData resultSetMetaData = results.getMetaData(); + + results.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(1)); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(2)); + assertEquals(random1, results.getDouble(1), 0.0d); + assertEquals(random1, results.getDouble("test_double"), 0.0d); + assertEquals(random1, results.getObject("test_double", Double.class), 0.0d); + assertTrue(results.getObject(1) instanceof Double); + + assertEquals(0, results.getDouble(2), 0.0d); + assertTrue(results.wasNull()); + assertEquals(null, results.getObject("test_null_double")); + assertTrue(results.wasNull()); + + assertTrue(results.next()); + assertEquals(random2, results.getDouble(1), 0.0d); + assertEquals(random2, results.getDouble("test_double"), 0.0d); + assertTrue(results.getObject(1) instanceof Double); + assertEquals(random3, results.getDouble("test_keyword"), 0.0d); + + assertFalse(results.next()); + }); + } + + public void testGettingValidDoubleWithCasting() throws Exception { + Map map = createTestDataForNumericValueTypes(() -> randomDouble()); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + for(Entry e : map.entrySet()) { + assertEquals("For field " + e.getKey(), e.getValue().doubleValue(), results.getDouble(e.getKey()), 0.0d); + assertEquals("For field " + e.getKey(), + e.getValue().doubleValue(), results.getObject(e.getKey(), Double.class), 0.0d); + } + }); + } + + public void testGettingInvalidDouble() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_keyword").field("type", "keyword").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + + String randomString = randomUnicodeOfCodepointLengthBetween(128, 256); + long randomDate = randomLong(); + + index("test", "1", builder -> { + builder.field("test_keyword", randomString); + builder.field("test_date", randomDate); + }); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + + SQLException sqle = expectThrows(SQLException.class, () -> results.getDouble("test_keyword")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Double", randomString), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_keyword", Double.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Double", randomString), + sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getDouble("test_date")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Double", randomDate), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Double.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Double", randomDate), + sqle.getMessage()); + }); + } + + // Float values testing + public void testGettingValidFloatWithoutCasting() throws Exception { + float random1 = randomFloat(); + float random2 = randomValueOtherThan(random1, () -> randomFloat()); + float random3 = randomValueOtherThanMany(Arrays.asList(random1, random2)::contains, () -> randomFloat()); + + createTestDataForFloatValueTests(random1, random2, random3); + + doWithQuery("SELECT test_float, test_null_float, test_keyword FROM test", (results) -> { + ResultSetMetaData resultSetMetaData = results.getMetaData(); + + results.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + assertEquals(Types.REAL, resultSetMetaData.getColumnType(1)); + assertEquals(Types.REAL, resultSetMetaData.getColumnType(2)); + assertEquals(random1, results.getFloat(1), 0.0f); + assertEquals(random1, results.getFloat("test_float"), 0.0f); + assertEquals(random1, results.getObject("test_float", Float.class), 0.0f); + assertTrue(results.getObject(1) instanceof Float); + + assertEquals(0, results.getFloat(2), 0.0d); + assertTrue(results.wasNull()); + assertEquals(null, results.getObject("test_null_float")); + assertTrue(results.wasNull()); + + assertTrue(results.next()); + assertEquals(random2, results.getFloat(1), 0.0d); + assertEquals(random2, results.getFloat("test_float"), 0.0d); + assertTrue(results.getObject(1) instanceof Float); + assertEquals(random3, results.getFloat("test_keyword"), 0.0d); + + assertFalse(results.next()); + }); + } + + public void testGettingValidFloatWithCasting() throws Exception { + Map map = createTestDataForNumericValueTypes(() -> randomFloat()); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + for(Entry e : map.entrySet()) { + assertEquals("For field " + e.getKey(), e.getValue().floatValue(), results.getFloat(e.getKey()), 0.0f); + assertEquals("For field " + e.getKey(), + e.getValue().floatValue(), results.getObject(e.getKey(), Float.class), 0.0f); + } + }); + } + + public void testGettingInvalidFloat() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_keyword").field("type", "keyword").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + + String randomString = randomUnicodeOfCodepointLengthBetween(128, 256); + long randomDate = randomLong(); + + index("test", "1", builder -> { + builder.field("test_keyword", randomString); + builder.field("test_date", randomDate); + }); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + + SQLException sqle = expectThrows(SQLException.class, () -> results.getFloat("test_keyword")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Float", randomString), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_keyword", Float.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Float", randomString), + sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getFloat("test_date")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Float", randomDate), + sqle.getMessage()); + sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Float.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Float", randomDate), + sqle.getMessage()); + }); + } + + public void testGettingBooleanValues() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_boolean").field("type", "boolean").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + long randomDate1 = randomLong(); + long randomDate2 = randomLong(); + + // true values + indexSimpleDocumentWithTrueValues(randomDate1); + + // false values + index("test", "2", builder -> { + builder.field("test_boolean", false); + builder.field("test_byte", 0); + builder.field("test_integer", 0); + builder.field("test_long", 0L); + builder.field("test_short", 0); + builder.field("test_double", 0d); + builder.field("test_float", 0f); + builder.field("test_keyword", "false"); + builder.field("test_date", randomDate2); + }); + + // other (non 0 = true) values + index("test", "3", builder -> { + builder.field("test_byte", randomValueOtherThan((byte) 0, () -> randomByte())); + builder.field("test_integer", randomValueOtherThan(0, () -> randomInt())); + builder.field("test_long", randomValueOtherThan(0L, () -> randomLong())); + builder.field("test_short", randomValueOtherThan((short) 0, () -> randomShort())); + builder.field("test_double", randomValueOtherThanMany(i -> i < 1.0d && i > -1.0d && i < Double.MAX_VALUE + && i > Double.MIN_VALUE, + () -> randomDouble() * randomInt())); + builder.field("test_float", randomValueOtherThanMany(i -> i < 1.0f && i > -1.0f && i < Float.MAX_VALUE && i > Float.MIN_VALUE, + () -> randomFloat() * randomInt())); + builder.field("test_keyword", "1"); + }); + + // other false values + index("test", "4", builder -> { + builder.field("test_keyword", "0"); + }); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + assertEquals(true, results.getBoolean("test_boolean")); + for(String fld : fieldsNames) { + assertEquals("Expected: but was: for field " + fld, true, results.getBoolean(fld)); + assertEquals("Expected: but was: for field " + fld, true, results.getObject(fld, Boolean.class)); + } + SQLException sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Boolean", randomDate1), + sqle.getMessage()); + + results.next(); + assertEquals(false, results.getBoolean("test_boolean")); + for(String fld : fieldsNames) { + assertEquals("Expected: but was: for field " + fld, false, results.getBoolean(fld)); + assertEquals("Expected: but was: for field " + fld, false, results.getObject(fld, Boolean.class)); + } + sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date")); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Boolean", randomDate2), + sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Boolean.class)); + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Boolean", randomDate2), + sqle.getMessage()); + + results.next(); + for(String fld : fieldsNames.stream() + .filter((f) -> !f.equals("test_keyword")).collect(Collectors.toCollection(HashSet::new))) { + assertEquals("Expected: but was: for field " + fld, true, results.getBoolean(fld)); + assertEquals("Expected: but was: for field " + fld, true, results.getObject(fld, Boolean.class)); + } + + results.next(); + assertEquals(false, results.getBoolean("test_keyword")); + assertEquals(false, results.getObject("test_keyword", Boolean.class)); + }); + } + + public void testGettingDateWithoutCalendar() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_boolean").field("type", "boolean").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + Long randomLongDate = randomLong(); + indexSimpleDocumentWithTrueValues(randomLongDate); + + String timeZoneId = randomKnownTimeZone(); + Calendar connCalendar = Calendar.getInstance(TimeZone.getTimeZone(timeZoneId), Locale.ROOT); + + doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> { + results.next(); + connCalendar.setTimeInMillis(randomLongDate); + connCalendar.set(HOUR_OF_DAY, 0); + connCalendar.set(MINUTE, 0); + connCalendar.set(SECOND, 0); + connCalendar.set(MILLISECOND, 0); + + assertEquals(results.getDate("test_date"), new java.sql.Date(connCalendar.getTimeInMillis())); + assertEquals(results.getDate(9), new java.sql.Date(connCalendar.getTimeInMillis())); + assertEquals(results.getObject("test_date", java.sql.Date.class), + new java.sql.Date(randomLongDate - (randomLongDate % 86400000L))); + assertEquals(results.getObject(9, java.sql.Date.class), + new java.sql.Date(randomLongDate - (randomLongDate % 86400000L))); + + // bulk validation for all fields which are not of type date + validateErrorsForDateTimeTestsWithoutCalendar(results::getDate); + }); + } + + public void testGettingDateWithCalendar() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_boolean").field("type", "boolean").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + Long randomLongDate = randomLong(); + indexSimpleDocumentWithTrueValues(randomLongDate); + index("test", "2", builder -> { + builder.timeField("test_date", null); + }); + + String timeZoneId = randomKnownTimeZone(); + String anotherTZId = randomValueOtherThan(timeZoneId, () -> randomKnownTimeZone()); + Calendar c = Calendar.getInstance(TimeZone.getTimeZone(anotherTZId), Locale.ROOT); + + doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> { + results.next(); + c.setTimeInMillis(randomLongDate); + c.set(HOUR_OF_DAY, 0); + c.set(MINUTE, 0); + c.set(SECOND, 0); + c.set(MILLISECOND, 0); + + assertEquals(results.getDate("test_date", c), new java.sql.Date(c.getTimeInMillis())); + assertEquals(results.getDate(9, c), new java.sql.Date(c.getTimeInMillis())); + + // bulk validation for all fields which are not of type date + validateErrorsForDateTimeTestsWithCalendar(c, results::getDate); + + results.next(); + assertNull(results.getDate("test_date")); + }); + } + + public void testGettingTimeWithoutCalendar() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_boolean").field("type", "boolean").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + Long randomLongDate = randomLong(); + indexSimpleDocumentWithTrueValues(randomLongDate); + + String timeZoneId = randomKnownTimeZone(); + Calendar c = Calendar.getInstance(TimeZone.getTimeZone(timeZoneId), Locale.ROOT); + + doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> { + results.next(); + c.setTimeInMillis(randomLongDate); + c.set(ERA, GregorianCalendar.AD); + c.set(YEAR, 1970); + c.set(MONTH, 0); + c.set(DAY_OF_MONTH, 1); + + assertEquals(results.getTime("test_date"), new java.sql.Time(c.getTimeInMillis())); + assertEquals(results.getTime(9), new java.sql.Time(c.getTimeInMillis())); + assertEquals(results.getObject("test_date", java.sql.Time.class), + new java.sql.Time(randomLongDate % 86400000L)); + assertEquals(results.getObject(9, java.sql.Time.class), + new java.sql.Time(randomLongDate % 86400000L)); + + validateErrorsForDateTimeTestsWithoutCalendar(results::getTime); + }); + } + + public void testGettingTimeWithCalendar() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_boolean").field("type", "boolean").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + Long randomLongDate = randomLong(); + indexSimpleDocumentWithTrueValues(randomLongDate); + index("test", "2", builder -> { + builder.timeField("test_date", null); + }); + + String timeZoneId = randomKnownTimeZone(); + String anotherTZId = randomValueOtherThan(timeZoneId, () -> randomKnownTimeZone()); + Calendar c = Calendar.getInstance(TimeZone.getTimeZone(anotherTZId), Locale.ROOT); + + doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> { + results.next(); + c.setTimeInMillis(randomLongDate); + c.set(ERA, GregorianCalendar.AD); + c.set(YEAR, 1970); + c.set(MONTH, 0); + c.set(DAY_OF_MONTH, 1); + + assertEquals(results.getTime("test_date", c), new java.sql.Time(c.getTimeInMillis())); + assertEquals(results.getTime(9, c), new java.sql.Time(c.getTimeInMillis())); + + validateErrorsForDateTimeTestsWithCalendar(c, results::getTime); + + results.next(); + assertNull(results.getTime("test_date")); + }); + } + + public void testGettingTimestampWithoutCalendar() throws Exception { + createIndex("library"); + updateMapping("library", builder -> { + builder.startObject("release_date").field("type", "date").endObject(); + builder.startObject("republish_date").field("type", "date").endObject(); + }); + long randomMillis = randomLong(); index("library", "1", builder -> { builder.field("name", "Don Quixote"); builder.field("page_count", 1072); - builder.timeField("release_date", new Date(randomMillis)); + builder.field("release_date", randomMillis); builder.timeField("republish_date", null); }); index("library", "2", builder -> { builder.field("name", "1984"); builder.field("page_count", 328); - builder.timeField("release_date", new Date(-649036800000L)); - builder.timeField("republish_date", new Date(599616000000L)); + builder.field("release_date", -649036800000L); + builder.field("republish_date", 599616000000L); }); - try (Connection connection = esJdbc()) { - try (PreparedStatement statement = connection.prepareStatement("SELECT name, release_date, republish_date FROM library")) { - try (ResultSet results = statement.executeQuery()) { - ResultSetMetaData resultSetMetaData = results.getMetaData(); + doWithQuery("SELECT name, release_date, republish_date FROM library", (results) -> { + ResultSetMetaData resultSetMetaData = results.getMetaData(); - results.next(); - assertEquals(3, resultSetMetaData.getColumnCount()); - assertEquals(randomMillis, results.getTimestamp("release_date").getTime()); - assertEquals(randomMillis, results.getTimestamp(2).getTime()); - assertTrue(results.getObject(2) instanceof Timestamp); - assertEquals(randomMillis, ((Timestamp) results.getObject("release_date")).getTime()); - - assertNull(results.getTimestamp(3)); - assertNull(results.getObject("republish_date")); + results.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + assertEquals(randomMillis, results.getTimestamp("release_date").getTime()); + assertEquals(randomMillis, results.getTimestamp(2).getTime()); + assertTrue(results.getObject(2) instanceof Timestamp); + assertEquals(randomMillis, ((Timestamp) results.getObject("release_date")).getTime()); + + assertNull(results.getTimestamp(3)); + assertNull(results.getObject("republish_date")); - assertTrue(results.next()); - assertEquals(599616000000L, results.getTimestamp("republish_date").getTime()); - assertEquals(-649036800000L, ((Timestamp) results.getObject(2)).getTime()); + assertTrue(results.next()); + assertEquals(599616000000L, results.getTimestamp("republish_date").getTime()); + assertEquals(-649036800000L, ((Timestamp) results.getObject(2)).getTime()); - assertFalse(results.next()); - } - } - } + assertFalse(results.next()); + }); + } + + public void testGettingTimestampWithCalendar() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_boolean").field("type", "boolean").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + Long randomLongDate = randomLong(); + indexSimpleDocumentWithTrueValues(randomLongDate); + index("test", "2", builder -> { + builder.timeField("test_date", null); + }); + + String timeZoneId = randomKnownTimeZone(); + String anotherTZId = randomValueOtherThan(timeZoneId, () -> randomKnownTimeZone()); + Calendar c = Calendar.getInstance(TimeZone.getTimeZone(anotherTZId), Locale.ROOT); + + doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> { + results.next(); + c.setTimeInMillis(randomLongDate); + + assertEquals(results.getTimestamp("test_date", c), new java.sql.Timestamp(c.getTimeInMillis())); + assertEquals(results.getTimestamp(9, c), new java.sql.Timestamp(c.getTimeInMillis())); + + validateErrorsForDateTimeTestsWithCalendar(c, results::getTimestamp); + + results.next(); + assertNull(results.getTimestamp("test_date")); + }); + } + + public void testValidGetObjectCalls() throws Exception { + createIndex("test"); + updateMappingForNumericValuesTests("test"); + updateMapping("test", builder -> { + builder.startObject("test_boolean").field("type", "boolean").endObject(); + builder.startObject("test_date").field("type", "date").endObject(); + }); + + byte b = randomByte(); + int i = randomInt(); + long l = randomLong(); + short s = (short) randomIntBetween(Short.MIN_VALUE, Short.MAX_VALUE); + double d = randomDouble(); + float f = randomFloat(); + boolean randomBool = randomBoolean(); + Long randomLongDate = randomLong(); + String randomString = randomUnicodeOfCodepointLengthBetween(128, 256); + + index("test", "1", builder -> { + builder.field("test_byte", b); + builder.field("test_integer", i); + builder.field("test_long", l); + builder.field("test_short", s); + builder.field("test_double", d); + builder.field("test_float", f); + builder.field("test_keyword", randomString); + builder.field("test_date", randomLongDate); + builder.field("test_boolean", randomBool); + }); + + doWithQuery(SELECT_WILDCARD, (results) -> { + results.next(); + + assertEquals(b, results.getObject("test_byte")); + assertTrue(results.getObject("test_byte") instanceof Byte); + + assertEquals(i, results.getObject("test_integer")); + assertTrue(results.getObject("test_integer") instanceof Integer); + + assertEquals(l, results.getObject("test_long")); + assertTrue(results.getObject("test_long") instanceof Long); + + assertEquals(s, results.getObject("test_short")); + assertTrue(results.getObject("test_short") instanceof Short); + + assertEquals(d, results.getObject("test_double")); + assertTrue(results.getObject("test_double") instanceof Double); + + assertEquals(f, results.getObject("test_float")); + assertTrue(results.getObject("test_float") instanceof Float); + + assertEquals(randomString, results.getObject("test_keyword")); + assertTrue(results.getObject("test_keyword") instanceof String); + + assertEquals(new Date(randomLongDate), results.getObject("test_date")); + assertTrue(results.getObject("test_date") instanceof Timestamp); + + assertEquals(randomBool, results.getObject("test_boolean")); + assertTrue(results.getObject("test_boolean") instanceof Boolean); + }); } /* @@ -79,4 +1091,458 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { fail("Infinite recursive call on getObject() method"); } } + + public void testUnsupportedGetMethods() throws IOException, SQLException { + index("test", "1", builder -> { + builder.field("test", "test"); + }); + Connection conn = esJdbc(); + PreparedStatement statement = conn.prepareStatement("SELECT * FROM test"); + ResultSet r = statement.executeQuery(); + + r.next(); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getAsciiStream("test"), "AsciiStream not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getAsciiStream(1), "AsciiStream not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getArray("test"), "Array not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getArray(1), "Array not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getBigDecimal("test"), "BigDecimal not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getBigDecimal("test"), "BigDecimal not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getBinaryStream("test"), "BinaryStream not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getBinaryStream(1), "BinaryStream not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getBlob("test"), "Blob not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getBlob(1), "Blob not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getCharacterStream("test"), "CharacterStream not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getCharacterStream(1), "CharacterStream not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getClob("test"), "Clob not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getClob(1), "Clob not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getNCharacterStream("test"), "NCharacterStream not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getNCharacterStream(1), "NCharacterStream not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getNClob("test"), "NClob not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getNClob(1), "NClob not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getNString("test"), "NString not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getNString(1), "NString not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getRef("test"), "Ref not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getRef(1), "Ref not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getRowId("test"), "RowId not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getRowId(1), "RowId not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getSQLXML("test"), "SQLXML not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getSQLXML(1), "SQLXML not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getURL("test"), "URL not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getURL(1), "URL not supported"); + } + + public void testUnsupportedUpdateMethods() throws IOException, SQLException { + index("test", "1", builder -> { + builder.field("test", "test"); + }); + Connection conn = esJdbc(); + PreparedStatement statement = conn.prepareStatement("SELECT * FROM test"); + ResultSet r = statement.executeQuery(); + + r.next(); + Blob b = null; + InputStream i = null; + Clob c = null; + NClob nc = null; + Reader rd = null; + + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBytes(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBytes("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateArray(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateArray("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateAsciiStream(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateAsciiStream("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateAsciiStream(1, null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateAsciiStream(1, null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateAsciiStream("", null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateAsciiStream("", null, 1L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBigDecimal(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBigDecimal("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBinaryStream(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBinaryStream("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBinaryStream(1, null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBinaryStream(1, null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBinaryStream("", null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBinaryStream("", null, 1L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBlob(1, b)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBlob(1, i)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBlob("", b)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBlob("", i)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBlob(1, null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBlob("", null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBoolean(1, false)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateBoolean("", false)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateByte(1, (byte) 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateByte("", (byte) 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateCharacterStream(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateCharacterStream("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateCharacterStream(1, null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateCharacterStream(1, null, 1L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateCharacterStream("", null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateCharacterStream("", null, 1L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateClob(1, c)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateClob(1, rd)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateClob("", c)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateClob("", rd)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateClob(1, null, 1L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateClob("", null, 1L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateDate(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateDate("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateDouble(1, 0d)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateDouble("", 0d)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateFloat(1, 0f)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateFloat("", 0f)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateInt(1, 0)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateInt("", 0)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateLong(1, 0L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateLong("", 0L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNCharacterStream(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNCharacterStream("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNCharacterStream(1, null, 1L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNCharacterStream("", null, 1L)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNClob(1, nc)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNClob(1, rd)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNClob("", nc)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNClob("", rd)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNClob(1, null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNClob("", null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNString(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNString("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNull(1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateNull("")); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateObject(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateObject("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateObject(1, null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateObject("", null, 1)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateRef(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateRef("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateRow()); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateRowId(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateRowId("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateSQLXML(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateSQLXML("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateShort(1, (short) 0)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateShort("", (short) 0)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateString(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateString("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateTime(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateTime("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateTimestamp(1, null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateTimestamp("", null)); + assertThrowsWritesUnsupportedForUpdate(() -> r.insertRow()); + assertThrowsWritesUnsupportedForUpdate(() -> r.updateRow()); + assertThrowsWritesUnsupportedForUpdate(() -> r.deleteRow()); + assertThrowsWritesUnsupportedForUpdate(() -> r.cancelRowUpdates()); + assertThrowsWritesUnsupportedForUpdate(() -> r.moveToInsertRow()); + assertThrowsWritesUnsupportedForUpdate(() -> r.refreshRow()); + assertThrowsWritesUnsupportedForUpdate(() -> r.moveToCurrentRow()); + assertThrowsWritesUnsupportedForUpdate(() -> r.rowUpdated()); + assertThrowsWritesUnsupportedForUpdate(() -> r.rowInserted()); + assertThrowsWritesUnsupportedForUpdate(() -> r.rowDeleted()); + } + + private void doWithQuery(String query, CheckedConsumer consumer) throws SQLException { + try (Connection connection = esJdbc()) { + try (PreparedStatement statement = connection.prepareStatement(query)) { + try (ResultSet results = statement.executeQuery()) { + consumer.accept(results); + } + } + } + } + + private void doWithQueryAndTimezone(String query, String tz, CheckedConsumer consumer) throws SQLException { + try (Connection connection = esJdbc(tz)) { + try (PreparedStatement statement = connection.prepareStatement(query)) { + try (ResultSet results = statement.executeQuery()) { + consumer.accept(results); + } + } + } + } + + private void createIndex(String index) throws Exception { + Request request = new Request("PUT", "/" + index); + XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); + createIndex.startObject("settings"); + { + createIndex.field("number_of_shards", 1); + createIndex.field("number_of_replicas", 1); + } + createIndex.endObject(); + createIndex.startObject("mappings"); + { + createIndex.startObject("doc"); + { + createIndex.startObject("properties"); + {} + createIndex.endObject(); + } + createIndex.endObject(); + } + createIndex.endObject().endObject(); + request.setJsonEntity(Strings.toString(createIndex)); + client().performRequest(request); + } + + private void updateMapping(String index, CheckedConsumer body) throws Exception { + Request request = new Request("PUT", "/" + index + "/_mapping/doc"); + XContentBuilder updateMapping = JsonXContent.contentBuilder().startObject(); + updateMapping.startObject("properties"); + { + body.accept(updateMapping); + } + updateMapping.endObject().endObject(); + + request.setJsonEntity(Strings.toString(updateMapping)); + client().performRequest(request); + } + + private void createTestDataForByteValueTests(byte random1, byte random2, byte random3) throws Exception, IOException { + createIndex("test"); + updateMapping("test", builder -> { + builder.startObject("test_byte").field("type", "byte").endObject(); + builder.startObject("test_null_byte").field("type", "byte").endObject(); + builder.startObject("test_keyword").field("type", "keyword").endObject(); + }); + + index("test", "1", builder -> { + builder.field("test_byte", random1); + builder.field("test_null_byte", (Byte) null); + }); + index("test", "2", builder -> { + builder.field("test_byte", random2); + builder.field("test_keyword", random3); + }); + } + + private void createTestDataForShortValueTests(short random1, short random2, short random3) throws Exception, IOException { + createIndex("test"); + updateMapping("test", builder -> { + builder.startObject("test_short").field("type", "short").endObject(); + builder.startObject("test_null_short").field("type", "short").endObject(); + builder.startObject("test_keyword").field("type", "keyword").endObject(); + }); + + index("test", "1", builder -> { + builder.field("test_short", random1); + builder.field("test_null_short", (Short) null); + }); + index("test", "2", builder -> { + builder.field("test_short", random2); + builder.field("test_keyword", random3); + }); + } + + private void createTestDataForIntegerValueTests(int random1, int random2, int random3) throws Exception, IOException { + createIndex("test"); + updateMapping("test", builder -> { + builder.startObject("test_integer").field("type", "integer").endObject(); + builder.startObject("test_null_integer").field("type", "integer").endObject(); + builder.startObject("test_keyword").field("type", "keyword").endObject(); + }); + + index("test", "1", builder -> { + builder.field("test_integer", random1); + builder.field("test_null_integer", (Integer) null); + }); + index("test", "2", builder -> { + builder.field("test_integer", random2); + builder.field("test_keyword", random3); + }); + } + + private void createTestDataForLongValueTests(long random1, long random2, long random3) throws Exception, IOException { + createIndex("test"); + updateMapping("test", builder -> { + builder.startObject("test_long").field("type", "long").endObject(); + builder.startObject("test_null_long").field("type", "long").endObject(); + builder.startObject("test_keyword").field("type", "keyword").endObject(); + }); + + index("test", "1", builder -> { + builder.field("test_long", random1); + builder.field("test_null_long", (Long) null); + }); + index("test", "2", builder -> { + builder.field("test_long", random2); + builder.field("test_keyword", random3); + }); + } + + private void createTestDataForDoubleValueTests(double random1, double random2, double random3) throws Exception, IOException { + createIndex("test"); + updateMapping("test", builder -> { + builder.startObject("test_double").field("type", "double").endObject(); + builder.startObject("test_null_double").field("type", "double").endObject(); + builder.startObject("test_keyword").field("type", "keyword").endObject(); + }); + + index("test", "1", builder -> { + builder.field("test_double", random1); + builder.field("test_null_double", (Double) null); + }); + index("test", "2", builder -> { + builder.field("test_double", random2); + builder.field("test_keyword", random3); + }); + } + + private void createTestDataForFloatValueTests(float random1, float random2, float random3) throws Exception, IOException { + createIndex("test"); + updateMapping("test", builder -> { + builder.startObject("test_float").field("type", "float").endObject(); + builder.startObject("test_null_float").field("type", "float").endObject(); + builder.startObject("test_keyword").field("type", "keyword").endObject(); + }); + + index("test", "1", builder -> { + builder.field("test_float", random1); + builder.field("test_null_float", (Double) null); + }); + index("test", "2", builder -> { + builder.field("test_float", random2); + builder.field("test_keyword", random3); + }); + } + + private void indexSimpleDocumentWithTrueValues(Long randomLongDate) throws IOException { + index("test", "1", builder -> { + builder.field("test_boolean", true); + builder.field("test_byte", 1); + builder.field("test_integer", 1); + builder.field("test_long", 1L); + builder.field("test_short", 1); + builder.field("test_double", 1d); + builder.field("test_float", 1f); + builder.field("test_keyword", "true"); + builder.field("test_date", randomLongDate); + }); + } + + /** + * Creates test data for all numeric get* methods. All values random and different from the other numeric fields already generated. + * It returns a map containing the field name and its randomly generated value to be later used in checking the returned values. + */ + private Map createTestDataForNumericValueTypes(Supplier randomGenerator) throws Exception, IOException { + Map map = new HashMap(); + createIndex("test"); + updateMappingForNumericValuesTests("test"); + + index("test", "1", builder -> { + // random Byte + byte test_byte = randomValueOtherThanMany(map::containsValue, randomGenerator).byteValue(); + builder.field("test_byte", test_byte); + map.put("test_byte", test_byte); + + // random Integer + int test_integer = randomValueOtherThanMany(map::containsValue, randomGenerator).intValue(); + builder.field("test_integer", test_integer); + map.put("test_integer", test_integer); + + // random Short + int test_short = randomValueOtherThanMany(map::containsValue, randomGenerator).shortValue(); + builder.field("test_short", test_short); + map.put("test_short", test_short); + + // random Long + long test_long = randomValueOtherThanMany(map::containsValue, randomGenerator).longValue(); + builder.field("test_long", test_long); + map.put("test_long", test_long); + + // random Double + double test_double = randomValueOtherThanMany(map::containsValue, randomGenerator).doubleValue(); + builder.field("test_double", test_double); + map.put("test_double", test_double); + + // random Float + float test_float = randomValueOtherThanMany(map::containsValue, randomGenerator).floatValue(); + builder.field("test_float", test_float); + map.put("test_float", test_float); + }); + return map; + } + + private void updateMappingForNumericValuesTests(String indexName) throws Exception { + updateMapping(indexName, builder -> { + for(String field : fieldsNames) { + builder.startObject(field).field("type", field.substring(5)).endObject(); + } + }); + } + + private void assertThrowsUnsupportedAndExpectErrorMessage(ThrowingRunnable runnable, String message) { + SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, runnable); + assertEquals(message, sqle.getMessage()); + } + + private void assertThrowsWritesUnsupportedForUpdate(ThrowingRunnable r) { + assertThrowsUnsupportedAndExpectErrorMessage(r, "Writes not supported"); + } + + private void validateErrorsForDateTimeTestsWithoutCalendar(CheckedFunction method) { + SQLException sqle; + for(Entry,JDBCType> field : dateTimeTestingFields.entrySet()) { + sqle = expectThrows(SQLException.class, () -> method.apply(field.getKey().v1())); + assertEquals( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Long", + field.getKey().v2(), field.getValue()), sqle.getMessage()); + } + } + + private void validateErrorsForDateTimeTestsWithCalendar(Calendar c, CheckedBiFunction method) { + SQLException sqle; + for(Entry,JDBCType> field : dateTimeTestingFields.entrySet()) { + sqle = expectThrows(SQLException.class, () -> method.apply(field.getKey().v1(), c)); + assertEquals( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Long", + field.getKey().v2(), field.getValue()), sqle.getMessage()); + } + } + + private float randomFloatBetween(float start, float end) { + float result = 0.0f; + while (result < start || result > end || Float.isNaN(result)) { + result = start + randomFloat() * (end - start); + } + + return result; + } + + private Long getMaxIntPlusOne() { + return Long.valueOf(Integer.MAX_VALUE) + 1L; + } + + private Double getMaxLongPlusOne() { + return Double.valueOf(Long.MAX_VALUE) + 1d; + } + + private Connection esJdbc(String timeZoneId) throws SQLException { + return randomBoolean() ? useDriverManager(timeZoneId) : useDataSource(timeZoneId); + } + + private Connection useDriverManager(String timeZoneId) throws SQLException { + String elasticsearchAddress = getProtocol() + "://" + elasticsearchAddress(); + String address = "jdbc:es://" + elasticsearchAddress; + Properties connectionProperties = connectionProperties(); + connectionProperties.put(JdbcConfiguration.TIME_ZONE, timeZoneId); + Connection connection = DriverManager.getConnection(address, connectionProperties); + + assertNotNull("The timezone should be specified", connectionProperties.getProperty(JdbcConfiguration.TIME_ZONE)); + return connection; + } + + private Connection useDataSource(String timeZoneId) throws SQLException { + String elasticsearchAddress = getProtocol() + "://" + elasticsearchAddress(); + JdbcDataSource dataSource = new JdbcDataSource(); + String address = "jdbc:es://" + elasticsearchAddress; + dataSource.setUrl(address); + Properties connectionProperties = connectionProperties(); + connectionProperties.put(JdbcConfiguration.TIME_ZONE, timeZoneId); + dataSource.setProperties(connectionProperties); + Connection connection = dataSource.getConnection(); + + assertNotNull("The timezone should be specified", connectionProperties.getProperty(JdbcConfiguration.TIME_ZONE)); + return connection; + } } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SimpleExampleTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SimpleExampleTestCase.java index 7621743481a..f5d559d9bf0 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SimpleExampleTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SimpleExampleTestCase.java @@ -25,7 +25,8 @@ public class SimpleExampleTestCase extends JdbcIntegrationTestCase { assertEquals("Don Quixote", results.getString(1)); assertEquals(1072, results.getInt(2)); SQLException e = expectThrows(SQLException.class, () -> results.getInt(1)); - assertTrue(e.getMessage(), e.getMessage().contains("unable to convert column 1 to an int")); + assertTrue(e.getMessage(), + e.getMessage().contains("Unable to convert value [Don Quixote] of type [VARCHAR] to an Integer")); assertFalse(results.next()); } // end::simple_example From a84a20844be0bbb39c7e063e33691f8d3e0a8380 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 31 Aug 2018 16:36:57 +0300 Subject: [PATCH 09/23] Lazy evaluate java9home (#33301) --- .../groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 2 +- .../gradle/precommit/ForbiddenApisCliTask.java | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 6a9d4076eef..75b5676cc34 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -211,7 +211,7 @@ class BuildPlugin implements Plugin { project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion project.rootProject.ext.inFipsJvm = inFipsJvm project.rootProject.ext.gradleJavaVersion = JavaVersion.toVersion(gradleJavaVersion) - project.rootProject.ext.java9Home = findJavaHome("9") + project.rootProject.ext.java9Home = "${-> findJavaHome("9")}" } project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java index 46e5d84a2f2..aaa9564b0dc 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java @@ -51,7 +51,8 @@ public class ForbiddenApisCliTask extends DefaultTask { private JavaVersion targetCompatibility; private FileCollection classesDirs; private SourceSet sourceSet; - private String javaHome; + // This needs to be an object so it can hold Groovy GStrings + private Object javaHome; @Input public JavaVersion getTargetCompatibility() { @@ -142,11 +143,11 @@ public class ForbiddenApisCliTask extends DefaultTask { } @Input - public String getJavaHome() { + public Object getJavaHome() { return javaHome; } - public void setJavaHome(String javaHome) { + public void setJavaHome(Object javaHome) { this.javaHome = javaHome; } From db3d32ce91b02d0de76085a3c47d57e4d21f0e92 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 31 Aug 2018 16:48:00 +0300 Subject: [PATCH 10/23] Fix pom for build-tools (#33300) Looks like `java-gradle-plugin` reconfigures the pom. Stop using it since we don't publish to Gradle plugin portal. --- buildSrc/build.gradle | 9 --------- .../elasticsearch.clusterformation.properties | 1 + 2 files changed, 1 insertion(+), 9 deletions(-) create mode 100644 buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index dce14b10fcb..da8ad788164 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -24,15 +24,6 @@ plugins { id 'groovy' } -gradlePlugin { - plugins { - simplePlugin { - id = 'elasticsearch.clusterformation' - implementationClass = 'org.elasticsearch.gradle.clusterformation.ClusterformationPlugin' - } - } -} - group = 'org.elasticsearch.gradle' String minimumGradleVersion = file('src/main/resources/minimumGradleVersion').text.trim() diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties new file mode 100644 index 00000000000..dfd6cd9956a --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.clusterformation.ClusterformationPlugin From 436d5c4eeec6185e8c745095a68227ea6e46d4b6 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Fri, 31 Aug 2018 17:47:05 +0100 Subject: [PATCH 11/23] Fixes SecurityIntegTestCase so it always adds at least one alias (#33296) * Fixes SecurityIntegTestCase so it always adds at least one alias `SecurityIntegTestCase.createIndicesWithRandomAliases` could randomly fail because its not gauranteed that the randomness of which aliases to add to the `IndicesAliasesRequestBuilder` would always select at least one alias to add. This change fixes the problem by keeping track of whether we have added an alias to teh request and forcing the last alias to be added if no other aliases have been added so far. Closes #30098 Closes #33123e * Addresses review comments --- .../org/elasticsearch/test/SecurityIntegTestCase.java | 8 ++++++-- .../xpack/security/authz/ReadActionsTests.java | 1 - 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 9bb0e44eb66..7143182c162 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -7,6 +7,7 @@ package org.elasticsearch.test; import io.netty.util.ThreadDeathWatcher; import io.netty.util.concurrent.GlobalEventExecutor; + import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; @@ -44,7 +45,6 @@ import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.client.SecurityClient; import org.elasticsearch.xpack.security.LocalStateSecurity; - import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.AfterClass; import org.junit.Before; @@ -420,14 +420,18 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { createIndex(indices); if (frequently()) { + boolean aliasAdded = false; IndicesAliasesRequestBuilder builder = client().admin().indices().prepareAliases(); for (String index : indices) { if (frequently()) { //one alias per index with prefix "alias-" builder.addAlias(index, "alias-" + index); + aliasAdded = true; } } - if (randomBoolean()) { + // If we get to this point and we haven't added an alias to the request we need to add one + // or the request will fail so use noAliasAdded to force adding the alias in this case + if (aliasAdded == false || randomBoolean()) { //one alias pointing to all indices for (String index : indices) { builder.addAlias(index, "alias"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index a88dafece32..76568d3d48b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -102,7 +102,6 @@ public class ReadActionsTests extends SecurityIntegTestCase { assertNoSearchHits(client().prepareSearch().get()); } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33123") public void testEmptyAuthorizedIndicesSearchForAllDisallowNoIndices() { createIndicesWithRandomAliases("index1", "index2"); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> client().prepareSearch() From 8703d875c0d14c7a723e9a02467bff65714b2f8c Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 31 Aug 2018 12:54:12 -0400 Subject: [PATCH 12/23] TEST: Disable soft-deletes in ParentChildTestCase Tracked at #33318 --- .../java/org/elasticsearch/join/query/ParentChildTestCase.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java index 87b16bc448e..5ea0d8312ad 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -58,6 +59,8 @@ public abstract class ParentChildTestCase extends ESIntegTestCase { @Override public Settings indexSettings() { Settings.Builder builder = Settings.builder().put(super.indexSettings()) + // AwaitsFix: https://github.com/elastic/elasticsearch/issues/33318 + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false) // aggressive filter caching so that we can assert on the filter cache size .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), true) .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true); From 08d0527e25b94eabf402c6f39c587b00ce76a1d1 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 31 Aug 2018 10:11:58 -0700 Subject: [PATCH 13/23] [DOCS] Rename X-Pack Commands section (#33005) --- docs/reference/commands/index.asciidoc | 8 ++++---- docs/reference/redirects.asciidoc | 5 +++++ 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/docs/reference/commands/index.asciidoc b/docs/reference/commands/index.asciidoc index 164d2fc0e84..134ac1edbd0 100644 --- a/docs/reference/commands/index.asciidoc +++ b/docs/reference/commands/index.asciidoc @@ -1,11 +1,11 @@ -[role="xpack"] -[[xpack-commands]] -= {xpack} Commands +[[commands]] += Command line tools [partintro] -- -{xpack} includes commands that help you configure security: +{es} provides the following tools for configuring security and performing other +tasks from the command line: * <> * <> diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index b88c7bf4547..f0e05538053 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -544,3 +544,8 @@ You can use the following APIs to add, remove, and retrieve role mappings: === Privilege APIs See <>. + +[role="exclude",id="xpack-commands"] +=== X-Pack commands + +See <>. From cdeadfc585fbbe9c9c01da590eae89869b823291 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 31 Aug 2018 10:50:43 -0700 Subject: [PATCH 14/23] [DOCS] Move rollup APIs to docs (#31450) --- docs/build.gradle | 267 ++++++++++++++++++ docs/reference/index.asciidoc | 2 +- docs/reference/rest-api/index.asciidoc | 2 +- .../reference}/rollup/api-quickref.asciidoc | 2 + .../rollup/apis}/delete-job.asciidoc | 1 + .../reference/rollup/apis}/get-job.asciidoc | 1 + .../reference/rollup/apis}/put-job.asciidoc | 1 + .../rollup/apis}/rollup-caps.asciidoc | 1 + .../rollup/apis}/rollup-index-caps.asciidoc | 0 .../rollup/apis}/rollup-job-config.asciidoc | 1 + .../rollup/apis}/rollup-search.asciidoc | 1 + .../reference/rollup/apis}/start-job.asciidoc | 1 + .../reference/rollup/apis}/stop-job.asciidoc | 1 + .../reference}/rollup/index.asciidoc | 2 + .../reference}/rollup/overview.asciidoc | 2 + .../rollup/rollup-agg-limitations.asciidoc | 2 + .../reference/rollup}/rollup-api.asciidoc | 19 +- .../rollup/rollup-getting-started.asciidoc | 2 + .../rollup/rollup-search-limitations.asciidoc | 2 + .../rollup/understanding-groups.asciidoc | 2 + 20 files changed, 301 insertions(+), 11 deletions(-) rename {x-pack/docs/en => docs/reference}/rollup/api-quickref.asciidoc (97%) rename {x-pack/docs/en/rest-api/rollup => docs/reference/rollup/apis}/delete-job.asciidoc (99%) rename {x-pack/docs/en/rest-api/rollup => docs/reference/rollup/apis}/get-job.asciidoc (99%) rename {x-pack/docs/en/rest-api/rollup => docs/reference/rollup/apis}/put-job.asciidoc (99%) rename {x-pack/docs/en/rest-api/rollup => docs/reference/rollup/apis}/rollup-caps.asciidoc (99%) rename {x-pack/docs/en/rest-api/rollup => docs/reference/rollup/apis}/rollup-index-caps.asciidoc (100%) rename {x-pack/docs/en/rest-api/rollup => docs/reference/rollup/apis}/rollup-job-config.asciidoc (99%) rename {x-pack/docs/en/rest-api/rollup => docs/reference/rollup/apis}/rollup-search.asciidoc (99%) rename {x-pack/docs/en/rest-api/rollup => docs/reference/rollup/apis}/start-job.asciidoc (99%) rename {x-pack/docs/en/rest-api/rollup => docs/reference/rollup/apis}/stop-job.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/rollup/index.asciidoc (97%) rename {x-pack/docs/en => docs/reference}/rollup/overview.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/rollup/rollup-agg-limitations.asciidoc (94%) rename {x-pack/docs/en/rest-api => docs/reference/rollup}/rollup-api.asciidoc (61%) rename {x-pack/docs/en => docs/reference}/rollup/rollup-getting-started.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/rollup/rollup-search-limitations.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/rollup/understanding-groups.asciidoc (99%) diff --git a/docs/build.gradle b/docs/build.gradle index 980c99baf83..8a23654f2e2 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -74,6 +74,17 @@ buildRestTests.docs = fileTree(projectDir) { exclude 'build' // Just syntax examples exclude 'README.asciidoc' + // Broken code snippet tests + exclude 'reference/rollup/rollup-getting-started.asciidoc' + exclude 'reference/rollup/apis/rollup-job-config.asciidoc' + exclude 'reference/rollup/apis/rollup-index-caps.asciidoc' + exclude 'reference/rollup/apis/put-job.asciidoc' + exclude 'reference/rollup/apis/stop-job.asciidoc' + exclude 'reference/rollup/apis/start-job.asciidoc' + exclude 'reference/rollup/apis/rollup-search.asciidoc' + exclude 'reference/rollup/apis/delete-job.asciidoc' + exclude 'reference/rollup/apis/get-job.asciidoc' + exclude 'reference/rollup/apis/rollup-caps.asciidoc' } listSnippets.docs = buildRestTests.docs @@ -594,3 +605,259 @@ buildRestTests.setups['library'] = ''' {"name": "The Moon is a Harsh Mistress", "author": "Robert A. Heinlein", "release_date": "1966-04-01", "page_count": 288} ''' +buildRestTests.setups['sensor_rollup_job'] = ''' + - do: + indices.create: + index: sensor-1 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + _doc: + properties: + timestamp: + type: date + temperature: + type: long + voltage: + type: float + node: + type: keyword + - do: + xpack.rollup.put_job: + id: "sensor" + body: > + { + "index_pattern": "sensor-*", + "rollup_index": "sensor_rollup", + "cron": "*/30 * * * * ?", + "page_size" :1000, + "groups" : { + "date_histogram": { + "field": "timestamp", + "interval": "1h", + "delay": "7d" + }, + "terms": { + "fields": ["node"] + } + }, + "metrics": [ + { + "field": "temperature", + "metrics": ["min", "max", "sum"] + }, + { + "field": "voltage", + "metrics": ["avg"] + } + ] + } +''' +buildRestTests.setups['sensor_started_rollup_job'] = ''' + - do: + indices.create: + index: sensor-1 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + _doc: + properties: + timestamp: + type: date + temperature: + type: long + voltage: + type: float + node: + type: keyword + + - do: + bulk: + index: sensor-1 + type: _doc + refresh: true + body: | + {"index":{}} + {"timestamp": 1516729294000, "temperature": 200, "voltage": 5.2, "node": "a"} + {"index":{}} + {"timestamp": 1516642894000, "temperature": 201, "voltage": 5.8, "node": "b"} + {"index":{}} + {"timestamp": 1516556494000, "temperature": 202, "voltage": 5.1, "node": "a"} + {"index":{}} + {"timestamp": 1516470094000, "temperature": 198, "voltage": 5.6, "node": "b"} + {"index":{}} + {"timestamp": 1516383694000, "temperature": 200, "voltage": 4.2, "node": "c"} + {"index":{}} + {"timestamp": 1516297294000, "temperature": 202, "voltage": 4.0, "node": "c"} + + - do: + xpack.rollup.put_job: + id: "sensor" + body: > + { + "index_pattern": "sensor-*", + "rollup_index": "sensor_rollup", + "cron": "* * * * * ?", + "page_size" :1000, + "groups" : { + "date_histogram": { + "field": "timestamp", + "interval": "1h", + "delay": "7d" + }, + "terms": { + "fields": ["node"] + } + }, + "metrics": [ + { + "field": "temperature", + "metrics": ["min", "max", "sum"] + }, + { + "field": "voltage", + "metrics": ["avg"] + } + ] + } + - do: + xpack.rollup.start_job: + id: "sensor" +''' + +buildRestTests.setups['sensor_index'] = ''' + - do: + indices.create: + index: sensor-1 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + _doc: + properties: + timestamp: + type: date + temperature: + type: long + voltage: + type: float + node: + type: keyword + load: + type: double + net_in: + type: long + net_out: + type: long + hostname: + type: keyword + datacenter: + type: keyword +''' + +buildRestTests.setups['sensor_prefab_data'] = ''' + - do: + indices.create: + index: sensor-1 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + _doc: + properties: + timestamp: + type: date + temperature: + type: long + voltage: + type: float + node: + type: keyword + - do: + indices.create: + index: sensor_rollup + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + _doc: + properties: + node.terms.value: + type: keyword + temperature.sum.value: + type: double + temperature.max.value: + type: double + temperature.min.value: + type: double + timestamp.date_histogram.time_zone: + type: keyword + timestamp.date_histogram.interval: + type: keyword + timestamp.date_histogram.timestamp: + type: date + timestamp.date_histogram._count: + type: long + voltage.avg.value: + type: double + voltage.avg._count: + type: long + _rollup.id: + type: keyword + _rollup.version: + type: long + _meta: + _rollup: + sensor: + cron: "* * * * * ?" + rollup_index: "sensor_rollup" + index_pattern: "sensor-*" + timeout: "20s" + page_size: 1000 + groups: + date_histogram: + delay: "7d" + field: "timestamp" + interval: "1h" + time_zone: "UTC" + terms: + fields: + - "node" + id: sensor + metrics: + - field: "temperature" + metrics: + - min + - max + - sum + - field: "voltage" + metrics: + - avg + + - do: + bulk: + index: sensor_rollup + type: _doc + refresh: true + body: | + {"index":{}} + {"node.terms.value":"b","temperature.sum.value":201.0,"temperature.max.value":201.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":201.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":5.800000190734863,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516640400000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} + {"index":{}} + {"node.terms.value":"c","temperature.sum.value":200.0,"temperature.max.value":200.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":200.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.199999809265137,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516381200000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} + {"index":{}} + {"node.terms.value":"a","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":5.099999904632568,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516554000000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} + {"index":{}} + {"node.terms.value":"a","temperature.sum.value":200.0,"temperature.max.value":200.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":200.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":5.199999809265137,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516726800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} + {"index":{}} + {"node.terms.value":"b","temperature.sum.value":198.0,"temperature.max.value":198.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":198.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":5.599999904632568,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516467600000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} + {"index":{}} + {"node.terms.value":"c","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.0,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516294800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} + +''' diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 7d51e4aa512..4df3ad48fb6 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -61,7 +61,7 @@ include::sql/index.asciidoc[] include::monitoring/index.asciidoc[] -include::{xes-repo-dir}/rollup/index.asciidoc[] +include::rollup/index.asciidoc[] include::rest-api/index.asciidoc[] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 9ec57940dd2..e1d607948e1 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -23,7 +23,7 @@ include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] include::{es-repo-dir}/migration/migration.asciidoc[] include::{xes-repo-dir}/rest-api/ml-api.asciidoc[] -include::{xes-repo-dir}/rest-api/rollup-api.asciidoc[] +include::{es-repo-dir}/rollup/rollup-api.asciidoc[] include::{xes-repo-dir}/rest-api/security.asciidoc[] include::{xes-repo-dir}/rest-api/watcher.asciidoc[] include::{xes-repo-dir}/rest-api/defs.asciidoc[] diff --git a/x-pack/docs/en/rollup/api-quickref.asciidoc b/docs/reference/rollup/api-quickref.asciidoc similarity index 97% rename from x-pack/docs/en/rollup/api-quickref.asciidoc rename to docs/reference/rollup/api-quickref.asciidoc index 5e99f1c6984..1d372a03ddc 100644 --- a/x-pack/docs/en/rollup/api-quickref.asciidoc +++ b/docs/reference/rollup/api-quickref.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[rollup-api-quickref]] == API Quick Reference diff --git a/x-pack/docs/en/rest-api/rollup/delete-job.asciidoc b/docs/reference/rollup/apis/delete-job.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/rollup/delete-job.asciidoc rename to docs/reference/rollup/apis/delete-job.asciidoc index b795e0b28c7..37774560848 100644 --- a/x-pack/docs/en/rest-api/rollup/delete-job.asciidoc +++ b/docs/reference/rollup/apis/delete-job.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[rollup-delete-job]] === Delete Job API ++++ diff --git a/x-pack/docs/en/rest-api/rollup/get-job.asciidoc b/docs/reference/rollup/apis/get-job.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/rollup/get-job.asciidoc rename to docs/reference/rollup/apis/get-job.asciidoc index 96053dbfea6..794d7248012 100644 --- a/x-pack/docs/en/rest-api/rollup/get-job.asciidoc +++ b/docs/reference/rollup/apis/get-job.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[rollup-get-job]] === Get Rollup Jobs API ++++ diff --git a/x-pack/docs/en/rest-api/rollup/put-job.asciidoc b/docs/reference/rollup/apis/put-job.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/rollup/put-job.asciidoc rename to docs/reference/rollup/apis/put-job.asciidoc index 27889d985b8..79e30ae8dc9 100644 --- a/x-pack/docs/en/rest-api/rollup/put-job.asciidoc +++ b/docs/reference/rollup/apis/put-job.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[rollup-put-job]] === Create Job API ++++ diff --git a/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc b/docs/reference/rollup/apis/rollup-caps.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc rename to docs/reference/rollup/apis/rollup-caps.asciidoc index 1f233f195a0..907efb94c17 100644 --- a/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-caps.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[rollup-get-rollup-caps]] === Get Rollup Job Capabilities ++++ diff --git a/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc b/docs/reference/rollup/apis/rollup-index-caps.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc rename to docs/reference/rollup/apis/rollup-index-caps.asciidoc diff --git a/x-pack/docs/en/rest-api/rollup/rollup-job-config.asciidoc b/docs/reference/rollup/apis/rollup-job-config.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/rollup/rollup-job-config.asciidoc rename to docs/reference/rollup/apis/rollup-job-config.asciidoc index f937f28601a..3a917fb59f2 100644 --- a/x-pack/docs/en/rest-api/rollup/rollup-job-config.asciidoc +++ b/docs/reference/rollup/apis/rollup-job-config.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[rollup-job-config]] === Rollup Job Configuration diff --git a/x-pack/docs/en/rest-api/rollup/rollup-search.asciidoc b/docs/reference/rollup/apis/rollup-search.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/rollup/rollup-search.asciidoc rename to docs/reference/rollup/apis/rollup-search.asciidoc index 115ef8fb043..8e7fc69a00a 100644 --- a/x-pack/docs/en/rest-api/rollup/rollup-search.asciidoc +++ b/docs/reference/rollup/apis/rollup-search.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[rollup-search]] === Rollup Search ++++ diff --git a/x-pack/docs/en/rest-api/rollup/start-job.asciidoc b/docs/reference/rollup/apis/start-job.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/rollup/start-job.asciidoc rename to docs/reference/rollup/apis/start-job.asciidoc index 9a0a0a7e4f0..cf44883895c 100644 --- a/x-pack/docs/en/rest-api/rollup/start-job.asciidoc +++ b/docs/reference/rollup/apis/start-job.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[rollup-start-job]] === Start Job API ++++ diff --git a/x-pack/docs/en/rest-api/rollup/stop-job.asciidoc b/docs/reference/rollup/apis/stop-job.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/rollup/stop-job.asciidoc rename to docs/reference/rollup/apis/stop-job.asciidoc index 60507402705..5912b2d688b 100644 --- a/x-pack/docs/en/rest-api/rollup/stop-job.asciidoc +++ b/docs/reference/rollup/apis/stop-job.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[rollup-stop-job]] === Stop Job API ++++ diff --git a/x-pack/docs/en/rollup/index.asciidoc b/docs/reference/rollup/index.asciidoc similarity index 97% rename from x-pack/docs/en/rollup/index.asciidoc rename to docs/reference/rollup/index.asciidoc index 9ac89341bfe..64dc233f82f 100644 --- a/x-pack/docs/en/rollup/index.asciidoc +++ b/docs/reference/rollup/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[xpack-rollup]] = Rolling up historical data diff --git a/x-pack/docs/en/rollup/overview.asciidoc b/docs/reference/rollup/overview.asciidoc similarity index 99% rename from x-pack/docs/en/rollup/overview.asciidoc rename to docs/reference/rollup/overview.asciidoc index a9a983fbecc..b2570f647e7 100644 --- a/x-pack/docs/en/rollup/overview.asciidoc +++ b/docs/reference/rollup/overview.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[rollup-overview]] == Overview diff --git a/x-pack/docs/en/rollup/rollup-agg-limitations.asciidoc b/docs/reference/rollup/rollup-agg-limitations.asciidoc similarity index 94% rename from x-pack/docs/en/rollup/rollup-agg-limitations.asciidoc rename to docs/reference/rollup/rollup-agg-limitations.asciidoc index cd20622d93c..9f8b6f66ade 100644 --- a/x-pack/docs/en/rollup/rollup-agg-limitations.asciidoc +++ b/docs/reference/rollup/rollup-agg-limitations.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[rollup-agg-limitations]] == Rollup Aggregation Limitations diff --git a/x-pack/docs/en/rest-api/rollup-api.asciidoc b/docs/reference/rollup/rollup-api.asciidoc similarity index 61% rename from x-pack/docs/en/rest-api/rollup-api.asciidoc rename to docs/reference/rollup/rollup-api.asciidoc index 9a8ec00d77a..099686fb432 100644 --- a/x-pack/docs/en/rest-api/rollup-api.asciidoc +++ b/docs/reference/rollup/rollup-api.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[rollup-apis]] == Rollup APIs @@ -26,12 +27,12 @@ -include::rollup/delete-job.asciidoc[] -include::rollup/get-job.asciidoc[] -include::rollup/put-job.asciidoc[] -include::rollup/start-job.asciidoc[] -include::rollup/stop-job.asciidoc[] -include::rollup/rollup-caps.asciidoc[] -include::rollup/rollup-index-caps.asciidoc[] -include::rollup/rollup-search.asciidoc[] -include::rollup/rollup-job-config.asciidoc[] \ No newline at end of file +include::apis/delete-job.asciidoc[] +include::apis/get-job.asciidoc[] +include::apis/put-job.asciidoc[] +include::apis/start-job.asciidoc[] +include::apis/stop-job.asciidoc[] +include::apis/rollup-caps.asciidoc[] +include::apis/rollup-index-caps.asciidoc[] +include::apis/rollup-search.asciidoc[] +include::apis/rollup-job-config.asciidoc[] diff --git a/x-pack/docs/en/rollup/rollup-getting-started.asciidoc b/docs/reference/rollup/rollup-getting-started.asciidoc similarity index 99% rename from x-pack/docs/en/rollup/rollup-getting-started.asciidoc rename to docs/reference/rollup/rollup-getting-started.asciidoc index b6c913d7d34..8f99bc2c010 100644 --- a/x-pack/docs/en/rollup/rollup-getting-started.asciidoc +++ b/docs/reference/rollup/rollup-getting-started.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[rollup-getting-started]] == Getting Started diff --git a/x-pack/docs/en/rollup/rollup-search-limitations.asciidoc b/docs/reference/rollup/rollup-search-limitations.asciidoc similarity index 99% rename from x-pack/docs/en/rollup/rollup-search-limitations.asciidoc rename to docs/reference/rollup/rollup-search-limitations.asciidoc index 99f19a179ed..43feeab9a2e 100644 --- a/x-pack/docs/en/rollup/rollup-search-limitations.asciidoc +++ b/docs/reference/rollup/rollup-search-limitations.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[rollup-search-limitations]] == Rollup Search Limitations diff --git a/x-pack/docs/en/rollup/understanding-groups.asciidoc b/docs/reference/rollup/understanding-groups.asciidoc similarity index 99% rename from x-pack/docs/en/rollup/understanding-groups.asciidoc rename to docs/reference/rollup/understanding-groups.asciidoc index 803555b2d73..6321ab9b00f 100644 --- a/x-pack/docs/en/rollup/understanding-groups.asciidoc +++ b/docs/reference/rollup/understanding-groups.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[rollup-understanding-groups]] == Understanding Groups From 874ebcb6d48b86dbcf539a5f34b91b4d1b38b6ea Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 31 Aug 2018 11:56:26 -0700 Subject: [PATCH 15/23] [DOCS] Moves ml folder from x-pack/docs to docs (#33248) --- docs/build.gradle | 6 ++ .../reference}/ml/aggregations.asciidoc | 4 +- .../reference}/ml/categories.asciidoc | 3 + .../reference}/ml/configuring.asciidoc | 12 +-- .../reference}/ml/customurl.asciidoc | 2 +- .../ml/detector-custom-rules.asciidoc | 7 +- .../reference}/ml/functions.asciidoc | 0 .../reference}/ml/functions/count.asciidoc | 7 ++ .../reference}/ml/functions/geo.asciidoc | 3 +- .../reference}/ml/functions/info.asciidoc | 0 .../reference}/ml/functions/metric.asciidoc | 0 .../reference}/ml/functions/rare.asciidoc | 0 .../reference}/ml/functions/sum.asciidoc | 0 .../reference}/ml/functions/time.asciidoc | 0 .../ml/images/ml-category-advanced.jpg | Bin .../ml/images/ml-category-anomalies.jpg | Bin .../reference}/ml/images/ml-categoryterms.jpg | Bin .../reference}/ml/images/ml-create-job.jpg | Bin .../reference}/ml/images/ml-create-jobs.jpg | Bin .../ml/images/ml-customurl-detail.jpg | Bin .../ml/images/ml-customurl-discover.jpg | Bin .../ml/images/ml-customurl-edit.jpg | Bin .../reference}/ml/images/ml-customurl.jpg | Bin .../reference}/ml/images/ml-data-dates.jpg | Bin .../reference}/ml/images/ml-data-keywords.jpg | Bin .../reference}/ml/images/ml-data-metrics.jpg | Bin .../ml/images/ml-data-topmetrics.jpg | Bin .../ml/images/ml-data-visualizer.jpg | Bin .../reference}/ml/images/ml-edit-job.jpg | Bin .../ml/images/ml-population-anomaly.jpg | Bin .../ml/images/ml-population-job.jpg | Bin .../ml/images/ml-population-results.jpg | Bin .../reference}/ml/images/ml-scriptfields.jpg | Bin .../reference}/ml/images/ml-start-feed.jpg | Bin .../reference}/ml/images/ml-stop-feed.jpg | Bin .../en => docs/reference}/ml/images/ml.jpg | Bin .../reference}/ml/populations.asciidoc | 5 +- .../reference}/ml/stopping-ml.asciidoc | 6 +- .../reference}/ml/transforms.asciidoc | 33 +++--- x-pack/docs/en/ml/api-quickref.asciidoc | 102 ------------------ 40 files changed, 50 insertions(+), 140 deletions(-) rename {x-pack/docs/en => docs/reference}/ml/aggregations.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/ml/categories.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/ml/configuring.asciidoc (88%) rename {x-pack/docs/en => docs/reference}/ml/customurl.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/ml/detector-custom-rules.asciidoc (97%) rename {x-pack/docs/en => docs/reference}/ml/functions.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/ml/functions/count.asciidoc (97%) rename {x-pack/docs/en => docs/reference}/ml/functions/geo.asciidoc (98%) rename {x-pack/docs/en => docs/reference}/ml/functions/info.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/ml/functions/metric.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/ml/functions/rare.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/ml/functions/sum.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/ml/functions/time.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-category-advanced.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-category-anomalies.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-categoryterms.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-create-job.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-create-jobs.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-customurl-detail.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-customurl-discover.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-customurl-edit.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-customurl.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-data-dates.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-data-keywords.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-data-metrics.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-data-topmetrics.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-data-visualizer.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-edit-job.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-population-anomaly.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-population-job.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-population-results.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-scriptfields.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-start-feed.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml-stop-feed.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/images/ml.jpg (100%) rename {x-pack/docs/en => docs/reference}/ml/populations.asciidoc (94%) rename {x-pack/docs/en => docs/reference}/ml/stopping-ml.asciidoc (94%) rename {x-pack/docs/en => docs/reference}/ml/transforms.asciidoc (97%) delete mode 100644 x-pack/docs/en/ml/api-quickref.asciidoc diff --git a/docs/build.gradle b/docs/build.gradle index 8a23654f2e2..88bccfef4a3 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -19,6 +19,12 @@ apply plugin: 'elasticsearch.docs-test' +/* List of files that have snippets that require a gold or platinum licence +and therefore cannot be tested yet... */ +buildRestTests.expectedUnconvertedCandidates = [ + 'reference/ml/transforms.asciidoc', +] + integTestCluster { /* Enable regexes in painless so our tests don't complain about example * snippets that use them. */ diff --git a/x-pack/docs/en/ml/aggregations.asciidoc b/docs/reference/ml/aggregations.asciidoc similarity index 99% rename from x-pack/docs/en/ml/aggregations.asciidoc rename to docs/reference/ml/aggregations.asciidoc index 07f46501569..4b873ea790b 100644 --- a/x-pack/docs/en/ml/aggregations.asciidoc +++ b/docs/reference/ml/aggregations.asciidoc @@ -41,7 +41,7 @@ PUT _xpack/ml/anomaly_detectors/farequote } ---------------------------------- // CONSOLE -// TEST[setup:farequote_data] +// TEST[skip:setup:farequote_data] In this example, the `airline`, `responsetime`, and `time` fields are aggregations. @@ -90,7 +90,7 @@ PUT _xpack/ml/datafeeds/datafeed-farequote } ---------------------------------- // CONSOLE -// TEST[setup:farequote_job] +// TEST[skip:setup:farequote_job] In this example, the aggregations have names that match the fields that they operate on. That is to say, the `max` aggregation is named `time` and its diff --git a/x-pack/docs/en/ml/categories.asciidoc b/docs/reference/ml/categories.asciidoc similarity index 99% rename from x-pack/docs/en/ml/categories.asciidoc rename to docs/reference/ml/categories.asciidoc index 21f71b871cb..03ebc8af76e 100644 --- a/x-pack/docs/en/ml/categories.asciidoc +++ b/docs/reference/ml/categories.asciidoc @@ -44,6 +44,7 @@ PUT _xpack/ml/anomaly_detectors/it_ops_new_logs } ---------------------------------- //CONSOLE +// TEST[skip:needs-licence] <1> The `categorization_field_name` property indicates which field will be categorized. <2> The resulting categories are used in a detector by setting `by_field_name`, @@ -127,6 +128,7 @@ PUT _xpack/ml/anomaly_detectors/it_ops_new_logs2 } ---------------------------------- //CONSOLE +// TEST[skip:needs-licence] <1> The {ref}/analysis-pattern-replace-charfilter.html[`pattern_replace` character filter] here achieves exactly the same as the `categorization_filters` in the first @@ -193,6 +195,7 @@ PUT _xpack/ml/anomaly_detectors/it_ops_new_logs3 } ---------------------------------- //CONSOLE +// TEST[skip:needs-licence] <1> Tokens basically consist of hyphens, digits, letters, underscores and dots. <2> By default, categorization ignores tokens that begin with a digit. <3> By default, categorization also ignores tokens that are hexadecimal numbers. diff --git a/x-pack/docs/en/ml/configuring.asciidoc b/docs/reference/ml/configuring.asciidoc similarity index 88% rename from x-pack/docs/en/ml/configuring.asciidoc rename to docs/reference/ml/configuring.asciidoc index e35f046a82b..9b6149d662a 100644 --- a/x-pack/docs/en/ml/configuring.asciidoc +++ b/docs/reference/ml/configuring.asciidoc @@ -36,20 +36,20 @@ The scenarios in this section describe some best practices for generating useful * <> * <> -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/customurl.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/customurl.asciidoc include::customurl.asciidoc[] -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/aggregations.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/aggregations.asciidoc include::aggregations.asciidoc[] -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/categories.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/categories.asciidoc include::categories.asciidoc[] -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/populations.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/populations.asciidoc include::populations.asciidoc[] -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/transforms.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/transforms.asciidoc include::transforms.asciidoc[] -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/detector-custom-rules.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/detector-custom-rules.asciidoc include::detector-custom-rules.asciidoc[] \ No newline at end of file diff --git a/x-pack/docs/en/ml/customurl.asciidoc b/docs/reference/ml/customurl.asciidoc similarity index 99% rename from x-pack/docs/en/ml/customurl.asciidoc rename to docs/reference/ml/customurl.asciidoc index 7c197084c0e..95f4f5f938f 100644 --- a/x-pack/docs/en/ml/customurl.asciidoc +++ b/docs/reference/ml/customurl.asciidoc @@ -106,7 +106,7 @@ POST _xpack/ml/anomaly_detectors/sample_job/_update } ---------------------------------- //CONSOLE -//TEST[setup:sample_job] +//TEST[skip:setup:sample_job] When you click this custom URL in the anomalies table in {kib}, it opens up the *Discover* page and displays source data for the period one hour before and diff --git a/x-pack/docs/en/ml/detector-custom-rules.asciidoc b/docs/reference/ml/detector-custom-rules.asciidoc similarity index 97% rename from x-pack/docs/en/ml/detector-custom-rules.asciidoc rename to docs/reference/ml/detector-custom-rules.asciidoc index 8513c7e4d25..02881f4cc43 100644 --- a/x-pack/docs/en/ml/detector-custom-rules.asciidoc +++ b/docs/reference/ml/detector-custom-rules.asciidoc @@ -39,6 +39,7 @@ PUT _xpack/ml/filters/safe_domains } ---------------------------------- // CONSOLE +// TEST[skip:needs-licence] Now, we can create our job specifying a scope that uses the `safe_domains` filter for the `highest_registered_domain` field: @@ -70,6 +71,7 @@ PUT _xpack/ml/anomaly_detectors/dns_exfiltration_with_rule } ---------------------------------- // CONSOLE +// TEST[skip:needs-licence] As time advances and we see more data and more results, we might encounter new domains that we want to add in the filter. We can do that by using the @@ -83,7 +85,7 @@ POST _xpack/ml/filters/safe_domains/_update } ---------------------------------- // CONSOLE -// TEST[setup:ml_filter_safe_domains] +// TEST[skip:setup:ml_filter_safe_domains] Note that we can use any of the `partition_field_name`, `over_field_name`, or `by_field_name` fields in the `scope`. @@ -123,6 +125,7 @@ PUT _xpack/ml/anomaly_detectors/scoping_multiple_fields } ---------------------------------- // CONSOLE +// TEST[skip:needs-licence] Such a detector will skip results when the values of all 3 scoped fields are included in the referenced filters. @@ -166,6 +169,7 @@ PUT _xpack/ml/anomaly_detectors/cpu_with_rule } ---------------------------------- // CONSOLE +// TEST[skip:needs-licence] When there are multiple conditions they are combined with a logical `and`. This is useful when we want the rule to apply to a range. We simply create @@ -205,6 +209,7 @@ PUT _xpack/ml/anomaly_detectors/rule_with_range } ---------------------------------- // CONSOLE +// TEST[skip:needs-licence] ==== Custom rules in the life-cycle of a job diff --git a/x-pack/docs/en/ml/functions.asciidoc b/docs/reference/ml/functions.asciidoc similarity index 100% rename from x-pack/docs/en/ml/functions.asciidoc rename to docs/reference/ml/functions.asciidoc diff --git a/x-pack/docs/en/ml/functions/count.asciidoc b/docs/reference/ml/functions/count.asciidoc similarity index 97% rename from x-pack/docs/en/ml/functions/count.asciidoc rename to docs/reference/ml/functions/count.asciidoc index a2dc5645b61..abbbd118ffe 100644 --- a/x-pack/docs/en/ml/functions/count.asciidoc +++ b/docs/reference/ml/functions/count.asciidoc @@ -59,6 +59,7 @@ PUT _xpack/ml/anomaly_detectors/example1 } -------------------------------------------------- // CONSOLE +// TEST[skip:needs-licence] This example is probably the simplest possible analysis. It identifies time buckets during which the overall count of events is higher or lower than @@ -86,6 +87,7 @@ PUT _xpack/ml/anomaly_detectors/example2 } -------------------------------------------------- // CONSOLE +// TEST[skip:needs-licence] If you use this `high_count` function in a detector in your job, it models the event rate for each error code. It detects users that generate an @@ -110,6 +112,7 @@ PUT _xpack/ml/anomaly_detectors/example3 } -------------------------------------------------- // CONSOLE +// TEST[skip:needs-licence] In this example, the function detects when the count of events for a status code is lower than usual. @@ -136,6 +139,7 @@ PUT _xpack/ml/anomaly_detectors/example4 } -------------------------------------------------- // CONSOLE +// TEST[skip:needs-licence] If you are analyzing an aggregated `events_per_min` field, do not use a sum function (for example, `sum(events_per_min)`). Instead, use the count function @@ -200,6 +204,7 @@ PUT _xpack/ml/anomaly_detectors/example5 } -------------------------------------------------- // CONSOLE +// TEST[skip:needs-licence] If you use this `high_non_zero_count` function in a detector in your job, it models the count of events for the `signaturename` field. It ignores any buckets @@ -253,6 +258,7 @@ PUT _xpack/ml/anomaly_detectors/example6 } -------------------------------------------------- // CONSOLE +// TEST[skip:needs-licence] This `distinct_count` function detects when a system has an unusual number of logged in users. When you use this function in a detector in your job, it @@ -278,6 +284,7 @@ PUT _xpack/ml/anomaly_detectors/example7 } -------------------------------------------------- // CONSOLE +// TEST[skip:needs-licence] This example detects instances of port scanning. When you use this function in a detector in your job, it models the distinct count of ports. It also detects the diff --git a/x-pack/docs/en/ml/functions/geo.asciidoc b/docs/reference/ml/functions/geo.asciidoc similarity index 98% rename from x-pack/docs/en/ml/functions/geo.asciidoc rename to docs/reference/ml/functions/geo.asciidoc index 5bcf6c33945..461ab825ff5 100644 --- a/x-pack/docs/en/ml/functions/geo.asciidoc +++ b/docs/reference/ml/functions/geo.asciidoc @@ -47,6 +47,7 @@ PUT _xpack/ml/anomaly_detectors/example1 } -------------------------------------------------- // CONSOLE +// TEST[skip:needs-licence] If you use this `lat_long` function in a detector in your job, it detects anomalies where the geographic location of a credit card transaction is @@ -98,6 +99,6 @@ PUT _xpack/ml/datafeeds/datafeed-test2 } -------------------------------------------------- // CONSOLE -// TEST[setup:farequote_job] +// TEST[skip:setup:farequote_job] For more information, see <>. diff --git a/x-pack/docs/en/ml/functions/info.asciidoc b/docs/reference/ml/functions/info.asciidoc similarity index 100% rename from x-pack/docs/en/ml/functions/info.asciidoc rename to docs/reference/ml/functions/info.asciidoc diff --git a/x-pack/docs/en/ml/functions/metric.asciidoc b/docs/reference/ml/functions/metric.asciidoc similarity index 100% rename from x-pack/docs/en/ml/functions/metric.asciidoc rename to docs/reference/ml/functions/metric.asciidoc diff --git a/x-pack/docs/en/ml/functions/rare.asciidoc b/docs/reference/ml/functions/rare.asciidoc similarity index 100% rename from x-pack/docs/en/ml/functions/rare.asciidoc rename to docs/reference/ml/functions/rare.asciidoc diff --git a/x-pack/docs/en/ml/functions/sum.asciidoc b/docs/reference/ml/functions/sum.asciidoc similarity index 100% rename from x-pack/docs/en/ml/functions/sum.asciidoc rename to docs/reference/ml/functions/sum.asciidoc diff --git a/x-pack/docs/en/ml/functions/time.asciidoc b/docs/reference/ml/functions/time.asciidoc similarity index 100% rename from x-pack/docs/en/ml/functions/time.asciidoc rename to docs/reference/ml/functions/time.asciidoc diff --git a/x-pack/docs/en/ml/images/ml-category-advanced.jpg b/docs/reference/ml/images/ml-category-advanced.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-category-advanced.jpg rename to docs/reference/ml/images/ml-category-advanced.jpg diff --git a/x-pack/docs/en/ml/images/ml-category-anomalies.jpg b/docs/reference/ml/images/ml-category-anomalies.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-category-anomalies.jpg rename to docs/reference/ml/images/ml-category-anomalies.jpg diff --git a/x-pack/docs/en/ml/images/ml-categoryterms.jpg b/docs/reference/ml/images/ml-categoryterms.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-categoryterms.jpg rename to docs/reference/ml/images/ml-categoryterms.jpg diff --git a/x-pack/docs/en/ml/images/ml-create-job.jpg b/docs/reference/ml/images/ml-create-job.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-create-job.jpg rename to docs/reference/ml/images/ml-create-job.jpg diff --git a/x-pack/docs/en/ml/images/ml-create-jobs.jpg b/docs/reference/ml/images/ml-create-jobs.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-create-jobs.jpg rename to docs/reference/ml/images/ml-create-jobs.jpg diff --git a/x-pack/docs/en/ml/images/ml-customurl-detail.jpg b/docs/reference/ml/images/ml-customurl-detail.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-customurl-detail.jpg rename to docs/reference/ml/images/ml-customurl-detail.jpg diff --git a/x-pack/docs/en/ml/images/ml-customurl-discover.jpg b/docs/reference/ml/images/ml-customurl-discover.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-customurl-discover.jpg rename to docs/reference/ml/images/ml-customurl-discover.jpg diff --git a/x-pack/docs/en/ml/images/ml-customurl-edit.jpg b/docs/reference/ml/images/ml-customurl-edit.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-customurl-edit.jpg rename to docs/reference/ml/images/ml-customurl-edit.jpg diff --git a/x-pack/docs/en/ml/images/ml-customurl.jpg b/docs/reference/ml/images/ml-customurl.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-customurl.jpg rename to docs/reference/ml/images/ml-customurl.jpg diff --git a/x-pack/docs/en/ml/images/ml-data-dates.jpg b/docs/reference/ml/images/ml-data-dates.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-data-dates.jpg rename to docs/reference/ml/images/ml-data-dates.jpg diff --git a/x-pack/docs/en/ml/images/ml-data-keywords.jpg b/docs/reference/ml/images/ml-data-keywords.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-data-keywords.jpg rename to docs/reference/ml/images/ml-data-keywords.jpg diff --git a/x-pack/docs/en/ml/images/ml-data-metrics.jpg b/docs/reference/ml/images/ml-data-metrics.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-data-metrics.jpg rename to docs/reference/ml/images/ml-data-metrics.jpg diff --git a/x-pack/docs/en/ml/images/ml-data-topmetrics.jpg b/docs/reference/ml/images/ml-data-topmetrics.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-data-topmetrics.jpg rename to docs/reference/ml/images/ml-data-topmetrics.jpg diff --git a/x-pack/docs/en/ml/images/ml-data-visualizer.jpg b/docs/reference/ml/images/ml-data-visualizer.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-data-visualizer.jpg rename to docs/reference/ml/images/ml-data-visualizer.jpg diff --git a/x-pack/docs/en/ml/images/ml-edit-job.jpg b/docs/reference/ml/images/ml-edit-job.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-edit-job.jpg rename to docs/reference/ml/images/ml-edit-job.jpg diff --git a/x-pack/docs/en/ml/images/ml-population-anomaly.jpg b/docs/reference/ml/images/ml-population-anomaly.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-population-anomaly.jpg rename to docs/reference/ml/images/ml-population-anomaly.jpg diff --git a/x-pack/docs/en/ml/images/ml-population-job.jpg b/docs/reference/ml/images/ml-population-job.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-population-job.jpg rename to docs/reference/ml/images/ml-population-job.jpg diff --git a/x-pack/docs/en/ml/images/ml-population-results.jpg b/docs/reference/ml/images/ml-population-results.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-population-results.jpg rename to docs/reference/ml/images/ml-population-results.jpg diff --git a/x-pack/docs/en/ml/images/ml-scriptfields.jpg b/docs/reference/ml/images/ml-scriptfields.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-scriptfields.jpg rename to docs/reference/ml/images/ml-scriptfields.jpg diff --git a/x-pack/docs/en/ml/images/ml-start-feed.jpg b/docs/reference/ml/images/ml-start-feed.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-start-feed.jpg rename to docs/reference/ml/images/ml-start-feed.jpg diff --git a/x-pack/docs/en/ml/images/ml-stop-feed.jpg b/docs/reference/ml/images/ml-stop-feed.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml-stop-feed.jpg rename to docs/reference/ml/images/ml-stop-feed.jpg diff --git a/x-pack/docs/en/ml/images/ml.jpg b/docs/reference/ml/images/ml.jpg similarity index 100% rename from x-pack/docs/en/ml/images/ml.jpg rename to docs/reference/ml/images/ml.jpg diff --git a/x-pack/docs/en/ml/populations.asciidoc b/docs/reference/ml/populations.asciidoc similarity index 94% rename from x-pack/docs/en/ml/populations.asciidoc rename to docs/reference/ml/populations.asciidoc index bf0dd2ad7d7..ed58c117f17 100644 --- a/x-pack/docs/en/ml/populations.asciidoc +++ b/docs/reference/ml/populations.asciidoc @@ -51,14 +51,11 @@ PUT _xpack/ml/anomaly_detectors/population } ---------------------------------- //CONSOLE +// TEST[skip:needs-licence] <1> This `over_field_name` property indicates that the metrics for each user ( as identified by their `username` value) are analyzed relative to other users in each bucket. -//TO-DO: Per sophiec20 "Perhaps add the datafeed config and add a query filter to -//include only workstations as servers and printers would behave differently -//from the population - If your data is stored in {es}, you can use the population job wizard in {kib} to create a job with these same properties. For example, the population job wizard provides the following job settings: diff --git a/x-pack/docs/en/ml/stopping-ml.asciidoc b/docs/reference/ml/stopping-ml.asciidoc similarity index 94% rename from x-pack/docs/en/ml/stopping-ml.asciidoc rename to docs/reference/ml/stopping-ml.asciidoc index c0be2d947cd..17505a02d15 100644 --- a/x-pack/docs/en/ml/stopping-ml.asciidoc +++ b/docs/reference/ml/stopping-ml.asciidoc @@ -28,7 +28,7 @@ request stops the `feed1` {dfeed}: POST _xpack/ml/datafeeds/datafeed-total-requests/_stop -------------------------------------------------- // CONSOLE -// TEST[setup:server_metrics_startdf] +// TEST[skip:setup:server_metrics_startdf] NOTE: You must have `manage_ml`, or `manage` cluster privileges to stop {dfeeds}. For more information, see <>. @@ -49,6 +49,7 @@ If you are upgrading your cluster, you can use the following request to stop all POST _xpack/ml/datafeeds/_all/_stop ---------------------------------- // CONSOLE +// TEST[skip:needs-licence] [float] [[closing-ml-jobs]] @@ -67,7 +68,7 @@ example, the following request closes the `job1` job: POST _xpack/ml/anomaly_detectors/total-requests/_close -------------------------------------------------- // CONSOLE -// TEST[setup:server_metrics_openjob] +// TEST[skip:setup:server_metrics_openjob] NOTE: You must have `manage_ml`, or `manage` cluster privileges to stop {dfeeds}. For more information, see <>. @@ -86,3 +87,4 @@ all open jobs on the cluster: POST _xpack/ml/anomaly_detectors/_all/_close ---------------------------------- // CONSOLE +// TEST[skip:needs-licence] diff --git a/x-pack/docs/en/ml/transforms.asciidoc b/docs/reference/ml/transforms.asciidoc similarity index 97% rename from x-pack/docs/en/ml/transforms.asciidoc rename to docs/reference/ml/transforms.asciidoc index c4b4d560297..a2276895fc9 100644 --- a/x-pack/docs/en/ml/transforms.asciidoc +++ b/docs/reference/ml/transforms.asciidoc @@ -95,7 +95,7 @@ PUT /my_index/my_type/1 } ---------------------------------- // CONSOLE -// TESTSETUP +// TEST[skip:SETUP] <1> In this example, string fields are mapped as `keyword` fields to support aggregation. If you want both a full text (`text`) and a keyword (`keyword`) version of the same field, use multi-fields. For more information, see @@ -144,7 +144,7 @@ PUT _xpack/ml/datafeeds/datafeed-test1 } ---------------------------------- // CONSOLE -// TEST[skip:broken] +// TEST[skip:needs-licence] <1> A script field named `total_error_count` is referenced in the detector within the job. <2> The script field is defined in the {dfeed}. @@ -163,7 +163,7 @@ You can preview the contents of the {dfeed} by using the following API: GET _xpack/ml/datafeeds/datafeed-test1/_preview ---------------------------------- // CONSOLE -// TEST[continued] +// TEST[skip:continued] In this example, the API returns the following results, which contain a sum of the `error_count` and `aborted_count` values: @@ -177,8 +177,6 @@ the `error_count` and `aborted_count` values: } ] ---------------------------------- -// TESTRESPONSE - NOTE: This example demonstrates how to use script fields, but it contains insufficient data to generate meaningful results. For a full demonstration of @@ -254,7 +252,7 @@ PUT _xpack/ml/datafeeds/datafeed-test2 GET _xpack/ml/datafeeds/datafeed-test2/_preview -------------------------------------------------- // CONSOLE -// TEST[skip:broken] +// TEST[skip:needs-licence] <1> The script field has a rather generic name in this case, since it will be used for various tests in the subsequent examples. <2> The script field uses the plus (+) operator to concatenate strings. @@ -271,7 +269,6 @@ and "SMITH " have been concatenated and an underscore was added: } ] ---------------------------------- -// TESTRESPONSE [[ml-configuring-transform3]] .Example 3: Trimming strings @@ -292,7 +289,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update GET _xpack/ml/datafeeds/datafeed-test2/_preview -------------------------------------------------- // CONSOLE -// TEST[continued] +// TEST[skip:continued] <1> This script field uses the `trim()` function to trim extra white space from a string. @@ -308,7 +305,6 @@ has been trimmed to "SMITH": } ] ---------------------------------- -// TESTRESPONSE [[ml-configuring-transform4]] .Example 4: Converting strings to lowercase @@ -329,7 +325,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update GET _xpack/ml/datafeeds/datafeed-test2/_preview -------------------------------------------------- // CONSOLE -// TEST[continued] +// TEST[skip:continued] <1> This script field uses the `toLowerCase` function to convert a string to all lowercase letters. Likewise, you can use the `toUpperCase{}` function to convert a string to uppercase letters. @@ -346,7 +342,6 @@ has been converted to "joe": } ] ---------------------------------- -// TESTRESPONSE [[ml-configuring-transform5]] .Example 5: Converting strings to mixed case formats @@ -367,7 +362,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update GET _xpack/ml/datafeeds/datafeed-test2/_preview -------------------------------------------------- // CONSOLE -// TEST[continued] +// TEST[skip:continued] <1> This script field is a more complicated example of case manipulation. It uses the `subString()` function to capitalize the first letter of a string and converts the remaining characters to lowercase. @@ -384,7 +379,6 @@ has been converted to "Joe": } ] ---------------------------------- -// TESTRESPONSE [[ml-configuring-transform6]] .Example 6: Replacing tokens @@ -405,7 +399,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update GET _xpack/ml/datafeeds/datafeed-test2/_preview -------------------------------------------------- // CONSOLE -// TEST[continued] +// TEST[skip:continued] <1> This script field uses regular expressions to replace white space with underscores. @@ -421,7 +415,6 @@ The preview {dfeed} API returns the following results, which show that } ] ---------------------------------- -// TESTRESPONSE [[ml-configuring-transform7]] .Example 7: Regular expression matching and concatenation @@ -442,7 +435,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update GET _xpack/ml/datafeeds/datafeed-test2/_preview -------------------------------------------------- // CONSOLE -// TEST[continued] +// TEST[skip:continued] <1> This script field looks for a specific regular expression pattern and emits the matched groups as a concatenated string. If no match is found, it emits an empty string. @@ -459,7 +452,6 @@ The preview {dfeed} API returns the following results, which show that } ] ---------------------------------- -// TESTRESPONSE [[ml-configuring-transform8]] .Example 8: Splitting strings by domain name @@ -509,7 +501,7 @@ PUT _xpack/ml/datafeeds/datafeed-test3 GET _xpack/ml/datafeeds/datafeed-test3/_preview -------------------------------------------------- // CONSOLE -// TEST[skip:broken] +// TEST[skip:needs-licence] If you have a single field that contains a well-formed DNS domain name, you can use the `domainSplit()` function to split the string into its highest registered @@ -537,7 +529,6 @@ The preview {dfeed} API returns the following results, which show that } ] ---------------------------------- -// TESTRESPONSE [[ml-configuring-transform9]] .Example 9: Transforming geo_point data @@ -583,7 +574,7 @@ PUT _xpack/ml/datafeeds/datafeed-test4 GET _xpack/ml/datafeeds/datafeed-test4/_preview -------------------------------------------------- // CONSOLE -// TEST[skip:broken] +// TEST[skip:needs-licence] In {es}, location data can be stored in `geo_point` fields but this data type is not supported natively in {xpackml} analytics. This example of a script field @@ -602,4 +593,4 @@ The preview {dfeed} API returns the following results, which show that } ] ---------------------------------- -// TESTRESPONSE + diff --git a/x-pack/docs/en/ml/api-quickref.asciidoc b/x-pack/docs/en/ml/api-quickref.asciidoc deleted file mode 100644 index be74167862e..00000000000 --- a/x-pack/docs/en/ml/api-quickref.asciidoc +++ /dev/null @@ -1,102 +0,0 @@ -[role="xpack"] -[[ml-api-quickref]] -== API quick reference - -All {ml} endpoints have the following base: - -[source,js] ----- -/_xpack/ml/ ----- -// NOTCONSOLE - -The main {ml} resources can be accessed with a variety of endpoints: - -* <>: Create and manage {ml} jobs -* <>: Select data from {es} to be analyzed -* <>: Access the results of a {ml} job -* <>: Manage model snapshots -//* <>: Validate subsections of job configurations - -[float] -[[ml-api-jobs]] -=== /anomaly_detectors/ - -* {ref}/ml-put-job.html[PUT /anomaly_detectors/+++]: Create a job -* {ref}/ml-open-job.html[POST /anomaly_detectors//_open]: Open a job -* {ref}/ml-post-data.html[POST /anomaly_detectors//_data]: Send data to a job -* {ref}/ml-get-job.html[GET /anomaly_detectors]: List jobs -* {ref}/ml-get-job.html[GET /anomaly_detectors/+++]: Get job details -* {ref}/ml-get-job-stats.html[GET /anomaly_detectors//_stats]: Get job statistics -* {ref}/ml-update-job.html[POST /anomaly_detectors//_update]: Update certain properties of the job configuration -* {ref}/ml-flush-job.html[POST anomaly_detectors//_flush]: Force a job to analyze buffered data -* {ref}/ml-forecast.html[POST anomaly_detectors//_forecast]: Forecast future job behavior -* {ref}/ml-close-job.html[POST /anomaly_detectors//_close]: Close a job -* {ref}/ml-delete-job.html[DELETE /anomaly_detectors/+++]: Delete a job - -[float] -[[ml-api-calendars]] -=== /calendars/ - -* {ref}/ml-put-calendar.html[PUT /calendars/+++]: Create a calendar -* {ref}/ml-post-calendar-event.html[POST /calendars/+++/events]: Add a scheduled event to a calendar -* {ref}/ml-put-calendar-job.html[PUT /calendars/+++/jobs/+++]: Associate a job with a calendar -* {ref}/ml-get-calendar.html[GET /calendars/+++]: Get calendar details -* {ref}/ml-get-calendar-event.html[GET /calendars/+++/events]: Get scheduled event details -* {ref}/ml-delete-calendar-event.html[DELETE /calendars/+++/events/+++]: Remove a scheduled event from a calendar -* {ref}/ml-delete-calendar-job.html[DELETE /calendars/+++/jobs/+++]: Disassociate a job from a calendar -* {ref}/ml-delete-calendar.html[DELETE /calendars/+++]: Delete a calendar - -[float] -[[ml-api-filters]] -=== /filters/ - -* {ref}/ml-put-filter.html[PUT /filters/+++]: Create a filter -* {ref}/ml-update-filter.html[POST /filters/+++/_update]: Update a filter -* {ref}/ml-get-filter.html[GET /filters/+++]: List filters -* {ref}/ml-delete-filter.html[DELETE /filter/+++]: Delete a filter - -[float] -[[ml-api-datafeeds]] -=== /datafeeds/ - -* {ref}/ml-put-datafeed.html[PUT /datafeeds/+++]: Create a {dfeed} -* {ref}/ml-start-datafeed.html[POST /datafeeds//_start]: Start a {dfeed} -* {ref}/ml-get-datafeed.html[GET /datafeeds]: List {dfeeds} -* {ref}/ml-get-datafeed.html[GET /datafeeds/+++]: Get {dfeed} details -* {ref}/ml-get-datafeed-stats.html[GET /datafeeds//_stats]: Get statistical information for {dfeeds} -* {ref}/ml-preview-datafeed.html[GET /datafeeds//_preview]: Get a preview of a {dfeed} -* {ref}/ml-update-datafeed.html[POST /datafeeds//_update]: Update certain settings for a {dfeed} -* {ref}/ml-stop-datafeed.html[POST /datafeeds//_stop]: Stop a {dfeed} -* {ref}/ml-delete-datafeed.html[DELETE /datafeeds/+++]: Delete {dfeed} - -[float] -[[ml-api-results]] -=== /results/ - -* {ref}/ml-get-bucket.html[GET /results/buckets]: List the buckets in the results -* {ref}/ml-get-bucket.html[GET /results/buckets/+++]: Get bucket details -* {ref}/ml-get-overall-buckets.html[GET /results/overall_buckets]: Get overall bucket results for multiple jobs -* {ref}/ml-get-category.html[GET /results/categories]: List the categories in the results -* {ref}/ml-get-category.html[GET /results/categories/+++]: Get category details -* {ref}/ml-get-influencer.html[GET /results/influencers]: Get influencer details -* {ref}/ml-get-record.html[GET /results/records]: Get records from the results - -[float] -[[ml-api-snapshots]] -=== /model_snapshots/ - -* {ref}/ml-get-snapshot.html[GET /model_snapshots]: List model snapshots -* {ref}/ml-get-snapshot.html[GET /model_snapshots/+++]: Get model snapshot details -* {ref}/ml-revert-snapshot.html[POST /model_snapshots//_revert]: Revert a model snapshot -* {ref}/ml-update-snapshot.html[POST /model_snapshots//_update]: Update certain settings for a model snapshot -* {ref}/ml-delete-snapshot.html[DELETE /model_snapshots/+++]: Delete a model snapshot - -//// -[float] -[[ml-api-validate]] -=== /validate/ - -* {ref}/ml-valid-detector.html[POST /anomaly_detectors/_validate/detector]: Validate a detector -* {ref}/ml-valid-job.html[POST /anomaly_detectors/_validate]: Validate a job -//// From d4f2b5be7de735ff4a5fb360c00ffc1741f3ddc4 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Fri, 31 Aug 2018 12:03:49 -0700 Subject: [PATCH 16/23] tracked at https://github.com/elastic/elasticsearch/issues/33320 and https://github.com/elastic/elasticsearch/issues/30777 --- .../elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java index 538d54416bf..71dd17f0684 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java +++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java @@ -135,6 +135,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33320") public void testSearchInputHasPermissions() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -242,6 +243,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(response.getStatusLine().getStatusCode(), is(404)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30777") public void testIndexActionHasPermissions() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); From 3d82a30fadd035228f29ccc00d6c5bab71e9adf6 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 31 Aug 2018 21:29:06 +0200 Subject: [PATCH 17/23] drop `index.shard.check_on_startup: fix` (#32279) drop `index.shard.check_on_startup: fix` Relates #31389 --- docs/reference/index-modules.asciidoc | 4 +- .../elasticsearch/index/shard/IndexShard.java | 22 +-- .../org/elasticsearch/index/store/Store.java | 15 +- .../index/shard/IndexShardTests.java | 157 +++++++++++++++++- .../index/shard/IndexShardTestCase.java | 38 ++++- 5 files changed, 197 insertions(+), 39 deletions(-) diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 54c0c1c1b15..214d7754177 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -65,9 +65,7 @@ corruption is detected, it will prevent the shard from being opened. Accepts: `fix`:: - Check for both physical and logical corruption. Segments that were reported - as corrupted will be automatically removed. This option *may result in data loss*. - Use with extreme caution! + The same as `false`. This option is deprecated and will be completely removed in 7.0. WARNING: Expert only. Checking shards may take a lot of time on large indices. -- diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index ef5f9ab0ef3..24c78c72033 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -301,6 +301,10 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl logger.debug("state: [CREATED]"); this.checkIndexOnStartup = indexSettings.getValue(IndexSettings.INDEX_CHECK_ON_STARTUP); + if ("fix".equals(checkIndexOnStartup)) { + deprecationLogger.deprecated("Setting [index.shard.check_on_startup] is set to deprecated value [fix], " + + "which has no effect and will not be accepted in future"); + } this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, bigArrays); final String aId = shardRouting.allocationId().getId(); this.globalCheckpointListeners = new GlobalCheckpointListeners(shardId, threadPool.executor(ThreadPool.Names.LISTENER), logger); @@ -1325,7 +1329,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } recoveryState.setStage(RecoveryState.Stage.VERIFY_INDEX); // also check here, before we apply the translog - if (Booleans.isTrue(checkIndexOnStartup)) { + if (Booleans.isTrue(checkIndexOnStartup) || "checksum".equals(checkIndexOnStartup)) { try { checkIndex(); } catch (IOException ex) { @@ -1933,6 +1937,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl if (store.tryIncRef()) { try { doCheckIndex(); + } catch (IOException e) { + store.markStoreCorrupted(e); + throw e; } finally { store.decRef(); } @@ -1976,18 +1983,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return; } logger.warn("check index [failure]\n{}", os.bytes().utf8ToString()); - if ("fix".equals(checkIndexOnStartup)) { - if (logger.isDebugEnabled()) { - logger.debug("fixing index, writing new segments file ..."); - } - store.exorciseIndex(status); - if (logger.isDebugEnabled()) { - logger.debug("index fixed, wrote new segments file \"{}\"", status.segmentsFileName); - } - } else { - // only throw a failure if we are not going to fix the index - throw new IllegalStateException("index check failure but can't fix it"); - } + throw new IOException("index check failure"); } } diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 85975bc68c8..a00d779ca77 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -134,7 +134,8 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref static final int VERSION_STACK_TRACE = 1; // we write the stack trace too since 1.4.0 static final int VERSION_START = 0; static final int VERSION = VERSION_WRITE_THROWABLE; - static final String CORRUPTED = "corrupted_"; + // public is for test purposes + public static final String CORRUPTED = "corrupted_"; public static final Setting INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.store.stats_refresh_interval", TimeValue.timeValueSeconds(10), Property.IndexScope); @@ -360,18 +361,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref } } - /** - * Repairs the index using the previous returned status from {@link #checkIndex(PrintStream)}. - */ - public void exorciseIndex(CheckIndex.Status status) throws IOException { - metadataLock.writeLock().lock(); - try (CheckIndex checkIndex = new CheckIndex(directory)) { - checkIndex.exorciseIndex(status); - } finally { - metadataLock.writeLock().unlock(); - } - } - public StoreStats stats() throws IOException { ensureOpen(); return new StoreStats(directory.estimateSize()); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 50f95bf4d47..fdf7f5438d0 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; @@ -118,6 +119,7 @@ import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotShardFailure; +import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.VersionUtils; @@ -126,7 +128,11 @@ import org.elasticsearch.ElasticsearchException; import java.io.IOException; import java.nio.charset.Charset; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -1239,7 +1245,7 @@ public class IndexShardTests extends IndexShardTestCase { }; try (Store store = createStore(shardId, new IndexSettings(metaData, Settings.EMPTY), directory)) { - IndexShard shard = newShard(shardRouting, shardPath, metaData, store, + IndexShard shard = newShard(shardRouting, shardPath, metaData, i -> store, null, new InternalEngineFactory(), () -> { }, EMPTY_EVENT_LISTENER); AtomicBoolean failureCallbackTriggered = new AtomicBoolean(false); @@ -2590,6 +2596,143 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(newShard); } + public void testIndexCheckOnStartup() throws Exception { + final IndexShard indexShard = newStartedShard(true); + + final long numDocs = between(10, 100); + for (long i = 0; i < numDocs; i++) { + indexDoc(indexShard, "_doc", Long.toString(i), "{}"); + } + indexShard.flush(new FlushRequest()); + closeShards(indexShard); + + final ShardPath shardPath = indexShard.shardPath(); + + final Path indexPath = corruptIndexFile(shardPath); + + final AtomicInteger corruptedMarkerCount = new AtomicInteger(); + final SimpleFileVisitor corruptedVisitor = new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + if (Files.isRegularFile(file) && file.getFileName().toString().startsWith(Store.CORRUPTED)) { + corruptedMarkerCount.incrementAndGet(); + } + return FileVisitResult.CONTINUE; + } + }; + Files.walkFileTree(indexPath, corruptedVisitor); + + assertThat("corruption marker should not be there", corruptedMarkerCount.get(), equalTo(0)); + + final ShardRouting shardRouting = ShardRoutingHelper.initWithSameId(indexShard.routingEntry(), + RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE + ); + // start shard and perform index check on startup. It enforce shard to fail due to corrupted index files + final IndexMetaData indexMetaData = IndexMetaData.builder(indexShard.indexSettings().getIndexMetaData()) + .settings(Settings.builder() + .put(indexShard.indexSettings.getSettings()) + .put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("true", "checksum"))) + .build(); + + IndexShard corruptedShard = newShard(shardRouting, shardPath, indexMetaData, + null, null, indexShard.engineFactory, + indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); + + final IndexShardRecoveryException indexShardRecoveryException = + expectThrows(IndexShardRecoveryException.class, () -> newStartedShard(p -> corruptedShard, true)); + assertThat(indexShardRecoveryException.getMessage(), equalTo("failed recovery")); + + // check that corrupt marker is there + Files.walkFileTree(indexPath, corruptedVisitor); + assertThat("store has to be marked as corrupted", corruptedMarkerCount.get(), equalTo(1)); + + try { + closeShards(corruptedShard); + } catch (RuntimeException e) { + assertThat(e.getMessage(), equalTo("CheckIndex failed")); + } + } + + public void testShardDoesNotStartIfCorruptedMarkerIsPresent() throws Exception { + final IndexShard indexShard = newStartedShard(true); + + final long numDocs = between(10, 100); + for (long i = 0; i < numDocs; i++) { + indexDoc(indexShard, "_doc", Long.toString(i), "{}"); + } + indexShard.flush(new FlushRequest()); + closeShards(indexShard); + + final ShardPath shardPath = indexShard.shardPath(); + + final ShardRouting shardRouting = ShardRoutingHelper.initWithSameId(indexShard.routingEntry(), + RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE + ); + final IndexMetaData indexMetaData = indexShard.indexSettings().getIndexMetaData(); + + final Path indexPath = shardPath.getDataPath().resolve(ShardPath.INDEX_FOLDER_NAME); + + // create corrupted marker + final String corruptionMessage = "fake ioexception"; + try(Store store = createStore(indexShard.indexSettings(), shardPath)) { + store.markStoreCorrupted(new IOException(corruptionMessage)); + } + + // try to start shard on corrupted files + final IndexShard corruptedShard = newShard(shardRouting, shardPath, indexMetaData, + null, null, indexShard.engineFactory, + indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); + + final IndexShardRecoveryException exception1 = expectThrows(IndexShardRecoveryException.class, + () -> newStartedShard(p -> corruptedShard, true)); + assertThat(exception1.getCause().getMessage(), equalTo(corruptionMessage + " (resource=preexisting_corruption)")); + closeShards(corruptedShard); + + final AtomicInteger corruptedMarkerCount = new AtomicInteger(); + final SimpleFileVisitor corruptedVisitor = new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + if (Files.isRegularFile(file) && file.getFileName().toString().startsWith(Store.CORRUPTED)) { + corruptedMarkerCount.incrementAndGet(); + } + return FileVisitResult.CONTINUE; + } + }; + Files.walkFileTree(indexPath, corruptedVisitor); + assertThat("store has to be marked as corrupted", corruptedMarkerCount.get(), equalTo(1)); + + // try to start another time shard on corrupted files + final IndexShard corruptedShard2 = newShard(shardRouting, shardPath, indexMetaData, + null, null, indexShard.engineFactory, + indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); + + final IndexShardRecoveryException exception2 = expectThrows(IndexShardRecoveryException.class, + () -> newStartedShard(p -> corruptedShard2, true)); + assertThat(exception2.getCause().getMessage(), equalTo(corruptionMessage + " (resource=preexisting_corruption)")); + closeShards(corruptedShard2); + + // check that corrupt marker is there + corruptedMarkerCount.set(0); + Files.walkFileTree(indexPath, corruptedVisitor); + assertThat("store still has a single corrupt marker", corruptedMarkerCount.get(), equalTo(1)); + } + + private Path corruptIndexFile(ShardPath shardPath) throws IOException { + final Path indexPath = shardPath.getDataPath().resolve(ShardPath.INDEX_FOLDER_NAME); + final Path[] filesToCorrupt = + Files.walk(indexPath) + .filter(p -> { + final String name = p.getFileName().toString(); + return Files.isRegularFile(p) + && name.startsWith("extra") == false // Skip files added by Lucene's ExtrasFS + && IndexWriter.WRITE_LOCK_NAME.equals(name) == false + && name.startsWith("segments_") == false && name.endsWith(".si") == false; + }) + .toArray(Path[]::new); + CorruptionUtils.corruptFile(random(), filesToCorrupt); + return indexPath; + } + /** * Simulates a scenario that happens when we are async fetching snapshot metadata from GatewayService * and checking index concurrently. This should always be possible without any exception. @@ -2613,7 +2756,7 @@ public class IndexShardTests extends IndexShardTestCase { final IndexMetaData indexMetaData = IndexMetaData.builder(indexShard.indexSettings().getIndexMetaData()) .settings(Settings.builder() .put(indexShard.indexSettings.getSettings()) - .put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "true", "checksum", "fix"))) + .put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "true", "checksum"))) .build(); final IndexShard newShard = newShard(shardRouting, indexShard.shardPath(), indexMetaData, null, null, indexShard.engineFactory, indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); @@ -2655,6 +2798,16 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(newShard); } + public void testCheckOnStartupDeprecatedValue() throws Exception { + final Settings settings = Settings.builder().put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), "fix").build(); + + final IndexShard newShard = newShard(true, settings); + closeShards(newShard); + + assertWarnings("Setting [index.shard.check_on_startup] is set to deprecated value [fix], " + + "which has no effect and will not be accepted in future"); + } + class Result { private final int localCheckpoint; private final int maxSeqNo; diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 2f4a3dfd6c1..375e74f6cca 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.uid.Versions; @@ -156,7 +157,6 @@ public abstract class IndexShardTestCase extends ESTestCase { return Settings.EMPTY; } - protected Store createStore(IndexSettings indexSettings, ShardPath shardPath) throws IOException { return createStore(shardPath.getShardId(), indexSettings, newFSDirectory(shardPath.resolveIndex())); } @@ -169,7 +169,6 @@ public abstract class IndexShardTestCase extends ESTestCase { } }; return new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); - } /** @@ -179,7 +178,17 @@ public abstract class IndexShardTestCase extends ESTestCase { * another shard) */ protected IndexShard newShard(boolean primary) throws IOException { - return newShard(primary, Settings.EMPTY, new InternalEngineFactory()); + return newShard(primary, Settings.EMPTY); + } + + /** + * Creates a new initializing shard. The shard will have its own unique data path. + * + * @param primary indicates whether to a primary shard (ready to recover from an empty store) or a replica (ready to recover from + * another shard) + */ + protected IndexShard newShard(final boolean primary, final Settings settings) throws IOException { + return newShard(primary, settings, new InternalEngineFactory()); } /** @@ -318,23 +327,25 @@ public abstract class IndexShardTestCase extends ESTestCase { * @param routing shard routing to use * @param shardPath path to use for shard data * @param indexMetaData indexMetaData for the shard, including any mapping - * @param store an optional custom store to use. If null a default file based store will be created + * @param storeProvider an optional custom store provider to use. If null a default file based store will be created * @param indexSearcherWrapper an optional wrapper to be used during searchers * @param globalCheckpointSyncer callback for syncing global checkpoints * @param indexEventListener index event listener * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMetaData indexMetaData, - @Nullable Store store, @Nullable IndexSearcherWrapper indexSearcherWrapper, + @Nullable CheckedFunction storeProvider, + @Nullable IndexSearcherWrapper indexSearcherWrapper, @Nullable EngineFactory engineFactory, Runnable globalCheckpointSyncer, IndexEventListener indexEventListener, IndexingOperationListener... listeners) throws IOException { final Settings nodeSettings = Settings.builder().put("node.name", routing.currentNodeId()).build(); final IndexSettings indexSettings = new IndexSettings(indexMetaData, nodeSettings); final IndexShard indexShard; - if (store == null) { - store = createStore(indexSettings, shardPath); + if (storeProvider == null) { + storeProvider = is -> createStore(is, shardPath); } + final Store store = storeProvider.apply(indexSettings); boolean success = false; try { IndexCache indexCache = new IndexCache(indexSettings, new DisabledQueryCache(indexSettings), null); @@ -424,7 +435,18 @@ public abstract class IndexShardTestCase extends ESTestCase { */ protected IndexShard newStartedShard( final boolean primary, final Settings settings, final EngineFactory engineFactory) throws IOException { - IndexShard shard = newShard(primary, settings, engineFactory); + return newStartedShard(p -> newShard(p, settings, engineFactory), primary); + } + + /** + * creates a new empty shard and starts it. + * + * @param shardFunction shard factory function + * @param primary controls whether the shard will be a primary or a replica. + */ + protected IndexShard newStartedShard(CheckedFunction shardFunction, + boolean primary) throws IOException { + IndexShard shard = shardFunction.apply(primary); if (primary) { recoverShardFromStore(shard); } else { From 4f1ffb5cb1b5163be02aba9bb54a5f872507a17c Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Fri, 31 Aug 2018 13:33:14 -0600 Subject: [PATCH 18/23] Mute SmokeTestWatcherWithSecurityIT testsi Tests from the SmokeTestWatcherWithSecurityIT suite have been failing occasionally. This commit mutes all the tests. This is tracked in --- .../elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java index 71dd17f0684..b4d60d3708e 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java +++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java @@ -186,6 +186,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(conditionMet, is(false)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29893") public void testSearchTransformHasPermissions() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -268,6 +269,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(spam, is("eggs")); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30777") public void testIndexActionInsufficientPrivileges() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); From 6a77cb4211498006b681f46b20feb6b957f8e87d Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Fri, 31 Aug 2018 13:37:22 -0600 Subject: [PATCH 19/23] Fix AwaitsFix issue number In the previous commit where SmokeTestWatcherWithSecurityIT tests were muted, I added the incorrect issue numbers. This commit fixes this. The issue for the tests is #33320. --- .../smoketest/SmokeTestWatcherWithSecurityIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java index b4d60d3708e..17fbf0769fd 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java +++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java @@ -186,7 +186,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(conditionMet, is(false)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29893") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33320") public void testSearchTransformHasPermissions() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -269,7 +269,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(spam, is("eggs")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30777") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33320") public void testIndexActionInsufficientPrivileges() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); From 00b272af327b7bfff63b5b8305a040b8b6dbfdb2 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 31 Aug 2018 22:05:40 +0200 Subject: [PATCH 20/23] completely drop `index.shard.check_on_startup: fix` for 7.0 (#33194) Relates to #32279 --- docs/reference/index-modules.asciidoc | 4 ---- docs/reference/migration/migrate_7_0/indices.asciidoc | 4 ++++ .../java/org/elasticsearch/index/IndexSettings.java | 3 +-- .../java/org/elasticsearch/index/shard/IndexShard.java | 4 ---- .../put/MetaDataIndexTemplateServiceTests.java | 2 +- .../org/elasticsearch/index/shard/IndexShardTests.java | 10 ---------- 6 files changed, 6 insertions(+), 21 deletions(-) diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 214d7754177..53de67e55fd 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -63,10 +63,6 @@ corruption is detected, it will prevent the shard from being opened. Accepts: Check for both physical and logical corruption. This is much more expensive in terms of CPU and memory usage. -`fix`:: - - The same as `false`. This option is deprecated and will be completely removed in 7.0. - WARNING: Expert only. Checking shards may take a lot of time on large indices. -- diff --git a/docs/reference/migration/migrate_7_0/indices.asciidoc b/docs/reference/migration/migrate_7_0/indices.asciidoc index bab7b602220..a47cc6f4324 100644 --- a/docs/reference/migration/migrate_7_0/indices.asciidoc +++ b/docs/reference/migration/migrate_7_0/indices.asciidoc @@ -78,3 +78,7 @@ The parent circuit breaker defines a new setting `indices.breaker.total.use_real heap memory instead of only considering the reserved memory by child circuit breakers. When this setting is `true`, the default parent breaker limit also changes from 70% to 95% of the JVM heap size. The previous behavior can be restored by setting `indices.breaker.total.use_real_memory` to `false`. + +==== `fix` value for `index.shard.check_on_startup` is removed + +Deprecated option value `fix` for setting `index.shard.check_on_startup` is not supported. \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 3ea022bbebd..2bfd5ed0707 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -75,11 +75,10 @@ public final class IndexSettings { switch(s) { case "false": case "true": - case "fix": case "checksum": return s; default: - throw new IllegalArgumentException("unknown value for [index.shard.check_on_startup] must be one of [true, false, fix, checksum] but was: " + s); + throw new IllegalArgumentException("unknown value for [index.shard.check_on_startup] must be one of [true, false, checksum] but was: " + s); } }, Property.IndexScope); diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 24c78c72033..3cdb76db927 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -301,10 +301,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl logger.debug("state: [CREATED]"); this.checkIndexOnStartup = indexSettings.getValue(IndexSettings.INDEX_CHECK_ON_STARTUP); - if ("fix".equals(checkIndexOnStartup)) { - deprecationLogger.deprecated("Setting [index.shard.check_on_startup] is set to deprecated value [fix], " - + "which has no effect and will not be accepted in future"); - } this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, bigArrays); final String aId = shardRouting.allocationId().getId(); this.globalCheckpointListeners = new GlobalCheckpointListeners(shardId, threadPool.executor(ThreadPool.Names.LISTENER), logger); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index f0e9a57f7f3..39f04c6b7b0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -69,7 +69,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { containsString("Failed to parse value [0] for setting [index.number_of_shards] must be >= 1")); assertThat(throwables.get(0).getMessage(), containsString("unknown value for [index.shard.check_on_startup] " + - "must be one of [true, false, fix, checksum] but was: blargh")); + "must be one of [true, false, checksum] but was: blargh")); } public void testIndexTemplateValidationAccumulatesValidationErrors() { diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index fdf7f5438d0..584ed7085a8 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2798,16 +2798,6 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(newShard); } - public void testCheckOnStartupDeprecatedValue() throws Exception { - final Settings settings = Settings.builder().put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), "fix").build(); - - final IndexShard newShard = newShard(true, settings); - closeShards(newShard); - - assertWarnings("Setting [index.shard.check_on_startup] is set to deprecated value [fix], " - + "which has no effect and will not be accepted in future"); - } - class Result { private final int localCheckpoint; private final int maxSeqNo; From 08b9247ce226bfc47ebe4f859d97b5ed87940e69 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 31 Aug 2018 16:50:08 -0400 Subject: [PATCH 21/23] Adjust soft-deletes version after backport into 6.5 Relates #33222 --- server/src/main/java/org/elasticsearch/index/IndexSettings.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 2bfd5ed0707..6a612091b97 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -416,7 +416,7 @@ public final class IndexSettings { generationThresholdSize = scopedSettings.get(INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING); mergeSchedulerConfig = new MergeSchedulerConfig(this); gcDeletesInMillis = scopedSettings.get(INDEX_GC_DELETES_SETTING).getMillis(); - softDeleteEnabled = version.onOrAfter(Version.V_7_0_0_alpha1) && scopedSettings.get(INDEX_SOFT_DELETES_SETTING); + softDeleteEnabled = version.onOrAfter(Version.V_6_5_0) && scopedSettings.get(INDEX_SOFT_DELETES_SETTING); softDeleteRetentionOperations = scopedSettings.get(INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING); warmerEnabled = scopedSettings.get(INDEX_WARMER_ENABLED_SETTING); maxResultWindow = scopedSettings.get(MAX_RESULT_WINDOW_SETTING); From ebed8f26187fc315b095106a39edbe46d0ed5778 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 31 Aug 2018 17:18:22 -0400 Subject: [PATCH 22/23] [Rollup] Fix FullClusterRestart test We need to wait for the job to fully initialize and start before we can attempt to stop it. If we don't, it's possible for the stop API to be called before the persistent task is fully loaded and it'll throw an exception. Closes #32773 --- .../org/elasticsearch/xpack/restart/FullClusterRestartIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 6ead87aba61..7c4eda37d2f 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -325,7 +325,6 @@ public class FullClusterRestartIT extends ESRestTestCase { } } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/32773") public void testRollupIDSchemeAfterRestart() throws Exception { assumeTrue("Rollup can be tested with 6.3.0 and onwards", oldClusterVersion.onOrAfter(Version.V_6_3_0)); assumeTrue("Rollup ID scheme changed in 6.4", oldClusterVersion.before(Version.V_6_4_0)); @@ -393,6 +392,8 @@ public class FullClusterRestartIT extends ESRestTestCase { indexRequest.setJsonEntity("{\"timestamp\":\"2018-01-02T00:00:01\",\"value\":345}"); client().performRequest(indexRequest); + assertRollUpJob("rollup-id-test"); + // stop the rollup job to force a state save, which will upgrade the ID final Request stopRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-id-test/_stop"); Map stopRollupJobResponse = entityAsMap(client().performRequest(stopRollupJobRequest)); From ca94d052b85aa03c4ac610b8d3423aaf9598e0f0 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 31 Aug 2018 18:48:19 -0400 Subject: [PATCH 23/23] Mute test watcher usage stats output Tracked at #33326 --- .../resources/rest-api-spec/test/watcher/usage/10_basic.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/usage/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/usage/10_basic.yml index a33fcdb5297..7a22ad322bf 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/usage/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/usage/10_basic.yml @@ -1,6 +1,8 @@ --- "Test watcher usage stats output": - + - skip: + version: "all" + reason: AwaitsFix at https://github.com/elastic/elasticsearch/issues/33326 - do: catch: missing xpack.watcher.delete_watch: