From 006c2c9ab028c06c5199679bba7df448119e362f Mon Sep 17 00:00:00 2001 From: Andy Bristol Date: Tue, 10 Jul 2018 08:21:20 -0700 Subject: [PATCH 01/10] [test] port archive distribution packaging tests (#31314) Recreates the rest of the bats packaging tests for the tar distribution in the java packaging test project, with support for both tar and zip packaging, both oss and default flavors, and on Linux and Windows. Most tests are followed fairly closely, some have either been dropped if unnecessary or folded into others if convenient. --- .../gradle/vagrant/VagrantTestPlugin.groovy | 10 +- qa/vagrant/build.gradle | 20 ++ .../packaging/test/ArchiveTestCase.java | 252 ++++++++++++++++++ .../packaging/util/Archives.java | 205 +++++++++----- .../elasticsearch/packaging/util/Cleanup.java | 39 ++- .../packaging/util/FileUtils.java | 31 ++- .../packaging/util/Installation.java | 32 +++ .../packaging/util/Platforms.java | 32 ++- .../packaging/util/ServerUtils.java | 123 +++++++++ .../elasticsearch/packaging/util/Shell.java | 56 ++-- .../packaging/tests/20_tar_package.bats | 178 ------------- 11 files changed, 677 insertions(+), 301 deletions(-) create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java delete mode 100644 qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index 455d30f95db..d4d1d857e90 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -546,9 +546,15 @@ class VagrantTestPlugin implements Plugin { javaPackagingTest.command = 'ssh' javaPackagingTest.args = ['--command', 'sudo bash "$PACKAGING_TESTS/run-tests.sh"'] } else { + // powershell sessions run over winrm always run as administrator, whether --elevated is passed or not. however + // remote sessions have some restrictions on what they can do, such as impersonating another user (or the same user + // without administrator elevation), which we need to do for these tests. passing --elevated runs the session + // as a scheduled job locally on the vm as a true administrator to get around this limitation + // + // https://github.com/hashicorp/vagrant/blob/9c299a2a357fcf87f356bb9d56e18a037a53d138/plugins/communicators/winrm/communicator.rb#L195-L225 + // https://devops-collective-inc.gitbooks.io/secrets-of-powershell-remoting/content/manuscript/accessing-remote-computers.html javaPackagingTest.command = 'winrm' - // winrm commands run as administrator - javaPackagingTest.args = ['--command', 'powershell -File "$Env:PACKAGING_TESTS/run-tests.ps1"'] + javaPackagingTest.args = ['--elevated', '--command', 'powershell -File "$Env:PACKAGING_TESTS/run-tests.ps1"'] } TaskExecutionAdapter javaPackagingReproListener = createReproListener(project, javaPackagingTest.path) diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 23d171f3125..704136eb4cf 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -31,6 +31,12 @@ dependencies { compile "org.hamcrest:hamcrest-core:${versions.hamcrest}" compile "org.hamcrest:hamcrest-library:${versions.hamcrest}" + compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" + compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" + compile "org.apache.httpcomponents:fluent-hc:${versions.httpclient}" + compile "commons-codec:commons-codec:${versions.commonscodec}" + compile "commons-logging:commons-logging:${versions.commonslogging}" + compile project(':libs:core') // pulls in the jar built by this project and its dependencies @@ -73,3 +79,17 @@ tasks.test.enabled = false // this project doesn't get published tasks.dependencyLicenses.enabled = false tasks.dependenciesInfo.enabled = false + +tasks.thirdPartyAudit.excludes = [ + //commons-logging optional dependencies + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + 'org.apache.log4j.Category', + 'org.apache.log4j.Level', + 'org.apache.log4j.Logger', + 'org.apache.log4j.Priority', + //commons-logging provided dependencies + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener' +] diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index ab4a11922cc..df5e8cf995d 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -19,6 +19,12 @@ package org.elasticsearch.packaging.test; +import org.apache.http.client.fluent.Request; +import org.elasticsearch.packaging.util.Archives; +import org.elasticsearch.packaging.util.Platforms; +import org.elasticsearch.packaging.util.ServerUtils; +import org.elasticsearch.packaging.util.Shell; +import org.elasticsearch.packaging.util.Shell.Result; import org.junit.Before; import org.junit.BeforeClass; import org.junit.FixMethodOrder; @@ -28,9 +34,33 @@ import org.junit.runners.MethodSorters; import org.elasticsearch.packaging.util.Distribution; import org.elasticsearch.packaging.util.Installation; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.stream.Stream; + +import static java.util.stream.Collectors.joining; +import static org.elasticsearch.packaging.util.Archives.ARCHIVE_OWNER; import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; import static org.elasticsearch.packaging.util.Archives.installArchive; import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation; +import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File; +import static org.elasticsearch.packaging.util.FileMatcher.file; +import static org.elasticsearch.packaging.util.FileMatcher.p660; +import static org.elasticsearch.packaging.util.FileUtils.append; +import static org.elasticsearch.packaging.util.FileUtils.cp; +import static org.elasticsearch.packaging.util.FileUtils.getTempDir; +import static org.elasticsearch.packaging.util.FileUtils.mkdir; +import static org.elasticsearch.packaging.util.FileUtils.rm; +import static org.elasticsearch.packaging.util.ServerUtils.makeRequest; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.isEmptyString; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeThat; import static org.junit.Assume.assumeTrue; /** @@ -61,4 +91,226 @@ public abstract class ArchiveTestCase { installation = installArchive(distribution()); verifyArchiveInstallation(installation, distribution()); } + + @Test + public void test20PluginsListWithNoPlugins() { + assumeThat(installation, is(notNullValue())); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + final Result r = sh.run(bin.elasticsearchPlugin + " list"); + + assertThat(r.stdout, isEmptyString()); + } + + @Test + public void test30AbortWhenJavaMissing() { + assumeThat(installation, is(notNullValue())); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + + Platforms.onWindows(() -> { + // on windows, removing java from PATH and removing JAVA_HOME is less involved than changing the permissions of the java + // executable. we also don't check permissions in the windows scripts anyway + final String originalPath = sh.run("$Env:PATH").stdout.trim(); + final String newPath = Arrays.stream(originalPath.split(";")) + .filter(path -> path.contains("Java") == false) + .collect(joining(";")); + + // note the lack of a $ when clearing the JAVA_HOME env variable - with a $ it deletes the java home directory + // https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/providers/environment-provider?view=powershell-6 + // + // this won't persist to another session so we don't have to reset anything + final Result runResult = sh.runIgnoreExitCode( + "$Env:PATH = '" + newPath + "'; " + + "Remove-Item Env:JAVA_HOME; " + + bin.elasticsearch + ); + + assertThat(runResult.exitCode, is(1)); + assertThat(runResult.stderr, containsString("could not find java; set JAVA_HOME or ensure java is in PATH")); + }); + + Platforms.onLinux(() -> { + final String javaPath = sh.run("which java").stdout.trim(); + + try { + sh.run("chmod -x '" + javaPath + "'"); + final Result runResult = sh.runIgnoreExitCode(bin.elasticsearch.toString()); + assertThat(runResult.exitCode, is(1)); + assertThat(runResult.stdout, containsString("could not find java; set JAVA_HOME or ensure java is in PATH")); + } finally { + sh.run("chmod +x '" + javaPath + "'"); + } + }); + } + + @Test + public void test40CreateKeystoreManually() { + assumeThat(installation, is(notNullValue())); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + + Platforms.onLinux(() -> sh.run("sudo -u " + ARCHIVE_OWNER + " " + bin.elasticsearchKeystore + " create")); + + // this is a hack around the fact that we can't run a command in the same session as the same user but not as administrator. + // the keystore ends up being owned by the Administrators group, so we manually set it to be owned by the vagrant user here. + // from the server's perspective the permissions aren't really different, this is just to reflect what we'd expect in the tests. + // when we run these commands as a role user we won't have to do this + Platforms.onWindows(() -> sh.run( + bin.elasticsearchKeystore + " create; " + + "$account = New-Object System.Security.Principal.NTAccount 'vagrant'; " + + "$acl = Get-Acl '" + installation.config("elasticsearch.keystore") + "'; " + + "$acl.SetOwner($account); " + + "Set-Acl '" + installation.config("elasticsearch.keystore") + "' $acl" + )); + + assertThat(installation.config("elasticsearch.keystore"), file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); + + Platforms.onLinux(() -> { + final Result r = sh.run("sudo -u " + ARCHIVE_OWNER + " " + bin.elasticsearchKeystore + " list"); + assertThat(r.stdout, containsString("keystore.seed")); + }); + + Platforms.onWindows(() -> { + final Result r = sh.run(bin.elasticsearchKeystore + " list"); + assertThat(r.stdout, containsString("keystore.seed")); + }); + } + + @Test + public void test50StartAndStop() throws IOException { + assumeThat(installation, is(notNullValue())); + + // cleanup from previous test + rm(installation.config("elasticsearch.keystore")); + + Archives.runElasticsearch(installation); + + final String gcLogName = Platforms.LINUX + ? "gc.log.0.current" + : "gc.log"; + assertTrue("gc logs exist", Files.exists(installation.logs.resolve(gcLogName))); + ServerUtils.runElasticsearchTests(); + + Archives.stopElasticsearch(installation); + } + + @Test + public void test60AutoCreateKeystore() { + assumeThat(installation, is(notNullValue())); + + assertThat(installation.config("elasticsearch.keystore"), file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + + Platforms.onLinux(() -> { + final Result result = sh.run("sudo -u " + ARCHIVE_OWNER + " " + bin.elasticsearchKeystore + " list"); + assertThat(result.stdout, containsString("keystore.seed")); + }); + + Platforms.onWindows(() -> { + final Result result = sh.run(bin.elasticsearchKeystore + " list"); + assertThat(result.stdout, containsString("keystore.seed")); + }); + } + + @Test + public void test70CustomPathConfAndJvmOptions() throws IOException { + assumeThat(installation, is(notNullValue())); + + final Path tempConf = getTempDir().resolve("esconf-alternate"); + + try { + mkdir(tempConf); + cp(installation.config("elasticsearch.yml"), tempConf.resolve("elasticsearch.yml")); + cp(installation.config("log4j2.properties"), tempConf.resolve("log4j2.properties")); + + // we have to disable Log4j from using JMX lest it will hit a security + // manager exception before we have configured logging; this will fail + // startup since we detect usages of logging before it is configured + final String jvmOptions = + "-Xms512m\n" + + "-Xmx512m\n" + + "-Dlog4j2.disable.jmx=true\n"; + append(tempConf.resolve("jvm.options"), jvmOptions); + + final Shell sh = new Shell(); + Platforms.onLinux(() -> sh.run("chown -R elasticsearch:elasticsearch " + tempConf)); + Platforms.onWindows(() -> sh.run( + "$account = New-Object System.Security.Principal.NTAccount 'vagrant'; " + + "$tempConf = Get-ChildItem '" + tempConf + "' -Recurse; " + + "$tempConf += Get-Item '" + tempConf + "'; " + + "$tempConf | ForEach-Object { " + + "$acl = Get-Acl $_.FullName; " + + "$acl.SetOwner($account); " + + "Set-Acl $_.FullName $acl " + + "}" + )); + + final Shell serverShell = new Shell(); + serverShell.getEnv().put("ES_PATH_CONF", tempConf.toString()); + serverShell.getEnv().put("ES_JAVA_OPTS", "-XX:-UseCompressedOops"); + + Archives.runElasticsearch(installation, serverShell); + + final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes")); + assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912")); + assertThat(nodesResponse, containsString("\"using_compressed_ordinary_object_pointers\":\"false\"")); + + Archives.stopElasticsearch(installation); + + } finally { + rm(tempConf); + } + } + + @Test + public void test80RelativePathConf() throws IOException { + assumeThat(installation, is(notNullValue())); + + final Path temp = getTempDir().resolve("esconf-alternate"); + final Path tempConf = temp.resolve("config"); + + try { + mkdir(tempConf); + Stream.of( + "elasticsearch.yml", + "log4j2.properties", + "jvm.options" + ).forEach(file -> cp(installation.config(file), tempConf.resolve(file))); + + append(tempConf.resolve("elasticsearch.yml"), "node.name: relative"); + + final Shell sh = new Shell(); + Platforms.onLinux(() -> sh.run("chown -R elasticsearch:elasticsearch " + temp)); + Platforms.onWindows(() -> sh.run( + "$account = New-Object System.Security.Principal.NTAccount 'vagrant'; " + + "$tempConf = Get-ChildItem '" + temp + "' -Recurse; " + + "$tempConf += Get-Item '" + temp + "'; " + + "$tempConf | ForEach-Object { " + + "$acl = Get-Acl $_.FullName; " + + "$acl.SetOwner($account); " + + "Set-Acl $_.FullName $acl " + + "}" + )); + + final Shell serverShell = new Shell(temp); + serverShell.getEnv().put("ES_PATH_CONF", "config"); + Archives.runElasticsearch(installation, serverShell); + + final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes")); + assertThat(nodesResponse, containsString("\"name\":\"relative\"")); + + Archives.stopElasticsearch(installation); + + } finally { + rm(tempConf); + } + } + + } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java index c4d3655a553..6ffec813eb0 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java @@ -19,11 +19,14 @@ package org.elasticsearch.packaging.util; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.List; import java.util.stream.Stream; +import static java.util.stream.Collectors.joining; import static org.elasticsearch.packaging.util.FileMatcher.Fileness.Directory; import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File; import static org.elasticsearch.packaging.util.FileMatcher.file; @@ -36,17 +39,26 @@ import static org.elasticsearch.packaging.util.FileUtils.getPackagingArchivesDir import static org.elasticsearch.packaging.util.FileUtils.lsGlob; import static org.elasticsearch.packaging.util.FileUtils.mv; +import static org.elasticsearch.packaging.util.FileUtils.slurp; import static org.elasticsearch.packaging.util.Platforms.isDPKG; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.isEmptyOrNullString; import static org.hamcrest.core.Is.is; import static org.hamcrest.collection.IsEmptyCollection.empty; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.core.IsNot.not; +import static org.junit.Assert.assertTrue; /** * Installation and verification logic for archive distributions */ public class Archives { + // in the future we'll run as a role user on Windows + public static final String ARCHIVE_OWNER = Platforms.WINDOWS + ? "vagrant" + : "elasticsearch"; + public static Installation installArchive(Distribution distribution) { return installArchive(distribution, getDefaultArchiveInstallPath(), getCurrentVersion()); } @@ -63,22 +75,20 @@ public class Archives { if (distribution.packaging == Distribution.Packaging.TAR) { - if (Platforms.LINUX) { - sh.bash("tar -C " + baseInstallPath + " -xzpf " + distributionFile); - } else { + Platforms.onLinux(() -> sh.run("tar -C " + baseInstallPath + " -xzpf " + distributionFile)); + + if (Platforms.WINDOWS) { throw new RuntimeException("Distribution " + distribution + " is not supported on windows"); } } else if (distribution.packaging == Distribution.Packaging.ZIP) { - if (Platforms.LINUX) { - sh.bash("unzip " + distributionFile + " -d " + baseInstallPath); - } else { - sh.powershell( - "Add-Type -AssemblyName 'System.IO.Compression.Filesystem'; " + - "[IO.Compression.ZipFile]::ExtractToDirectory('" + distributionFile + "', '" + baseInstallPath + "')" - ); - } + Platforms.onLinux(() -> sh.run("unzip " + distributionFile + " -d " + baseInstallPath)); + + Platforms.onWindows(() -> sh.run( + "Add-Type -AssemblyName 'System.IO.Compression.Filesystem'; " + + "[IO.Compression.ZipFile]::ExtractToDirectory('" + distributionFile + "', '" + baseInstallPath + "')" + )); } else { throw new RuntimeException("Distribution " + distribution + " is not a known archive type"); @@ -93,9 +103,8 @@ public class Archives { assertThat("only the intended installation exists", installations, hasSize(1)); assertThat("only the intended installation exists", installations.get(0), is(fullInstallPath)); - if (Platforms.LINUX) { - setupArchiveUsersLinux(fullInstallPath); - } + Platforms.onLinux(() -> setupArchiveUsersLinux(fullInstallPath)); + Platforms.onWindows(() -> setupArchiveUsersWindows(fullInstallPath)); return new Installation(fullInstallPath); } @@ -103,17 +112,17 @@ public class Archives { private static void setupArchiveUsersLinux(Path installPath) { final Shell sh = new Shell(); - if (sh.bashIgnoreExitCode("getent group elasticsearch").isSuccess() == false) { + if (sh.runIgnoreExitCode("getent group elasticsearch").isSuccess() == false) { if (isDPKG()) { - sh.bash("addgroup --system elasticsearch"); + sh.run("addgroup --system elasticsearch"); } else { - sh.bash("groupadd -r elasticsearch"); + sh.run("groupadd -r elasticsearch"); } } - if (sh.bashIgnoreExitCode("id elasticsearch").isSuccess() == false) { + if (sh.runIgnoreExitCode("id elasticsearch").isSuccess() == false) { if (isDPKG()) { - sh.bash("adduser " + + sh.run("adduser " + "--quiet " + "--system " + "--no-create-home " + @@ -122,7 +131,7 @@ public class Archives { "--shell /bin/false " + "elasticsearch"); } else { - sh.bash("useradd " + + sh.run("useradd " + "--system " + "-M " + "--gid elasticsearch " + @@ -131,20 +140,29 @@ public class Archives { "elasticsearch"); } } - sh.bash("chown -R elasticsearch:elasticsearch " + installPath); + sh.run("chown -R elasticsearch:elasticsearch " + installPath); + } + + private static void setupArchiveUsersWindows(Path installPath) { + // we want the installation to be owned as the vagrant user rather than the Administrators group + + final Shell sh = new Shell(); + sh.run( + "$account = New-Object System.Security.Principal.NTAccount 'vagrant'; " + + "$install = Get-ChildItem -Path '" + installPath + "' -Recurse; " + + "$install += Get-Item -Path '" + installPath + "'; " + + "$install | ForEach-Object { " + + "$acl = Get-Acl $_.FullName; " + + "$acl.SetOwner($account); " + + "Set-Acl $_.FullName $acl " + + "}" + ); } public static void verifyArchiveInstallation(Installation installation, Distribution distribution) { - // on Windows for now we leave the installation owned by the vagrant user that the tests run as. Since the vagrant account - // is a local administrator, the files really end up being owned by the local administrators group. In the future we'll - // install and run elasticesearch with a role user on Windows - final String owner = Platforms.WINDOWS - ? "BUILTIN\\Administrators" - : "elasticsearch"; - - verifyOssInstallation(installation, distribution, owner); + verifyOssInstallation(installation, distribution, ARCHIVE_OWNER); if (distribution.flavor == Distribution.Flavor.DEFAULT) { - verifyDefaultInstallation(installation, distribution, owner); + verifyDefaultInstallation(installation, distribution, ARCHIVE_OWNER); } } @@ -160,38 +178,38 @@ public class Archives { assertThat(Files.exists(es.data), is(false)); assertThat(Files.exists(es.scripts), is(false)); - assertThat(es.home.resolve("bin"), file(Directory, owner, owner, p755)); - assertThat(es.home.resolve("lib"), file(Directory, owner, owner, p755)); - assertThat(Files.exists(es.config.resolve("elasticsearch.keystore")), is(false)); + assertThat(es.bin, file(Directory, owner, owner, p755)); + assertThat(es.lib, file(Directory, owner, owner, p755)); + assertThat(Files.exists(es.config("elasticsearch.keystore")), is(false)); Stream.of( - "bin/elasticsearch", - "bin/elasticsearch-env", - "bin/elasticsearch-keystore", - "bin/elasticsearch-plugin", - "bin/elasticsearch-translog" + "elasticsearch", + "elasticsearch-env", + "elasticsearch-keystore", + "elasticsearch-plugin", + "elasticsearch-translog" ).forEach(executable -> { - assertThat(es.home.resolve(executable), file(File, owner, owner, p755)); + assertThat(es.bin(executable), file(File, owner, owner, p755)); if (distribution.packaging == Distribution.Packaging.ZIP) { - assertThat(es.home.resolve(executable + ".bat"), file(File, owner)); + assertThat(es.bin(executable + ".bat"), file(File, owner)); } }); if (distribution.packaging == Distribution.Packaging.ZIP) { Stream.of( - "bin/elasticsearch-service.bat", - "bin/elasticsearch-service-mgr.exe", - "bin/elasticsearch-service-x64.exe" - ).forEach(executable -> assertThat(es.home.resolve(executable), file(File, owner))); + "elasticsearch-service.bat", + "elasticsearch-service-mgr.exe", + "elasticsearch-service-x64.exe" + ).forEach(executable -> assertThat(es.bin(executable), file(File, owner))); } Stream.of( "elasticsearch.yml", "jvm.options", "log4j2.properties" - ).forEach(config -> assertThat(es.config.resolve(config), file(File, owner, owner, p660))); + ).forEach(config -> assertThat(es.config(config), file(File, owner, owner, p660))); Stream.of( "NOTICE.txt", @@ -203,30 +221,30 @@ public class Archives { private static void verifyDefaultInstallation(Installation es, Distribution distribution, String owner) { Stream.of( - "bin/elasticsearch-certgen", - "bin/elasticsearch-certutil", - "bin/elasticsearch-croneval", - "bin/elasticsearch-migrate", - "bin/elasticsearch-saml-metadata", - "bin/elasticsearch-setup-passwords", - "bin/elasticsearch-sql-cli", - "bin/elasticsearch-syskeygen", - "bin/elasticsearch-users", - "bin/x-pack-env", - "bin/x-pack-security-env", - "bin/x-pack-watcher-env" + "elasticsearch-certgen", + "elasticsearch-certutil", + "elasticsearch-croneval", + "elasticsearch-migrate", + "elasticsearch-saml-metadata", + "elasticsearch-setup-passwords", + "elasticsearch-sql-cli", + "elasticsearch-syskeygen", + "elasticsearch-users", + "x-pack-env", + "x-pack-security-env", + "x-pack-watcher-env" ).forEach(executable -> { - assertThat(es.home.resolve(executable), file(File, owner, owner, p755)); + assertThat(es.bin(executable), file(File, owner, owner, p755)); if (distribution.packaging == Distribution.Packaging.ZIP) { - assertThat(es.home.resolve(executable + ".bat"), file(File, owner)); + assertThat(es.bin(executable + ".bat"), file(File, owner)); } }); // at this time we only install the current version of archive distributions, but if that changes we'll need to pass // the version through here - assertThat(es.home.resolve("bin/elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), file(File, owner, owner, p755)); + assertThat(es.bin("elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), file(File, owner, owner, p755)); Stream.of( "users", @@ -234,7 +252,72 @@ public class Archives { "roles.yml", "role_mapping.yml", "log4j2.properties" - ).forEach(config -> assertThat(es.config.resolve(config), file(File, owner, owner, p660))); + ).forEach(config -> assertThat(es.config(config), file(File, owner, owner, p660))); + } + + public static void runElasticsearch(Installation installation) throws IOException { + runElasticsearch(installation, new Shell()); + } + + public static void runElasticsearch(Installation installation, Shell sh) throws IOException { + final Path pidFile = installation.home.resolve("elasticsearch.pid"); + + final Installation.Executables bin = installation.executables(); + + Platforms.onLinux(() -> { + // If jayatana is installed then we try to use it. Elasticsearch should ignore it even when we try. + // If it doesn't ignore it then Elasticsearch will fail to start because of security errors. + // This line is attempting to emulate the on login behavior of /usr/share/upstart/sessions/jayatana.conf + if (Files.exists(Paths.get("/usr/share/java/jayatanaag.jar"))) { + sh.getEnv().put("JAVA_TOOL_OPTIONS", "-javaagent:/usr/share/java/jayatanaag.jar"); + } + sh.run("sudo -E -u " + ARCHIVE_OWNER + " " + + bin.elasticsearch + " -d -p " + installation.home.resolve("elasticsearch.pid")); + }); + + Platforms.onWindows(() -> { + // this starts the server in the background. the -d flag is unsupported on windows + // these tests run as Administrator. we don't want to run the server as Administrator, so we provide the current user's + // username and password to the process which has the effect of starting it not as Administrator. + sh.run( + "$password = ConvertTo-SecureString 'vagrant' -AsPlainText -Force; " + + "$processInfo = New-Object System.Diagnostics.ProcessStartInfo; " + + "$processInfo.FileName = '" + bin.elasticsearch + "'; " + + "$processInfo.Arguments = '-p " + installation.home.resolve("elasticsearch.pid") + "'; " + + "$processInfo.Username = 'vagrant'; " + + "$processInfo.Password = $password; " + + "$processInfo.RedirectStandardOutput = $true; " + + "$processInfo.RedirectStandardError = $true; " + + sh.env.entrySet().stream() + .map(entry -> "$processInfo.Environment.Add('" + entry.getKey() + "', '" + entry.getValue() + "'); ") + .collect(joining()) + + "$processInfo.UseShellExecute = $false; " + + "$process = New-Object System.Diagnostics.Process; " + + "$process.StartInfo = $processInfo; " + + "$process.Start() | Out-Null; " + + "$process.Id;" + ); + }); + + ServerUtils.waitForElasticsearch(); + + assertTrue(Files.exists(pidFile)); + String pid = slurp(pidFile).trim(); + assertThat(pid, not(isEmptyOrNullString())); + + Platforms.onLinux(() -> sh.run("ps " + pid)); + Platforms.onWindows(() -> sh.run("Get-Process -Id " + pid)); + } + + public static void stopElasticsearch(Installation installation) { + Path pidFile = installation.home.resolve("elasticsearch.pid"); + assertTrue(Files.exists(pidFile)); + String pid = slurp(pidFile).trim(); + assertThat(pid, not(isEmptyOrNullString())); + + final Shell sh = new Shell(); + Platforms.onLinux(() -> sh.run("kill -SIGTERM " + pid)); + Platforms.onWindows(() -> sh.run("Get-Process -Id " + pid + " | Stop-Process -Force")); } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java index a775a23a194..4ff2998988c 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java @@ -56,31 +56,28 @@ public class Cleanup { final Shell sh = new Shell(); // kill elasticsearch processes - if (Platforms.WINDOWS) { + Platforms.onLinux(() -> { + sh.runIgnoreExitCode("pkill -u elasticsearch"); + sh.runIgnoreExitCode("ps aux | grep -i 'org.elasticsearch.bootstrap.Elasticsearch' | awk {'print $2'} | xargs kill -9"); + }); + Platforms.onWindows(() -> { // the view of processes returned by Get-Process doesn't expose command line arguments, so we use WMI here - sh.powershellIgnoreExitCode( + sh.runIgnoreExitCode( "Get-WmiObject Win32_Process | " + "Where-Object { $_.CommandLine -Match 'org.elasticsearch.bootstrap.Elasticsearch' } | " + "ForEach-Object { $_.Terminate() }" ); + }); - } else { - - sh.bashIgnoreExitCode("pkill -u elasticsearch"); - sh.bashIgnoreExitCode("ps aux | grep -i 'org.elasticsearch.bootstrap.Elasticsearch' | awk {'print $2'} | xargs kill -9"); - - } - - if (Platforms.LINUX) { - purgePackagesLinux(); - } + Platforms.onLinux(Cleanup::purgePackagesLinux); // remove elasticsearch users - if (Platforms.LINUX) { - sh.bashIgnoreExitCode("userdel elasticsearch"); - sh.bashIgnoreExitCode("groupdel elasticsearch"); - } + Platforms.onLinux(() -> { + sh.runIgnoreExitCode("userdel elasticsearch"); + sh.runIgnoreExitCode("groupdel elasticsearch"); + }); + // when we run es as a role user on windows, add the equivalent here // delete files that may still exist lsGlob(getTempDir(), "elasticsearch*").forEach(FileUtils::rm); @@ -95,7 +92,7 @@ public class Cleanup { // disable elasticsearch service // todo add this for windows when adding tests for service intallation if (Platforms.LINUX && isSystemd()) { - sh.bash("systemctl unmask systemd-sysctl.service"); + sh.run("systemctl unmask systemd-sysctl.service"); } } @@ -103,19 +100,19 @@ public class Cleanup { final Shell sh = new Shell(); if (isRPM()) { - sh.bashIgnoreExitCode("rpm --quiet -e elasticsearch elasticsearch-oss"); + sh.runIgnoreExitCode("rpm --quiet -e elasticsearch elasticsearch-oss"); } if (isYUM()) { - sh.bashIgnoreExitCode("yum remove -y elasticsearch elasticsearch-oss"); + sh.runIgnoreExitCode("yum remove -y elasticsearch elasticsearch-oss"); } if (isDPKG()) { - sh.bashIgnoreExitCode("dpkg --purge elasticsearch elasticsearch-oss"); + sh.runIgnoreExitCode("dpkg --purge elasticsearch elasticsearch-oss"); } if (isAptGet()) { - sh.bashIgnoreExitCode("apt-get --quiet --yes purge elasticsearch elasticsearch-oss"); + sh.runIgnoreExitCode("apt-get --quiet --yes purge elasticsearch elasticsearch-oss"); } } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java index ad826675244..315dc6ffee1 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java @@ -21,11 +21,14 @@ package org.elasticsearch.packaging.util; import org.elasticsearch.core.internal.io.IOUtils; +import java.io.BufferedWriter; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileOwnerAttributeView; import java.nio.file.attribute.PosixFileAttributes; @@ -63,6 +66,22 @@ public class FileUtils { } } + public static Path mkdir(Path path) { + try { + return Files.createDirectories(path); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static Path cp(Path source, Path target) { + try { + return Files.copy(source, target); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + public static Path mv(Path source, Path target) { try { return Files.move(source, target); @@ -71,9 +90,19 @@ public class FileUtils { } } + public static void append(Path file, String text) { + try (BufferedWriter writer = Files.newBufferedWriter(file, StandardCharsets.UTF_8, + StandardOpenOption.CREATE, StandardOpenOption.APPEND)) { + + writer.write(text); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + public static String slurp(Path file) { try { - return String.join("\n", Files.readAllLines(file)); + return String.join("\n", Files.readAllLines(file, StandardCharsets.UTF_8)); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java index d231762d062..68da440400a 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java @@ -27,6 +27,8 @@ import java.nio.file.Path; public class Installation { public final Path home; + public final Path bin; // this isn't a first-class installation feature but we include it for convenience + public final Path lib; // same public final Path config; public final Path data; public final Path logs; @@ -36,6 +38,9 @@ public class Installation { public Installation(Path home, Path config, Path data, Path logs, Path plugins, Path modules, Path scripts) { this.home = home; + this.bin = home.resolve("bin"); + this.lib = home.resolve("lib"); + this.config = config; this.data = data; this.logs = logs; @@ -55,4 +60,31 @@ public class Installation { home.resolve("scripts") ); } + + public Path bin(String executableName) { + return bin.resolve(executableName); + } + + public Path config(String configFileName) { + return config.resolve(configFileName); + } + + public Executables executables() { + return new Executables(); + } + + public class Executables { + + public final Path elasticsearch = platformExecutable("elasticsearch"); + public final Path elasticsearchPlugin = platformExecutable("elasticsearch-plugin"); + public final Path elasticsearchKeystore = platformExecutable("elasticsearch-keystore"); + public final Path elasticsearchTranslog = platformExecutable("elasticsearch-translog"); + + private Path platformExecutable(String name) { + final String platformExecutableName = Platforms.WINDOWS + ? name + ".bat" + : name; + return bin(platformExecutableName); + } + } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java index 202c025ae8a..5ffbc318200 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java @@ -28,41 +28,61 @@ public class Platforms { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which dpkg").isSuccess(); + return new Shell().runIgnoreExitCode("which dpkg").isSuccess(); } public static boolean isAptGet() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which apt-get").isSuccess(); + return new Shell().runIgnoreExitCode("which apt-get").isSuccess(); } public static boolean isRPM() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which rpm").isSuccess(); + return new Shell().runIgnoreExitCode("which rpm").isSuccess(); } public static boolean isYUM() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which yum").isSuccess(); + return new Shell().runIgnoreExitCode("which yum").isSuccess(); } public static boolean isSystemd() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which systemctl").isSuccess(); + return new Shell().runIgnoreExitCode("which systemctl").isSuccess(); } public static boolean isSysVInit() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which service").isSuccess(); + return new Shell().runIgnoreExitCode("which service").isSuccess(); + } + + public static void onWindows(PlatformAction action) { + if (WINDOWS) { + action.run(); + } + } + + public static void onLinux(PlatformAction action) { + if (LINUX) { + action.run(); + } + } + + /** + * Essentially a Runnable, but we make the distinction so it's more clear that these are synchronous + */ + @FunctionalInterface + public interface PlatformAction { + void run(); } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java new file mode 100644 index 00000000000..ff006a34e68 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.util; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.http.HttpResponse; +import org.apache.http.client.fluent.Request; +import org.apache.http.conn.HttpHostConnectException; +import org.apache.http.entity.ContentType; +import org.apache.http.util.EntityUtils; + +import java.io.IOException; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; + +public class ServerUtils { + + private static final Log LOG = LogFactory.getLog(ServerUtils.class); + + private static final long waitTime = TimeUnit.SECONDS.toMillis(60); + private static final long timeoutLength = TimeUnit.SECONDS.toMillis(10); + + public static void waitForElasticsearch() throws IOException { + waitForElasticsearch("green", null); + } + + public static void waitForElasticsearch(String status, String index) throws IOException { + + Objects.requireNonNull(status); + + // we loop here rather than letting httpclient handle retries so we can measure the entire waiting time + final long startTime = System.currentTimeMillis(); + long timeElapsed = 0; + boolean started = false; + while (started == false && timeElapsed < waitTime) { + try { + + final HttpResponse response = Request.Get("http://localhost:9200/_cluster/health") + .connectTimeout((int) timeoutLength) + .socketTimeout((int) timeoutLength) + .execute() + .returnResponse(); + + if (response.getStatusLine().getStatusCode() >= 300) { + final String statusLine = response.getStatusLine().toString(); + final String body = EntityUtils.toString(response.getEntity()); + throw new RuntimeException("Connecting to elasticsearch cluster health API failed:\n" + statusLine+ "\n" + body); + } + + started = true; + + } catch (HttpHostConnectException e) { + // we want to retry if the connection is refused + LOG.info("Got connection refused when waiting for cluster health", e); + } + + timeElapsed = System.currentTimeMillis() - startTime; + } + + if (started == false) { + throw new RuntimeException("Elasticsearch did not start"); + } + + final String url; + if (index == null) { + url = "http://localhost:9200/_cluster/health?wait_for_status=" + status + "&timeout=60s&pretty"; + } else { + url = "http://localhost:9200/_cluster/health/" + index + "?wait_for_status=" + status + "&timeout=60s&pretty"; + + } + + final String body = makeRequest(Request.Get(url)); + assertThat("cluster health response must contain desired status", body, containsString(status)); + } + + public static void runElasticsearchTests() throws IOException { + makeRequest( + Request.Post("http://localhost:9200/library/book/1?refresh=true&pretty") + .bodyString("{ \"title\": \"Book #1\", \"pages\": 123 }", ContentType.APPLICATION_JSON)); + + makeRequest( + Request.Post("http://localhost:9200/library/book/2?refresh=true&pretty") + .bodyString("{ \"title\": \"Book #2\", \"pages\": 456 }", ContentType.APPLICATION_JSON)); + + String count = makeRequest(Request.Get("http://localhost:9200/_count?pretty")); + assertThat(count, containsString("\"count\" : 2")); + + makeRequest(Request.Delete("http://localhost:9200/_all")); + } + + public static String makeRequest(Request request) throws IOException { + final HttpResponse response = request.execute().returnResponse(); + final String body = EntityUtils.toString(response.getEntity()); + + if (response.getStatusLine().getStatusCode() >= 300) { + throw new RuntimeException("Request failed:\n" + response.getStatusLine().toString() + "\n" + body); + } + + return body; + + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Shell.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Shell.java index 9a908e2d680..5853bc2daa1 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Shell.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Shell.java @@ -58,58 +58,50 @@ public class Shell { this.workingDirectory = workingDirectory; } - /** - * Runs a script in a bash shell, throwing an exception if its exit code is nonzero - */ - public Result bash(String script) { - return run(bashCommand(script)); + public Map getEnv() { + return env; } /** - * Runs a script in a bash shell + * Run the provided string as a shell script. On Linux the {@code bash -c [script]} syntax will be used, and on Windows + * the {@code powershell.exe -Command [script]} syntax will be used. Throws an exception if the exit code of the script is nonzero */ - public Result bashIgnoreExitCode(String script) { - return runIgnoreExitCode(bashCommand(script)); + public Result run(String script) { + return runScript(getScriptCommand(script)); + } + + /** + * Same as {@link #run(String)}, but does not throw an exception if the exit code of the script is nonzero + */ + public Result runIgnoreExitCode(String script) { + return runScriptIgnoreExitCode(getScriptCommand(script)); + } + + private String[] getScriptCommand(String script) { + if (Platforms.WINDOWS) { + return powershellCommand(script); + } else { + return bashCommand(script); + } } private static String[] bashCommand(String script) { return Stream.concat(Stream.of("bash", "-c"), Stream.of(script)).toArray(String[]::new); } - /** - * Runs a script in a powershell shell, throwing an exception if its exit code is nonzero - */ - public Result powershell(String script) { - return run(powershellCommand(script)); - } - - /** - * Runs a script in a powershell shell - */ - public Result powershellIgnoreExitCode(String script) { - return runIgnoreExitCode(powershellCommand(script)); - } - private static String[] powershellCommand(String script) { return Stream.concat(Stream.of("powershell.exe", "-Command"), Stream.of(script)).toArray(String[]::new); } - /** - * Runs an executable file, passing all elements of {@code command} after the first as arguments. Throws an exception if the process' - * exit code is nonzero - */ - private Result run(String[] command) { - Result result = runIgnoreExitCode(command); + private Result runScript(String[] command) { + Result result = runScriptIgnoreExitCode(command); if (result.isSuccess() == false) { throw new RuntimeException("Command was not successful: [" + String.join(" ", command) + "] result: " + result.toString()); } return result; } - /** - * Runs an executable file, passing all elements of {@code command} after the first as arguments - */ - private Result runIgnoreExitCode(String[] command) { + private Result runScriptIgnoreExitCode(String[] command) { ProcessBuilder builder = new ProcessBuilder(); builder.command(command); diff --git a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats deleted file mode 100644 index 3607e4ab45a..00000000000 --- a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats +++ /dev/null @@ -1,178 +0,0 @@ -#!/usr/bin/env bats - -# This file is used to test the tar gz package. - -# WARNING: This testing file must be executed as root and can -# dramatically change your system. It should only be executed -# in a throw-away VM like those made by the Vagrantfile at -# the root of the Elasticsearch source code. This should -# cause the script to fail if it is executed any other way: -[ -f /etc/is_vagrant_vm ] || { - >&2 echo "must be run on a vagrant VM" - exit 1 -} - -# The test case can be executed with the Bash Automated -# Testing System tool available at https://github.com/sstephenson/bats -# Thanks to Sam Stephenson! - -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Load test utilities -load $BATS_UTILS/utils.bash -load $BATS_UTILS/tar.bash -load $BATS_UTILS/plugins.bash - -setup() { - skip_not_tar_gz - export ESHOME=/tmp/elasticsearch - export_elasticsearch_paths -} - -################################## -# Install TAR GZ package -################################## -@test "[TAR] tar command is available" { - # Cleans everything for the 1st execution - clean_before_test - run tar --version - [ "$status" -eq 0 ] -} - -@test "[TAR] archive is available" { - local version=$(cat version) - count=$(find . -type f -name "${PACKAGE_NAME}-${version}.tar.gz" | wc -l) - [ "$count" -eq 1 ] -} - -@test "[TAR] archive is not installed" { - count=$(find /tmp -type d -name 'elasticsearch*' | wc -l) - [ "$count" -eq 0 ] -} - -@test "[TAR] install archive" { - # Install the archive - install_archive - set_debug_logging - - count=$(find /tmp -type d -name 'elasticsearch*' | wc -l) - [ "$count" -eq 1 ] - - # Its simpler to check that the install was correct in this test rather - # than in another test because install_archive sets a number of path - # variables that verify_archive_installation reads. To separate this into - # another test you'd have to recreate the variables. - verify_archive_installation -} - -@test "[TAR] verify elasticsearch-plugin list runs without any plugins installed" { - # previously this would fail because the archive installations did - # not create an empty plugins directory - local plugins_list=`$ESHOME/bin/elasticsearch-plugin list` - [[ -z $plugins_list ]] -} - -@test "[TAR] elasticsearch fails if java executable is not found" { - local JAVA=$(which java) - - sudo chmod -x $JAVA - run "$ESHOME/bin/elasticsearch" - sudo chmod +x $JAVA - - [ "$status" -eq 1 ] - local expected="could not find java; set JAVA_HOME or ensure java is in PATH" - [[ "$output" == *"$expected"* ]] || { - echo "Expected error message [$expected] but found: $output" - false - } -} - -@test "[TAR] test creating elasticearch.keystore" { - sudo -E -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" create - assert_file "$ESCONFIG/elasticsearch.keystore" f elasticsearch elasticsearch 660 - sudo -E -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" list | grep "keystore.seed" - # cleanup for the next test - rm -rf "$ESCONFIG/elasticsearch.keystore" -} - -################################## -# Check that Elasticsearch is working -################################## -@test "[TAR] test elasticsearch" { - start_elasticsearch_service - run_elasticsearch_tests - stop_elasticsearch_service -} - -@test "[TAR] test auto-creating elasticearch.keystore" { - # a keystore should automatically be created after the service is started - assert_file "$ESCONFIG/elasticsearch.keystore" f elasticsearch elasticsearch 660 - # the keystore should be seeded - sudo -E -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" list | grep "keystore.seed" -} - -@test "[TAR] start Elasticsearch with custom JVM options" { - local es_java_opts=$ES_JAVA_OPTS - local es_path_conf=$ES_PATH_CONF - local temp=`mktemp -d` - cp "$ESCONFIG"/elasticsearch.yml "$temp" - cp "$ESCONFIG"/log4j2.properties "$temp" - touch "$temp/jvm.options" - chown -R elasticsearch:elasticsearch "$temp" - echo "-Xms512m" >> "$temp/jvm.options" - echo "-Xmx512m" >> "$temp/jvm.options" - # we have to disable Log4j from using JMX lest it will hit a security - # manager exception before we have configured logging; this will fail - # startup since we detect usages of logging before it is configured - echo "-Dlog4j2.disable.jmx=true" >> "$temp/jvm.options" - export ES_PATH_CONF="$temp" - export ES_JAVA_OPTS="-XX:-UseCompressedOops" - start_elasticsearch_service - curl -s -XGET localhost:9200/_nodes | fgrep '"heap_init_in_bytes":536870912' - curl -s -XGET localhost:9200/_nodes | fgrep '"using_compressed_ordinary_object_pointers":"false"' - stop_elasticsearch_service - export ES_PATH_CONF=$es_path_conf - export ES_JAVA_OPTS=$es_java_opts -} - -@test "[TAR] GC logs exist" { - start_elasticsearch_service - assert_file_exist $ESHOME/logs/gc.log.0.current - stop_elasticsearch_service -} - -@test "[TAR] relative ES_PATH_CONF" { - local es_path_conf=$ES_PATH_CONF - local temp=`mktemp -d` - mkdir "$temp"/config - cp "$ESCONFIG"/elasticsearch.yml "$temp"/config - cp "$ESCONFIG"/log4j2.properties "$temp"/config - cp "$ESCONFIG/jvm.options" "$temp/config" - chown -R elasticsearch:elasticsearch "$temp" - echo "node.name: relative" >> "$temp"/config/elasticsearch.yml - cd "$temp" - export ES_PATH_CONF=config - start_elasticsearch_service - curl -s -XGET localhost:9200/_nodes | fgrep '"name":"relative"' - stop_elasticsearch_service - export ES_PATH_CONF=$es_path_conf -} - -@test "[TAR] remove tar" { - rm -rf "/tmp/elasticsearch" -} From e85bb734cfc6b4b6d52f9b4e954976866d8c04cc Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Tue, 10 Jul 2018 11:17:21 -0500 Subject: [PATCH 02/10] Docs: add security delete role to api call table (#31907) --- x-pack/docs/en/rest-api/security/roles.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/docs/en/rest-api/security/roles.asciidoc b/x-pack/docs/en/rest-api/security/roles.asciidoc index d82c2600062..b7b2260a0e4 100644 --- a/x-pack/docs/en/rest-api/security/roles.asciidoc +++ b/x-pack/docs/en/rest-api/security/roles.asciidoc @@ -11,6 +11,8 @@ realm. `GET /_xpack/security/role/` + +`DELETE /_xpack/security/role/` + + `POST /_xpack/security/role//_clear_cache` + `POST /_xpack/security/role/` + From dcbb1154bf6586910721d2befb4e362e7b5f8f90 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 10 Jul 2018 13:01:28 -0400 Subject: [PATCH 03/10] HLRest: Move xPackInfo() to xPack().info() (#31905) Originally I put the X-Pack info object into the top level rest client object. I did that because we thought we'd like to squash `xpack` from the name of the X-Pack APIs now that it is part of the default distribution. We still kind of want to do that, but at least for now we feel like it is better to keep the high level rest client aligned with the other language clients like C# and Python. This shifts the X-Pack info API to align with its json spec file. Relates to #31870 --- .../client/RestHighLevelClient.java | 44 ++++------- .../org/elasticsearch/client/XPackClient.java | 73 +++++++++++++++++++ .../elasticsearch/client/PingAndInfoIT.java | 6 +- .../MiscellaneousDocumentationIT.java | 4 +- .../org/elasticsearch/license/License.java | 2 +- .../action/TransportXPackInfoActionTests.java | 2 +- .../ml/datafeed/MlRemoteLicenseChecker.java | 2 +- .../datafeed/MlRemoteLicenseCheckerTests.java | 2 +- .../protocol/xpack/XPackInfoResponse.java | 2 +- .../{ => xpack}/license/LicenseStatus.java | 2 +- .../{ => xpack}/license/package-info.java | 2 +- .../{ => xpack}/security/package-info.java | 2 +- .../{ => xpack}/watcher/package-info.java | 2 +- .../xpack/XPackInfoResponseTests.java | 2 +- .../license/LicenseStatusTests.java | 2 +- 15 files changed, 103 insertions(+), 46 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java rename x-pack/protocol/src/main/java/org/elasticsearch/protocol/{ => xpack}/license/LicenseStatus.java (97%) rename x-pack/protocol/src/main/java/org/elasticsearch/protocol/{ => xpack}/license/package-info.java (94%) rename x-pack/protocol/src/main/java/org/elasticsearch/protocol/{ => xpack}/security/package-info.java (94%) rename x-pack/protocol/src/main/java/org/elasticsearch/protocol/{ => xpack}/watcher/package-info.java (94%) rename x-pack/protocol/src/test/java/org/elasticsearch/protocol/{ => xpack}/license/LicenseStatusTests.java (95%) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index e4237795a89..df674ea898e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -66,8 +66,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.index.rankeval.RankEvalResponse; import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.protocol.xpack.XPackInfoRequest; -import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; @@ -204,6 +202,7 @@ public class RestHighLevelClient implements Closeable { private final IngestClient ingestClient = new IngestClient(this); private final SnapshotClient snapshotClient = new SnapshotClient(this); private final TasksClient tasksClient = new TasksClient(this); + private final XPackClient xPackClient = new XPackClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -294,6 +293,19 @@ public class RestHighLevelClient implements Closeable { return tasksClient; } + /** + * A wrapper for the {@link RestHighLevelClient} that provides methods for + * accessing the Elastic Licensed X-Pack APIs that are shipped with the + * default distribution of Elasticsearch. All of these APIs will 404 if run + * against the OSS distribution of Elasticsearch. + *

+ * See the + * X-Pack APIs on elastic.co for more information. + */ + public final XPackClient xpack() { + return xPackClient; + } + /** * Executes a bulk request using the Bulk API. * See Bulk API on elastic.co @@ -794,34 +806,6 @@ public class RestHighLevelClient implements Closeable { FieldCapabilitiesResponse::fromXContent, listener, emptySet()); } - /** - * Fetch information about X-Pack from the cluster if it is installed. - * See - * the docs for more. - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public XPackInfoResponse xPackInfo(XPackInfoRequest request, RequestOptions options) throws IOException { - return performRequestAndParseEntity(request, RequestConverters::xPackInfo, options, - XPackInfoResponse::fromXContent, emptySet()); - } - - /** - * Fetch information about X-Pack from the cluster if it is installed. - * See - * the docs for more. - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - */ - public void xPackInfoAsync(XPackInfoRequest request, RequestOptions options, - ActionListener listener) { - performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options, - XPackInfoResponse::fromXContent, listener, emptySet()); - } - protected final Resp performRequestAndParseEntity(Req request, CheckedFunction requestConverter, RequestOptions options, diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java new file mode 100644 index 00000000000..5942bfa35a4 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; + +import java.io.IOException; + +import static java.util.Collections.emptySet; + +/** + * A wrapper for the {@link RestHighLevelClient} that provides methods for + * accessing the Elastic Licensed X-Pack APIs that are shipped with the + * default distribution of Elasticsearch. All of these APIs will 404 if run + * against the OSS distribution of Elasticsearch. + *

+ * See the + * X-Pack APIs on elastic.co for more information. + */ +public final class XPackClient { + private final RestHighLevelClient restHighLevelClient; + + XPackClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Fetch information about X-Pack from the cluster. + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public XPackInfoResponse info(XPackInfoRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackInfo, options, + XPackInfoResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously fetch information about X-Pack from the cluster. + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void infoAsync(XPackInfoRequest request, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options, + XPackInfoResponse::fromXContent, listener, emptySet()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java index 04bac628f49..1a50187f5df 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java @@ -21,10 +21,10 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpGet; import org.elasticsearch.action.main.MainResponse; -import org.elasticsearch.protocol.license.LicenseStatus; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import java.io.IOException; import java.util.EnumSet; @@ -60,7 +60,7 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase { XPackInfoRequest request = new XPackInfoRequest(); request.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class)); request.setVerbose(true); - XPackInfoResponse info = highLevelClient().xPackInfo(request, RequestOptions.DEFAULT); + XPackInfoResponse info = highLevelClient().xpack().info(request, RequestOptions.DEFAULT); MainResponse mainResponse = highLevelClient().info(RequestOptions.DEFAULT); @@ -89,7 +89,7 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase { } public void testXPackInfoEmptyRequest() throws IOException { - XPackInfoResponse info = highLevelClient().xPackInfo(new XPackInfoRequest(), RequestOptions.DEFAULT); + XPackInfoResponse info = highLevelClient().xpack().info(new XPackInfoRequest(), RequestOptions.DEFAULT); /* * The default in the transport client is non-verbose and returning diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java index f760426c514..75c14097c45 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java @@ -86,7 +86,7 @@ public class MiscellaneousDocumentationIT extends ESRestHighLevelClientTestCase XPackInfoRequest.Category.BUILD, XPackInfoRequest.Category.LICENSE, XPackInfoRequest.Category.FEATURES)); - XPackInfoResponse response = client.xPackInfo(request, RequestOptions.DEFAULT); + XPackInfoResponse response = client.xpack().info(request, RequestOptions.DEFAULT); //end::x-pack-info-execute //tag::x-pack-info-response @@ -122,7 +122,7 @@ public class MiscellaneousDocumentationIT extends ESRestHighLevelClientTestCase listener = new LatchedActionListener<>(listener, latch); // tag::x-pack-info-execute-async - client.xPackInfoAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.xpack().infoAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::x-pack-info-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java index fba5f06ba90..b2130ac9f4b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.protocol.license.LicenseStatus; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; /** * Data structure for license. Use {@link Builder} to build a license. diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java index 45d1d7cc749..60050bd9311 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java @@ -10,10 +10,10 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; -import org.elasticsearch.protocol.license.LicenseStatus; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseChecker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseChecker.java index 791be9b7cb4..b0eeed2c800 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseChecker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseChecker.java @@ -13,9 +13,9 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.License; -import org.elasticsearch.protocol.license.LicenseStatus; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.action.XPackInfoAction; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseCheckerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseCheckerTests.java index c10bf1081dd..81e4c75cfad 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseCheckerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseCheckerTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.protocol.license.LicenseStatus; import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.action.XPackInfoAction; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java index d38174e03f3..3b9032f0921 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.protocol.license.LicenseStatus; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/license/LicenseStatus.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java similarity index 97% rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/license/LicenseStatus.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java index f0f49351ab1..ea3e4f8a896 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/license/LicenseStatus.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.protocol.license; +package org.elasticsearch.protocol.xpack.license; import java.io.IOException; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/license/package-info.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java similarity index 94% rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/license/package-info.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java index f671b280d84..ca859f29e44 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/license/package-info.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java @@ -21,4 +21,4 @@ * Request and Response objects for the default distribution's License * APIs. */ -package org.elasticsearch.protocol.license; +package org.elasticsearch.protocol.xpack.license; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/security/package-info.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java similarity index 94% rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/security/package-info.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java index 2a0ed5bc52b..216990d9f0e 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/security/package-info.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java @@ -21,4 +21,4 @@ * Request and Response objects for the default distribution's Security * APIs. */ -package org.elasticsearch.protocol.security; +package org.elasticsearch.protocol.xpack.security; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/watcher/package-info.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java similarity index 94% rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/watcher/package-info.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java index 43d7dd29ec9..d34fd598ab1 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/watcher/package-info.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java @@ -21,4 +21,4 @@ * Request and Response objects for the default distribution's Watcher * APIs. */ -package org.elasticsearch.protocol.watcher; +package org.elasticsearch.protocol.xpack.watcher; diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java index 61a936b0df8..820b8200ed9 100644 --- a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java @@ -21,11 +21,11 @@ package org.elasticsearch.protocol.xpack; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.protocol.license.LicenseStatus; import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo; import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import java.util.HashMap; diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/license/LicenseStatusTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java similarity index 95% rename from x-pack/protocol/src/test/java/org/elasticsearch/protocol/license/LicenseStatusTests.java rename to x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java index 2c87645157c..c256e7562f7 100644 --- a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/license/LicenseStatusTests.java +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.protocol.license; +package org.elasticsearch.protocol.xpack.license; import java.io.IOException; From 3189ef49a5d4de87ec906764e7d703445df0c0dc Mon Sep 17 00:00:00 2001 From: Shaunak Kashyap Date: Tue, 10 Jul 2018 10:06:41 -0700 Subject: [PATCH 04/10] [X-Pack] Beats centralized management: security role + licensing (#30520) * Adding Beats x-pack plugin + index templates * Adding built-in roles for Beats central management * Fixing typo * Refactoring: extract common code into method * More refactoring for more code reuse * Use a single index for Beats management * Rename "fragment" to "block" * Adding configuration block type * Expand kibana_system role to include Beats management index privileges * Fixing syntax * Adding test * Adding asserting for reserved role * Fixing privileges * Updating template * Removing beats plugin * Fixing tests * Fixing role variable name * Fixing assertions * Switching to preferred syntax for boolean false checks * Making class final * Making variables final * Updating Basic license message to be more accurate --- .../license/XPackLicenseState.java | 48 ++++++++++++------- .../xpack/core/XPackClientPlugin.java | 3 ++ .../elasticsearch/xpack/core/XPackField.java | 2 + .../xpack/core/XPackSettings.java | 4 ++ .../core/beats/BeatsFeatureSetUsage.java | 24 ++++++++++ .../authz/store/ReservedRolesStore.java | 10 +++- .../authz/store/ReservedRolesStoreTests.java | 48 +++++++++++++++++++ 7 files changed, 120 insertions(+), 19 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/beats/BeatsFeatureSetUsage.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index e58c5eda063..ea30e30ae3c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -52,6 +52,9 @@ public class XPackLicenseState { messages.put(XPackField.LOGSTASH, new String[] { "Logstash will continue to poll centrally-managed pipelines" }); + messages.put(XPackField.BEATS, new String[] { + "Beats will continue to poll centrally-managed configuration" + }); messages.put(XPackField.DEPRECATION, new String[] { "Deprecation APIs are disabled" }); @@ -81,6 +84,7 @@ public class XPackLicenseState { messages.put(XPackField.GRAPH, XPackLicenseState::graphAcknowledgementMessages); messages.put(XPackField.MACHINE_LEARNING, XPackLicenseState::machineLearningAcknowledgementMessages); messages.put(XPackField.LOGSTASH, XPackLicenseState::logstashAcknowledgementMessages); + messages.put(XPackField.BEATS, XPackLicenseState::beatsAcknowledgementMessages); messages.put(XPackField.SQL, XPackLicenseState::sqlAcknowledgementMessages); ACKNOWLEDGMENT_MESSAGES = Collections.unmodifiableMap(messages); } @@ -205,12 +209,19 @@ public class XPackLicenseState { private static String[] logstashAcknowledgementMessages(OperationMode currentMode, OperationMode newMode) { switch (newMode) { case BASIC: - switch (currentMode) { - case TRIAL: - case STANDARD: - case GOLD: - case PLATINUM: - return new String[] { "Logstash will no longer poll for centrally-managed pipelines" }; + if (isBasic(currentMode) == false) { + return new String[] { "Logstash will no longer poll for centrally-managed pipelines" }; + } + break; + } + return Strings.EMPTY_ARRAY; + } + + private static String[] beatsAcknowledgementMessages(OperationMode currentMode, OperationMode newMode) { + switch (newMode) { + case BASIC: + if (isBasic(currentMode) == false) { + return new String[] { "Beats will no longer be able to use centrally-managed configuration" }; } break; } @@ -232,6 +243,10 @@ public class XPackLicenseState { return Strings.EMPTY_ARRAY; } + private static boolean isBasic(OperationMode mode) { + return mode == OperationMode.BASIC; + } + /** A wrapper for the license mode and state, to allow atomically swapping. */ private static class Status { @@ -500,20 +515,17 @@ public class XPackLicenseState { */ public boolean isLogstashAllowed() { Status localStatus = status; + return localStatus.active && (isBasic(localStatus.mode) == false); + } - if (localStatus.active == false) { - return false; - } + /** + * Beats is allowed as long as there is an active license of type TRIAL, STANDARD, GOLD or PLATINUM + * @return {@code true} as long as there is a valid license + */ + public boolean isBeatsAllowed() { + Status localStatus = status; + return localStatus.active && (isBasic(localStatus.mode) == false); - switch (localStatus.mode) { - case TRIAL: - case GOLD: - case PLATINUM: - case STANDARD: - return true; - default: - return false; - } } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index d3ddac32899..aa60456d805 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; import org.elasticsearch.xpack.core.graph.GraphFeatureSetUsage; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; import org.elasticsearch.xpack.core.logstash.LogstashFeatureSetUsage; +import org.elasticsearch.xpack.core.beats.BeatsFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MachineLearningFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; @@ -320,6 +321,8 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.GRAPH, GraphFeatureSetUsage::new), // logstash new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.LOGSTASH, LogstashFeatureSetUsage::new), + // beats + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.BEATS, BeatsFeatureSetUsage::new), // ML - Custom metadata new NamedWriteableRegistry.Entry(MetaData.Custom.class, "ml", MlMetadata::new), new NamedWriteableRegistry.Entry(NamedDiff.class, "ml", MlMetadata.MlMetadataDiff::new), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index dd482c4e22d..70eb047c8ed 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -19,6 +19,8 @@ public final class XPackField { public static final String MACHINE_LEARNING = "ml"; /** Name constant for the Logstash feature. */ public static final String LOGSTASH = "logstash"; + /** Name constant for the Beats feature. */ + public static final String BEATS = "beats"; /** Name constant for the Deprecation API feature. */ public static final String DEPRECATION = "deprecation"; /** Name constant for the upgrade feature. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index b0d0c4f2c2e..8c4c5e2c760 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -67,6 +67,10 @@ public class XPackSettings { public static final Setting LOGSTASH_ENABLED = Setting.boolSetting("xpack.logstash.enabled", true, Setting.Property.NodeScope); + /** Setting for enabling or disabling Beats extensions. Defaults to true. */ + public static final Setting BEATS_ENABLED = Setting.boolSetting("xpack.beats.enabled", true, + Setting.Property.NodeScope); + /** Setting for enabling or disabling TLS. Defaults to false. */ public static final Setting TRANSPORT_SSL_ENABLED = Setting.boolSetting("xpack.security.transport.ssl.enabled", false, Property.NodeScope); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/beats/BeatsFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/beats/BeatsFeatureSetUsage.java new file mode 100644 index 00000000000..1702bf3869d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/beats/BeatsFeatureSetUsage.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.beats; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.core.XPackFeatureSet; +import org.elasticsearch.xpack.core.XPackField; + +import java.io.IOException; + +public final class BeatsFeatureSetUsage extends XPackFeatureSet.Usage { + + public BeatsFeatureSetUsage(StreamInput in) throws IOException { + super(in); + } + + public BeatsFeatureSetUsage(boolean available, boolean enabled) { + super(XPackField.BEATS, available, enabled); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 059c4dfbb65..aeb448faa9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -80,11 +80,19 @@ public class ReservedRolesStore { new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*", ".reporting-*").privileges("all").build(), RoleDescriptor.IndicesPrivileges.builder() - .indices(".monitoring-*").privileges("read", "read_cross_cluster").build() + .indices(".monitoring-*").privileges("read", "read_cross_cluster").build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices(".management-beats").privileges("create_index", "read", "write").build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("logstash_system", new RoleDescriptor("logstash_system", new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("beats_admin", new RoleDescriptor("beats_admin", + null, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices(".management-beats").privileges("all").build() + }, + null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put(UsernamesField.BEATS_ROLE, new RoleDescriptor(UsernamesField.BEATS_ROLE, new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("machine_learning_user", new RoleDescriptor("machine_learning_user", new String[] { "monitor_ml" }, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index b25f3f374b3..85d2bc16dd0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -132,6 +132,7 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(ReservedRolesStore.isReserved("watcher_user"), is(true)); assertThat(ReservedRolesStore.isReserved("watcher_admin"), is(true)); assertThat(ReservedRolesStore.isReserved("kibana_dashboard_only_user"), is(true)); + assertThat(ReservedRolesStore.isReserved("beats_admin"), is(true)); assertThat(ReservedRolesStore.isReserved(XPackUser.ROLE_NAME), is(true)); assertThat(ReservedRolesStore.isReserved(LogstashSystemUser.ROLE_NAME), is(true)); assertThat(ReservedRolesStore.isReserved(BeatsSystemUser.ROLE_NAME), is(true)); @@ -220,6 +221,20 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(index), is(true)); }); + + // Beats management index + final String index = ".management-beats"; + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(index), is(false)); } public void testKibanaUserRole() { @@ -478,6 +493,39 @@ public class ReservedRolesStoreTests extends ESTestCase { is(false)); } + public void testBeatsAdminRole() { + final RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("beats_admin"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + final Role beatsAdminRole = Role.builder(roleDescriptor, null).build(); + assertThat(beatsAdminRole.cluster().check(ClusterHealthAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterStateAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterStatsAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterRerouteAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + + assertThat(beatsAdminRole.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); + + assertThat(beatsAdminRole.indices().allowedIndicesMatcher("indices:foo").test(randomAlphaOfLengthBetween(8, 24)), + is(false)); + + final String index = ".management-beats"; + logger.info("index name [{}]", index); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); + } + public void testBeatsSystemRole() { RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor(BeatsSystemUser.ROLE_NAME); assertNotNull(roleDescriptor); From 88c270d8443c573cf593a3d7d429be5bdad14001 Mon Sep 17 00:00:00 2001 From: Sohaib Iftikhar Date: Tue, 10 Jul 2018 23:11:50 +0200 Subject: [PATCH 05/10] Added lenient flag for synonym token filter (#31484) * Added lenient flag for synonym-tokenfilter. Relates to #30968 * added docs for synonym-graph-tokenfilter -- Also made lenient final -- changed from !lenient to lenient == false * Changes after review (1) -- Renamed to ElasticsearchSynonymParser -- Added explanation for ElasticsearchSynonymParser::add method -- Changed ElasticsearchSynonymParser::logger instance to static * Added lenient option for WordnetSynonymParser -- also added more documentation * Added additional documentation * Improved documentation --- .../synonym-graph-tokenfilter.asciidoc | 44 +++++++++- .../tokenfilters/synonym-tokenfilter.asciidoc | 47 +++++++++- .../index/analysis/ESSolrSynonymParser.java | 68 ++++++++++++++ .../analysis/ESWordnetSynonymParser.java | 68 ++++++++++++++ .../SynonymGraphTokenFilterFactory.java | 10 +-- .../analysis/SynonymTokenFilterFactory.java | 12 +-- .../analysis/ESSolrSynonymParserTests.java | 78 ++++++++++++++++ .../analysis/ESWordnetSynonymParserTests.java | 88 +++++++++++++++++++ 8 files changed, 400 insertions(+), 15 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java create mode 100644 server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java create mode 100644 server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc index a8f2108b57a..8be5647e10f 100644 --- a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc @@ -50,7 +50,49 @@ PUT /test_index The above configures a `search_synonyms` filter, with a path of `analysis/synonym.txt` (relative to the `config` location). The `search_synonyms` analyzer is then configured with the filter. -Additional settings are: `expand` (defaults to `true`). + +Additional settings are: + +* `expand` (defaults to `true`). +* `lenient` (defaults to `false`). If `true` ignores exceptions while parsing the synonym configuration. It is important +to note that only those synonym rules which cannot get parsed are ignored. For instance consider the following request: + +[source,js] +-------------------------------------------------- +PUT /test_index +{ + "settings": { + "index" : { + "analysis" : { + "analyzer" : { + "synonym" : { + "tokenizer" : "standard", + "filter" : ["my_stop", "synonym_graph"] + } + }, + "filter" : { + "my_stop": { + "type" : "stop", + "stopwords": ["bar"] + }, + "synonym_graph" : { + "type" : "synonym_graph", + "lenient": true, + "synonyms" : ["foo, bar => baz"] + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +With the above request the word `bar` gets skipped but a mapping `foo => baz` is still added. However, if the mapping +being added was "foo, baz => bar" nothing would get added to the synonym list. This is because the target word for the +mapping is itself eliminated because it was a stop word. Similarly, if the mapping was "bar, foo, baz" and `expand` was +set to `false` no mapping would get added as when `expand=false` the target mapping is the first word. However, if +`expand=true` then the mappings added would be equivalent to `foo, baz => foo, baz` i.e, all mappings other than the +stop word. [float] ==== `tokenizer` and `ignore_case` are deprecated diff --git a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc index 68d3f444b2d..5a6a84493ab 100644 --- a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc @@ -33,12 +33,55 @@ PUT /test_index The above configures a `synonym` filter, with a path of `analysis/synonym.txt` (relative to the `config` location). The -`synonym` analyzer is then configured with the filter. Additional -settings is: `expand` (defaults to `true`). +`synonym` analyzer is then configured with the filter. This filter tokenize synonyms with whatever tokenizer and token filters appear before it in the chain. +Additional settings are: + +* `expand` (defaults to `true`). +* `lenient` (defaults to `false`). If `true` ignores exceptions while parsing the synonym configuration. It is important +to note that only those synonym rules which cannot get parsed are ignored. For instance consider the following request: + +[source,js] +-------------------------------------------------- +PUT /test_index +{ + "settings": { + "index" : { + "analysis" : { + "analyzer" : { + "synonym" : { + "tokenizer" : "standard", + "filter" : ["my_stop", "synonym"] + } + }, + "filter" : { + "my_stop": { + "type" : "stop", + "stopwords": ["bar"] + }, + "synonym" : { + "type" : "synonym", + "lenient": true, + "synonyms" : ["foo, bar => baz"] + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +With the above request the word `bar` gets skipped but a mapping `foo => baz` is still added. However, if the mapping +being added was "foo, baz => bar" nothing would get added to the synonym list. This is because the target word for the +mapping is itself eliminated because it was a stop word. Similarly, if the mapping was "bar, foo, baz" and `expand` was +set to `false` no mapping would get added as when `expand=false` the target mapping is the first word. However, if +`expand=true` then the mappings added would be equivalent to `foo, baz => foo, baz` i.e, all mappings other than the +stop word. + + [float] ==== `tokenizer` and `ignore_case` are deprecated diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java new file mode 100644 index 00000000000..bcc249f8a8a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.logging.log4j.Logger; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.synonym.SolrSynonymParser; +import org.apache.lucene.util.CharsRef; +import org.apache.lucene.util.CharsRefBuilder; +import org.elasticsearch.common.logging.Loggers; + +import java.io.IOException; + +public class ESSolrSynonymParser extends SolrSynonymParser { + + private final boolean lenient; + private static final Logger logger = + Loggers.getLogger(ESSolrSynonymParser.class, "ESSolrSynonymParser"); + + public ESSolrSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { + super(dedup, expand, analyzer); + this.lenient = lenient; + } + + @Override + public void add(CharsRef input, CharsRef output, boolean includeOrig) { + // This condition follows up on the overridden analyze method. In case lenient was set to true and there was an + // exception during super.analyze we return a zero-length CharsRef for that word which caused an exception. When + // the synonym mappings for the words are added using the add method we skip the ones that were left empty by + // analyze i.e., in the case when lenient is set we only add those combinations which are non-zero-length. The + // else would happen only in the case when the input or output is empty and lenient is set, in which case we + // quietly ignore it. For more details on the control-flow see SolrSynonymParser::addInternal. + if (lenient == false || (input.length > 0 && output.length > 0)) { + super.add(input, output, includeOrig); + } + } + + @Override + public CharsRef analyze(String text, CharsRefBuilder reuse) throws IOException { + try { + return super.analyze(text, reuse); + } catch (IllegalArgumentException ex) { + if (lenient) { + logger.info("Synonym rule for [" + text + "] was ignored"); + return new CharsRef(""); + } else { + throw ex; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java new file mode 100644 index 00000000000..3764820c434 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.logging.log4j.Logger; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.synonym.WordnetSynonymParser; +import org.apache.lucene.util.CharsRef; +import org.apache.lucene.util.CharsRefBuilder; +import org.elasticsearch.common.logging.Loggers; + +import java.io.IOException; + +public class ESWordnetSynonymParser extends WordnetSynonymParser { + + private final boolean lenient; + private static final Logger logger = + Loggers.getLogger(ESSolrSynonymParser.class, "ESWordnetSynonymParser"); + + public ESWordnetSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { + super(dedup, expand, analyzer); + this.lenient = lenient; + } + + @Override + public void add(CharsRef input, CharsRef output, boolean includeOrig) { + // This condition follows up on the overridden analyze method. In case lenient was set to true and there was an + // exception during super.analyze we return a zero-length CharsRef for that word which caused an exception. When + // the synonym mappings for the words are added using the add method we skip the ones that were left empty by + // analyze i.e., in the case when lenient is set we only add those combinations which are non-zero-length. The + // else would happen only in the case when the input or output is empty and lenient is set, in which case we + // quietly ignore it. For more details on the control-flow see SolrSynonymParser::addInternal. + if (lenient == false || (input.length > 0 && output.length > 0)) { + super.add(input, output, includeOrig); + } + } + + @Override + public CharsRef analyze(String text, CharsRefBuilder reuse) throws IOException { + try { + return super.analyze(text, reuse); + } catch (IllegalArgumentException ex) { + if (lenient) { + logger.info("Synonym rule for [" + text + "] was ignored"); + return new CharsRef(""); + } else { + throw ex; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java index 2f7964f63d6..24dcb6d33fe 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java @@ -21,10 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.synonym.SolrSynonymParser; import org.apache.lucene.analysis.synonym.SynonymGraphFilter; import org.apache.lucene.analysis.synonym.SynonymMap; -import org.apache.lucene.analysis.synonym.WordnetSynonymParser; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -58,11 +56,11 @@ public class SynonymGraphTokenFilterFactory extends SynonymTokenFilterFactory { try { SynonymMap.Builder parser; if ("wordnet".equalsIgnoreCase(format)) { - parser = new WordnetSynonymParser(true, expand, analyzerForParseSynonym); - ((WordnetSynonymParser) parser).parse(rulesReader); + parser = new ESWordnetSynonymParser(true, expand, lenient, analyzerForParseSynonym); + ((ESWordnetSynonymParser) parser).parse(rulesReader); } else { - parser = new SolrSynonymParser(true, expand, analyzerForParseSynonym); - ((SolrSynonymParser) parser).parse(rulesReader); + parser = new ESSolrSynonymParser(true, expand, lenient, analyzerForParseSynonym); + ((ESSolrSynonymParser) parser).parse(rulesReader); } synonymMap = parser.build(); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java index 56bae571988..61c9aba7a3e 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java @@ -21,10 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.synonym.SolrSynonymParser; import org.apache.lucene.analysis.synonym.SynonymFilter; import org.apache.lucene.analysis.synonym.SynonymMap; -import org.apache.lucene.analysis.synonym.WordnetSynonymParser; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -38,6 +36,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { protected final String format; protected final boolean expand; + protected final boolean lenient; protected final Settings settings; public SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, AnalysisRegistry analysisRegistry, @@ -52,6 +51,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { } this.expand = settings.getAsBoolean("expand", true); + this.lenient = settings.getAsBoolean("lenient", false); this.format = settings.get("format", ""); } @@ -93,11 +93,11 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { try { SynonymMap.Builder parser; if ("wordnet".equalsIgnoreCase(format)) { - parser = new WordnetSynonymParser(true, expand, analyzerForParseSynonym); - ((WordnetSynonymParser) parser).parse(rulesReader); + parser = new ESWordnetSynonymParser(true, expand, lenient, analyzerForParseSynonym); + ((ESWordnetSynonymParser) parser).parse(rulesReader); } else { - parser = new SolrSynonymParser(true, expand, analyzerForParseSynonym); - ((SolrSynonymParser) parser).parse(rulesReader); + parser = new ESSolrSynonymParser(true, expand, lenient, analyzerForParseSynonym); + ((ESSolrSynonymParser) parser).parse(rulesReader); } synonymMap = parser.build(); } catch (Exception e) { diff --git a/server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java b/server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java new file mode 100644 index 00000000000..31aa1a9be25 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.analysis.standard.StandardTokenizer; +import org.apache.lucene.analysis.synonym.SynonymFilter; +import org.apache.lucene.analysis.synonym.SynonymMap; +import org.elasticsearch.test.ESTokenStreamTestCase; + +import java.io.IOException; +import java.io.StringReader; +import java.text.ParseException; + +import static org.hamcrest.Matchers.containsString; + +public class ESSolrSynonymParserTests extends ESTokenStreamTestCase { + + public void testLenientParser() throws IOException, ParseException { + ESSolrSynonymParser parser = new ESSolrSynonymParser(true, false, true, new StandardAnalyzer()); + String rules = + "&,and\n" + + "come,advance,approach\n"; + StringReader rulesReader = new StringReader(rules); + parser.parse(rulesReader); + SynonymMap synonymMap = parser.build(); + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader("approach quietly then advance & destroy")); + TokenStream ts = new SynonymFilter(tokenizer, synonymMap, false); + assertTokenStreamContents(ts, new String[]{"come", "quietly", "then", "come", "destroy"}); + } + + public void testLenientParserWithSomeIncorrectLines() throws IOException, ParseException { + CharArraySet stopSet = new CharArraySet(1, true); + stopSet.add("bar"); + ESSolrSynonymParser parser = + new ESSolrSynonymParser(true, false, true, new StandardAnalyzer(stopSet)); + String rules = "foo,bar,baz"; + StringReader rulesReader = new StringReader(rules); + parser.parse(rulesReader); + SynonymMap synonymMap = parser.build(); + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader("first word is foo, then bar and lastly baz")); + TokenStream ts = new SynonymFilter(new StopFilter(tokenizer, stopSet), synonymMap, false); + assertTokenStreamContents(ts, new String[]{"first", "word", "is", "foo", "then", "and", "lastly", "foo"}); + } + + public void testNonLenientParser() { + ESSolrSynonymParser parser = new ESSolrSynonymParser(true, false, false, new StandardAnalyzer()); + String rules = + "&,and=>and\n" + + "come,advance,approach\n"; + StringReader rulesReader = new StringReader(rules); + ParseException ex = expectThrows(ParseException.class, () -> parser.parse(rulesReader)); + assertThat(ex.getMessage(), containsString("Invalid synonym rule at line 1")); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java b/server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java new file mode 100644 index 00000000000..6d0fd8944d4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.analysis.standard.StandardTokenizer; +import org.apache.lucene.analysis.synonym.SynonymFilter; +import org.apache.lucene.analysis.synonym.SynonymMap; +import org.elasticsearch.test.ESTokenStreamTestCase; + +import java.io.IOException; +import java.io.StringReader; +import java.text.ParseException; + +import static org.hamcrest.Matchers.containsString; + +public class ESWordnetSynonymParserTests extends ESTokenStreamTestCase { + + public void testLenientParser() throws IOException, ParseException { + ESWordnetSynonymParser parser = new ESWordnetSynonymParser(true, false, true, new StandardAnalyzer()); + String rules = + "s(100000001,1,'&',a,1,0).\n" + + "s(100000001,2,'and',a,1,0).\n" + + "s(100000002,1,'come',v,1,0).\n" + + "s(100000002,2,'advance',v,1,0).\n" + + "s(100000002,3,'approach',v,1,0)."; + StringReader rulesReader = new StringReader(rules); + parser.parse(rulesReader); + SynonymMap synonymMap = parser.build(); + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader("approach quietly then advance & destroy")); + TokenStream ts = new SynonymFilter(tokenizer, synonymMap, false); + assertTokenStreamContents(ts, new String[]{"come", "quietly", "then", "come", "destroy"}); + } + + public void testLenientParserWithSomeIncorrectLines() throws IOException, ParseException { + CharArraySet stopSet = new CharArraySet(1, true); + stopSet.add("bar"); + ESWordnetSynonymParser parser = + new ESWordnetSynonymParser(true, false, true, new StandardAnalyzer(stopSet)); + String rules = + "s(100000001,1,'foo',v,1,0).\n" + + "s(100000001,2,'bar',v,1,0).\n" + + "s(100000001,3,'baz',v,1,0)."; + StringReader rulesReader = new StringReader(rules); + parser.parse(rulesReader); + SynonymMap synonymMap = parser.build(); + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader("first word is foo, then bar and lastly baz")); + TokenStream ts = new SynonymFilter(new StopFilter(tokenizer, stopSet), synonymMap, false); + assertTokenStreamContents(ts, new String[]{"first", "word", "is", "foo", "then", "and", "lastly", "foo"}); + } + + public void testNonLenientParser() { + ESWordnetSynonymParser parser = new ESWordnetSynonymParser(true, false, false, new StandardAnalyzer()); + String rules = + "s(100000001,1,'&',a,1,0).\n" + + "s(100000001,2,'and',a,1,0).\n" + + "s(100000002,1,'come',v,1,0).\n" + + "s(100000002,2,'advance',v,1,0).\n" + + "s(100000002,3,'approach',v,1,0)."; + StringReader rulesReader = new StringReader(rules); + ParseException ex = expectThrows(ParseException.class, () -> parser.parse(rulesReader)); + assertThat(ex.getMessage(), containsString("Invalid synonym rule at line 1")); + } + +} From 25cd8350105d62c34245e7e36636838a72b9b7ff Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 10 Jul 2018 17:44:26 -0400 Subject: [PATCH 06/10] Increase logging level for testStressMaybeFlush Relates #31629 --- .../elasticsearch/index/shard/IndexShardIT.java | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index d6d50b24d1f..bda6de8aa7d 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -73,6 +73,7 @@ import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; import java.io.UncheckedIOException; @@ -84,6 +85,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -404,6 +406,7 @@ public class IndexShardIT extends ESSingleNodeTestCase { } } + @TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.index.engine:TRACE") public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { createIndex("test"); ensureGreen(); @@ -446,13 +449,14 @@ public class IndexShardIT extends ESSingleNodeTestCase { barrier.await(); final CheckedRunnable check; if (flush) { - final FlushStats flushStats = shard.flushStats(); - final long total = flushStats.getTotal(); - final long periodic = flushStats.getPeriodic(); + final FlushStats initialStats = shard.flushStats(); client().prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get(); check = () -> { - assertThat(shard.flushStats().getTotal(), equalTo(total + 1)); - assertThat(shard.flushStats().getPeriodic(), equalTo(periodic + 1)); + final FlushStats currentStats = shard.flushStats(); + String msg = String.format(Locale.ROOT, "flush stats: total=[%d vs %d], periodic=[%d vs %d]", + initialStats.getTotal(), currentStats.getTotal(), initialStats.getPeriodic(), currentStats.getPeriodic()); + assertThat(msg, currentStats.getPeriodic(), equalTo(initialStats.getPeriodic() + 1)); + assertThat(msg, currentStats.getTotal(), equalTo(initialStats.getTotal() + 1)); }; } else { final long generation = getTranslog(shard).currentFileGeneration(); From 4d83a0dd5af469cbb6ae04b1ba13deb73c617a4a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 10 Jul 2018 17:59:15 -0400 Subject: [PATCH 07/10] HLREST: Bundle the x-pack protocol project (#31904) The `:x-pack:protocol` project is an implementation detail shared by the xpack projects and the high level rest client and really doesn't deserve its own maven coordinants and published javadoc. This change bundles `:x-pack:protocol` into the high level rest client. Relates to #29827 --- client/rest-high-level/build.gradle | 123 ++++++++++++++++++++--- qa/ccs-unavailable-clusters/build.gradle | 4 +- 2 files changed, 113 insertions(+), 14 deletions(-) diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 05b1cb25ed5..451452759f5 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -18,30 +16,86 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks * specific language governing permissions and limitations * under the License. */ + +import org.elasticsearch.gradle.precommit.PrecommitTasks +import org.gradle.api.XmlProvider +import org.gradle.api.publish.maven.MavenPublication + +buildscript { + repositories { + maven { + url 'https://plugins.gradle.org/m2/' + } + } + dependencies { + classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' + } +} + apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.rest-test' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' +apply plugin: 'com.github.johnrengelman.shadow' group = 'org.elasticsearch.client' archivesBaseName = 'elasticsearch-rest-high-level-client' publishing { - publications { - nebula { - artifactId = archivesBaseName + publications { + nebula(MavenPublication) { + artifact shadowJar + artifactId = archivesBaseName + /* + * Configure the pom to include the "shadow" as compile dependencies + * because that is how we're using them but remove all other dependencies + * because they've been shaded into the jar. + */ + pom.withXml { XmlProvider xml -> + Node root = xml.asNode() + root.remove(root.dependencies) + Node dependenciesNode = root.appendNode('dependencies') + project.configurations.shadow.allDependencies.each { + if (false == it instanceof SelfResolvingDependency) { + Node dependencyNode = dependenciesNode.appendNode('dependency') + dependencyNode.appendNode('groupId', it.group) + dependencyNode.appendNode('artifactId', it.name) + dependencyNode.appendNode('version', it.version) + dependencyNode.appendNode('scope', 'compile') + } } + } } + } +} + +/* + * We need somewhere to configure dependencies that we don't wish to shade + * into the high level REST client. The shadow plugin creates a "shadow" + * configuration which is *almost* exactly that. It is never bundled into + * the shaded jar but is used for main source compilation. Unfortunately, + * by default it is not used for *test* source compilation and isn't used + * in tests at all. This change makes it available for test compilation. + * A change below makes it available for testing. + */ +sourceSets { + test { + compileClasspath += configurations.shadow + } } dependencies { - compile "org.elasticsearch:elasticsearch:${version}" - compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" - compile "org.elasticsearch.plugin:parent-join-client:${version}" - compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" - compile "org.elasticsearch.plugin:rank-eval-client:${version}" - compile "org.elasticsearch.plugin:lang-mustache-client:${version}" - compile project(':x-pack:protocol') // TODO bundle into the jar + /* + * Everything in the "shadow" configuration is *not* copied into the + * shadowJar. + */ + shadow "org.elasticsearch:elasticsearch:${version}" + shadow "org.elasticsearch.client:elasticsearch-rest-client:${version}" + shadow "org.elasticsearch.plugin:parent-join-client:${version}" + shadow "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" + shadow "org.elasticsearch.plugin:rank-eval-client:${version}" + shadow "org.elasticsearch.plugin:lang-mustache-client:${version}" + compile project(':x-pack:protocol') testCompile "org.elasticsearch.client:test:${version}" testCompile "org.elasticsearch.test:framework:${version}" @@ -64,3 +118,48 @@ forbiddenApisMain { signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')] signaturesURLs += [file('src/main/resources/forbidden/rest-high-level-signatures.txt').toURI().toURL()] } + +shadowJar { + classifier = null + mergeServiceFiles() +} + +// We don't need normal jar, we use shadow jar instead +jar.enabled = false +assemble.dependsOn shadowJar + +javadoc { + /* + * Bundle all of the javadoc from all of the shaded projects into this one + * so we don't *have* to publish javadoc for all of the "client" jars. + */ + configurations.compile.dependencies.all { Dependency dep -> + Project p = dependencyToProject(dep) + if (p != null) { + evaluationDependsOn(p.path) + source += p.sourceSets.main.allJava + } + } +} + +/* + * Use the jar for testing so we have tests of the bundled jar. + * Use the "shadow" configuration for testing because we need things + * in it. + */ +test { + classpath -= compileJava.outputs.files + classpath -= configurations.compile + classpath -= configurations.runtime + classpath += configurations.shadow + classpath += shadowJar.outputs.files + dependsOn shadowJar +} +integTestRunner { + classpath -= compileJava.outputs.files + classpath -= configurations.compile + classpath -= configurations.runtime + classpath += configurations.shadow + classpath += shadowJar.outputs.files + dependsOn shadowJar +} diff --git a/qa/ccs-unavailable-clusters/build.gradle b/qa/ccs-unavailable-clusters/build.gradle index 86d0cb64f65..d9de422bb43 100644 --- a/qa/ccs-unavailable-clusters/build.gradle +++ b/qa/ccs-unavailable-clusters/build.gradle @@ -21,5 +21,5 @@ apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.test-with-dependencies' dependencies { - testCompile project(path: ':client:rest-high-level', configuration: 'runtime') -} \ No newline at end of file + testCompile project(path: ':client:rest-high-level', configuration: 'shadow') +} From c6666fc6cbb7afd3b4114453c9c9327a3361f130 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 10 Jul 2018 10:52:03 +0200 Subject: [PATCH 08/10] rolling upgrade should use a replica to prevent relocations while running a scroll A scroll holds a reference to the shard store. If the cluster is moving shards around that reference can prevent a shard from relocating back to node it used to be on, causing test failures. Closes #31827 --- .../resources/rest-api-spec/test/mixed_cluster/10_basic.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml index 3dd1f708959..66c92797eef 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml @@ -6,7 +6,10 @@ wait_for_active_shards: all body: settings: - number_of_replicas: 0 +# we use 1 replica to make sure we don't have shards relocating. Relocating a shard with +# a scroll on it prevents shards from moving back into a where a scroll is running (it holds the shard lock) +# see https://github.com/elastic/elasticsearch/issues/31827 + number_of_replicas: 1 index.routing.allocation.include.upgraded: true - do: From 4b8b831517f4448efa83acfe7060b0f4ee5e6beb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 11 Jul 2018 09:56:21 +0200 Subject: [PATCH 09/10] Remove unused reference to filePermissionsCache (#31923) Currently Role.Builder keeps a reference to the FieldPermissionsCache that is passed into its constructors. This seems to be unused except for passing it on to convertFromIndicesPrivileges() in the second ctor itself, but we don't need to keep the internal reference in that case, so it can be removed. Relates to #31876 --- .../xpack/core/security/authz/permission/Role.java | 11 ++--------- .../security/authz/store/CompositeRolesStore.java | 2 +- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java index 8fed501ece2..a850a4a16f6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java @@ -55,11 +55,7 @@ public final class Role { } public static Builder builder(String... names) { - return new Builder(names, null); - } - - public static Builder builder(String[] names, FieldPermissionsCache fieldPermissionsCache) { - return new Builder(names, fieldPermissionsCache); + return new Builder(names); } public static Builder builder(RoleDescriptor rd, FieldPermissionsCache fieldPermissionsCache) { @@ -94,16 +90,13 @@ public final class Role { private ClusterPermission cluster = ClusterPermission.NONE; private RunAsPermission runAs = RunAsPermission.NONE; private List groups = new ArrayList<>(); - private FieldPermissionsCache fieldPermissionsCache = null; - private Builder(String[] names, FieldPermissionsCache fieldPermissionsCache) { + private Builder(String[] names) { this.names = names; - this.fieldPermissionsCache = fieldPermissionsCache; } private Builder(RoleDescriptor rd, @Nullable FieldPermissionsCache fieldPermissionsCache) { this.names = new String[] { rd.getName() }; - this.fieldPermissionsCache = fieldPermissionsCache; if (rd.getClusterPrivileges().length == 0) { cluster = ClusterPermission.NONE; } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index b5a20af8d30..1018ceeda09 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -278,7 +278,7 @@ public class CompositeRolesStore extends AbstractComponent { final Set clusterPrivs = clusterPrivileges.isEmpty() ? null : clusterPrivileges; final Privilege runAsPrivilege = runAs.isEmpty() ? Privilege.NONE : new Privilege(runAs, runAs.toArray(Strings.EMPTY_ARRAY)); - Role.Builder builder = Role.builder(roleNames.toArray(new String[roleNames.size()]), fieldPermissionsCache) + Role.Builder builder = Role.builder(roleNames.toArray(new String[roleNames.size()])) .cluster(ClusterPrivilege.get(clusterPrivs)) .runAs(runAsPrivilege); indicesPrivilegesMap.entrySet().forEach((entry) -> { From b4087d69d2f27d1640d2ada5d5f5cc0e9d05afcf Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 11 Jul 2018 10:24:21 +0200 Subject: [PATCH 10/10] Fix assertIngestDocument wrongfully passing (#31913) * Fix assertIngestDocument wrongfully passing * Previously docA being subset of docB passed because iteration was over docA's keys only * Scalars in nested fields were not compared in all cases * Assertion errors were hard to interpret (message wasn't correct since it only mentioned the class type) * In cases where two paths contained different types a ClassCastException was thrown instead of an AssertionError * Fixes #28492 --- .../ingest/common/GrokProcessorTests.java | 2 +- .../ingest/common/JsonProcessorTests.java | 14 ++-- .../ingest/IngestDocumentMatcher.java | 57 ++++++++----- .../ingest/IngestDocumentMatcherTests.java | 82 +++++++++++++++++++ 4 files changed, 123 insertions(+), 32 deletions(-) create mode 100644 test/framework/src/test/java/org/elasticsearch/ingest/IngestDocumentMatcherTests.java diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java index 0eba79523ac..68654923ae9 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java @@ -129,7 +129,7 @@ public class GrokProcessorTests extends ESTestCase { public void testMissingFieldWithIgnoreMissing() throws Exception { String fieldName = "foo.bar"; IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, true, ThreadWatchdog.noop()); processor.execute(ingestDocument); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 245285259b4..2867ed1d240 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -33,7 +33,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -55,7 +54,7 @@ public class JsonProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); jsonProcessor.execute(ingestDocument); Map jsonified = ingestDocument.getFieldValue(randomTargetField, Map.class); - assertIngestDocument(ingestDocument.getFieldValue(randomTargetField, Object.class), jsonified); + assertEquals(ingestDocument.getFieldValue(randomTargetField, Object.class), jsonified); } public void testInvalidValue() { @@ -161,13 +160,10 @@ public class JsonProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); jsonProcessor.execute(ingestDocument); - Map expected = new HashMap<>(); - expected.put("a", 1); - expected.put("b", 2); - expected.put("c", "see"); - IngestDocument expectedIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), expected); - - assertIngestDocument(ingestDocument, expectedIngestDocument); + Map sourceAndMetadata = ingestDocument.getSourceAndMetadata(); + assertEquals(1, sourceAndMetadata.get("a")); + assertEquals(2, sourceAndMetadata.get("b")); + assertEquals("see", sourceAndMetadata.get("c")); } public void testAddBoolToRoot() { diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java b/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java index bb058c5cfdb..a7a8a3f11b1 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java @@ -20,48 +20,61 @@ package org.elasticsearch.ingest; import java.util.List; -import java.util.Locale; import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertThat; +import java.util.Objects; public class IngestDocumentMatcher { /** * Helper method to assert the equivalence between two IngestDocuments. * - * @param a first object to compare - * @param b second object to compare + * @param docA first document to compare + * @param docB second document to compare */ - public static void assertIngestDocument(Object a, Object b) { + public static void assertIngestDocument(IngestDocument docA, IngestDocument docB) { + if ((deepEquals(docA.getIngestMetadata(), docB.getIngestMetadata(), true) && + deepEquals(docA.getSourceAndMetadata(), docB.getSourceAndMetadata(), false)) == false) { + throw new AssertionError("Expected [" + docA + "] but received [" + docB + "]."); + } + } + + private static boolean deepEquals(Object a, Object b, boolean isIngestMeta) { if (a instanceof Map) { Map mapA = (Map) a; + if (b instanceof Map == false) { + return false; + } Map mapB = (Map) b; + if (mapA.size() != mapB.size()) { + return false; + } for (Map.Entry entry : mapA.entrySet()) { - if (entry.getValue() instanceof List || entry.getValue() instanceof Map) { - assertIngestDocument(entry.getValue(), mapB.get(entry.getKey())); + Object key = entry.getKey(); + // Don't compare the timestamp of ingest metadata since it will differ between executions + if ((isIngestMeta && "timestamp".equals(key)) == false + && deepEquals(entry.getValue(), mapB.get(key), false) == false) { + return false; } } + return true; } else if (a instanceof List) { List listA = (List) a; + if (b instanceof List == false) { + return false; + } List listB = (List) b; - for (int i = 0; i < listA.size(); i++) { + int countA = listA.size(); + if (countA != listB.size()) { + return false; + } + for (int i = 0; i < countA; i++) { Object value = listA.get(i); - if (value instanceof List || value instanceof Map) { - assertIngestDocument(value, listB.get(i)); + if (deepEquals(value, listB.get(i), false) == false) { + return false; } } - } else if (a instanceof byte[]) { - assertArrayEquals((byte[]) a, (byte[])b); - } else if (a instanceof IngestDocument) { - IngestDocument docA = (IngestDocument) a; - IngestDocument docB = (IngestDocument) b; - assertIngestDocument(docA.getSourceAndMetadata(), docB.getSourceAndMetadata()); - assertIngestDocument(docA.getIngestMetadata(), docB.getIngestMetadata()); + return true; } else { - String msg = String.format(Locale.ROOT, "Expected %s class to be equal to %s", a.getClass().getName(), b.getClass().getName()); - assertThat(msg, a, equalTo(b)); + return Objects.deepEquals(a, b); } } } diff --git a/test/framework/src/test/java/org/elasticsearch/ingest/IngestDocumentMatcherTests.java b/test/framework/src/test/java/org/elasticsearch/ingest/IngestDocumentMatcherTests.java new file mode 100644 index 00000000000..bff9a923b9f --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/ingest/IngestDocumentMatcherTests.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; + +public class IngestDocumentMatcherTests extends ESTestCase { + + public void testDifferentMapData() { + Map sourceAndMetadata1 = new HashMap<>(); + sourceAndMetadata1.put("foo", "bar"); + IngestDocument document1 = new IngestDocument(sourceAndMetadata1, new HashMap<>()); + IngestDocument document2 = new IngestDocument(new HashMap<>(), new HashMap<>()); + assertThrowsOnComparision(document1, document2); + } + + public void testDifferentLengthListData() { + String rootKey = "foo"; + IngestDocument document1 = + new IngestDocument(Collections.singletonMap(rootKey, Arrays.asList("bar", "baz")), new HashMap<>()); + IngestDocument document2 = + new IngestDocument(Collections.singletonMap(rootKey, Collections.emptyList()), new HashMap<>()); + assertThrowsOnComparision(document1, document2); + } + + public void testDifferentNestedListFieldData() { + String rootKey = "foo"; + IngestDocument document1 = + new IngestDocument(Collections.singletonMap(rootKey, Arrays.asList("bar", "baz")), new HashMap<>()); + IngestDocument document2 = + new IngestDocument(Collections.singletonMap(rootKey, Arrays.asList("bar", "blub")), new HashMap<>()); + assertThrowsOnComparision(document1, document2); + } + + public void testDifferentNestedMapFieldData() { + String rootKey = "foo"; + IngestDocument document1 = + new IngestDocument(Collections.singletonMap(rootKey, Collections.singletonMap("bar", "baz")), new HashMap<>()); + IngestDocument document2 = + new IngestDocument(Collections.singletonMap(rootKey, Collections.singletonMap("bar", "blub")), new HashMap<>()); + assertThrowsOnComparision(document1, document2); + } + + public void testOnTypeConflict() { + String rootKey = "foo"; + IngestDocument document1 = + new IngestDocument(Collections.singletonMap(rootKey, Collections.singletonList("baz")), new HashMap<>()); + IngestDocument document2 = new IngestDocument( + Collections.singletonMap(rootKey, Collections.singletonMap("blub", "blab")), new HashMap<>() + ); + assertThrowsOnComparision(document1, document2); + } + + private static void assertThrowsOnComparision(IngestDocument document1, IngestDocument document2) { + expectThrows(AssertionError.class, () -> assertIngestDocument(document1, document2)); + expectThrows(AssertionError.class, () -> assertIngestDocument(document2, document1)); + } +}