Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
2ccb4df005
|
@ -70,6 +70,11 @@ public class RestIntegTestTask extends DefaultTask {
|
||||||
runner.parallelism = '1'
|
runner.parallelism = '1'
|
||||||
runner.include('**/*IT.class')
|
runner.include('**/*IT.class')
|
||||||
runner.systemProperty('tests.rest.load_packaged', 'false')
|
runner.systemProperty('tests.rest.load_packaged', 'false')
|
||||||
|
|
||||||
|
if (System.getProperty("tests.rest.cluster") == null) {
|
||||||
|
if (System.getProperty("tests.cluster") != null) {
|
||||||
|
throw new IllegalArgumentException("tests.rest.cluster and tests.cluster must both be null or non-null")
|
||||||
|
}
|
||||||
// we pass all nodes to the rest cluster to allow the clients to round-robin between them
|
// we pass all nodes to the rest cluster to allow the clients to round-robin between them
|
||||||
// this is more realistic than just talking to a single node
|
// this is more realistic than just talking to a single node
|
||||||
runner.systemProperty('tests.rest.cluster', "${-> nodes.collect{it.httpUri()}.join(",")}")
|
runner.systemProperty('tests.rest.cluster', "${-> nodes.collect{it.httpUri()}.join(",")}")
|
||||||
|
@ -96,6 +101,14 @@ public class RestIntegTestTask extends DefaultTask {
|
||||||
runner.doLast {
|
runner.doLast {
|
||||||
project.gradle.removeListener(logDumpListener)
|
project.gradle.removeListener(logDumpListener)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
if (System.getProperty("tests.cluster") == null) {
|
||||||
|
throw new IllegalArgumentException("tests.rest.cluster and tests.cluster must both be null or non-null")
|
||||||
|
}
|
||||||
|
// an external cluster was specified and all responsibility for cluster configuration is taken by the user
|
||||||
|
runner.systemProperty('tests.rest.cluster', System.getProperty("tests.rest.cluster"))
|
||||||
|
runner.systemProperty('test.cluster', System.getProperty("tests.cluster"))
|
||||||
|
}
|
||||||
|
|
||||||
// copy the rest spec/tests into the test resources
|
// copy the rest spec/tests into the test resources
|
||||||
Task copyRestSpec = createCopyRestSpecTask(project, includePackaged)
|
Task copyRestSpec = createCopyRestSpecTask(project, includePackaged)
|
||||||
|
@ -109,7 +122,10 @@ public class RestIntegTestTask extends DefaultTask {
|
||||||
clusterInit.enabled = false
|
clusterInit.enabled = false
|
||||||
return // no need to add cluster formation tasks if the task won't run!
|
return // no need to add cluster formation tasks if the task won't run!
|
||||||
}
|
}
|
||||||
|
// only create the cluster if needed as otherwise an external cluster to use was specified
|
||||||
|
if (System.getProperty("tests.rest.cluster") == null) {
|
||||||
nodes = ClusterFormationTasks.setup(project, "${name}Cluster", runner, clusterConfig)
|
nodes = ClusterFormationTasks.setup(project, "${name}Cluster", runner, clusterConfig)
|
||||||
|
}
|
||||||
super.dependsOn(runner.finalizedBy)
|
super.dependsOn(runner.finalizedBy)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,9 @@ task createPluginsDir(type: EmptyDirTask) {
|
||||||
CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean oss) {
|
CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean oss) {
|
||||||
return copySpec {
|
return copySpec {
|
||||||
into("elasticsearch-${version}") {
|
into("elasticsearch-${version}") {
|
||||||
|
into('lib') {
|
||||||
with libFiles
|
with libFiles
|
||||||
|
}
|
||||||
into('config') {
|
into('config') {
|
||||||
dirMode 0750
|
dirMode 0750
|
||||||
fileMode 0660
|
fileMode 0660
|
||||||
|
|
|
@ -227,13 +227,15 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
||||||
* Common files in all distributions *
|
* Common files in all distributions *
|
||||||
*****************************************************************************/
|
*****************************************************************************/
|
||||||
libFiles = copySpec {
|
libFiles = copySpec {
|
||||||
into 'lib'
|
// delay by using closures, since they have not yet been configured, so no jar task exists yet
|
||||||
from { project(':server').jar }
|
from { project(':server').jar }
|
||||||
from { project(':server').configurations.runtime }
|
from { project(':server').configurations.runtime }
|
||||||
from { project(':libs:plugin-classloader').jar }
|
from { project(':libs:plugin-classloader').jar }
|
||||||
// delay add tools using closures, since they have not yet been configured, so no jar task exists yet
|
|
||||||
from { project(':distribution:tools:launchers').jar }
|
from { project(':distribution:tools:launchers').jar }
|
||||||
|
into('tools/plugin-cli') {
|
||||||
from { project(':distribution:tools:plugin-cli').jar }
|
from { project(':distribution:tools:plugin-cli').jar }
|
||||||
|
from { project(':distribution:tools:plugin-cli').configurations.runtime }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
modulesFiles = { oss ->
|
modulesFiles = { oss ->
|
||||||
|
|
|
@ -124,13 +124,23 @@ Closure commonPackageConfig(String type, boolean oss) {
|
||||||
include 'README.textile'
|
include 'README.textile'
|
||||||
fileMode 0644
|
fileMode 0644
|
||||||
}
|
}
|
||||||
|
into('lib') {
|
||||||
|
with copySpec {
|
||||||
|
with libFiles
|
||||||
|
// we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine
|
||||||
|
eachFile { FileCopyDetails fcp ->
|
||||||
|
String[] segments = fcp.relativePath.segments
|
||||||
|
for (int i = segments.length - 2; i > 0 && segments[i] != 'lib'; --i) {
|
||||||
|
directory('/' + segments[0..i].join('/'), 0755)
|
||||||
|
}
|
||||||
|
fcp.mode = 0644
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
into('modules') {
|
into('modules') {
|
||||||
with copySpec {
|
with copySpec {
|
||||||
with modulesFiles(oss)
|
with modulesFiles(oss)
|
||||||
// we need to specify every intermediate directory, but modules could have sub directories
|
// we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine
|
||||||
// and there might not be any files as direct children of intermediates (eg platform)
|
|
||||||
// so we must iterate through the parents, but duplicate calls with the same path
|
|
||||||
// are ok (they don't show up in the built packages)
|
|
||||||
eachFile { FileCopyDetails fcp ->
|
eachFile { FileCopyDetails fcp ->
|
||||||
String[] segments = fcp.relativePath.segments
|
String[] segments = fcp.relativePath.segments
|
||||||
for (int i = segments.length - 2; i > 0 && segments[i] != 'modules'; --i) {
|
for (int i = segments.length - 2; i > 0 && segments[i] != 'modules'; --i) {
|
||||||
|
@ -263,7 +273,6 @@ ospackage {
|
||||||
permissionGroup 'root'
|
permissionGroup 'root'
|
||||||
|
|
||||||
into '/usr/share/elasticsearch'
|
into '/usr/share/elasticsearch'
|
||||||
with libFiles
|
|
||||||
with noticeFile
|
with noticeFile
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,12 @@ do
|
||||||
source "`dirname "$0"`"/$additional_source
|
source "`dirname "$0"`"/$additional_source
|
||||||
done
|
done
|
||||||
|
|
||||||
|
IFS=';' read -r -a additional_classpath_directories <<< "$ES_ADDITIONAL_CLASSPATH_DIRECTORIES"
|
||||||
|
for additional_classpath_directory in "${additional_classpath_directories[@]}"
|
||||||
|
do
|
||||||
|
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/$additional_classpath_directory/*"
|
||||||
|
done
|
||||||
|
|
||||||
exec \
|
exec \
|
||||||
"$JAVA" \
|
"$JAVA" \
|
||||||
$ES_JAVA_OPTS \
|
$ES_JAVA_OPTS \
|
||||||
|
|
|
@ -11,6 +11,12 @@ for /f "tokens=1*" %%a in ("%*") do (
|
||||||
set arguments=%%b
|
set arguments=%%b
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if defined ES_ADDITIONAL_CLASSPATH_DIRECTORIES (
|
||||||
|
for %%a in ("%ES_ADDITIONAL_CLASSPATH_DIRECTORIES:;=","%") do (
|
||||||
|
set ES_CLASSPATH=!ES_CLASSPATH!;!ES_HOME!/%%a/*
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
%JAVA% ^
|
%JAVA% ^
|
||||||
%ES_JAVA_OPTS% ^
|
%ES_JAVA_OPTS% ^
|
||||||
-Des.path.home="%ES_HOME%" ^
|
-Des.path.home="%ES_HOME%" ^
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
"`dirname "$0"`"/elasticsearch-cli \
|
ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/plugin-cli \
|
||||||
|
"`dirname "$0"`"/elasticsearch-cli \
|
||||||
org.elasticsearch.plugins.PluginCli \
|
org.elasticsearch.plugins.PluginCli \
|
||||||
"$@"
|
"$@"
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
setlocal enabledelayedexpansion
|
setlocal enabledelayedexpansion
|
||||||
setlocal enableextensions
|
setlocal enableextensions
|
||||||
|
|
||||||
|
set ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/plugin-cli
|
||||||
call "%~dp0elasticsearch-cli.bat" ^
|
call "%~dp0elasticsearch-cli.bat" ^
|
||||||
org.elasticsearch.plugins.PluginCli ^
|
org.elasticsearch.plugins.PluginCli ^
|
||||||
%* ^
|
%* ^
|
||||||
|
|
|
@ -19,14 +19,22 @@
|
||||||
|
|
||||||
apply plugin: 'elasticsearch.build'
|
apply plugin: 'elasticsearch.build'
|
||||||
|
|
||||||
|
archivesBaseName = 'elasticsearch-plugin-cli'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compileOnly "org.elasticsearch:elasticsearch:${version}"
|
compileOnly "org.elasticsearch:elasticsearch:${version}"
|
||||||
compileOnly "org.elasticsearch:elasticsearch-cli:${version}"
|
compileOnly "org.elasticsearch:elasticsearch-cli:${version}"
|
||||||
|
compile "org.bouncycastle:bcpg-jdk15on:1.59"
|
||||||
|
compile "org.bouncycastle:bcprov-jdk15on:1.59"
|
||||||
testCompile "org.elasticsearch.test:framework:${version}"
|
testCompile "org.elasticsearch.test:framework:${version}"
|
||||||
testCompile 'com.google.jimfs:jimfs:1.1'
|
testCompile 'com.google.jimfs:jimfs:1.1'
|
||||||
testCompile 'com.google.guava:guava:18.0'
|
testCompile 'com.google.guava:guava:18.0'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dependencyLicenses {
|
||||||
|
mapping from: /bc.*/, to: 'bouncycastle'
|
||||||
|
}
|
||||||
|
|
||||||
test {
|
test {
|
||||||
// TODO: find a way to add permissions for the tests in this module
|
// TODO: find a way to add permissions for the tests in this module
|
||||||
systemProperty 'tests.security.manager', 'false'
|
systemProperty 'tests.security.manager', 'false'
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
ee93e5376bb6cf0a15c027b5f5e4393f2738e709
|
|
@ -0,0 +1 @@
|
||||||
|
2507204241ab450456bdb8e8c0a8f986e418bd99
|
|
@ -0,0 +1,17 @@
|
||||||
|
Copyright (c) 2000-2015 The Legion of the Bouncy Castle Inc. (http://www.bouncycastle.org)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
|
||||||
|
and associated documentation files (the "Software"), to deal in the Software without restriction,
|
||||||
|
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||||
|
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial
|
||||||
|
portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||||
|
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||||
|
DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -23,6 +23,16 @@ import joptsimple.OptionSet;
|
||||||
import joptsimple.OptionSpec;
|
import joptsimple.OptionSpec;
|
||||||
import org.apache.lucene.search.spell.LevensteinDistance;
|
import org.apache.lucene.search.spell.LevensteinDistance;
|
||||||
import org.apache.lucene.util.CollectionUtil;
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
|
import org.bouncycastle.jce.provider.BouncyCastleProvider;
|
||||||
|
import org.bouncycastle.openpgp.PGPException;
|
||||||
|
import org.bouncycastle.openpgp.PGPPublicKey;
|
||||||
|
import org.bouncycastle.openpgp.PGPPublicKeyRingCollection;
|
||||||
|
import org.bouncycastle.openpgp.PGPSignature;
|
||||||
|
import org.bouncycastle.openpgp.PGPSignatureList;
|
||||||
|
import org.bouncycastle.openpgp.PGPUtil;
|
||||||
|
import org.bouncycastle.openpgp.jcajce.JcaPGPObjectFactory;
|
||||||
|
import org.bouncycastle.openpgp.operator.jcajce.JcaKeyFingerprintCalculator;
|
||||||
|
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPContentVerifierBuilderProvider;
|
||||||
import org.elasticsearch.Build;
|
import org.elasticsearch.Build;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.bootstrap.JarHell;
|
import org.elasticsearch.bootstrap.JarHell;
|
||||||
|
@ -37,12 +47,14 @@ import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLConnection;
|
import java.net.URLConnection;
|
||||||
import java.net.URLDecoder;
|
import java.net.URLDecoder;
|
||||||
|
@ -59,8 +71,10 @@ import java.nio.file.attribute.PosixFileAttributes;
|
||||||
import java.nio.file.attribute.PosixFilePermission;
|
import java.nio.file.attribute.PosixFilePermission;
|
||||||
import java.nio.file.attribute.PosixFilePermissions;
|
import java.nio.file.attribute.PosixFilePermissions;
|
||||||
import java.security.MessageDigest;
|
import java.security.MessageDigest;
|
||||||
|
import java.security.NoSuchAlgorithmException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Base64;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
@ -116,7 +130,6 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
||||||
/** The plugin zip is not properly structured. */
|
/** The plugin zip is not properly structured. */
|
||||||
static final int PLUGIN_MALFORMED = 2;
|
static final int PLUGIN_MALFORMED = 2;
|
||||||
|
|
||||||
|
|
||||||
/** The builtin modules, which are plugins, but cannot be installed or removed. */
|
/** The builtin modules, which are plugins, but cannot be installed or removed. */
|
||||||
static final Set<String> MODULES;
|
static final Set<String> MODULES;
|
||||||
static {
|
static {
|
||||||
|
@ -241,7 +254,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
||||||
if (OFFICIAL_PLUGINS.contains(pluginId)) {
|
if (OFFICIAL_PLUGINS.contains(pluginId)) {
|
||||||
final String url = getElasticUrl(terminal, getStagingHash(), Version.CURRENT, isSnapshot(), pluginId, Platforms.PLATFORM_NAME);
|
final String url = getElasticUrl(terminal, getStagingHash(), Version.CURRENT, isSnapshot(), pluginId, Platforms.PLATFORM_NAME);
|
||||||
terminal.println("-> Downloading " + pluginId + " from elastic");
|
terminal.println("-> Downloading " + pluginId + " from elastic");
|
||||||
return downloadZipAndChecksum(terminal, url, tmpDir, false);
|
return downloadAndValidate(terminal, url, tmpDir, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// now try as maven coordinates, a valid URL would only have a colon and slash
|
// now try as maven coordinates, a valid URL would only have a colon and slash
|
||||||
|
@ -249,7 +262,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
||||||
if (coordinates.length == 3 && pluginId.contains("/") == false && pluginId.startsWith("file:") == false) {
|
if (coordinates.length == 3 && pluginId.contains("/") == false && pluginId.startsWith("file:") == false) {
|
||||||
String mavenUrl = getMavenUrl(terminal, coordinates, Platforms.PLATFORM_NAME);
|
String mavenUrl = getMavenUrl(terminal, coordinates, Platforms.PLATFORM_NAME);
|
||||||
terminal.println("-> Downloading " + pluginId + " from maven central");
|
terminal.println("-> Downloading " + pluginId + " from maven central");
|
||||||
return downloadZipAndChecksum(terminal, mavenUrl, tmpDir, true);
|
return downloadAndValidate(terminal, mavenUrl, tmpDir, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// fall back to plain old URL
|
// fall back to plain old URL
|
||||||
|
@ -406,16 +419,44 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Downloads a zip from the url, as well as a SHA512 (or SHA1) checksum, and checks the checksum. */
|
@SuppressForbidden(reason = "URL#openStream")
|
||||||
// pkg private for tests
|
private InputStream urlOpenStream(final URL url) throws IOException {
|
||||||
@SuppressForbidden(reason = "We use openStream to download plugins")
|
return url.openStream();
|
||||||
private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir, boolean allowSha1) throws Exception {
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Downloads a ZIP from the URL. This method also validates the downloaded plugin ZIP via the following means:
|
||||||
|
* <ul>
|
||||||
|
* <li>
|
||||||
|
* For an official plugin we download the SHA-512 checksum and validate the integrity of the downloaded ZIP. We also download the
|
||||||
|
* armored signature and validate the authenticity of the downloaded ZIP.
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* For a non-official plugin we download the SHA-512 checksum and fallback to the SHA-1 checksum and validate the integrity of the
|
||||||
|
* downloaded ZIP.
|
||||||
|
* </li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param terminal a terminal to log messages to
|
||||||
|
* @param urlString the URL of the plugin ZIP
|
||||||
|
* @param tmpDir a temporary directory to write downloaded files to
|
||||||
|
* @param officialPlugin true if the plugin is an official plugin
|
||||||
|
* @return the path to the downloaded plugin ZIP
|
||||||
|
* @throws IOException if an I/O exception occurs download or reading files and resources
|
||||||
|
* @throws PGPException if an exception occurs verifying the downloaded ZIP signature
|
||||||
|
* @throws UserException if checksum validation fails
|
||||||
|
*/
|
||||||
|
private Path downloadAndValidate(
|
||||||
|
final Terminal terminal,
|
||||||
|
final String urlString,
|
||||||
|
final Path tmpDir,
|
||||||
|
final boolean officialPlugin) throws IOException, PGPException, UserException {
|
||||||
Path zip = downloadZip(terminal, urlString, tmpDir);
|
Path zip = downloadZip(terminal, urlString, tmpDir);
|
||||||
pathsToDeleteOnShutdown.add(zip);
|
pathsToDeleteOnShutdown.add(zip);
|
||||||
String checksumUrlString = urlString + ".sha512";
|
String checksumUrlString = urlString + ".sha512";
|
||||||
URL checksumUrl = openUrl(checksumUrlString);
|
URL checksumUrl = openUrl(checksumUrlString);
|
||||||
String digestAlgo = "SHA-512";
|
String digestAlgo = "SHA-512";
|
||||||
if (checksumUrl == null && allowSha1) {
|
if (checksumUrl == null && officialPlugin == false) {
|
||||||
// fallback to sha1, until 7.0, but with warning
|
// fallback to sha1, until 7.0, but with warning
|
||||||
terminal.println("Warning: sha512 not found, falling back to sha1. This behavior is deprecated and will be removed in a " +
|
terminal.println("Warning: sha512 not found, falling back to sha1. This behavior is deprecated and will be removed in a " +
|
||||||
"future release. Please update the plugin to use a sha512 checksum.");
|
"future release. Please update the plugin to use a sha512 checksum.");
|
||||||
|
@ -427,7 +468,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
||||||
throw new UserException(ExitCodes.IO_ERROR, "Plugin checksum missing: " + checksumUrlString);
|
throw new UserException(ExitCodes.IO_ERROR, "Plugin checksum missing: " + checksumUrlString);
|
||||||
}
|
}
|
||||||
final String expectedChecksum;
|
final String expectedChecksum;
|
||||||
try (InputStream in = checksumUrl.openStream()) {
|
try (InputStream in = urlOpenStream(checksumUrl)) {
|
||||||
/*
|
/*
|
||||||
* The supported format of the SHA-1 files is a single-line file containing the SHA-1. The supported format of the SHA-512 files
|
* The supported format of the SHA-1 files is a single-line file containing the SHA-1. The supported format of the SHA-512 files
|
||||||
* is a single-line file containing the SHA-512 and the filename, separated by two spaces. For SHA-1, we verify that the hash
|
* is a single-line file containing the SHA-512 and the filename, separated by two spaces. For SHA-1, we verify that the hash
|
||||||
|
@ -465,23 +506,119 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
byte[] zipbytes = Files.readAllBytes(zip);
|
try {
|
||||||
String gotChecksum = MessageDigests.toHexString(MessageDigest.getInstance(digestAlgo).digest(zipbytes));
|
final byte[] zipBytes = Files.readAllBytes(zip);
|
||||||
if (expectedChecksum.equals(gotChecksum) == false) {
|
final String actualChecksum = MessageDigests.toHexString(MessageDigest.getInstance(digestAlgo).digest(zipBytes));
|
||||||
throw new UserException(ExitCodes.IO_ERROR,
|
if (expectedChecksum.equals(actualChecksum) == false) {
|
||||||
digestAlgo + " mismatch, expected " + expectedChecksum + " but got " + gotChecksum);
|
throw new UserException(
|
||||||
|
ExitCodes.IO_ERROR,
|
||||||
|
digestAlgo + " mismatch, expected " + expectedChecksum + " but got " + actualChecksum);
|
||||||
|
}
|
||||||
|
} catch (final NoSuchAlgorithmException e) {
|
||||||
|
// this should never happen as we are using SHA-1 and SHA-512 here
|
||||||
|
throw new AssertionError(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (officialPlugin) {
|
||||||
|
verifySignature(zip, urlString);
|
||||||
}
|
}
|
||||||
|
|
||||||
return zip;
|
return zip;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify the signature of the downloaded plugin ZIP. The signature is obtained from the source of the downloaded plugin by appending
|
||||||
|
* ".asc" to the URL. It is expected that the plugin is signed with the Elastic signing key with ID D27D666CD88E42B4.
|
||||||
|
*
|
||||||
|
* @param zip the path to the downloaded plugin ZIP
|
||||||
|
* @param urlString the URL source of the downloade plugin ZIP
|
||||||
|
* @throws IOException if an I/O exception occurs reading from various input streams
|
||||||
|
* @throws PGPException if the PGP implementation throws an internal exception during verification
|
||||||
|
*/
|
||||||
|
void verifySignature(final Path zip, final String urlString) throws IOException, PGPException {
|
||||||
|
final String ascUrlString = urlString + ".asc";
|
||||||
|
final URL ascUrl = openUrl(ascUrlString);
|
||||||
|
try (
|
||||||
|
// fin is a file stream over the downloaded plugin zip whose signature to verify
|
||||||
|
InputStream fin = pluginZipInputStream(zip);
|
||||||
|
// sin is a URL stream to the signature corresponding to the downloaded plugin zip
|
||||||
|
InputStream sin = urlOpenStream(ascUrl);
|
||||||
|
// pin is a input stream to the public key in ASCII-Armor format (RFC4880); the Armor data is in RFC2045 format
|
||||||
|
InputStream pin = getPublicKey()) {
|
||||||
|
final JcaPGPObjectFactory factory = new JcaPGPObjectFactory(PGPUtil.getDecoderStream(sin));
|
||||||
|
final PGPSignature signature = ((PGPSignatureList) factory.nextObject()).get(0);
|
||||||
|
|
||||||
|
// validate the signature has key ID matching our public key ID
|
||||||
|
final String keyId = Long.toHexString(signature.getKeyID()).toUpperCase(Locale.ROOT);
|
||||||
|
if (getPublicKeyId().equals(keyId) == false) {
|
||||||
|
throw new IllegalStateException("key id [" + keyId + "] does not match expected key id [" + getPublicKeyId() + "]");
|
||||||
|
}
|
||||||
|
|
||||||
|
// compute the signature of the downloaded plugin zip
|
||||||
|
final List<String> lines =
|
||||||
|
new BufferedReader(new InputStreamReader(pin, StandardCharsets.UTF_8)).lines().collect(Collectors.toList());
|
||||||
|
// skip armor headers and possible blank line
|
||||||
|
int index = 1;
|
||||||
|
for (; index < lines.size(); index++) {
|
||||||
|
if (lines.get(index).matches(".*: .*") == false && lines.get(index).matches("\\s*") == false) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
final byte[] armoredData =
|
||||||
|
lines.subList(index, lines.size() - 1).stream().collect(Collectors.joining("\n")).getBytes(StandardCharsets.UTF_8);
|
||||||
|
final InputStream ain = Base64.getMimeDecoder().wrap(new ByteArrayInputStream(armoredData));
|
||||||
|
final PGPPublicKeyRingCollection collection = new PGPPublicKeyRingCollection(ain, new JcaKeyFingerprintCalculator());
|
||||||
|
final PGPPublicKey key = collection.getPublicKey(signature.getKeyID());
|
||||||
|
signature.init(new JcaPGPContentVerifierBuilderProvider().setProvider(new BouncyCastleProvider()), key);
|
||||||
|
final byte[] buffer = new byte[1024];
|
||||||
|
int read;
|
||||||
|
while ((read = fin.read(buffer)) != -1) {
|
||||||
|
signature.update(buffer, 0, read);
|
||||||
|
}
|
||||||
|
|
||||||
|
// finally we verify the signature of the downloaded plugin zip matches the expected signature
|
||||||
|
if (signature.verify() == false) {
|
||||||
|
throw new IllegalStateException("signature verification for [" + urlString + "] failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An input stream to the raw bytes of the plugin ZIP.
|
||||||
|
*
|
||||||
|
* @param zip the path to the downloaded plugin ZIP
|
||||||
|
* @return an input stream to the raw bytes of the plugin ZIP.
|
||||||
|
* @throws IOException if an I/O exception occurs preparing the input stream
|
||||||
|
*/
|
||||||
|
InputStream pluginZipInputStream(final Path zip) throws IOException {
|
||||||
|
return Files.newInputStream(zip);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the public key ID of the signing key that is expected to have signed the official plugin.
|
||||||
|
*
|
||||||
|
* @return the public key ID
|
||||||
|
*/
|
||||||
|
String getPublicKeyId() {
|
||||||
|
return "D27D666CD88E42B4";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An input stream to the public key of the signing key.
|
||||||
|
*
|
||||||
|
* @return an input stream to the public key
|
||||||
|
*/
|
||||||
|
InputStream getPublicKey() {
|
||||||
|
return InstallPluginCommand.class.getResourceAsStream("/public_key.asc");
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a URL and opens a connection.
|
* Creates a URL and opens a connection.
|
||||||
*
|
*
|
||||||
* If the URL returns a 404, {@code null} is returned, otherwise the open URL opject is returned.
|
* If the URL returns a 404, {@code null} is returned, otherwise the open URL opject is returned.
|
||||||
*/
|
*/
|
||||||
// pkg private for tests
|
// pkg private for tests
|
||||||
URL openUrl(String urlString) throws Exception {
|
URL openUrl(String urlString) throws IOException {
|
||||||
URL checksumUrl = new URL(urlString);
|
URL checksumUrl = new URL(urlString);
|
||||||
HttpURLConnection connection = (HttpURLConnection)checksumUrl.openConnection();
|
HttpURLConnection connection = (HttpURLConnection)checksumUrl.openConnection();
|
||||||
if (connection.getResponseCode() == 404) {
|
if (connection.getResponseCode() == 404) {
|
||||||
|
@ -605,11 +742,27 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static final String LIB_TOOLS_PLUGIN_CLI_CLASSPATH_JAR;
|
||||||
|
|
||||||
|
static {
|
||||||
|
LIB_TOOLS_PLUGIN_CLI_CLASSPATH_JAR =
|
||||||
|
String.format(Locale.ROOT, ".+%1$slib%1$stools%1$splugin-cli%1$s[^%1$s]+\\.jar", "(/|\\\\)");
|
||||||
|
}
|
||||||
|
|
||||||
/** check a candidate plugin for jar hell before installing it */
|
/** check a candidate plugin for jar hell before installing it */
|
||||||
void jarHellCheck(PluginInfo candidateInfo, Path candidateDir, Path pluginsDir, Path modulesDir) throws Exception {
|
void jarHellCheck(PluginInfo candidateInfo, Path candidateDir, Path pluginsDir, Path modulesDir) throws Exception {
|
||||||
// create list of current jars in classpath
|
// create list of current jars in classpath
|
||||||
final Set<URL> jars = new HashSet<>(JarHell.parseClassPath());
|
final Set<URL> classpath =
|
||||||
|
JarHell.parseClassPath()
|
||||||
|
.stream()
|
||||||
|
.filter(url -> {
|
||||||
|
try {
|
||||||
|
return url.toURI().getPath().matches(LIB_TOOLS_PLUGIN_CLI_CLASSPATH_JAR) == false;
|
||||||
|
} catch (final URISyntaxException e) {
|
||||||
|
throw new AssertionError(e);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
// read existing bundles. this does some checks on the installation too.
|
// read existing bundles. this does some checks on the installation too.
|
||||||
Set<PluginsService.Bundle> bundles = new HashSet<>(PluginsService.getPluginBundles(pluginsDir));
|
Set<PluginsService.Bundle> bundles = new HashSet<>(PluginsService.getPluginBundles(pluginsDir));
|
||||||
|
@ -621,7 +774,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
||||||
// TODO: optimize to skip any bundles not connected to the candidate plugin?
|
// TODO: optimize to skip any bundles not connected to the candidate plugin?
|
||||||
Map<String, Set<URL>> transitiveUrls = new HashMap<>();
|
Map<String, Set<URL>> transitiveUrls = new HashMap<>();
|
||||||
for (PluginsService.Bundle bundle : sortedBundles) {
|
for (PluginsService.Bundle bundle : sortedBundles) {
|
||||||
PluginsService.checkBundleJarHell(bundle, transitiveUrls);
|
PluginsService.checkBundleJarHell(classpath, bundle, transitiveUrls);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: no jars should be an error
|
// TODO: no jars should be an error
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
|
Version: SKS 1.1.6
|
||||||
|
Comment: Hostname: pgp.mit.edu
|
||||||
|
|
||||||
|
mQENBFI3HsoBCADXDtbNJnxbPqB1vDNtCsqhe49vFYsZN9IOZsZXgp7aHjh6CJBDA+bGFOwy
|
||||||
|
hbd7at35jQjWAw1O3cfYsKAmFy+Ar3LHCMkV3oZspJACTIgCrwnkic/9CUliQe324qvObU2Q
|
||||||
|
RtP4Fl0zWcfb/S8UYzWXWIFuJqMvE9MaRY1bwUBvzoqavLGZj3SF1SPO+TB5QrHkrQHBsmX+
|
||||||
|
Jda6d4Ylt8/t6CvMwgQNlrlzIO9WT+YN6zS+sqHd1YK/aY5qhoLNhp9G/HxhcSVCkLq8SStj
|
||||||
|
1ZZ1S9juBPoXV1ZWNbxFNGwOh/NYGldD2kmBf3YgCqeLzHahsAEpvAm8TBa7Q9W21C8vABEB
|
||||||
|
AAG0RUVsYXN0aWNzZWFyY2ggKEVsYXN0aWNzZWFyY2ggU2lnbmluZyBLZXkpIDxkZXZfb3Bz
|
||||||
|
QGVsYXN0aWNzZWFyY2gub3JnPokBOAQTAQIAIgUCUjceygIbAwYLCQgHAwIGFQgCCQoLBBYC
|
||||||
|
AwECHgECF4AACgkQ0n1mbNiOQrRzjAgAlTUQ1mgo3nK6BGXbj4XAJvuZDG0HILiUt+pPnz75
|
||||||
|
nsf0NWhqR4yGFlmpuctgCmTD+HzYtV9fp9qW/bwVuJCNtKXk3sdzYABY+Yl0Cez/7C2GuGCO
|
||||||
|
lbn0luCNT9BxJnh4mC9h/cKI3y5jvZ7wavwe41teqG14V+EoFSn3NPKmTxcDTFrV7SmVPxCB
|
||||||
|
cQze00cJhprKxkuZMPPVqpBS+JfDQtzUQD/LSFfhHj9eD+Xe8d7sw+XvxB2aN4gnTlRzjL1n
|
||||||
|
TRp0h2/IOGkqYfIG9rWmSLNlxhB2t+c0RsjdGM4/eRlPWylFbVMc5pmDpItrkWSnzBfkmXL3
|
||||||
|
vO2X3WvwmSFiQbkBDQRSNx7KAQgA5JUlzcMW5/cuyZR8alSacKqhSbvoSqqbzHKcUQZmlzNM
|
||||||
|
KGTABFG1yRx9r+wa/fvqP6OTRzRDvVS/cycws8YX7Ddum7x8uI95b9ye1/Xy5noPEm8cD+hp
|
||||||
|
lnpU+PBQZJ5XJ2I+1l9Nixx47wPGXeClLqcdn0ayd+v+Rwf3/XUJrvccG2YZUiQ4jWZkoxsA
|
||||||
|
07xx7Bj+Lt8/FKG7sHRFvePFU0ZS6JFx9GJqjSBbHRRkam+4emW3uWgVfZxuwcUCn1ayNgRt
|
||||||
|
KiFv9jQrg2TIWEvzYx9tywTCxc+FFMWAlbCzi+m4WD+QUWWfDQ009U/WM0ks0KwwEwSk/UDu
|
||||||
|
ToxGnKU2dQARAQABiQEfBBgBAgAJBQJSNx7KAhsMAAoJENJ9ZmzYjkK0c3MIAIE9hAR20mqJ
|
||||||
|
WLcsxLtrRs6uNF1VrpB+4n/55QU7oxA1iVBO6IFu4qgsF12JTavnJ5MLaETlggXY+zDef9sy
|
||||||
|
TPXoQctpzcaNVDmedwo1SiL03uMoblOvWpMR/Y0j6rm7IgrMWUDXDPvoPGjMl2q1iTeyHkMZ
|
||||||
|
EyUJ8SKsaHh4jV9wp9KmC8C+9CwMukL7vM5w8cgvJoAwsp3Fn59AxWthN3XJYcnMfStkIuWg
|
||||||
|
R7U2r+a210W6vnUxU4oN0PmMcursYPyeV0NX/KQeUeNMwGTFB6QHS/anRaGQewijkrYYoTNt
|
||||||
|
fllxIu9XYmiBERQ/qPDlGRlOgVTd9xUfHFkzB52c70E=
|
||||||
|
=92oX
|
||||||
|
-----END PGP PUBLIC KEY BLOCK-----
|
|
@ -23,6 +23,26 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||||
import com.google.common.jimfs.Configuration;
|
import com.google.common.jimfs.Configuration;
|
||||||
import com.google.common.jimfs.Jimfs;
|
import com.google.common.jimfs.Jimfs;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
||||||
|
import org.bouncycastle.bcpg.ArmoredOutputStream;
|
||||||
|
import org.bouncycastle.bcpg.BCPGOutputStream;
|
||||||
|
import org.bouncycastle.bcpg.HashAlgorithmTags;
|
||||||
|
import org.bouncycastle.jce.provider.BouncyCastleProvider;
|
||||||
|
import org.bouncycastle.openpgp.PGPEncryptedData;
|
||||||
|
import org.bouncycastle.openpgp.PGPException;
|
||||||
|
import org.bouncycastle.openpgp.PGPKeyPair;
|
||||||
|
import org.bouncycastle.openpgp.PGPPrivateKey;
|
||||||
|
import org.bouncycastle.openpgp.PGPPublicKey;
|
||||||
|
import org.bouncycastle.openpgp.PGPSecretKey;
|
||||||
|
import org.bouncycastle.openpgp.PGPSignature;
|
||||||
|
import org.bouncycastle.openpgp.PGPSignatureGenerator;
|
||||||
|
import org.bouncycastle.openpgp.operator.PGPDigestCalculator;
|
||||||
|
import org.bouncycastle.openpgp.operator.bc.BcPBESecretKeyDecryptorBuilder;
|
||||||
|
import org.bouncycastle.openpgp.operator.bc.BcPGPContentSignerBuilder;
|
||||||
|
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPContentSignerBuilder;
|
||||||
|
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPDigestCalculatorProviderBuilder;
|
||||||
|
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPKeyPair;
|
||||||
|
import org.bouncycastle.openpgp.operator.jcajce.JcePBESecretKeyEncryptorBuilder;
|
||||||
import org.elasticsearch.Build;
|
import org.elasticsearch.Build;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cli.ExitCodes;
|
import org.elasticsearch.cli.ExitCodes;
|
||||||
|
@ -44,6 +64,8 @@ import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
|
@ -66,13 +88,19 @@ import java.nio.file.attribute.PosixFileAttributeView;
|
||||||
import java.nio.file.attribute.PosixFileAttributes;
|
import java.nio.file.attribute.PosixFileAttributes;
|
||||||
import java.nio.file.attribute.PosixFilePermission;
|
import java.nio.file.attribute.PosixFilePermission;
|
||||||
import java.nio.file.attribute.UserPrincipal;
|
import java.nio.file.attribute.UserPrincipal;
|
||||||
|
import java.security.KeyPair;
|
||||||
|
import java.security.KeyPairGenerator;
|
||||||
import java.security.MessageDigest;
|
import java.security.MessageDigest;
|
||||||
|
import java.security.NoSuchAlgorithmException;
|
||||||
|
import java.security.NoSuchProviderException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
@ -88,6 +116,7 @@ import static org.hamcrest.Matchers.hasToString;
|
||||||
import static org.hamcrest.Matchers.not;
|
import static org.hamcrest.Matchers.not;
|
||||||
|
|
||||||
@LuceneTestCase.SuppressFileSystems("*")
|
@LuceneTestCase.SuppressFileSystems("*")
|
||||||
|
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30900")
|
||||||
public class InstallPluginCommandTests extends ESTestCase {
|
public class InstallPluginCommandTests extends ESTestCase {
|
||||||
|
|
||||||
private InstallPluginCommand skipJarHellCommand;
|
private InstallPluginCommand skipJarHellCommand;
|
||||||
|
@ -800,8 +829,16 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
skipJarHellCommand.execute(terminal, pluginZip, isBatch, env.v2());
|
skipJarHellCommand.execute(terminal, pluginZip, isBatch, env.v2());
|
||||||
}
|
}
|
||||||
|
|
||||||
void assertInstallPluginFromUrl(String pluginId, String name, String url, String stagingHash, boolean isSnapshot,
|
void assertInstallPluginFromUrl(
|
||||||
String shaExtension, Function<byte[], String> shaCalculator) throws Exception {
|
final String pluginId,
|
||||||
|
final String name,
|
||||||
|
final String url,
|
||||||
|
final String stagingHash,
|
||||||
|
final boolean isSnapshot,
|
||||||
|
final String shaExtension,
|
||||||
|
final Function<byte[], String> shaCalculator,
|
||||||
|
final PGPSecretKey secretKey,
|
||||||
|
final BiFunction<byte[], PGPSecretKey, String> signature) throws Exception {
|
||||||
Tuple<Path, Environment> env = createEnv(fs, temp);
|
Tuple<Path, Environment> env = createEnv(fs, temp);
|
||||||
Path pluginDir = createPluginDir(temp);
|
Path pluginDir = createPluginDir(temp);
|
||||||
Path pluginZip = createPlugin(name, pluginDir);
|
Path pluginZip = createPlugin(name, pluginDir);
|
||||||
|
@ -814,18 +851,56 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
return downloadedPath;
|
return downloadedPath;
|
||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
URL openUrl(String urlString) throws Exception {
|
URL openUrl(String urlString) throws IOException {
|
||||||
String expectedUrl = url + shaExtension;
|
if ((url + shaExtension).equals(urlString)) {
|
||||||
if (expectedUrl.equals(urlString)) {
|
|
||||||
// calc sha an return file URL to it
|
// calc sha an return file URL to it
|
||||||
Path shaFile = temp.apply("shas").resolve("downloaded.zip" + shaExtension);
|
Path shaFile = temp.apply("shas").resolve("downloaded.zip" + shaExtension);
|
||||||
byte[] zipbytes = Files.readAllBytes(pluginZip);
|
byte[] zipbytes = Files.readAllBytes(pluginZip);
|
||||||
String checksum = shaCalculator.apply(zipbytes);
|
String checksum = shaCalculator.apply(zipbytes);
|
||||||
Files.write(shaFile, checksum.getBytes(StandardCharsets.UTF_8));
|
Files.write(shaFile, checksum.getBytes(StandardCharsets.UTF_8));
|
||||||
return shaFile.toUri().toURL();
|
return shaFile.toUri().toURL();
|
||||||
|
} else if ((url + ".asc").equals(urlString)) {
|
||||||
|
final Path ascFile = temp.apply("asc").resolve("downloaded.zip" + ".asc");
|
||||||
|
final byte[] zipBytes = Files.readAllBytes(pluginZip);
|
||||||
|
final String asc = signature.apply(zipBytes, secretKey);
|
||||||
|
Files.write(ascFile, asc.getBytes(StandardCharsets.UTF_8));
|
||||||
|
return ascFile.toUri().toURL();
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
void verifySignature(Path zip, String urlString) throws IOException, PGPException {
|
||||||
|
if (InstallPluginCommand.OFFICIAL_PLUGINS.contains(name)) {
|
||||||
|
super.verifySignature(zip, urlString);
|
||||||
|
} else {
|
||||||
|
throw new UnsupportedOperationException("verify signature should not be called for unofficial plugins");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
InputStream pluginZipInputStream(Path zip) throws IOException {
|
||||||
|
return new ByteArrayInputStream(Files.readAllBytes(zip));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
String getPublicKeyId() {
|
||||||
|
return Long.toHexString(secretKey.getKeyID()).toUpperCase(Locale.ROOT);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
InputStream getPublicKey() {
|
||||||
|
try {
|
||||||
|
final ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||||
|
final ArmoredOutputStream armored = new ArmoredOutputStream(output);
|
||||||
|
secretKey.getPublicKey().encode(armored);
|
||||||
|
armored.close();
|
||||||
|
return new ByteArrayInputStream(output.toByteArray());
|
||||||
|
} catch (final IOException e) {
|
||||||
|
throw new AssertionError(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
boolean urlExists(Terminal terminal, String urlString) throws IOException {
|
boolean urlExists(Terminal terminal, String urlString) throws IOException {
|
||||||
return urlString.equals(url);
|
return urlString.equals(url);
|
||||||
|
@ -851,11 +926,12 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
|
|
||||||
public void assertInstallPluginFromUrl(
|
public void assertInstallPluginFromUrl(
|
||||||
final String pluginId, final String name, final String url, final String stagingHash, boolean isSnapshot) throws Exception {
|
final String pluginId, final String name, final String url, final String stagingHash, boolean isSnapshot) throws Exception {
|
||||||
MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
final MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||||
assertInstallPluginFromUrl(pluginId, name, url, stagingHash, isSnapshot, ".sha512", checksumAndFilename(digest, url));
|
assertInstallPluginFromUrl(
|
||||||
|
pluginId, name, url, stagingHash, isSnapshot, ".sha512", checksumAndFilename(digest, url), newSecretKey(), this::signature);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOfficalPlugin() throws Exception {
|
public void testOfficialPlugin() throws Exception {
|
||||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
||||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false);
|
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false);
|
||||||
}
|
}
|
||||||
|
@ -883,13 +959,13 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
e, hasToString(containsString("attempted to install release build of official plugin on snapshot build of Elasticsearch")));
|
e, hasToString(containsString("attempted to install release build of official plugin on snapshot build of Elasticsearch")));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOfficalPluginStaging() throws Exception {
|
public void testOfficialPluginStaging() throws Exception {
|
||||||
String url = "https://staging.elastic.co/" + Version.CURRENT + "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
|
String url = "https://staging.elastic.co/" + Version.CURRENT + "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
|
||||||
+ Version.CURRENT + ".zip";
|
+ Version.CURRENT + ".zip";
|
||||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", false);
|
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOfficalPlatformPlugin() throws Exception {
|
public void testOfficialPlatformPlugin() throws Exception {
|
||||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Platforms.PLATFORM_NAME +
|
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Platforms.PLATFORM_NAME +
|
||||||
"-" + Version.CURRENT + ".zip";
|
"-" + Version.CURRENT + ".zip";
|
||||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false);
|
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false);
|
||||||
|
@ -905,7 +981,7 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", true);
|
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOfficalPlatformPluginStaging() throws Exception {
|
public void testOfficialPlatformPluginStaging() throws Exception {
|
||||||
String url = "https://staging.elastic.co/" + Version.CURRENT + "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
|
String url = "https://staging.elastic.co/" + Version.CURRENT + "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
|
||||||
+ Platforms.PLATFORM_NAME + "-"+ Version.CURRENT + ".zip";
|
+ Platforms.PLATFORM_NAME + "-"+ Version.CURRENT + ".zip";
|
||||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", false);
|
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", false);
|
||||||
|
@ -924,7 +1000,7 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
public void testMavenSha1Backcompat() throws Exception {
|
public void testMavenSha1Backcompat() throws Exception {
|
||||||
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
|
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
|
||||||
MessageDigest digest = MessageDigest.getInstance("SHA-1");
|
MessageDigest digest = MessageDigest.getInstance("SHA-1");
|
||||||
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, false, ".sha1", checksum(digest));
|
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, false, ".sha1", checksum(digest), null, (b, p) -> null);
|
||||||
assertTrue(terminal.getOutput(), terminal.getOutput().contains("sha512 not found, falling back to sha1"));
|
assertTrue(terminal.getOutput(), terminal.getOutput().contains("sha512 not found, falling back to sha1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -932,7 +1008,7 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
||||||
MessageDigest digest = MessageDigest.getInstance("SHA-1");
|
MessageDigest digest = MessageDigest.getInstance("SHA-1");
|
||||||
UserException e = expectThrows(UserException.class, () ->
|
UserException e = expectThrows(UserException.class, () ->
|
||||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false, ".sha1", checksum(digest)));
|
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false, ".sha1", checksum(digest), null, (b, p) -> null));
|
||||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||||
assertEquals("Plugin checksum missing: " + url + ".sha512", e.getMessage());
|
assertEquals("Plugin checksum missing: " + url + ".sha512", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -940,7 +1016,8 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
public void testMavenShaMissing() throws Exception {
|
public void testMavenShaMissing() throws Exception {
|
||||||
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
|
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
|
||||||
UserException e = expectThrows(UserException.class, () ->
|
UserException e = expectThrows(UserException.class, () ->
|
||||||
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, false, ".dne", bytes -> null));
|
assertInstallPluginFromUrl(
|
||||||
|
"mygroup:myplugin:1.0.0", "myplugin", url, null, false, ".dne", bytes -> null, null, (b, p) -> null));
|
||||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||||
assertEquals("Plugin checksum missing: " + url + ".sha1", e.getMessage());
|
assertEquals("Plugin checksum missing: " + url + ".sha1", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -948,8 +1025,9 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
public void testInvalidShaFileMissingFilename() throws Exception {
|
public void testInvalidShaFileMissingFilename() throws Exception {
|
||||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
||||||
MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||||
UserException e = expectThrows(UserException.class, () ->
|
UserException e = expectThrows(UserException.class,
|
||||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false, ".sha512", checksum(digest)));
|
() -> assertInstallPluginFromUrl(
|
||||||
|
"analysis-icu", "analysis-icu", url, null, false, ".sha512", checksum(digest), null, (b, p) -> null));
|
||||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||||
assertTrue(e.getMessage(), e.getMessage().startsWith("Invalid checksum file"));
|
assertTrue(e.getMessage(), e.getMessage().startsWith("Invalid checksum file"));
|
||||||
}
|
}
|
||||||
|
@ -965,7 +1043,9 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
false,
|
false,
|
||||||
".sha512",
|
".sha512",
|
||||||
checksumAndString(digest, " repository-s3-" + Version.CURRENT + ".zip")));
|
checksumAndString(digest, " repository-s3-" + Version.CURRENT + ".zip"),
|
||||||
|
null,
|
||||||
|
(b, p) -> null));
|
||||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||||
assertThat(e, hasToString(matches("checksum file at \\[.*\\] is not for this plugin")));
|
assertThat(e, hasToString(matches("checksum file at \\[.*\\] is not for this plugin")));
|
||||||
}
|
}
|
||||||
|
@ -981,7 +1061,9 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
false,
|
false,
|
||||||
".sha512",
|
".sha512",
|
||||||
checksumAndString(digest, " analysis-icu-" + Version.CURRENT + ".zip\nfoobar")));
|
checksumAndString(digest, " analysis-icu-" + Version.CURRENT + ".zip\nfoobar"),
|
||||||
|
null,
|
||||||
|
(b, p) -> null));
|
||||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||||
assertTrue(e.getMessage(), e.getMessage().startsWith("Invalid checksum file"));
|
assertTrue(e.getMessage(), e.getMessage().startsWith("Invalid checksum file"));
|
||||||
}
|
}
|
||||||
|
@ -996,7 +1078,9 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
false,
|
false,
|
||||||
".sha512",
|
".sha512",
|
||||||
bytes -> "foobar analysis-icu-" + Version.CURRENT + ".zip"));
|
bytes -> "foobar analysis-icu-" + Version.CURRENT + ".zip",
|
||||||
|
null,
|
||||||
|
(b, p) -> null));
|
||||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||||
assertTrue(e.getMessage(), e.getMessage().contains("SHA-512 mismatch, expected foobar"));
|
assertTrue(e.getMessage(), e.getMessage().contains("SHA-512 mismatch, expected foobar"));
|
||||||
}
|
}
|
||||||
|
@ -1004,11 +1088,77 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
public void testSha1Mismatch() throws Exception {
|
public void testSha1Mismatch() throws Exception {
|
||||||
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
|
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
|
||||||
UserException e = expectThrows(UserException.class, () ->
|
UserException e = expectThrows(UserException.class, () ->
|
||||||
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, false, ".sha1", bytes -> "foobar"));
|
assertInstallPluginFromUrl(
|
||||||
|
"mygroup:myplugin:1.0.0", "myplugin", url, null, false, ".sha1", bytes -> "foobar", null, (b, p) -> null));
|
||||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||||
assertTrue(e.getMessage(), e.getMessage().contains("SHA-1 mismatch, expected foobar"));
|
assertTrue(e.getMessage(), e.getMessage().contains("SHA-1 mismatch, expected foobar"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testPublicKeyIdMismatchToExpectedPublicKeyId() throws Exception {
|
||||||
|
final String icu = "analysis-icu";
|
||||||
|
final String url =
|
||||||
|
"https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/" + icu + "-" + Version.CURRENT + ".zip";
|
||||||
|
final MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||||
|
/*
|
||||||
|
* To setup a situation where the expected public key ID does not match the public key ID used for signing, we generate a new public
|
||||||
|
* key at the moment of signing (see the signature invocation). Note that this key will not match the key that we push down to the
|
||||||
|
* install plugin command.
|
||||||
|
*/
|
||||||
|
final PGPSecretKey signingKey = newSecretKey(); // the actual key used for signing
|
||||||
|
final String actualID = Long.toHexString(signingKey.getKeyID()).toUpperCase(Locale.ROOT);
|
||||||
|
final BiFunction<byte[], PGPSecretKey, String> signature = (b, p) -> signature(b, signingKey);
|
||||||
|
final PGPSecretKey verifyingKey = newSecretKey(); // the expected key used for signing
|
||||||
|
final String expectedID = Long.toHexString(verifyingKey.getKeyID()).toUpperCase(Locale.ROOT);
|
||||||
|
final IllegalStateException e = expectThrows(
|
||||||
|
IllegalStateException.class,
|
||||||
|
() ->
|
||||||
|
assertInstallPluginFromUrl(
|
||||||
|
icu, icu, url, null, false, ".sha512", checksumAndFilename(digest, url), verifyingKey, signature));
|
||||||
|
assertThat(e, hasToString(containsString("key id [" + actualID + "] does not match expected key id [" + expectedID + "]")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testFailedSignatureVerification() throws Exception {
|
||||||
|
final String icu = "analysis-icu";
|
||||||
|
final String url =
|
||||||
|
"https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/" + icu + "-" + Version.CURRENT + ".zip";
|
||||||
|
final MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||||
|
/*
|
||||||
|
* To setup a situation where signature verification fails, we will mutate the input byte array by modifying a single byte to some
|
||||||
|
* random byte value other than the actual value. This is enough to change the signature and cause verification to intentionally
|
||||||
|
* fail.
|
||||||
|
*/
|
||||||
|
final BiFunction<byte[], PGPSecretKey, String> signature = (b, p) -> {
|
||||||
|
final byte[] bytes = Arrays.copyOf(b, b.length);
|
||||||
|
bytes[0] = randomValueOtherThan(b[0], ESTestCase::randomByte);
|
||||||
|
return signature(bytes, p);
|
||||||
|
};
|
||||||
|
final IllegalStateException e = expectThrows(
|
||||||
|
IllegalStateException.class,
|
||||||
|
() ->
|
||||||
|
assertInstallPluginFromUrl(
|
||||||
|
icu, icu, url, null, false, ".sha512", checksumAndFilename(digest, url), newSecretKey(), signature));
|
||||||
|
assertThat(e, hasToString(equalTo("java.lang.IllegalStateException: signature verification for [" + url + "] failed")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public PGPSecretKey newSecretKey() throws NoSuchAlgorithmException, NoSuchProviderException, PGPException {
|
||||||
|
final KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
|
||||||
|
kpg.initialize(2048);
|
||||||
|
final KeyPair pair = kpg.generateKeyPair();
|
||||||
|
final PGPDigestCalculator sha1Calc = new JcaPGPDigestCalculatorProviderBuilder().build().get(HashAlgorithmTags.SHA1);
|
||||||
|
final PGPKeyPair pkp = new JcaPGPKeyPair(PGPPublicKey.RSA_GENERAL, pair, new Date());
|
||||||
|
return new PGPSecretKey(
|
||||||
|
PGPSignature.DEFAULT_CERTIFICATION,
|
||||||
|
pkp,
|
||||||
|
"example@example.com",
|
||||||
|
sha1Calc,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
new JcaPGPContentSignerBuilder(pkp.getPublicKey().getAlgorithm(), HashAlgorithmTags.SHA1),
|
||||||
|
new JcePBESecretKeyEncryptorBuilder(PGPEncryptedData.CAST5, sha1Calc)
|
||||||
|
.setProvider(new BouncyCastleProvider())
|
||||||
|
.build("passphrase".toCharArray()));
|
||||||
|
}
|
||||||
|
|
||||||
private Function<byte[], String> checksum(final MessageDigest digest) {
|
private Function<byte[], String> checksum(final MessageDigest digest) {
|
||||||
return checksumAndString(digest, "");
|
return checksumAndString(digest, "");
|
||||||
}
|
}
|
||||||
|
@ -1022,6 +1172,32 @@ public class InstallPluginCommandTests extends ESTestCase {
|
||||||
return bytes -> MessageDigests.toHexString(digest.digest(bytes)) + s;
|
return bytes -> MessageDigests.toHexString(digest.digest(bytes)) + s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String signature(final byte[] bytes, final PGPSecretKey secretKey) {
|
||||||
|
try {
|
||||||
|
final PGPPrivateKey privateKey
|
||||||
|
= secretKey.extractPrivateKey(
|
||||||
|
new BcPBESecretKeyDecryptorBuilder(
|
||||||
|
new JcaPGPDigestCalculatorProviderBuilder().build()).build("passphrase".toCharArray()));
|
||||||
|
final PGPSignatureGenerator generator =
|
||||||
|
new PGPSignatureGenerator(
|
||||||
|
new BcPGPContentSignerBuilder(privateKey.getPublicKeyPacket().getAlgorithm(), HashAlgorithmTags.SHA512));
|
||||||
|
generator.init(PGPSignature.BINARY_DOCUMENT, privateKey);
|
||||||
|
final ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||||
|
try (BCPGOutputStream pout = new BCPGOutputStream(new ArmoredOutputStream(output));
|
||||||
|
InputStream is = new ByteArrayInputStream(bytes)) {
|
||||||
|
final byte[] buffer = new byte[1024];
|
||||||
|
int read;
|
||||||
|
while ((read = is.read(buffer)) != -1) {
|
||||||
|
generator.update(buffer, 0, read);
|
||||||
|
}
|
||||||
|
generator.generate().encode(pout);
|
||||||
|
}
|
||||||
|
return new String(output.toByteArray(), "UTF-8");
|
||||||
|
} catch (IOException | PGPException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// checks the plugin requires a policy confirmation, and does not install when that is rejected by the user
|
// checks the plugin requires a policy confirmation, and does not install when that is rejected by the user
|
||||||
// the plugin is installed after this method completes
|
// the plugin is installed after this method completes
|
||||||
private void assertPolicyConfirmation(Tuple<Path, Environment> env, String pluginZip, String... warnings) throws Exception {
|
private void assertPolicyConfirmation(Tuple<Path, Environment> env, String pluginZip, String... warnings) throws Exception {
|
||||||
|
|
|
@ -432,7 +432,15 @@ Remote hosts have to be explicitly whitelisted in elasticsearch.yaml using the
|
||||||
`reindex.remote.whitelist` property. It can be set to a comma delimited list
|
`reindex.remote.whitelist` property. It can be set to a comma delimited list
|
||||||
of allowed remote `host` and `port` combinations (e.g.
|
of allowed remote `host` and `port` combinations (e.g.
|
||||||
`otherhost:9200, another:9200, 127.0.10.*:9200, localhost:*`). Scheme is
|
`otherhost:9200, another:9200, 127.0.10.*:9200, localhost:*`). Scheme is
|
||||||
ignored by the whitelist - only host and port are used.
|
ignored by the whitelist - only host and port are used, for example:
|
||||||
|
|
||||||
|
|
||||||
|
[source,yaml]
|
||||||
|
--------------------------------------------------
|
||||||
|
reindex.remote.whitelist: "otherhost:9200, another:9200, 127.0.10.*:9200, localhost:*"
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
The whitelist must be configured on any nodes that will coordinate the reindex.
|
||||||
|
|
||||||
This feature should work with remote clusters of any version of Elasticsearch
|
This feature should work with remote clusters of any version of Elasticsearch
|
||||||
you are likely to find. This should allow you to upgrade from any version of
|
you are likely to find. This should allow you to upgrade from any version of
|
||||||
|
|
|
@ -1,16 +1,39 @@
|
||||||
[[indices-create-index]]
|
[[indices-create-index]]
|
||||||
== Create Index
|
== Create Index
|
||||||
|
|
||||||
The create index API allows to instantiate an index. Elasticsearch
|
The Create Index API is used to manually create an index in Elasticsearch. All documents in Elasticsearch
|
||||||
provides support for multiple indices, including executing operations
|
are stored inside of one index or another.
|
||||||
across several indices.
|
|
||||||
|
The most basic command is the following:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
PUT twitter
|
||||||
|
--------------------------------------------------
|
||||||
|
// CONSOLE
|
||||||
|
|
||||||
|
This create an index named `twitter` with all default setting.
|
||||||
|
|
||||||
|
[NOTE]
|
||||||
|
.Index name limitations
|
||||||
|
======================================================
|
||||||
|
There are several limitations to what you can name your index. The complete list of limitations are:
|
||||||
|
|
||||||
|
- Lowercase only
|
||||||
|
- Cannot include `\`, `/`, `*`, `?`, `"`, `<`, `>`, `|`, ` ` (space character), `,`, `#`
|
||||||
|
- Indices prior to 7.0 could contain a colon (`:`), but that's been deprecated and won't be supported in 7.0+
|
||||||
|
- Cannot start with `-`, `_`, `+`
|
||||||
|
- Cannot be `.` or ``..`
|
||||||
|
- Cannot be longer than 255 bytes (note it is bytes, so multi-byte characters will count towards the 255 limit faster)
|
||||||
|
|
||||||
|
======================================================
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
[[create-index-settings]]
|
[[create-index-settings]]
|
||||||
=== Index Settings
|
=== Index Settings
|
||||||
|
|
||||||
Each index created can have specific settings
|
Each index created can have specific settings
|
||||||
associated with it.
|
associated with it, defined in the body:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
@ -28,25 +51,6 @@ PUT twitter
|
||||||
<1> Default for `number_of_shards` is 1
|
<1> Default for `number_of_shards` is 1
|
||||||
<2> Default for `number_of_replicas` is 1 (ie one replica for each primary shard)
|
<2> Default for `number_of_replicas` is 1 (ie one replica for each primary shard)
|
||||||
|
|
||||||
The above second curl example shows how an index called `twitter` can be
|
|
||||||
created with specific settings for it using http://www.yaml.org[YAML].
|
|
||||||
In this case, creating an index with 3 shards, each with 2 replicas. The
|
|
||||||
index settings can also be defined with http://www.json.org[JSON]:
|
|
||||||
|
|
||||||
[source,js]
|
|
||||||
--------------------------------------------------
|
|
||||||
PUT twitter
|
|
||||||
{
|
|
||||||
"settings" : {
|
|
||||||
"index" : {
|
|
||||||
"number_of_shards" : 3,
|
|
||||||
"number_of_replicas" : 2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
--------------------------------------------------
|
|
||||||
// CONSOLE
|
|
||||||
|
|
||||||
or more simplified
|
or more simplified
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
|
|
|
@ -35,7 +35,7 @@ Elasticsearch 6.x in order to be readable by Elasticsearch 7.x.
|
||||||
* <<breaking_70_java_changes>>
|
* <<breaking_70_java_changes>>
|
||||||
* <<breaking_70_settings_changes>>
|
* <<breaking_70_settings_changes>>
|
||||||
* <<breaking_70_scripting_changes>>
|
* <<breaking_70_scripting_changes>>
|
||||||
|
* <<breaking_70_snapshotstats_changes>>
|
||||||
|
|
||||||
include::migrate_7_0/aggregations.asciidoc[]
|
include::migrate_7_0/aggregations.asciidoc[]
|
||||||
include::migrate_7_0/analysis.asciidoc[]
|
include::migrate_7_0/analysis.asciidoc[]
|
||||||
|
@ -49,3 +49,4 @@ include::migrate_7_0/api.asciidoc[]
|
||||||
include::migrate_7_0/java.asciidoc[]
|
include::migrate_7_0/java.asciidoc[]
|
||||||
include::migrate_7_0/settings.asciidoc[]
|
include::migrate_7_0/settings.asciidoc[]
|
||||||
include::migrate_7_0/scripting.asciidoc[]
|
include::migrate_7_0/scripting.asciidoc[]
|
||||||
|
include::migrate_7_0/snapshotstats.asciidoc[]
|
|
@ -0,0 +1,13 @@
|
||||||
|
[[breaking_70_snapshotstats_changes]]
|
||||||
|
=== Snapshot stats changes
|
||||||
|
|
||||||
|
Snapshot stats details are provided in a new structured way:
|
||||||
|
|
||||||
|
* `total` section for all the files that are referenced by the snapshot.
|
||||||
|
* `incremental` section for those files that actually needed to be copied over as part of the incremental snapshotting.
|
||||||
|
* In case of a snapshot that's still in progress, there's also a `processed` section for files that are in the process of being copied.
|
||||||
|
|
||||||
|
==== Deprecated `number_of_files`, `processed_files`, `total_size_in_bytes` and `processed_size_in_bytes` snapshot stats properties have been removed
|
||||||
|
|
||||||
|
* Properties `number_of_files` and `total_size_in_bytes` are removed and should be replaced by values of nested object `total`.
|
||||||
|
* Properties `processed_files` and `processed_size_in_bytes` are removed and should be replaced by values of nested object `processed`.
|
|
@ -563,6 +563,54 @@ GET /_snapshot/my_backup/snapshot_1/_status
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[continued]
|
// TEST[continued]
|
||||||
|
|
||||||
|
The output looks similar to the following:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
{
|
||||||
|
"snapshots": [
|
||||||
|
{
|
||||||
|
"snapshot": "snapshot_1",
|
||||||
|
"repository": "my_backup",
|
||||||
|
"uuid": "XuBo4l4ISYiVg0nYUen9zg",
|
||||||
|
"state": "SUCCESS",
|
||||||
|
"include_global_state": true,
|
||||||
|
"shards_stats": {
|
||||||
|
"initializing": 0,
|
||||||
|
"started": 0,
|
||||||
|
"finalizing": 0,
|
||||||
|
"done": 5,
|
||||||
|
"failed": 0,
|
||||||
|
"total": 5
|
||||||
|
},
|
||||||
|
"stats": {
|
||||||
|
"incremental": {
|
||||||
|
"file_count": 8,
|
||||||
|
"size_in_bytes": 4704
|
||||||
|
},
|
||||||
|
"processed": {
|
||||||
|
"file_count": 7,
|
||||||
|
"size_in_bytes": 4254
|
||||||
|
},
|
||||||
|
"total": {
|
||||||
|
"file_count": 8,
|
||||||
|
"size_in_bytes": 4704
|
||||||
|
},
|
||||||
|
"start_time_in_millis": 1526280280355,
|
||||||
|
"time_in_millis": 358
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
--------------------------------------------------
|
||||||
|
// TESTRESPONSE
|
||||||
|
|
||||||
|
The output is composed of different sections. The `stats` sub-object provides details on the number and size of files that were
|
||||||
|
snapshotted. As snapshots are incremental, copying only the Lucene segments that are not already in the repository,
|
||||||
|
the `stats` object contains a `total` section for all the files that are referenced by the snapshot, as well as an `incremental` section
|
||||||
|
for those files that actually needed to be copied over as part of the incremental snapshotting. In case of a snapshot that's still
|
||||||
|
in progress, there's also a `processed` section that contains information about the files that are in the process of being copied.
|
||||||
|
|
||||||
Multiple ids are also supported:
|
Multiple ids are also supported:
|
||||||
|
|
||||||
[source,sh]
|
[source,sh]
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
|
|
||||||
When you shut down a node, the allocation process waits for one minute
|
When you shut down a node, the allocation process waits for
|
||||||
before starting to replicate the shards on that node to other nodes
|
`index.unassigned.node_left.delayed_timeout` (by default, one minute) before
|
||||||
in the cluster, causing a lot of wasted I/O. You can avoid racing the clock
|
starting to replicate the shards on that node to other nodes in the cluster,
|
||||||
by disabling allocation before shutting down the node:
|
which can involve a lot of I/O. Since the node is shortly going to be
|
||||||
|
restarted, this I/O is unnecessary. You can avoid racing the clock by disabling
|
||||||
|
allocation before shutting down the node:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
|
@ -23,8 +23,10 @@ import org.apache.lucene.expressions.Expression;
|
||||||
import org.apache.lucene.expressions.SimpleBindings;
|
import org.apache.lucene.expressions.SimpleBindings;
|
||||||
import org.apache.lucene.expressions.js.JavascriptCompiler;
|
import org.apache.lucene.expressions.js.JavascriptCompiler;
|
||||||
import org.apache.lucene.expressions.js.VariableContext;
|
import org.apache.lucene.expressions.js.VariableContext;
|
||||||
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.queries.function.ValueSource;
|
import org.apache.lucene.queries.function.ValueSource;
|
||||||
import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource;
|
import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.elasticsearch.SpecialPermission;
|
import org.elasticsearch.SpecialPermission;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
@ -39,12 +41,14 @@ import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.script.ClassPermission;
|
import org.elasticsearch.script.ClassPermission;
|
||||||
import org.elasticsearch.script.ExecutableScript;
|
import org.elasticsearch.script.ExecutableScript;
|
||||||
import org.elasticsearch.script.FilterScript;
|
import org.elasticsearch.script.FilterScript;
|
||||||
|
import org.elasticsearch.script.ScoreScript;
|
||||||
import org.elasticsearch.script.ScriptContext;
|
import org.elasticsearch.script.ScriptContext;
|
||||||
import org.elasticsearch.script.ScriptEngine;
|
import org.elasticsearch.script.ScriptEngine;
|
||||||
import org.elasticsearch.script.ScriptException;
|
import org.elasticsearch.script.ScriptException;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
import org.elasticsearch.search.lookup.SearchLookup;
|
import org.elasticsearch.search.lookup.SearchLookup;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.security.AccessControlContext;
|
import java.security.AccessControlContext;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
import java.security.PrivilegedAction;
|
import java.security.PrivilegedAction;
|
||||||
|
@ -111,6 +115,9 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
|
||||||
} else if (context.instanceClazz.equals(FilterScript.class)) {
|
} else if (context.instanceClazz.equals(FilterScript.class)) {
|
||||||
FilterScript.Factory factory = (p, lookup) -> newFilterScript(expr, lookup, p);
|
FilterScript.Factory factory = (p, lookup) -> newFilterScript(expr, lookup, p);
|
||||||
return context.factoryClazz.cast(factory);
|
return context.factoryClazz.cast(factory);
|
||||||
|
} else if (context.instanceClazz.equals(ScoreScript.class)) {
|
||||||
|
ScoreScript.Factory factory = (p, lookup) -> newScoreScript(expr, lookup, p);
|
||||||
|
return context.factoryClazz.cast(factory);
|
||||||
}
|
}
|
||||||
throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]");
|
throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]");
|
||||||
}
|
}
|
||||||
|
@ -261,6 +268,42 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private ScoreScript.LeafFactory newScoreScript(Expression expr, SearchLookup lookup, @Nullable Map<String, Object> vars) {
|
||||||
|
SearchScript.LeafFactory searchLeafFactory = newSearchScript(expr, lookup, vars);
|
||||||
|
return new ScoreScript.LeafFactory() {
|
||||||
|
@Override
|
||||||
|
public boolean needs_score() {
|
||||||
|
return searchLeafFactory.needs_score();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScoreScript newInstance(LeafReaderContext ctx) throws IOException {
|
||||||
|
SearchScript script = searchLeafFactory.newInstance(ctx);
|
||||||
|
return new ScoreScript(vars, lookup, ctx) {
|
||||||
|
@Override
|
||||||
|
public double execute() {
|
||||||
|
return script.runAsDouble();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setDocument(int docid) {
|
||||||
|
script.setDocument(docid);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setScorer(Scorer scorer) {
|
||||||
|
script.setScorer(scorer);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double get_score() {
|
||||||
|
return script.getScore();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* converts a ParseException at compile-time or link-time to a ScriptException
|
* converts a ParseException at compile-time or link-time to a ScriptException
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -30,9 +30,9 @@ import org.apache.lucene.index.Term;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.plugins.ScriptPlugin;
|
import org.elasticsearch.plugins.ScriptPlugin;
|
||||||
|
import org.elasticsearch.script.ScoreScript;
|
||||||
import org.elasticsearch.script.ScriptContext;
|
import org.elasticsearch.script.ScriptContext;
|
||||||
import org.elasticsearch.script.ScriptEngine;
|
import org.elasticsearch.script.ScriptEngine;
|
||||||
import org.elasticsearch.script.SearchScript;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An example script plugin that adds a {@link ScriptEngine} implementing expert scoring.
|
* An example script plugin that adds a {@link ScriptEngine} implementing expert scoring.
|
||||||
|
@ -54,12 +54,12 @@ public class ExpertScriptPlugin extends Plugin implements ScriptPlugin {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) {
|
public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) {
|
||||||
if (context.equals(SearchScript.SCRIPT_SCORE_CONTEXT) == false) {
|
if (context.equals(ScoreScript.CONTEXT) == false) {
|
||||||
throw new IllegalArgumentException(getType() + " scripts cannot be used for context [" + context.name + "]");
|
throw new IllegalArgumentException(getType() + " scripts cannot be used for context [" + context.name + "]");
|
||||||
}
|
}
|
||||||
// we use the script "source" as the script identifier
|
// we use the script "source" as the script identifier
|
||||||
if ("pure_df".equals(scriptSource)) {
|
if ("pure_df".equals(scriptSource)) {
|
||||||
SearchScript.Factory factory = (p, lookup) -> new SearchScript.LeafFactory() {
|
ScoreScript.Factory factory = (p, lookup) -> new ScoreScript.LeafFactory() {
|
||||||
final String field;
|
final String field;
|
||||||
final String term;
|
final String term;
|
||||||
{
|
{
|
||||||
|
@ -74,18 +74,18 @@ public class ExpertScriptPlugin extends Plugin implements ScriptPlugin {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SearchScript newInstance(LeafReaderContext context) throws IOException {
|
public ScoreScript newInstance(LeafReaderContext context) throws IOException {
|
||||||
PostingsEnum postings = context.reader().postings(new Term(field, term));
|
PostingsEnum postings = context.reader().postings(new Term(field, term));
|
||||||
if (postings == null) {
|
if (postings == null) {
|
||||||
// the field and/or term don't exist in this segment, so always return 0
|
// the field and/or term don't exist in this segment, so always return 0
|
||||||
return new SearchScript(p, lookup, context) {
|
return new ScoreScript(p, lookup, context) {
|
||||||
@Override
|
@Override
|
||||||
public double runAsDouble() {
|
public double execute() {
|
||||||
return 0.0d;
|
return 0.0d;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return new SearchScript(p, lookup, context) {
|
return new ScoreScript(p, lookup, context) {
|
||||||
int currentDocid = -1;
|
int currentDocid = -1;
|
||||||
@Override
|
@Override
|
||||||
public void setDocument(int docid) {
|
public void setDocument(int docid) {
|
||||||
|
@ -100,7 +100,7 @@ public class ExpertScriptPlugin extends Plugin implements ScriptPlugin {
|
||||||
currentDocid = docid;
|
currentDocid = docid;
|
||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public double runAsDouble() {
|
public double execute() {
|
||||||
if (postings.docID() != currentDocid) {
|
if (postings.docID() != currentDocid) {
|
||||||
// advance moved past the current doc, so this doc has no occurrences of the term
|
// advance moved past the current doc, so this doc has no occurrences of the term
|
||||||
return 0.0d;
|
return 0.0d;
|
||||||
|
|
|
@ -25,9 +25,3 @@ dependencies {
|
||||||
testCompile project(path: ':modules:rank-eval', configuration: 'runtime')
|
testCompile project(path: ':modules:rank-eval', configuration: 'runtime')
|
||||||
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
|
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* One of the integration tests doesn't work with the zip distribution
|
|
||||||
* and will be fixed later.
|
|
||||||
* Tracked by https://github.com/elastic/elasticsearch/issues/30628
|
|
||||||
*/
|
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
---
|
||||||
|
setup:
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
index:
|
||||||
|
number_of_shards: 1
|
||||||
|
number_of_replicas: 1
|
||||||
|
index: test-index
|
||||||
|
---
|
||||||
|
Test retrieval of default settings:
|
||||||
|
- skip:
|
||||||
|
version: " - 6.3.99"
|
||||||
|
reason: include_defaults will not work in mixed-mode clusters containing nodes pre-6.4
|
||||||
|
- do:
|
||||||
|
indices.get_settings:
|
||||||
|
flat_settings: true
|
||||||
|
index: test-index
|
||||||
|
- is_false:
|
||||||
|
test-index.settings.index\.refresh_interval
|
||||||
|
- do:
|
||||||
|
indices.get_settings:
|
||||||
|
include_defaults: true
|
||||||
|
flat_settings: true
|
||||||
|
index: test-index
|
||||||
|
- match:
|
||||||
|
test-index.defaults.index\.refresh_interval: "1s"
|
|
@ -23,15 +23,3 @@ Test reset index settings:
|
||||||
indices.get_settings:
|
indices.get_settings:
|
||||||
flat_settings: false
|
flat_settings: false
|
||||||
- is_false: test-index.settings.index\.refresh_interval
|
- is_false: test-index.settings.index\.refresh_interval
|
||||||
|
|
||||||
# Disabled until https://github.com/elastic/elasticsearch/pull/29229 is back-ported
|
|
||||||
# That PR changed the execution path of index settings default to be on the master
|
|
||||||
# until the PR is back-ported the old master will not return default settings.
|
|
||||||
#
|
|
||||||
# - do:
|
|
||||||
# indices.get_settings:
|
|
||||||
# include_defaults: true
|
|
||||||
# flat_settings: true
|
|
||||||
# index: test-index
|
|
||||||
# - match:
|
|
||||||
# test-index.defaults.index\.refresh_interval: "1s"
|
|
||||||
|
|
|
@ -52,7 +52,7 @@ setup:
|
||||||
---
|
---
|
||||||
"Verify created repository":
|
"Verify created repository":
|
||||||
- skip:
|
- skip:
|
||||||
version: " - 6.99.99"
|
version: "all"
|
||||||
reason: AwaitsFix for https://github.com/elastic/elasticsearch/issues/30807
|
reason: AwaitsFix for https://github.com/elastic/elasticsearch/issues/30807
|
||||||
- do:
|
- do:
|
||||||
snapshot.verify_repository:
|
snapshot.verify_repository:
|
||||||
|
|
|
@ -11,7 +11,6 @@ setup:
|
||||||
|
|
||||||
---
|
---
|
||||||
"Get snapshot status":
|
"Get snapshot status":
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
indices.create:
|
indices.create:
|
||||||
index: test_index
|
index: test_index
|
||||||
|
@ -32,6 +31,14 @@ setup:
|
||||||
snapshot: test_snapshot
|
snapshot: test_snapshot
|
||||||
|
|
||||||
- is_true: snapshots
|
- is_true: snapshots
|
||||||
|
- match: { snapshots.0.snapshot: test_snapshot }
|
||||||
|
- match: { snapshots.0.state: SUCCESS }
|
||||||
|
- gt: { snapshots.0.stats.incremental.file_count: 0 }
|
||||||
|
- gt: { snapshots.0.stats.incremental.size_in_bytes: 0 }
|
||||||
|
- gt: { snapshots.0.stats.total.file_count: 0 }
|
||||||
|
- gt: { snapshots.0.stats.total.size_in_bytes: 0 }
|
||||||
|
- is_true: snapshots.0.stats.start_time_in_millis
|
||||||
|
- is_true: snapshots.0.stats.time_in_millis
|
||||||
|
|
||||||
---
|
---
|
||||||
"Get missing snapshot status throws an exception":
|
"Get missing snapshot status throws an exception":
|
||||||
|
|
|
@ -140,7 +140,7 @@ public class VerifyRepositoryResponse extends ActionResponse implements ToXConte
|
||||||
@Override
|
@Override
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
public void readFrom(StreamInput in) throws IOException {
|
||||||
super.readFrom(in);
|
super.readFrom(in);
|
||||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||||
this.nodes = in.readList(NodeView::new).stream().map(n -> n.convertToDiscoveryNode()).collect(Collectors.toList());
|
this.nodes = in.readList(NodeView::new).stream().map(n -> n.convertToDiscoveryNode()).collect(Collectors.toList());
|
||||||
} else {
|
} else {
|
||||||
clusterName = new ClusterName(in);
|
clusterName = new ClusterName(in);
|
||||||
|
@ -151,7 +151,7 @@ public class VerifyRepositoryResponse extends ActionResponse implements ToXConte
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
super.writeTo(out);
|
super.writeTo(out);
|
||||||
if (Version.CURRENT.onOrAfter(Version.V_7_0_0_alpha1)) {
|
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||||
out.writeList(getNodes());
|
out.writeList(getNodes());
|
||||||
} else {
|
} else {
|
||||||
clusterName.writeTo(out);
|
clusterName.writeTo(out);
|
||||||
|
|
|
@ -74,8 +74,8 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
|
||||||
throw new IllegalArgumentException("Unknown stage type " + indexShardStatus.getStage());
|
throw new IllegalArgumentException("Unknown stage type " + indexShardStatus.getStage());
|
||||||
}
|
}
|
||||||
this.stats = new SnapshotStats(indexShardStatus.getStartTime(), indexShardStatus.getTotalTime(),
|
this.stats = new SnapshotStats(indexShardStatus.getStartTime(), indexShardStatus.getTotalTime(),
|
||||||
indexShardStatus.getNumberOfFiles(), indexShardStatus.getProcessedFiles(),
|
indexShardStatus.getIncrementalFileCount(), indexShardStatus.getTotalFileCount(), indexShardStatus.getProcessedFileCount(),
|
||||||
indexShardStatus.getTotalSize(), indexShardStatus.getProcessedSize());
|
indexShardStatus.getIncrementalSize(), indexShardStatus.getTotalSize(), indexShardStatus.getProcessedSize());
|
||||||
this.failure = indexShardStatus.getFailure();
|
this.failure = indexShardStatus.getFailure();
|
||||||
this.nodeId = nodeId;
|
this.nodeId = nodeId;
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.cluster.snapshots.status;
|
package org.elasticsearch.action.admin.cluster.snapshots.status;
|
||||||
|
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
|
@ -34,19 +35,25 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
|
||||||
|
|
||||||
private long startTime;
|
private long startTime;
|
||||||
private long time;
|
private long time;
|
||||||
private int numberOfFiles;
|
private int incrementalFileCount;
|
||||||
private int processedFiles;
|
private int totalFileCount;
|
||||||
|
private int processedFileCount;
|
||||||
|
private long incrementalSize;
|
||||||
private long totalSize;
|
private long totalSize;
|
||||||
private long processedSize;
|
private long processedSize;
|
||||||
|
|
||||||
SnapshotStats() {
|
SnapshotStats() {
|
||||||
}
|
}
|
||||||
|
|
||||||
SnapshotStats(long startTime, long time, int numberOfFiles, int processedFiles, long totalSize, long processedSize) {
|
SnapshotStats(long startTime, long time,
|
||||||
|
int incrementalFileCount, int totalFileCount, int processedFileCount,
|
||||||
|
long incrementalSize, long totalSize, long processedSize) {
|
||||||
this.startTime = startTime;
|
this.startTime = startTime;
|
||||||
this.time = time;
|
this.time = time;
|
||||||
this.numberOfFiles = numberOfFiles;
|
this.incrementalFileCount = incrementalFileCount;
|
||||||
this.processedFiles = processedFiles;
|
this.totalFileCount = totalFileCount;
|
||||||
|
this.processedFileCount = processedFileCount;
|
||||||
|
this.incrementalSize = incrementalSize;
|
||||||
this.totalSize = totalSize;
|
this.totalSize = totalSize;
|
||||||
this.processedSize = processedSize;
|
this.processedSize = processedSize;
|
||||||
}
|
}
|
||||||
|
@ -66,17 +73,31 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns number of files in the snapshot
|
* Returns incremental file count of the snapshot
|
||||||
*/
|
*/
|
||||||
public int getNumberOfFiles() {
|
public int getIncrementalFileCount() {
|
||||||
return numberOfFiles;
|
return incrementalFileCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns total number of files in the snapshot
|
||||||
|
*/
|
||||||
|
public int getTotalFileCount() {
|
||||||
|
return totalFileCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns number of files in the snapshot that were processed so far
|
* Returns number of files in the snapshot that were processed so far
|
||||||
*/
|
*/
|
||||||
public int getProcessedFiles() {
|
public int getProcessedFileCount() {
|
||||||
return processedFiles;
|
return processedFileCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return incremental files size of the snapshot
|
||||||
|
*/
|
||||||
|
public long getIncrementalSize() {
|
||||||
|
return incrementalSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -105,11 +126,16 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
|
||||||
out.writeVLong(startTime);
|
out.writeVLong(startTime);
|
||||||
out.writeVLong(time);
|
out.writeVLong(time);
|
||||||
|
|
||||||
out.writeVInt(numberOfFiles);
|
out.writeVInt(incrementalFileCount);
|
||||||
out.writeVInt(processedFiles);
|
out.writeVInt(processedFileCount);
|
||||||
|
|
||||||
out.writeVLong(totalSize);
|
out.writeVLong(incrementalSize);
|
||||||
out.writeVLong(processedSize);
|
out.writeVLong(processedSize);
|
||||||
|
|
||||||
|
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||||
|
out.writeVInt(totalFileCount);
|
||||||
|
out.writeVLong(totalSize);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -117,21 +143,32 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
|
||||||
startTime = in.readVLong();
|
startTime = in.readVLong();
|
||||||
time = in.readVLong();
|
time = in.readVLong();
|
||||||
|
|
||||||
numberOfFiles = in.readVInt();
|
incrementalFileCount = in.readVInt();
|
||||||
processedFiles = in.readVInt();
|
processedFileCount = in.readVInt();
|
||||||
|
|
||||||
totalSize = in.readVLong();
|
incrementalSize = in.readVLong();
|
||||||
processedSize = in.readVLong();
|
processedSize = in.readVLong();
|
||||||
|
|
||||||
|
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||||
|
totalFileCount = in.readVInt();
|
||||||
|
totalSize = in.readVLong();
|
||||||
|
} else {
|
||||||
|
totalFileCount = incrementalFileCount;
|
||||||
|
totalSize = incrementalSize;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static final class Fields {
|
static final class Fields {
|
||||||
static final String STATS = "stats";
|
static final String STATS = "stats";
|
||||||
static final String NUMBER_OF_FILES = "number_of_files";
|
|
||||||
static final String PROCESSED_FILES = "processed_files";
|
static final String INCREMENTAL = "incremental";
|
||||||
static final String TOTAL_SIZE_IN_BYTES = "total_size_in_bytes";
|
static final String PROCESSED = "processed";
|
||||||
static final String TOTAL_SIZE = "total_size";
|
static final String TOTAL = "total";
|
||||||
static final String PROCESSED_SIZE_IN_BYTES = "processed_size_in_bytes";
|
|
||||||
static final String PROCESSED_SIZE = "processed_size";
|
static final String FILE_COUNT = "file_count";
|
||||||
|
static final String SIZE = "size";
|
||||||
|
static final String SIZE_IN_BYTES = "size_in_bytes";
|
||||||
|
|
||||||
static final String START_TIME_IN_MILLIS = "start_time_in_millis";
|
static final String START_TIME_IN_MILLIS = "start_time_in_millis";
|
||||||
static final String TIME_IN_MILLIS = "time_in_millis";
|
static final String TIME_IN_MILLIS = "time_in_millis";
|
||||||
static final String TIME = "time";
|
static final String TIME = "time";
|
||||||
|
@ -139,25 +176,44 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||||
builder.startObject(Fields.STATS);
|
builder.startObject(Fields.STATS)
|
||||||
builder.field(Fields.NUMBER_OF_FILES, getNumberOfFiles());
|
// incremental starts
|
||||||
builder.field(Fields.PROCESSED_FILES, getProcessedFiles());
|
.startObject(Fields.INCREMENTAL)
|
||||||
builder.humanReadableField(Fields.TOTAL_SIZE_IN_BYTES, Fields.TOTAL_SIZE, new ByteSizeValue(getTotalSize()));
|
.field(Fields.FILE_COUNT, getIncrementalFileCount())
|
||||||
builder.humanReadableField(Fields.PROCESSED_SIZE_IN_BYTES, Fields.PROCESSED_SIZE, new ByteSizeValue(getProcessedSize()));
|
.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getIncrementalSize()))
|
||||||
builder.field(Fields.START_TIME_IN_MILLIS, getStartTime());
|
// incremental ends
|
||||||
builder.humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
|
.endObject();
|
||||||
builder.endObject();
|
|
||||||
return builder;
|
if (getProcessedFileCount() != getIncrementalFileCount()) {
|
||||||
|
// processed starts
|
||||||
|
builder.startObject(Fields.PROCESSED)
|
||||||
|
.field(Fields.FILE_COUNT, getProcessedFileCount())
|
||||||
|
.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getProcessedSize()))
|
||||||
|
// processed ends
|
||||||
|
.endObject();
|
||||||
|
}
|
||||||
|
// total starts
|
||||||
|
builder.startObject(Fields.TOTAL)
|
||||||
|
.field(Fields.FILE_COUNT, getTotalFileCount())
|
||||||
|
.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalSize()))
|
||||||
|
// total ends
|
||||||
|
.endObject();
|
||||||
|
// timings stats
|
||||||
|
builder.field(Fields.START_TIME_IN_MILLIS, getStartTime())
|
||||||
|
.humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
|
||||||
|
|
||||||
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
void add(SnapshotStats stats) {
|
void add(SnapshotStats stats) {
|
||||||
numberOfFiles += stats.numberOfFiles;
|
incrementalFileCount += stats.incrementalFileCount;
|
||||||
processedFiles += stats.processedFiles;
|
totalFileCount += stats.totalFileCount;
|
||||||
|
processedFileCount += stats.processedFileCount;
|
||||||
|
|
||||||
|
incrementalSize += stats.incrementalSize;
|
||||||
totalSize += stats.totalSize;
|
totalSize += stats.totalSize;
|
||||||
processedSize += stats.processedSize;
|
processedSize += stats.processedSize;
|
||||||
|
|
||||||
|
|
||||||
if (startTime == 0) {
|
if (startTime == 0) {
|
||||||
// First time here
|
// First time here
|
||||||
startTime = stats.startTime;
|
startTime = stats.startTime;
|
||||||
|
|
|
@ -18,12 +18,9 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.geo.parsers;
|
package org.elasticsearch.common.geo.parsers;
|
||||||
|
|
||||||
import org.locationtech.jts.geom.Coordinate;
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.geo.GeoShapeType;
|
import org.elasticsearch.common.geo.GeoShapeType;
|
||||||
|
|
||||||
import java.io.StringReader;
|
|
||||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||||
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
|
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
|
||||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||||
|
@ -37,9 +34,11 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||||
|
import org.locationtech.jts.geom.Coordinate;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.StreamTokenizer;
|
import java.io.StreamTokenizer;
|
||||||
|
import java.io.StringReader;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -77,8 +76,7 @@ public class GeoWKTParser {
|
||||||
public static ShapeBuilder parseExpectedType(XContentParser parser, final GeoShapeType shapeType,
|
public static ShapeBuilder parseExpectedType(XContentParser parser, final GeoShapeType shapeType,
|
||||||
final GeoShapeFieldMapper shapeMapper)
|
final GeoShapeFieldMapper shapeMapper)
|
||||||
throws IOException, ElasticsearchParseException {
|
throws IOException, ElasticsearchParseException {
|
||||||
StringReader reader = new StringReader(parser.text());
|
try (StringReader reader = new StringReader(parser.text())) {
|
||||||
try {
|
|
||||||
boolean ignoreZValue = (shapeMapper != null && shapeMapper.ignoreZValue().value() == true);
|
boolean ignoreZValue = (shapeMapper != null && shapeMapper.ignoreZValue().value() == true);
|
||||||
// setup the tokenizer; configured to read words w/o numbers
|
// setup the tokenizer; configured to read words w/o numbers
|
||||||
StreamTokenizer tokenizer = new StreamTokenizer(reader);
|
StreamTokenizer tokenizer = new StreamTokenizer(reader);
|
||||||
|
@ -95,8 +93,6 @@ public class GeoWKTParser {
|
||||||
ShapeBuilder builder = parseGeometry(tokenizer, shapeType, ignoreZValue);
|
ShapeBuilder builder = parseGeometry(tokenizer, shapeType, ignoreZValue);
|
||||||
checkEOF(tokenizer);
|
checkEOF(tokenizer);
|
||||||
return builder;
|
return builder;
|
||||||
} finally {
|
|
||||||
reader.close();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,8 +24,8 @@ import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Explanation;
|
import org.apache.lucene.search.Explanation;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.elasticsearch.script.ExplainableSearchScript;
|
import org.elasticsearch.script.ExplainableSearchScript;
|
||||||
|
import org.elasticsearch.script.ScoreScript;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.script.SearchScript;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -58,10 +58,10 @@ public class ScriptScoreFunction extends ScoreFunction {
|
||||||
|
|
||||||
private final Script sScript;
|
private final Script sScript;
|
||||||
|
|
||||||
private final SearchScript.LeafFactory script;
|
private final ScoreScript.LeafFactory script;
|
||||||
|
|
||||||
|
|
||||||
public ScriptScoreFunction(Script sScript, SearchScript.LeafFactory script) {
|
public ScriptScoreFunction(Script sScript, ScoreScript.LeafFactory script) {
|
||||||
super(CombineFunction.REPLACE);
|
super(CombineFunction.REPLACE);
|
||||||
this.sScript = sScript;
|
this.sScript = sScript;
|
||||||
this.script = script;
|
this.script = script;
|
||||||
|
@ -69,7 +69,7 @@ public class ScriptScoreFunction extends ScoreFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException {
|
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException {
|
||||||
final SearchScript leafScript = script.newInstance(ctx);
|
final ScoreScript leafScript = script.newInstance(ctx);
|
||||||
final CannedScorer scorer = new CannedScorer();
|
final CannedScorer scorer = new CannedScorer();
|
||||||
leafScript.setScorer(scorer);
|
leafScript.setScorer(scorer);
|
||||||
return new LeafScoreFunction() {
|
return new LeafScoreFunction() {
|
||||||
|
@ -78,7 +78,7 @@ public class ScriptScoreFunction extends ScoreFunction {
|
||||||
leafScript.setDocument(docId);
|
leafScript.setDocument(docId);
|
||||||
scorer.docid = docId;
|
scorer.docid = docId;
|
||||||
scorer.score = subQueryScore;
|
scorer.score = subQueryScore;
|
||||||
double result = leafScript.runAsDouble();
|
double result = leafScript.execute();
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -234,8 +234,8 @@ public class Analysis {
|
||||||
|
|
||||||
final Path path = env.configFile().resolve(wordListPath);
|
final Path path = env.configFile().resolve(wordListPath);
|
||||||
|
|
||||||
try (BufferedReader reader = Files.newBufferedReader(path, StandardCharsets.UTF_8)) {
|
try {
|
||||||
return loadWordList(reader, "#");
|
return loadWordList(path, "#");
|
||||||
} catch (CharacterCodingException ex) {
|
} catch (CharacterCodingException ex) {
|
||||||
String message = String.format(Locale.ROOT,
|
String message = String.format(Locale.ROOT,
|
||||||
"Unsupported character encoding detected while reading %s_path: %s - files must be UTF-8 encoded",
|
"Unsupported character encoding detected while reading %s_path: %s - files must be UTF-8 encoded",
|
||||||
|
@ -247,15 +247,9 @@ public class Analysis {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<String> loadWordList(Reader reader, String comment) throws IOException {
|
private static List<String> loadWordList(Path path, String comment) throws IOException {
|
||||||
final List<String> result = new ArrayList<>();
|
final List<String> result = new ArrayList<>();
|
||||||
BufferedReader br = null;
|
try (BufferedReader br = Files.newBufferedReader(path, StandardCharsets.UTF_8)) {
|
||||||
try {
|
|
||||||
if (reader instanceof BufferedReader) {
|
|
||||||
br = (BufferedReader) reader;
|
|
||||||
} else {
|
|
||||||
br = new BufferedReader(reader);
|
|
||||||
}
|
|
||||||
String word;
|
String word;
|
||||||
while ((word = br.readLine()) != null) {
|
while ((word = br.readLine()) != null) {
|
||||||
if (!Strings.hasText(word)) {
|
if (!Strings.hasText(word)) {
|
||||||
|
@ -265,9 +259,6 @@ public class Analysis {
|
||||||
result.add(word.trim());
|
result.add(word.trim());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} finally {
|
|
||||||
if (br != null)
|
|
||||||
br.close();
|
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1424,10 +1424,6 @@ public abstract class Engine implements Closeable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() {
|
public void close() {
|
||||||
release();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void release() {
|
|
||||||
Releasables.close(searcher);
|
Releasables.close(searcher);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -159,7 +159,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
||||||
get = indexShard.get(new Engine.Get(realtime, readFromTranslog, type, id, uidTerm)
|
get = indexShard.get(new Engine.Get(realtime, readFromTranslog, type, id, uidTerm)
|
||||||
.version(version).versionType(versionType));
|
.version(version).versionType(versionType));
|
||||||
if (get.exists() == false) {
|
if (get.exists() == false) {
|
||||||
get.release();
|
get.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -172,7 +172,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
||||||
// break between having loaded it from translog (so we only have _source), and having a document to load
|
// break between having loaded it from translog (so we only have _source), and having a document to load
|
||||||
return innerGetLoadFromStoredFields(type, id, gFields, fetchSourceContext, get, mapperService);
|
return innerGetLoadFromStoredFields(type, id, gFields, fetchSourceContext, get, mapperService);
|
||||||
} finally {
|
} finally {
|
||||||
get.release();
|
get.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.index.query.QueryShardException;
|
import org.elasticsearch.index.query.QueryShardException;
|
||||||
|
import org.elasticsearch.script.ScoreScript;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
|
|
||||||
|
@ -92,8 +93,8 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder<ScriptScore
|
||||||
@Override
|
@Override
|
||||||
protected ScoreFunction doToFunction(QueryShardContext context) {
|
protected ScoreFunction doToFunction(QueryShardContext context) {
|
||||||
try {
|
try {
|
||||||
SearchScript.Factory factory = context.getScriptService().compile(script, SearchScript.SCRIPT_SCORE_CONTEXT);
|
ScoreScript.Factory factory = context.getScriptService().compile(script, ScoreScript.CONTEXT);
|
||||||
SearchScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup());
|
ScoreScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup());
|
||||||
return new ScriptScoreFunction(script, searchScript);
|
return new ScriptScoreFunction(script, searchScript);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new QueryShardException(context, "script_score: the script could not be loaded", e);
|
throw new QueryShardException(context, "script_score: the script could not be loaded", e);
|
||||||
|
|
|
@ -60,31 +60,39 @@ public class IndexShardSnapshotStatus {
|
||||||
private final AtomicReference<Stage> stage;
|
private final AtomicReference<Stage> stage;
|
||||||
private long startTime;
|
private long startTime;
|
||||||
private long totalTime;
|
private long totalTime;
|
||||||
private int numberOfFiles;
|
private int incrementalFileCount;
|
||||||
private int processedFiles;
|
private int totalFileCount;
|
||||||
|
private int processedFileCount;
|
||||||
private long totalSize;
|
private long totalSize;
|
||||||
|
private long incrementalSize;
|
||||||
private long processedSize;
|
private long processedSize;
|
||||||
private long indexVersion;
|
private long indexVersion;
|
||||||
private String failure;
|
private String failure;
|
||||||
|
|
||||||
private IndexShardSnapshotStatus(final Stage stage, final long startTime, final long totalTime,
|
private IndexShardSnapshotStatus(final Stage stage, final long startTime, final long totalTime,
|
||||||
final int numberOfFiles, final int processedFiles, final long totalSize, final long processedSize,
|
final int incrementalFileCount, final int totalFileCount, final int processedFileCount,
|
||||||
|
final long incrementalSize, final long totalSize, final long processedSize,
|
||||||
final long indexVersion, final String failure) {
|
final long indexVersion, final String failure) {
|
||||||
this.stage = new AtomicReference<>(Objects.requireNonNull(stage));
|
this.stage = new AtomicReference<>(Objects.requireNonNull(stage));
|
||||||
this.startTime = startTime;
|
this.startTime = startTime;
|
||||||
this.totalTime = totalTime;
|
this.totalTime = totalTime;
|
||||||
this.numberOfFiles = numberOfFiles;
|
this.incrementalFileCount = incrementalFileCount;
|
||||||
this.processedFiles = processedFiles;
|
this.totalFileCount = totalFileCount;
|
||||||
|
this.processedFileCount = processedFileCount;
|
||||||
this.totalSize = totalSize;
|
this.totalSize = totalSize;
|
||||||
this.processedSize = processedSize;
|
this.processedSize = processedSize;
|
||||||
|
this.incrementalSize = incrementalSize;
|
||||||
this.indexVersion = indexVersion;
|
this.indexVersion = indexVersion;
|
||||||
this.failure = failure;
|
this.failure = failure;
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized Copy moveToStarted(final long startTime, final int numberOfFiles, final long totalSize) {
|
public synchronized Copy moveToStarted(final long startTime, final int incrementalFileCount, final int totalFileCount,
|
||||||
|
final long incrementalSize, final long totalSize) {
|
||||||
if (stage.compareAndSet(Stage.INIT, Stage.STARTED)) {
|
if (stage.compareAndSet(Stage.INIT, Stage.STARTED)) {
|
||||||
this.startTime = startTime;
|
this.startTime = startTime;
|
||||||
this.numberOfFiles = numberOfFiles;
|
this.incrementalFileCount = incrementalFileCount;
|
||||||
|
this.totalFileCount = totalFileCount;
|
||||||
|
this.incrementalSize = incrementalSize;
|
||||||
this.totalSize = totalSize;
|
this.totalSize = totalSize;
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalStateException("Unable to move the shard snapshot status to [STARTED]: " +
|
throw new IllegalStateException("Unable to move the shard snapshot status to [STARTED]: " +
|
||||||
|
@ -135,7 +143,7 @@ public class IndexShardSnapshotStatus {
|
||||||
* Increments number of processed files
|
* Increments number of processed files
|
||||||
*/
|
*/
|
||||||
public synchronized void addProcessedFile(long size) {
|
public synchronized void addProcessedFile(long size) {
|
||||||
processedFiles++;
|
processedFileCount++;
|
||||||
processedSize += size;
|
processedSize += size;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,12 +154,14 @@ public class IndexShardSnapshotStatus {
|
||||||
* @return a {@link IndexShardSnapshotStatus.Copy}
|
* @return a {@link IndexShardSnapshotStatus.Copy}
|
||||||
*/
|
*/
|
||||||
public synchronized IndexShardSnapshotStatus.Copy asCopy() {
|
public synchronized IndexShardSnapshotStatus.Copy asCopy() {
|
||||||
return new IndexShardSnapshotStatus.Copy(stage.get(), startTime, totalTime, numberOfFiles, processedFiles, totalSize, processedSize,
|
return new IndexShardSnapshotStatus.Copy(stage.get(), startTime, totalTime,
|
||||||
|
incrementalFileCount, totalFileCount, processedFileCount,
|
||||||
|
incrementalSize, totalSize, processedSize,
|
||||||
indexVersion, failure);
|
indexVersion, failure);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static IndexShardSnapshotStatus newInitializing() {
|
public static IndexShardSnapshotStatus newInitializing() {
|
||||||
return new IndexShardSnapshotStatus(Stage.INIT, 0L, 0L, 0, 0, 0, 0, 0, null);
|
return new IndexShardSnapshotStatus(Stage.INIT, 0L, 0L, 0, 0, 0, 0, 0, 0, 0, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static IndexShardSnapshotStatus newFailed(final String failure) {
|
public static IndexShardSnapshotStatus newFailed(final String failure) {
|
||||||
|
@ -159,12 +169,15 @@ public class IndexShardSnapshotStatus {
|
||||||
if (failure == null) {
|
if (failure == null) {
|
||||||
throw new IllegalArgumentException("A failure description is required for a failed IndexShardSnapshotStatus");
|
throw new IllegalArgumentException("A failure description is required for a failed IndexShardSnapshotStatus");
|
||||||
}
|
}
|
||||||
return new IndexShardSnapshotStatus(Stage.FAILURE, 0L, 0L, 0, 0, 0, 0, 0, failure);
|
return new IndexShardSnapshotStatus(Stage.FAILURE, 0L, 0L, 0, 0, 0, 0, 0, 0, 0, failure);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static IndexShardSnapshotStatus newDone(final long startTime, final long totalTime, final int files, final long size) {
|
public static IndexShardSnapshotStatus newDone(final long startTime, final long totalTime,
|
||||||
|
final int incrementalFileCount, final int fileCount,
|
||||||
|
final long incrementalSize, final long size) {
|
||||||
// The snapshot is done which means the number of processed files is the same as total
|
// The snapshot is done which means the number of processed files is the same as total
|
||||||
return new IndexShardSnapshotStatus(Stage.DONE, startTime, totalTime, files, files, size, size, 0, null);
|
return new IndexShardSnapshotStatus(Stage.DONE, startTime, totalTime, incrementalFileCount, fileCount, incrementalFileCount,
|
||||||
|
incrementalSize, size, incrementalSize, 0, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -175,23 +188,28 @@ public class IndexShardSnapshotStatus {
|
||||||
private final Stage stage;
|
private final Stage stage;
|
||||||
private final long startTime;
|
private final long startTime;
|
||||||
private final long totalTime;
|
private final long totalTime;
|
||||||
private final int numberOfFiles;
|
private final int incrementalFileCount;
|
||||||
private final int processedFiles;
|
private final int totalFileCount;
|
||||||
|
private final int processedFileCount;
|
||||||
private final long totalSize;
|
private final long totalSize;
|
||||||
private final long processedSize;
|
private final long processedSize;
|
||||||
|
private final long incrementalSize;
|
||||||
private final long indexVersion;
|
private final long indexVersion;
|
||||||
private final String failure;
|
private final String failure;
|
||||||
|
|
||||||
public Copy(final Stage stage, final long startTime, final long totalTime,
|
public Copy(final Stage stage, final long startTime, final long totalTime,
|
||||||
final int numberOfFiles, final int processedFiles, final long totalSize, final long processedSize,
|
final int incrementalFileCount, final int totalFileCount, final int processedFileCount,
|
||||||
|
final long incrementalSize, final long totalSize, final long processedSize,
|
||||||
final long indexVersion, final String failure) {
|
final long indexVersion, final String failure) {
|
||||||
this.stage = stage;
|
this.stage = stage;
|
||||||
this.startTime = startTime;
|
this.startTime = startTime;
|
||||||
this.totalTime = totalTime;
|
this.totalTime = totalTime;
|
||||||
this.numberOfFiles = numberOfFiles;
|
this.incrementalFileCount = incrementalFileCount;
|
||||||
this.processedFiles = processedFiles;
|
this.totalFileCount = totalFileCount;
|
||||||
|
this.processedFileCount = processedFileCount;
|
||||||
this.totalSize = totalSize;
|
this.totalSize = totalSize;
|
||||||
this.processedSize = processedSize;
|
this.processedSize = processedSize;
|
||||||
|
this.incrementalSize = incrementalSize;
|
||||||
this.indexVersion = indexVersion;
|
this.indexVersion = indexVersion;
|
||||||
this.failure = failure;
|
this.failure = failure;
|
||||||
}
|
}
|
||||||
|
@ -208,12 +226,20 @@ public class IndexShardSnapshotStatus {
|
||||||
return totalTime;
|
return totalTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getNumberOfFiles() {
|
public int getIncrementalFileCount() {
|
||||||
return numberOfFiles;
|
return incrementalFileCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getProcessedFiles() {
|
public int getTotalFileCount() {
|
||||||
return processedFiles;
|
return totalFileCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getProcessedFileCount() {
|
||||||
|
return processedFileCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getIncrementalSize() {
|
||||||
|
return incrementalSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getTotalSize() {
|
public long getTotalSize() {
|
||||||
|
@ -238,8 +264,10 @@ public class IndexShardSnapshotStatus {
|
||||||
"stage=" + stage +
|
"stage=" + stage +
|
||||||
", startTime=" + startTime +
|
", startTime=" + startTime +
|
||||||
", totalTime=" + totalTime +
|
", totalTime=" + totalTime +
|
||||||
", numberOfFiles=" + numberOfFiles +
|
", incrementalFileCount=" + incrementalFileCount +
|
||||||
", processedFiles=" + processedFiles +
|
", totalFileCount=" + totalFileCount +
|
||||||
|
", processedFileCount=" + processedFileCount +
|
||||||
|
", incrementalSize=" + incrementalSize +
|
||||||
", totalSize=" + totalSize +
|
", totalSize=" + totalSize +
|
||||||
", processedSize=" + processedSize +
|
", processedSize=" + processedSize +
|
||||||
", indexVersion=" + indexVersion +
|
", indexVersion=" + indexVersion +
|
||||||
|
|
|
@ -356,9 +356,9 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
|
||||||
|
|
||||||
private final long time;
|
private final long time;
|
||||||
|
|
||||||
private final int numberOfFiles;
|
private final int incrementalFileCount;
|
||||||
|
|
||||||
private final long totalSize;
|
private final long incrementalSize;
|
||||||
|
|
||||||
private final List<FileInfo> indexFiles;
|
private final List<FileInfo> indexFiles;
|
||||||
|
|
||||||
|
@ -370,11 +370,14 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
|
||||||
* @param indexFiles list of files in the shard
|
* @param indexFiles list of files in the shard
|
||||||
* @param startTime snapshot start time
|
* @param startTime snapshot start time
|
||||||
* @param time snapshot running time
|
* @param time snapshot running time
|
||||||
* @param numberOfFiles number of files that where snapshotted
|
* @param incrementalFileCount incremental of files that were snapshotted
|
||||||
* @param totalSize total size of all files snapshotted
|
* @param incrementalSize incremental size of snapshot
|
||||||
*/
|
*/
|
||||||
public BlobStoreIndexShardSnapshot(String snapshot, long indexVersion, List<FileInfo> indexFiles, long startTime, long time,
|
public BlobStoreIndexShardSnapshot(String snapshot, long indexVersion, List<FileInfo> indexFiles,
|
||||||
int numberOfFiles, long totalSize) {
|
long startTime, long time,
|
||||||
|
int incrementalFileCount,
|
||||||
|
long incrementalSize
|
||||||
|
) {
|
||||||
assert snapshot != null;
|
assert snapshot != null;
|
||||||
assert indexVersion >= 0;
|
assert indexVersion >= 0;
|
||||||
this.snapshot = snapshot;
|
this.snapshot = snapshot;
|
||||||
|
@ -382,8 +385,8 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
|
||||||
this.indexFiles = Collections.unmodifiableList(new ArrayList<>(indexFiles));
|
this.indexFiles = Collections.unmodifiableList(new ArrayList<>(indexFiles));
|
||||||
this.startTime = startTime;
|
this.startTime = startTime;
|
||||||
this.time = time;
|
this.time = time;
|
||||||
this.numberOfFiles = numberOfFiles;
|
this.incrementalFileCount = incrementalFileCount;
|
||||||
this.totalSize = totalSize;
|
this.incrementalSize = incrementalSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -395,8 +398,8 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
|
||||||
this.indexFiles = Collections.emptyList();
|
this.indexFiles = Collections.emptyList();
|
||||||
this.startTime = 0;
|
this.startTime = 0;
|
||||||
this.time = 0;
|
this.time = 0;
|
||||||
this.numberOfFiles = 0;
|
this.incrementalFileCount = 0;
|
||||||
this.totalSize = 0;
|
this.incrementalSize = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -441,34 +444,51 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns number of files that where snapshotted
|
* Returns incremental of files that were snapshotted
|
||||||
*/
|
*/
|
||||||
public int numberOfFiles() {
|
public int incrementalFileCount() {
|
||||||
return numberOfFiles;
|
return incrementalFileCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns total number of files that are referenced by this snapshot
|
||||||
|
*/
|
||||||
|
public int totalFileCount() {
|
||||||
|
return indexFiles.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns incremental of files size that were snapshotted
|
||||||
|
*/
|
||||||
|
public long incrementalSize() {
|
||||||
|
return incrementalSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns total size of all files that where snapshotted
|
* Returns total size of all files that where snapshotted
|
||||||
*/
|
*/
|
||||||
public long totalSize() {
|
public long totalSize() {
|
||||||
return totalSize;
|
return indexFiles.stream().mapToLong(fi -> fi.metadata().length()).sum();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final String NAME = "name";
|
private static final String NAME = "name";
|
||||||
private static final String INDEX_VERSION = "index_version";
|
private static final String INDEX_VERSION = "index_version";
|
||||||
private static final String START_TIME = "start_time";
|
private static final String START_TIME = "start_time";
|
||||||
private static final String TIME = "time";
|
private static final String TIME = "time";
|
||||||
private static final String NUMBER_OF_FILES = "number_of_files";
|
|
||||||
private static final String TOTAL_SIZE = "total_size";
|
|
||||||
private static final String FILES = "files";
|
private static final String FILES = "files";
|
||||||
|
// for the sake of BWC keep the actual property names as in 6.x
|
||||||
|
// + there is a constraint in #fromXContent() that leads to ElasticsearchParseException("unknown parameter [incremental_file_count]");
|
||||||
|
private static final String INCREMENTAL_FILE_COUNT = "number_of_files";
|
||||||
|
private static final String INCREMENTAL_SIZE = "total_size";
|
||||||
|
|
||||||
private static final ParseField PARSE_NAME = new ParseField("name");
|
|
||||||
private static final ParseField PARSE_INDEX_VERSION = new ParseField("index_version", "index-version");
|
private static final ParseField PARSE_NAME = new ParseField(NAME);
|
||||||
private static final ParseField PARSE_START_TIME = new ParseField("start_time");
|
private static final ParseField PARSE_INDEX_VERSION = new ParseField(INDEX_VERSION, "index-version");
|
||||||
private static final ParseField PARSE_TIME = new ParseField("time");
|
private static final ParseField PARSE_START_TIME = new ParseField(START_TIME);
|
||||||
private static final ParseField PARSE_NUMBER_OF_FILES = new ParseField("number_of_files");
|
private static final ParseField PARSE_TIME = new ParseField(TIME);
|
||||||
private static final ParseField PARSE_TOTAL_SIZE = new ParseField("total_size");
|
private static final ParseField PARSE_INCREMENTAL_FILE_COUNT = new ParseField(INCREMENTAL_FILE_COUNT);
|
||||||
private static final ParseField PARSE_FILES = new ParseField("files");
|
private static final ParseField PARSE_INCREMENTAL_SIZE = new ParseField(INCREMENTAL_SIZE);
|
||||||
|
private static final ParseField PARSE_FILES = new ParseField(FILES);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Serializes shard snapshot metadata info into JSON
|
* Serializes shard snapshot metadata info into JSON
|
||||||
|
@ -482,8 +502,8 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
|
||||||
builder.field(INDEX_VERSION, indexVersion);
|
builder.field(INDEX_VERSION, indexVersion);
|
||||||
builder.field(START_TIME, startTime);
|
builder.field(START_TIME, startTime);
|
||||||
builder.field(TIME, time);
|
builder.field(TIME, time);
|
||||||
builder.field(NUMBER_OF_FILES, numberOfFiles);
|
builder.field(INCREMENTAL_FILE_COUNT, incrementalFileCount);
|
||||||
builder.field(TOTAL_SIZE, totalSize);
|
builder.field(INCREMENTAL_SIZE, incrementalSize);
|
||||||
builder.startArray(FILES);
|
builder.startArray(FILES);
|
||||||
for (FileInfo fileInfo : indexFiles) {
|
for (FileInfo fileInfo : indexFiles) {
|
||||||
FileInfo.toXContent(fileInfo, builder, params);
|
FileInfo.toXContent(fileInfo, builder, params);
|
||||||
|
@ -503,8 +523,8 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
|
||||||
long indexVersion = -1;
|
long indexVersion = -1;
|
||||||
long startTime = 0;
|
long startTime = 0;
|
||||||
long time = 0;
|
long time = 0;
|
||||||
int numberOfFiles = 0;
|
int incrementalFileCount = 0;
|
||||||
long totalSize = 0;
|
long incrementalSize = 0;
|
||||||
|
|
||||||
List<FileInfo> indexFiles = new ArrayList<>();
|
List<FileInfo> indexFiles = new ArrayList<>();
|
||||||
if (parser.currentToken() == null) { // fresh parser? move to the first token
|
if (parser.currentToken() == null) { // fresh parser? move to the first token
|
||||||
|
@ -526,10 +546,10 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
|
||||||
startTime = parser.longValue();
|
startTime = parser.longValue();
|
||||||
} else if (PARSE_TIME.match(currentFieldName, parser.getDeprecationHandler())) {
|
} else if (PARSE_TIME.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||||
time = parser.longValue();
|
time = parser.longValue();
|
||||||
} else if (PARSE_NUMBER_OF_FILES.match(currentFieldName, parser.getDeprecationHandler())) {
|
} else if (PARSE_INCREMENTAL_FILE_COUNT.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||||
numberOfFiles = parser.intValue();
|
incrementalFileCount = parser.intValue();
|
||||||
} else if (PARSE_TOTAL_SIZE.match(currentFieldName, parser.getDeprecationHandler())) {
|
} else if (PARSE_INCREMENTAL_SIZE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||||
totalSize = parser.longValue();
|
incrementalSize = parser.longValue();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
|
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
|
@ -549,7 +569,8 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new BlobStoreIndexShardSnapshot(snapshot, indexVersion, Collections.unmodifiableList(indexFiles),
|
return new BlobStoreIndexShardSnapshot(snapshot, indexVersion, Collections.unmodifiableList(indexFiles),
|
||||||
startTime, time, numberOfFiles, totalSize);
|
startTime, time, incrementalFileCount, incrementalSize);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,8 +85,6 @@ public class TermVectorsService {
|
||||||
termVectorsResponse.setExists(false);
|
termVectorsResponse.setExists(false);
|
||||||
return termVectorsResponse;
|
return termVectorsResponse;
|
||||||
}
|
}
|
||||||
Engine.GetResult get = indexShard.get(new Engine.Get(request.realtime(), false, request.type(), request.id(), uidTerm)
|
|
||||||
.version(request.version()).versionType(request.versionType()));
|
|
||||||
|
|
||||||
Fields termVectorsByField = null;
|
Fields termVectorsByField = null;
|
||||||
AggregatedDfs dfs = null;
|
AggregatedDfs dfs = null;
|
||||||
|
@ -97,8 +95,9 @@ public class TermVectorsService {
|
||||||
handleFieldWildcards(indexShard, request);
|
handleFieldWildcards(indexShard, request);
|
||||||
}
|
}
|
||||||
|
|
||||||
final Engine.Searcher searcher = indexShard.acquireSearcher("term_vector");
|
try (Engine.GetResult get = indexShard.get(new Engine.Get(request.realtime(), false, request.type(), request.id(), uidTerm)
|
||||||
try {
|
.version(request.version()).versionType(request.versionType()));
|
||||||
|
Engine.Searcher searcher = indexShard.acquireSearcher("term_vector")) {
|
||||||
Fields topLevelFields = MultiFields.getFields(get.searcher() != null ? get.searcher().reader() : searcher.reader());
|
Fields topLevelFields = MultiFields.getFields(get.searcher() != null ? get.searcher().reader() : searcher.reader());
|
||||||
DocIdAndVersion docIdAndVersion = get.docIdAndVersion();
|
DocIdAndVersion docIdAndVersion = get.docIdAndVersion();
|
||||||
/* from an artificial document */
|
/* from an artificial document */
|
||||||
|
@ -143,14 +142,12 @@ public class TermVectorsService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// write term vectors
|
// write term vectors
|
||||||
termVectorsResponse.setFields(termVectorsByField, request.selectedFields(), request.getFlags(), topLevelFields, dfs, termVectorsFilter);
|
termVectorsResponse.setFields(termVectorsByField, request.selectedFields(), request.getFlags(), topLevelFields, dfs,
|
||||||
|
termVectorsFilter);
|
||||||
}
|
}
|
||||||
termVectorsResponse.setTookInMillis(TimeUnit.NANOSECONDS.toMillis(nanoTimeSupplier.getAsLong() - startTime));
|
termVectorsResponse.setTookInMillis(TimeUnit.NANOSECONDS.toMillis(nanoTimeSupplier.getAsLong() - startTime));
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
throw new ElasticsearchException("failed to execute term vector request", ex);
|
throw new ElasticsearchException("failed to execute term vector request", ex);
|
||||||
} finally {
|
|
||||||
searcher.close();
|
|
||||||
get.release();
|
|
||||||
}
|
}
|
||||||
return termVectorsResponse;
|
return termVectorsResponse;
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.tasks.CancellableTask;
|
import org.elasticsearch.tasks.CancellableTask;
|
||||||
import org.elasticsearch.tasks.Task;
|
import org.elasticsearch.tasks.Task;
|
||||||
import org.elasticsearch.tasks.TaskCancelledException;
|
|
||||||
import org.elasticsearch.tasks.TaskId;
|
import org.elasticsearch.tasks.TaskId;
|
||||||
import org.elasticsearch.tasks.TaskManager;
|
import org.elasticsearch.tasks.TaskManager;
|
||||||
|
|
||||||
|
@ -38,18 +37,16 @@ import java.util.function.Predicate;
|
||||||
* Represents a executor node operation that corresponds to a persistent task
|
* Represents a executor node operation that corresponds to a persistent task
|
||||||
*/
|
*/
|
||||||
public class AllocatedPersistentTask extends CancellableTask {
|
public class AllocatedPersistentTask extends CancellableTask {
|
||||||
private volatile String persistentTaskId;
|
|
||||||
private volatile long allocationId;
|
|
||||||
|
|
||||||
private final AtomicReference<State> state;
|
private final AtomicReference<State> state;
|
||||||
@Nullable
|
|
||||||
private volatile Exception failure;
|
|
||||||
|
|
||||||
|
private volatile String persistentTaskId;
|
||||||
|
private volatile long allocationId;
|
||||||
|
private volatile @Nullable Exception failure;
|
||||||
private volatile PersistentTasksService persistentTasksService;
|
private volatile PersistentTasksService persistentTasksService;
|
||||||
private volatile Logger logger;
|
private volatile Logger logger;
|
||||||
private volatile TaskManager taskManager;
|
private volatile TaskManager taskManager;
|
||||||
|
|
||||||
|
|
||||||
public AllocatedPersistentTask(long id, String type, String action, String description, TaskId parentTask,
|
public AllocatedPersistentTask(long id, String type, String action, String description, TaskId parentTask,
|
||||||
Map<String, String> headers) {
|
Map<String, String> headers) {
|
||||||
super(id, type, action, description, parentTask, headers);
|
super(id, type, action, description, parentTask, headers);
|
||||||
|
@ -101,24 +98,10 @@ public class AllocatedPersistentTask extends CancellableTask {
|
||||||
return failure;
|
return failure;
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean markAsCancelled() {
|
|
||||||
return state.compareAndSet(AllocatedPersistentTask.State.STARTED, AllocatedPersistentTask.State.PENDING_CANCEL);
|
|
||||||
}
|
|
||||||
|
|
||||||
public State getState() {
|
|
||||||
return state.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getAllocationId() {
|
public long getAllocationId() {
|
||||||
return allocationId;
|
return allocationId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum State {
|
|
||||||
STARTED, // the task is currently running
|
|
||||||
PENDING_CANCEL, // the task is cancelled on master, cancelling it locally
|
|
||||||
COMPLETED // the task is done running and trying to notify caller
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Waits for this persistent task to have the desired state.
|
* Waits for this persistent task to have the desired state.
|
||||||
*/
|
*/
|
||||||
|
@ -128,6 +111,14 @@ public class AllocatedPersistentTask extends CancellableTask {
|
||||||
persistentTasksService.waitForPersistentTaskStatus(persistentTaskId, predicate, timeout, listener);
|
persistentTasksService.waitForPersistentTaskStatus(persistentTaskId, predicate, timeout, listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final boolean isCompleted() {
|
||||||
|
return state.get() == State.COMPLETED;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean markAsCancelled() {
|
||||||
|
return state.compareAndSet(State.STARTED, State.PENDING_CANCEL);
|
||||||
|
}
|
||||||
|
|
||||||
public void markAsCompleted() {
|
public void markAsCompleted() {
|
||||||
completeAndNotifyIfNeeded(null);
|
completeAndNotifyIfNeeded(null);
|
||||||
}
|
}
|
||||||
|
@ -138,11 +129,10 @@ public class AllocatedPersistentTask extends CancellableTask {
|
||||||
} else {
|
} else {
|
||||||
completeAndNotifyIfNeeded(e);
|
completeAndNotifyIfNeeded(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void completeAndNotifyIfNeeded(@Nullable Exception failure) {
|
private void completeAndNotifyIfNeeded(@Nullable Exception failure) {
|
||||||
State prevState = state.getAndSet(AllocatedPersistentTask.State.COMPLETED);
|
final State prevState = state.getAndSet(State.COMPLETED);
|
||||||
if (prevState == State.COMPLETED) {
|
if (prevState == State.COMPLETED) {
|
||||||
logger.warn("attempt to complete task [{}] with id [{}] in the [{}] state", getAction(), getPersistentTaskId(), prevState);
|
logger.warn("attempt to complete task [{}] with id [{}] in the [{}] state", getAction(), getPersistentTaskId(), prevState);
|
||||||
} else {
|
} else {
|
||||||
|
@ -173,4 +163,10 @@ public class AllocatedPersistentTask extends CancellableTask {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public enum State {
|
||||||
|
STARTED, // the task is currently running
|
||||||
|
PENDING_CANCEL, // the task is cancelled on master, cancelling it locally
|
||||||
|
COMPLETED // the task is done running and trying to notify caller
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,7 +123,7 @@ public class PersistentTasksNodeService extends AbstractComponent implements Clu
|
||||||
|
|
||||||
for (Long id : notVisitedTasks) {
|
for (Long id : notVisitedTasks) {
|
||||||
AllocatedPersistentTask task = runningTasks.get(id);
|
AllocatedPersistentTask task = runningTasks.get(id);
|
||||||
if (task.getState() == AllocatedPersistentTask.State.COMPLETED) {
|
if (task.isCompleted()) {
|
||||||
// Result was sent to the caller and the caller acknowledged acceptance of the result
|
// Result was sent to the caller and the caller acknowledged acceptance of the result
|
||||||
logger.trace("Found completed persistent task [{}] with id [{}] and allocation id [{}] - removing",
|
logger.trace("Found completed persistent task [{}] with id [{}] and allocation id [{}] - removing",
|
||||||
task.getAction(), task.getPersistentTaskId(), task.getAllocationId());
|
task.getAction(), task.getPersistentTaskId(), task.getAllocationId());
|
||||||
|
|
|
@ -440,7 +440,7 @@ public class PluginsService extends AbstractComponent {
|
||||||
List<Bundle> sortedBundles = sortBundles(bundles);
|
List<Bundle> sortedBundles = sortBundles(bundles);
|
||||||
|
|
||||||
for (Bundle bundle : sortedBundles) {
|
for (Bundle bundle : sortedBundles) {
|
||||||
checkBundleJarHell(bundle, transitiveUrls);
|
checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveUrls);
|
||||||
|
|
||||||
final Plugin plugin = loadBundle(bundle, loaded);
|
final Plugin plugin = loadBundle(bundle, loaded);
|
||||||
plugins.add(new Tuple<>(bundle.plugin, plugin));
|
plugins.add(new Tuple<>(bundle.plugin, plugin));
|
||||||
|
@ -451,7 +451,7 @@ public class PluginsService extends AbstractComponent {
|
||||||
|
|
||||||
// jar-hell check the bundle against the parent classloader and extended plugins
|
// jar-hell check the bundle against the parent classloader and extended plugins
|
||||||
// the plugin cli does it, but we do it again, in case lusers mess with jar files manually
|
// the plugin cli does it, but we do it again, in case lusers mess with jar files manually
|
||||||
static void checkBundleJarHell(Bundle bundle, Map<String, Set<URL>> transitiveUrls) {
|
static void checkBundleJarHell(Set<URL> classpath, Bundle bundle, Map<String, Set<URL>> transitiveUrls) {
|
||||||
// invariant: any plugins this plugin bundle extends have already been added to transitiveUrls
|
// invariant: any plugins this plugin bundle extends have already been added to transitiveUrls
|
||||||
List<String> exts = bundle.plugin.getExtendedPlugins();
|
List<String> exts = bundle.plugin.getExtendedPlugins();
|
||||||
|
|
||||||
|
@ -484,7 +484,6 @@ public class PluginsService extends AbstractComponent {
|
||||||
JarHell.checkJarHell(urls, logger::debug); // check jarhell of each extended plugin against this plugin
|
JarHell.checkJarHell(urls, logger::debug); // check jarhell of each extended plugin against this plugin
|
||||||
transitiveUrls.put(bundle.plugin.getName(), urls);
|
transitiveUrls.put(bundle.plugin.getName(), urls);
|
||||||
|
|
||||||
Set<URL> classpath = JarHell.parseClassPath();
|
|
||||||
// check we don't have conflicting codebases with core
|
// check we don't have conflicting codebases with core
|
||||||
Set<URL> intersection = new HashSet<>(classpath);
|
Set<URL> intersection = new HashSet<>(classpath);
|
||||||
intersection.retainAll(bundle.urls);
|
intersection.retainAll(bundle.urls);
|
||||||
|
|
|
@ -818,7 +818,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) {
|
public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) {
|
||||||
Context context = new Context(snapshotId, version, indexId, shardId);
|
Context context = new Context(snapshotId, version, indexId, shardId);
|
||||||
BlobStoreIndexShardSnapshot snapshot = context.loadSnapshot();
|
BlobStoreIndexShardSnapshot snapshot = context.loadSnapshot();
|
||||||
return IndexShardSnapshotStatus.newDone(snapshot.startTime(), snapshot.time(), snapshot.numberOfFiles(), snapshot.totalSize());
|
return IndexShardSnapshotStatus.newDone(snapshot.startTime(), snapshot.time(),
|
||||||
|
snapshot.incrementalFileCount(), snapshot.totalFileCount(),
|
||||||
|
snapshot.incrementalSize(), snapshot.totalSize());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1139,9 +1141,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
final List<BlobStoreIndexShardSnapshot.FileInfo> indexCommitPointFiles = new ArrayList<>();
|
final List<BlobStoreIndexShardSnapshot.FileInfo> indexCommitPointFiles = new ArrayList<>();
|
||||||
|
|
||||||
store.incRef();
|
store.incRef();
|
||||||
|
int indexIncrementalFileCount = 0;
|
||||||
|
int indexTotalNumberOfFiles = 0;
|
||||||
|
long indexIncrementalSize = 0;
|
||||||
|
long indexTotalFileCount = 0;
|
||||||
try {
|
try {
|
||||||
int indexNumberOfFiles = 0;
|
|
||||||
long indexTotalFilesSize = 0;
|
|
||||||
ArrayList<BlobStoreIndexShardSnapshot.FileInfo> filesToSnapshot = new ArrayList<>();
|
ArrayList<BlobStoreIndexShardSnapshot.FileInfo> filesToSnapshot = new ArrayList<>();
|
||||||
final Store.MetadataSnapshot metadata;
|
final Store.MetadataSnapshot metadata;
|
||||||
// TODO apparently we don't use the MetadataSnapshot#.recoveryDiff(...) here but we should
|
// TODO apparently we don't use the MetadataSnapshot#.recoveryDiff(...) here but we should
|
||||||
|
@ -1182,9 +1186,13 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
indexTotalFileCount += md.length();
|
||||||
|
indexTotalNumberOfFiles++;
|
||||||
|
|
||||||
if (existingFileInfo == null) {
|
if (existingFileInfo == null) {
|
||||||
indexNumberOfFiles++;
|
indexIncrementalFileCount++;
|
||||||
indexTotalFilesSize += md.length();
|
indexIncrementalSize += md.length();
|
||||||
// create a new FileInfo
|
// create a new FileInfo
|
||||||
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), md, chunkSize());
|
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), md, chunkSize());
|
||||||
indexCommitPointFiles.add(snapshotFileInfo);
|
indexCommitPointFiles.add(snapshotFileInfo);
|
||||||
|
@ -1194,7 +1202,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
snapshotStatus.moveToStarted(startTime, indexNumberOfFiles, indexTotalFilesSize);
|
snapshotStatus.moveToStarted(startTime, indexIncrementalFileCount,
|
||||||
|
indexTotalNumberOfFiles, indexIncrementalSize, indexTotalFileCount);
|
||||||
|
|
||||||
for (BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo : filesToSnapshot) {
|
for (BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo : filesToSnapshot) {
|
||||||
try {
|
try {
|
||||||
|
@ -1217,8 +1226,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
// snapshotStatus.startTime() is assigned on the same machine,
|
// snapshotStatus.startTime() is assigned on the same machine,
|
||||||
// so it's safe to use with VLong
|
// so it's safe to use with VLong
|
||||||
System.currentTimeMillis() - lastSnapshotStatus.getStartTime(),
|
System.currentTimeMillis() - lastSnapshotStatus.getStartTime(),
|
||||||
lastSnapshotStatus.getNumberOfFiles(),
|
lastSnapshotStatus.getIncrementalFileCount(),
|
||||||
lastSnapshotStatus.getTotalSize());
|
lastSnapshotStatus.getIncrementalSize()
|
||||||
|
);
|
||||||
|
|
||||||
//TODO: The time stored in snapshot doesn't include cleanup time.
|
//TODO: The time stored in snapshot doesn't include cleanup time.
|
||||||
logger.trace("[{}] [{}] writing shard snapshot file", shardId, snapshotId);
|
logger.trace("[{}] [{}] writing shard snapshot file", shardId, snapshotId);
|
||||||
|
|
|
@ -0,0 +1,102 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.script;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
|
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
|
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||||
|
import org.elasticsearch.search.lookup.SearchLookup;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.DoubleSupplier;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A script used for adjusting the score on a per document basis.
|
||||||
|
*/
|
||||||
|
public abstract class ScoreScript {
|
||||||
|
|
||||||
|
public static final String[] PARAMETERS = new String[]{};
|
||||||
|
|
||||||
|
/** The generic runtime parameters for the script. */
|
||||||
|
private final Map<String, Object> params;
|
||||||
|
|
||||||
|
/** A leaf lookup for the bound segment this script will operate on. */
|
||||||
|
private final LeafSearchLookup leafLookup;
|
||||||
|
|
||||||
|
private DoubleSupplier scoreSupplier = () -> 0.0;
|
||||||
|
|
||||||
|
public ScoreScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
|
||||||
|
this.params = params;
|
||||||
|
this.leafLookup = lookup.getLeafSearchLookup(leafContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract double execute();
|
||||||
|
|
||||||
|
/** Return the parameters for this script. */
|
||||||
|
public Map<String, Object> getParams() {
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** The doc lookup for the Lucene segment this script was created for. */
|
||||||
|
public final Map<String, ScriptDocValues<?>> getDoc() {
|
||||||
|
return leafLookup.doc();
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Set the current document to run the script on next. */
|
||||||
|
public void setDocument(int docid) {
|
||||||
|
leafLookup.setDocument(docid);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setScorer(Scorer scorer) {
|
||||||
|
this.scoreSupplier = () -> {
|
||||||
|
try {
|
||||||
|
return scorer.score();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public double get_score() {
|
||||||
|
return scoreSupplier.getAsDouble();
|
||||||
|
}
|
||||||
|
|
||||||
|
/** A factory to construct {@link ScoreScript} instances. */
|
||||||
|
public interface LeafFactory {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return {@code true} if the script needs {@code _score} calculated, or {@code false} otherwise.
|
||||||
|
*/
|
||||||
|
boolean needs_score();
|
||||||
|
|
||||||
|
ScoreScript newInstance(LeafReaderContext ctx) throws IOException;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** A factory to construct stateful {@link ScoreScript} factories for a specific index. */
|
||||||
|
public interface Factory {
|
||||||
|
|
||||||
|
ScoreScript.LeafFactory newFactory(Map<String, Object> params, SearchLookup lookup);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static final ScriptContext<ScoreScript.Factory> CONTEXT = new ScriptContext<>("score", ScoreScript.Factory.class);
|
||||||
|
}
|
|
@ -42,7 +42,7 @@ public class ScriptModule {
|
||||||
CORE_CONTEXTS = Stream.of(
|
CORE_CONTEXTS = Stream.of(
|
||||||
SearchScript.CONTEXT,
|
SearchScript.CONTEXT,
|
||||||
SearchScript.AGGS_CONTEXT,
|
SearchScript.AGGS_CONTEXT,
|
||||||
SearchScript.SCRIPT_SCORE_CONTEXT,
|
ScoreScript.CONTEXT,
|
||||||
SearchScript.SCRIPT_SORT_CONTEXT,
|
SearchScript.SCRIPT_SORT_CONTEXT,
|
||||||
SearchScript.TERMS_SET_QUERY_CONTEXT,
|
SearchScript.TERMS_SET_QUERY_CONTEXT,
|
||||||
ExecutableScript.CONTEXT,
|
ExecutableScript.CONTEXT,
|
||||||
|
|
|
@ -162,8 +162,6 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript {
|
||||||
public static final ScriptContext<Factory> AGGS_CONTEXT = new ScriptContext<>("aggs", Factory.class);
|
public static final ScriptContext<Factory> AGGS_CONTEXT = new ScriptContext<>("aggs", Factory.class);
|
||||||
// Can return a double. (For ScriptSortType#NUMBER only, for ScriptSortType#STRING normal CONTEXT should be used)
|
// Can return a double. (For ScriptSortType#NUMBER only, for ScriptSortType#STRING normal CONTEXT should be used)
|
||||||
public static final ScriptContext<Factory> SCRIPT_SORT_CONTEXT = new ScriptContext<>("sort", Factory.class);
|
public static final ScriptContext<Factory> SCRIPT_SORT_CONTEXT = new ScriptContext<>("sort", Factory.class);
|
||||||
// Can return a float
|
|
||||||
public static final ScriptContext<Factory> SCRIPT_SCORE_CONTEXT = new ScriptContext<>("score", Factory.class);
|
|
||||||
// Can return a long
|
// Can return a long
|
||||||
public static final ScriptContext<Factory> TERMS_SET_QUERY_CONTEXT = new ScriptContext<>("terms_set", Factory.class);
|
public static final ScriptContext<Factory> TERMS_SET_QUERY_CONTEXT = new ScriptContext<>("terms_set", Factory.class);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search;
|
||||||
|
|
||||||
import org.apache.lucene.search.FieldDoc;
|
import org.apache.lucene.search.FieldDoc;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
@ -39,6 +38,7 @@ import org.elasticsearch.common.util.BigArrays;
|
||||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||||
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
|
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
|
||||||
|
import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
@ -92,8 +92,8 @@ import org.elasticsearch.search.sort.SortAndFormats;
|
||||||
import org.elasticsearch.search.sort.SortBuilder;
|
import org.elasticsearch.search.sort.SortBuilder;
|
||||||
import org.elasticsearch.search.suggest.Suggest;
|
import org.elasticsearch.search.suggest.Suggest;
|
||||||
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
|
||||||
import org.elasticsearch.threadpool.Scheduler.Cancellable;
|
import org.elasticsearch.threadpool.Scheduler.Cancellable;
|
||||||
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.threadpool.ThreadPool.Names;
|
import org.elasticsearch.threadpool.ThreadPool.Names;
|
||||||
import org.elasticsearch.transport.TransportRequest;
|
import org.elasticsearch.transport.TransportRequest;
|
||||||
|
|
||||||
|
@ -646,21 +646,18 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
||||||
|
|
||||||
|
|
||||||
public boolean freeContext(long id) {
|
public boolean freeContext(long id) {
|
||||||
final SearchContext context = removeContext(id);
|
try (SearchContext context = removeContext(id)) {
|
||||||
if (context != null) {
|
if (context != null) {
|
||||||
assert context.refCount() > 0 : " refCount must be > 0: " + context.refCount();
|
assert context.refCount() > 0 : " refCount must be > 0: " + context.refCount();
|
||||||
try {
|
|
||||||
context.indexShard().getSearchOperationListener().onFreeContext(context);
|
context.indexShard().getSearchOperationListener().onFreeContext(context);
|
||||||
if (context.scrollContext() != null) {
|
if (context.scrollContext() != null) {
|
||||||
context.indexShard().getSearchOperationListener().onFreeScrollContext(context);
|
context.indexShard().getSearchOperationListener().onFreeScrollContext(context);
|
||||||
}
|
}
|
||||||
} finally {
|
|
||||||
context.close();
|
|
||||||
}
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void freeAllScrollContexts() {
|
public void freeAllScrollContexts() {
|
||||||
for (SearchContext searchContext : activeContexts.values()) {
|
for (SearchContext searchContext : activeContexts.values()) {
|
||||||
|
|
|
@ -65,8 +65,8 @@ public class SnapshotBlocksIT extends ESIntegTestCase {
|
||||||
client().prepareIndex(OTHER_INDEX_NAME, "type").setSource("test", "init").execute().actionGet();
|
client().prepareIndex(OTHER_INDEX_NAME, "type").setSource("test", "init").execute().actionGet();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
logger.info("--> register a repository");
|
logger.info("--> register a repository");
|
||||||
|
|
||||||
assertAcked(client().admin().cluster().preparePutRepository(REPOSITORY_NAME)
|
assertAcked(client().admin().cluster().preparePutRepository(REPOSITORY_NAME)
|
||||||
.setType("fs")
|
.setType("fs")
|
||||||
.setSettings(Settings.builder().put("location", randomRepoPath())));
|
.setSettings(Settings.builder().put("location", randomRepoPath())));
|
||||||
|
|
|
@ -91,10 +91,14 @@ public class SnapshotStatusTests extends ESTestCase {
|
||||||
" \"total\" : " + totalShards + "\n" +
|
" \"total\" : " + totalShards + "\n" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
" \"stats\" : {\n" +
|
" \"stats\" : {\n" +
|
||||||
" \"number_of_files\" : 0,\n" +
|
" \"incremental\" : {\n" +
|
||||||
" \"processed_files\" : 0,\n" +
|
" \"file_count\" : 0,\n" +
|
||||||
" \"total_size_in_bytes\" : 0,\n" +
|
" \"size_in_bytes\" : 0\n" +
|
||||||
" \"processed_size_in_bytes\" : 0,\n" +
|
" },\n" +
|
||||||
|
" \"total\" : {\n" +
|
||||||
|
" \"file_count\" : 0,\n" +
|
||||||
|
" \"size_in_bytes\" : 0\n" +
|
||||||
|
" },\n" +
|
||||||
" \"start_time_in_millis\" : 0,\n" +
|
" \"start_time_in_millis\" : 0,\n" +
|
||||||
" \"time_in_millis\" : 0\n" +
|
" \"time_in_millis\" : 0\n" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
|
@ -109,10 +113,14 @@ public class SnapshotStatusTests extends ESTestCase {
|
||||||
" \"total\" : " + totalShards + "\n" +
|
" \"total\" : " + totalShards + "\n" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
" \"stats\" : {\n" +
|
" \"stats\" : {\n" +
|
||||||
" \"number_of_files\" : 0,\n" +
|
" \"incremental\" : {\n" +
|
||||||
" \"processed_files\" : 0,\n" +
|
" \"file_count\" : 0,\n" +
|
||||||
" \"total_size_in_bytes\" : 0,\n" +
|
" \"size_in_bytes\" : 0\n" +
|
||||||
" \"processed_size_in_bytes\" : 0,\n" +
|
" },\n" +
|
||||||
|
" \"total\" : {\n" +
|
||||||
|
" \"file_count\" : 0,\n" +
|
||||||
|
" \"size_in_bytes\" : 0\n" +
|
||||||
|
" },\n" +
|
||||||
" \"start_time_in_millis\" : 0,\n" +
|
" \"start_time_in_millis\" : 0,\n" +
|
||||||
" \"time_in_millis\" : 0\n" +
|
" \"time_in_millis\" : 0\n" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
|
@ -120,10 +128,14 @@ public class SnapshotStatusTests extends ESTestCase {
|
||||||
" \"" + shardId + "\" : {\n" +
|
" \"" + shardId + "\" : {\n" +
|
||||||
" \"stage\" : \"" + shardStage.toString() + "\",\n" +
|
" \"stage\" : \"" + shardStage.toString() + "\",\n" +
|
||||||
" \"stats\" : {\n" +
|
" \"stats\" : {\n" +
|
||||||
" \"number_of_files\" : 0,\n" +
|
" \"incremental\" : {\n" +
|
||||||
" \"processed_files\" : 0,\n" +
|
" \"file_count\" : 0,\n" +
|
||||||
" \"total_size_in_bytes\" : 0,\n" +
|
" \"size_in_bytes\" : 0\n" +
|
||||||
" \"processed_size_in_bytes\" : 0,\n" +
|
" },\n" +
|
||||||
|
" \"total\" : {\n" +
|
||||||
|
" \"file_count\" : 0,\n" +
|
||||||
|
" \"size_in_bytes\" : 0\n" +
|
||||||
|
" },\n" +
|
||||||
" \"start_time_in_millis\" : 0,\n" +
|
" \"start_time_in_millis\" : 0,\n" +
|
||||||
" \"time_in_millis\" : 0\n" +
|
" \"time_in_millis\" : 0\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.Fields;
|
import org.apache.lucene.index.Fields;
|
||||||
import org.apache.lucene.index.Terms;
|
import org.apache.lucene.index.Terms;
|
||||||
import org.apache.lucene.index.TermsEnum;
|
import org.apache.lucene.index.TermsEnum;
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||||
import org.elasticsearch.common.lucene.uid.Versions;
|
import org.elasticsearch.common.lucene.uid.Versions;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -111,7 +112,8 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
||||||
checkTermTexts(response.getResponses()[1].getResponse().getFields().terms("field"), new String[]{"value1"});
|
checkTermTexts(response.getResponses()[1].getResponse().getFields().terms("field"), new String[]{"value1"});
|
||||||
assertThat(response.getResponses()[2].getFailure(), notNullValue());
|
assertThat(response.getResponses()[2].getFailure(), notNullValue());
|
||||||
assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1"));
|
assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1"));
|
||||||
assertThat(response.getResponses()[2].getFailure().getCause(), instanceOf(VersionConflictEngineException.class));
|
assertThat(response.getResponses()[2].getFailure().getCause(), instanceOf(ElasticsearchException.class));
|
||||||
|
assertThat(response.getResponses()[2].getFailure().getCause().getCause(), instanceOf(VersionConflictEngineException.class));
|
||||||
|
|
||||||
//Version from Lucene index
|
//Version from Lucene index
|
||||||
refresh();
|
refresh();
|
||||||
|
@ -132,7 +134,8 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
||||||
checkTermTexts(response.getResponses()[1].getResponse().getFields().terms("field"), new String[]{"value1"});
|
checkTermTexts(response.getResponses()[1].getResponse().getFields().terms("field"), new String[]{"value1"});
|
||||||
assertThat(response.getResponses()[2].getFailure(), notNullValue());
|
assertThat(response.getResponses()[2].getFailure(), notNullValue());
|
||||||
assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1"));
|
assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1"));
|
||||||
assertThat(response.getResponses()[2].getFailure().getCause(), instanceOf(VersionConflictEngineException.class));
|
assertThat(response.getResponses()[2].getFailure().getCause(), instanceOf(ElasticsearchException.class));
|
||||||
|
assertThat(response.getResponses()[2].getFailure().getCause().getCause(), instanceOf(VersionConflictEngineException.class));
|
||||||
|
|
||||||
|
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
|
@ -155,7 +158,8 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
||||||
assertThat(response.getResponses()[1].getFailure(), notNullValue());
|
assertThat(response.getResponses()[1].getFailure(), notNullValue());
|
||||||
assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2"));
|
assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2"));
|
||||||
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
|
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
|
||||||
assertThat(response.getResponses()[1].getFailure().getCause(), instanceOf(VersionConflictEngineException.class));
|
assertThat(response.getResponses()[1].getFailure().getCause(), instanceOf(ElasticsearchException.class));
|
||||||
|
assertThat(response.getResponses()[1].getFailure().getCause().getCause(), instanceOf(VersionConflictEngineException.class));
|
||||||
assertThat(response.getResponses()[2].getId(), equalTo("2"));
|
assertThat(response.getResponses()[2].getId(), equalTo("2"));
|
||||||
assertThat(response.getResponses()[2].getIndex(), equalTo("test"));
|
assertThat(response.getResponses()[2].getIndex(), equalTo("test"));
|
||||||
assertThat(response.getResponses()[2].getFailure(), nullValue());
|
assertThat(response.getResponses()[2].getFailure(), nullValue());
|
||||||
|
@ -180,7 +184,8 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
||||||
assertThat(response.getResponses()[1].getFailure(), notNullValue());
|
assertThat(response.getResponses()[1].getFailure(), notNullValue());
|
||||||
assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2"));
|
assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2"));
|
||||||
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
|
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
|
||||||
assertThat(response.getResponses()[1].getFailure().getCause(), instanceOf(VersionConflictEngineException.class));
|
assertThat(response.getResponses()[1].getFailure().getCause(), instanceOf(ElasticsearchException.class));
|
||||||
|
assertThat(response.getResponses()[1].getFailure().getCause().getCause(), instanceOf(VersionConflictEngineException.class));
|
||||||
assertThat(response.getResponses()[2].getId(), equalTo("2"));
|
assertThat(response.getResponses()[2].getId(), equalTo("2"));
|
||||||
assertThat(response.getResponses()[2].getIndex(), equalTo("test"));
|
assertThat(response.getResponses()[2].getIndex(), equalTo("test"));
|
||||||
assertThat(response.getResponses()[2].getFailure(), nullValue());
|
assertThat(response.getResponses()[2].getFailure(), nullValue());
|
||||||
|
|
|
@ -471,7 +471,7 @@ public class CacheTests extends ESTestCase {
|
||||||
keys.add(key);
|
keys.add(key);
|
||||||
} else {
|
} else {
|
||||||
// invalidate with incorrect value
|
// invalidate with incorrect value
|
||||||
cache.invalidate(key, Integer.toString(key * randomIntBetween(2, 10)));
|
cache.invalidate(key, Integer.toString(key + randomIntBetween(2, 10)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -506,7 +506,7 @@ public class CacheTests extends ESTestCase {
|
||||||
invalidated.add(i);
|
invalidated.add(i);
|
||||||
} else {
|
} else {
|
||||||
// invalidate with incorrect value
|
// invalidate with incorrect value
|
||||||
cache.invalidate(i, Integer.toString(i * randomIntBetween(2, 10)));
|
cache.invalidate(i, Integer.toString(i + randomIntBetween(2, 10)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.engine;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
|
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Level;
|
import org.apache.logging.log4j.Level;
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
|
@ -793,7 +794,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
while (flushFinished.get() == false) {
|
while (flushFinished.get() == false) {
|
||||||
Engine.GetResult previousGetResult = latestGetResult.get();
|
Engine.GetResult previousGetResult = latestGetResult.get();
|
||||||
if (previousGetResult != null) {
|
if (previousGetResult != null) {
|
||||||
previousGetResult.release();
|
previousGetResult.close();
|
||||||
}
|
}
|
||||||
latestGetResult.set(engine.get(newGet(true, doc), searcherFactory));
|
latestGetResult.set(engine.get(newGet(true, doc), searcherFactory));
|
||||||
if (latestGetResult.get().exists() == false) {
|
if (latestGetResult.get().exists() == false) {
|
||||||
|
@ -807,7 +808,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
flushFinished.set(true);
|
flushFinished.set(true);
|
||||||
getThread.join();
|
getThread.join();
|
||||||
assertTrue(latestGetResult.get().exists());
|
assertTrue(latestGetResult.get().exists());
|
||||||
latestGetResult.get().release();
|
latestGetResult.get().close();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimpleOperations() throws Exception {
|
public void testSimpleOperations() throws Exception {
|
||||||
|
@ -830,21 +831,20 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
searchResult.close();
|
searchResult.close();
|
||||||
|
|
||||||
// but, not there non realtime
|
// but, not there non realtime
|
||||||
Engine.GetResult getResult = engine.get(newGet(false, doc), searcherFactory);
|
try (Engine.GetResult getResult = engine.get(newGet(false, doc), searcherFactory)) {
|
||||||
assertThat(getResult.exists(), equalTo(false));
|
assertThat(getResult.exists(), equalTo(false));
|
||||||
getResult.release();
|
}
|
||||||
|
|
||||||
// but, we can still get it (in realtime)
|
// but, we can still get it (in realtime)
|
||||||
getResult = engine.get(newGet(true, doc), searcherFactory);
|
try (Engine.GetResult getResult = engine.get(newGet(true, doc), searcherFactory)) {
|
||||||
assertThat(getResult.exists(), equalTo(true));
|
assertThat(getResult.exists(), equalTo(true));
|
||||||
assertThat(getResult.docIdAndVersion(), notNullValue());
|
assertThat(getResult.docIdAndVersion(), notNullValue());
|
||||||
getResult.release();
|
}
|
||||||
|
|
||||||
// but not real time is not yet visible
|
// but not real time is not yet visible
|
||||||
getResult = engine.get(newGet(false, doc), searcherFactory);
|
try (Engine.GetResult getResult = engine.get(newGet(false, doc), searcherFactory)) {
|
||||||
assertThat(getResult.exists(), equalTo(false));
|
assertThat(getResult.exists(), equalTo(false));
|
||||||
getResult.release();
|
}
|
||||||
|
|
||||||
|
|
||||||
// refresh and it should be there
|
// refresh and it should be there
|
||||||
engine.refresh("test");
|
engine.refresh("test");
|
||||||
|
@ -856,10 +856,10 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
searchResult.close();
|
searchResult.close();
|
||||||
|
|
||||||
// also in non realtime
|
// also in non realtime
|
||||||
getResult = engine.get(newGet(false, doc), searcherFactory);
|
try (Engine.GetResult getResult = engine.get(newGet(false, doc), searcherFactory)) {
|
||||||
assertThat(getResult.exists(), equalTo(true));
|
assertThat(getResult.exists(), equalTo(true));
|
||||||
assertThat(getResult.docIdAndVersion(), notNullValue());
|
assertThat(getResult.docIdAndVersion(), notNullValue());
|
||||||
getResult.release();
|
}
|
||||||
|
|
||||||
// now do an update
|
// now do an update
|
||||||
document = testDocument();
|
document = testDocument();
|
||||||
|
@ -876,10 +876,10 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
searchResult.close();
|
searchResult.close();
|
||||||
|
|
||||||
// but, we can still get it (in realtime)
|
// but, we can still get it (in realtime)
|
||||||
getResult = engine.get(newGet(true, doc), searcherFactory);
|
try (Engine.GetResult getResult = engine.get(newGet(true, doc), searcherFactory)) {
|
||||||
assertThat(getResult.exists(), equalTo(true));
|
assertThat(getResult.exists(), equalTo(true));
|
||||||
assertThat(getResult.docIdAndVersion(), notNullValue());
|
assertThat(getResult.docIdAndVersion(), notNullValue());
|
||||||
getResult.release();
|
}
|
||||||
|
|
||||||
// refresh and it should be updated
|
// refresh and it should be updated
|
||||||
engine.refresh("test");
|
engine.refresh("test");
|
||||||
|
@ -901,9 +901,9 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
searchResult.close();
|
searchResult.close();
|
||||||
|
|
||||||
// but, get should not see it (in realtime)
|
// but, get should not see it (in realtime)
|
||||||
getResult = engine.get(newGet(true, doc), searcherFactory);
|
try (Engine.GetResult getResult = engine.get(newGet(true, doc), searcherFactory)) {
|
||||||
assertThat(getResult.exists(), equalTo(false));
|
assertThat(getResult.exists(), equalTo(false));
|
||||||
getResult.release();
|
}
|
||||||
|
|
||||||
// refresh and it should be deleted
|
// refresh and it should be deleted
|
||||||
engine.refresh("test");
|
engine.refresh("test");
|
||||||
|
@ -941,10 +941,10 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
engine.flush();
|
engine.flush();
|
||||||
|
|
||||||
// and, verify get (in real time)
|
// and, verify get (in real time)
|
||||||
getResult = engine.get(newGet(true, doc), searcherFactory);
|
try (Engine.GetResult getResult = engine.get(newGet(true, doc), searcherFactory)) {
|
||||||
assertThat(getResult.exists(), equalTo(true));
|
assertThat(getResult.exists(), equalTo(true));
|
||||||
assertThat(getResult.docIdAndVersion(), notNullValue());
|
assertThat(getResult.docIdAndVersion(), notNullValue());
|
||||||
getResult.release();
|
}
|
||||||
|
|
||||||
// make sure we can still work with the engine
|
// make sure we can still work with the engine
|
||||||
// now do an update
|
// now do an update
|
||||||
|
@ -4156,7 +4156,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
new Term("_id", parsedDocument.id()),
|
new Term("_id", parsedDocument.id()),
|
||||||
parsedDocument,
|
parsedDocument,
|
||||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||||
(long) randomIntBetween(1, 8),
|
randomIntBetween(1, 8),
|
||||||
Versions.MATCH_ANY,
|
Versions.MATCH_ANY,
|
||||||
VersionType.INTERNAL,
|
VersionType.INTERNAL,
|
||||||
Engine.Operation.Origin.PRIMARY,
|
Engine.Operation.Origin.PRIMARY,
|
||||||
|
@ -4172,7 +4172,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
id,
|
id,
|
||||||
new Term("_id", parsedDocument.id()),
|
new Term("_id", parsedDocument.id()),
|
||||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||||
(long) randomIntBetween(1, 8),
|
randomIntBetween(1, 8),
|
||||||
Versions.MATCH_ANY,
|
Versions.MATCH_ANY,
|
||||||
VersionType.INTERNAL,
|
VersionType.INTERNAL,
|
||||||
Engine.Operation.Origin.PRIMARY,
|
Engine.Operation.Origin.PRIMARY,
|
||||||
|
|
|
@ -1861,10 +1861,11 @@ public class IndexShardTests extends IndexShardTestCase {
|
||||||
indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}");
|
indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}");
|
||||||
shard.refresh("test");
|
shard.refresh("test");
|
||||||
|
|
||||||
Engine.GetResult getResult = shard.get(new Engine.Get(false, false, "test", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))));
|
try (Engine.GetResult getResult = shard
|
||||||
|
.get(new Engine.Get(false, false, "test", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))))) {
|
||||||
assertTrue(getResult.exists());
|
assertTrue(getResult.exists());
|
||||||
assertNotNull(getResult.searcher());
|
assertNotNull(getResult.searcher());
|
||||||
getResult.release();
|
}
|
||||||
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
||||||
TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10);
|
TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10);
|
||||||
assertEquals(search.totalHits, 1);
|
assertEquals(search.totalHits, 1);
|
||||||
|
@ -1895,11 +1896,12 @@ public class IndexShardTests extends IndexShardTestCase {
|
||||||
search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10);
|
search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10);
|
||||||
assertEquals(search.totalHits, 1);
|
assertEquals(search.totalHits, 1);
|
||||||
}
|
}
|
||||||
getResult = newShard.get(new Engine.Get(false, false, "test", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))));
|
try (Engine.GetResult getResult = newShard
|
||||||
|
.get(new Engine.Get(false, false, "test", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))))) {
|
||||||
assertTrue(getResult.exists());
|
assertTrue(getResult.exists());
|
||||||
assertNotNull(getResult.searcher()); // make sure get uses the wrapped reader
|
assertNotNull(getResult.searcher()); // make sure get uses the wrapped reader
|
||||||
assertTrue(getResult.searcher().reader() instanceof FieldMaskingReader);
|
assertTrue(getResult.searcher().reader() instanceof FieldMaskingReader);
|
||||||
getResult.release();
|
}
|
||||||
|
|
||||||
closeShards(newShard);
|
closeShards(newShard);
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,6 +52,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.empty;
|
import static org.hamcrest.Matchers.empty;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.Matchers.sameInstance;
|
import static org.hamcrest.Matchers.sameInstance;
|
||||||
import static org.hamcrest.core.IsEqual.equalTo;
|
import static org.hamcrest.core.IsEqual.equalTo;
|
||||||
import static org.mockito.Matchers.any;
|
import static org.mockito.Matchers.any;
|
||||||
|
@ -73,7 +74,6 @@ public class PersistentTasksNodeServiceTests extends ESTestCase {
|
||||||
threadPool = new TestThreadPool(getClass().getName());
|
threadPool = new TestThreadPool(getClass().getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@After
|
@After
|
||||||
public void tearDown() throws Exception {
|
public void tearDown() throws Exception {
|
||||||
|
@ -95,7 +95,7 @@ public class PersistentTasksNodeServiceTests extends ESTestCase {
|
||||||
return state.build();
|
return state.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testStartTask() throws Exception {
|
public void testStartTask() {
|
||||||
PersistentTasksService persistentTasksService = mock(PersistentTasksService.class);
|
PersistentTasksService persistentTasksService = mock(PersistentTasksService.class);
|
||||||
@SuppressWarnings("unchecked") PersistentTasksExecutor<TestParams> action = mock(PersistentTasksExecutor.class);
|
@SuppressWarnings("unchecked") PersistentTasksExecutor<TestParams> action = mock(PersistentTasksExecutor.class);
|
||||||
when(action.getExecutor()).thenReturn(ThreadPool.Names.SAME);
|
when(action.getExecutor()).thenReturn(ThreadPool.Names.SAME);
|
||||||
|
@ -131,8 +131,8 @@ public class PersistentTasksNodeServiceTests extends ESTestCase {
|
||||||
|
|
||||||
if (added == false) {
|
if (added == false) {
|
||||||
logger.info("No local node action was added");
|
logger.info("No local node action was added");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
MetaData.Builder metaData = MetaData.builder(state.metaData());
|
MetaData.Builder metaData = MetaData.builder(state.metaData());
|
||||||
metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks.build());
|
metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks.build());
|
||||||
ClusterState newClusterState = ClusterState.builder(state).metaData(metaData).build();
|
ClusterState newClusterState = ClusterState.builder(state).metaData(metaData).build();
|
||||||
|
@ -149,6 +149,7 @@ public class PersistentTasksNodeServiceTests extends ESTestCase {
|
||||||
|
|
||||||
// Make sure action wasn't called again
|
// Make sure action wasn't called again
|
||||||
assertThat(executor.executions.size(), equalTo(1));
|
assertThat(executor.executions.size(), equalTo(1));
|
||||||
|
assertThat(executor.get(0).task.isCompleted(), is(false));
|
||||||
|
|
||||||
// Start another task on this node
|
// Start another task on this node
|
||||||
state = newClusterState;
|
state = newClusterState;
|
||||||
|
@ -157,10 +158,15 @@ public class PersistentTasksNodeServiceTests extends ESTestCase {
|
||||||
|
|
||||||
// Make sure action was called this time
|
// Make sure action was called this time
|
||||||
assertThat(executor.size(), equalTo(2));
|
assertThat(executor.size(), equalTo(2));
|
||||||
|
assertThat(executor.get(1).task.isCompleted(), is(false));
|
||||||
|
|
||||||
// Finish both tasks
|
// Finish both tasks
|
||||||
executor.get(0).task.markAsFailed(new RuntimeException());
|
executor.get(0).task.markAsFailed(new RuntimeException());
|
||||||
executor.get(1).task.markAsCompleted();
|
executor.get(1).task.markAsCompleted();
|
||||||
|
|
||||||
|
assertThat(executor.get(0).task.isCompleted(), is(true));
|
||||||
|
assertThat(executor.get(1).task.isCompleted(), is(true));
|
||||||
|
|
||||||
String failedTaskId = executor.get(0).task.getPersistentTaskId();
|
String failedTaskId = executor.get(0).task.getPersistentTaskId();
|
||||||
String finishedTaskId = executor.get(1).task.getPersistentTaskId();
|
String finishedTaskId = executor.get(1).task.getPersistentTaskId();
|
||||||
executor.clear();
|
executor.clear();
|
||||||
|
@ -186,7 +192,6 @@ public class PersistentTasksNodeServiceTests extends ESTestCase {
|
||||||
// Make sure action was only allocated on this node once
|
// Make sure action was only allocated on this node once
|
||||||
assertThat(executor.size(), equalTo(1));
|
assertThat(executor.size(), equalTo(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParamsStatusAndNodeTaskAreDelegated() throws Exception {
|
public void testParamsStatusAndNodeTaskAreDelegated() throws Exception {
|
||||||
|
@ -300,7 +305,6 @@ public class PersistentTasksNodeServiceTests extends ESTestCase {
|
||||||
|
|
||||||
// Check the the task is now removed from task manager
|
// Check the the task is now removed from task manager
|
||||||
assertThat(taskManager.getTasks().values(), empty());
|
assertThat(taskManager.getTasks().values(), empty());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private <Params extends PersistentTaskParams> ClusterState addTask(ClusterState state, String action, Params params,
|
private <Params extends PersistentTaskParams> ClusterState addTask(ClusterState state, String action, Params params,
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.log4j.Level;
|
||||||
import org.apache.lucene.util.Constants;
|
import org.apache.lucene.util.Constants;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
import org.elasticsearch.bootstrap.JarHell;
|
||||||
import org.elasticsearch.common.io.PathUtils;
|
import org.elasticsearch.common.io.PathUtils;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
|
@ -443,7 +444,7 @@ public class PluginsServiceTests extends ESTestCase {
|
||||||
"MyPlugin", Collections.singletonList("dep"), false);
|
"MyPlugin", Collections.singletonList("dep"), false);
|
||||||
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
||||||
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
||||||
PluginsService.checkBundleJarHell(bundle, transitiveDeps));
|
PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps));
|
||||||
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
||||||
assertThat(e.getCause().getMessage(), containsString("jar hell! duplicate codebases with extended plugin"));
|
assertThat(e.getCause().getMessage(), containsString("jar hell! duplicate codebases with extended plugin"));
|
||||||
}
|
}
|
||||||
|
@ -462,7 +463,7 @@ public class PluginsServiceTests extends ESTestCase {
|
||||||
"MyPlugin", Arrays.asList("dep1", "dep2"), false);
|
"MyPlugin", Arrays.asList("dep1", "dep2"), false);
|
||||||
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
||||||
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
||||||
PluginsService.checkBundleJarHell(bundle, transitiveDeps));
|
PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps));
|
||||||
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
||||||
assertThat(e.getCause().getMessage(), containsString("jar hell!"));
|
assertThat(e.getCause().getMessage(), containsString("jar hell!"));
|
||||||
assertThat(e.getCause().getMessage(), containsString("duplicate codebases"));
|
assertThat(e.getCause().getMessage(), containsString("duplicate codebases"));
|
||||||
|
@ -479,7 +480,7 @@ public class PluginsServiceTests extends ESTestCase {
|
||||||
"MyPlugin", Collections.emptyList(), false);
|
"MyPlugin", Collections.emptyList(), false);
|
||||||
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
||||||
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
||||||
PluginsService.checkBundleJarHell(bundle, new HashMap<>()));
|
PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, new HashMap<>()));
|
||||||
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
||||||
assertThat(e.getCause().getMessage(), containsString("jar hell!"));
|
assertThat(e.getCause().getMessage(), containsString("jar hell!"));
|
||||||
assertThat(e.getCause().getMessage(), containsString("Level"));
|
assertThat(e.getCause().getMessage(), containsString("Level"));
|
||||||
|
@ -498,7 +499,7 @@ public class PluginsServiceTests extends ESTestCase {
|
||||||
"MyPlugin", Collections.singletonList("dep"), false);
|
"MyPlugin", Collections.singletonList("dep"), false);
|
||||||
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
||||||
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
||||||
PluginsService.checkBundleJarHell(bundle, transitiveDeps));
|
PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps));
|
||||||
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
||||||
assertThat(e.getCause().getMessage(), containsString("jar hell!"));
|
assertThat(e.getCause().getMessage(), containsString("jar hell!"));
|
||||||
assertThat(e.getCause().getMessage(), containsString("DummyClass1"));
|
assertThat(e.getCause().getMessage(), containsString("DummyClass1"));
|
||||||
|
@ -521,7 +522,7 @@ public class PluginsServiceTests extends ESTestCase {
|
||||||
"MyPlugin", Arrays.asList("dep1", "dep2"), false);
|
"MyPlugin", Arrays.asList("dep1", "dep2"), false);
|
||||||
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
||||||
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
||||||
PluginsService.checkBundleJarHell(bundle, transitiveDeps));
|
PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps));
|
||||||
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
assertEquals("failed to load plugin myplugin due to jar hell", e.getMessage());
|
||||||
assertThat(e.getCause().getMessage(), containsString("jar hell!"));
|
assertThat(e.getCause().getMessage(), containsString("jar hell!"));
|
||||||
assertThat(e.getCause().getMessage(), containsString("DummyClass2"));
|
assertThat(e.getCause().getMessage(), containsString("DummyClass2"));
|
||||||
|
@ -543,7 +544,7 @@ public class PluginsServiceTests extends ESTestCase {
|
||||||
PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8",
|
PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8",
|
||||||
"MyPlugin", Arrays.asList("dep1", "dep2"), false);
|
"MyPlugin", Arrays.asList("dep1", "dep2"), false);
|
||||||
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir);
|
||||||
PluginsService.checkBundleJarHell(bundle, transitiveDeps);
|
PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps);
|
||||||
Set<URL> deps = transitiveDeps.get("myplugin");
|
Set<URL> deps = transitiveDeps.get("myplugin");
|
||||||
assertNotNull(deps);
|
assertNotNull(deps);
|
||||||
assertThat(deps, containsInAnyOrder(pluginJar.toUri().toURL(), dep1Jar.toUri().toURL(), dep2Jar.toUri().toURL()));
|
assertThat(deps, containsInAnyOrder(pluginJar.toUri().toURL(), dep1Jar.toUri().toURL(), dep2Jar.toUri().toURL()));
|
||||||
|
|
|
@ -30,14 +30,14 @@ import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.plugins.ScriptPlugin;
|
import org.elasticsearch.plugins.ScriptPlugin;
|
||||||
import org.elasticsearch.script.ExplainableSearchScript;
|
import org.elasticsearch.script.ExplainableSearchScript;
|
||||||
|
import org.elasticsearch.script.ScoreScript;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.script.ScriptContext;
|
import org.elasticsearch.script.ScriptContext;
|
||||||
import org.elasticsearch.script.ScriptEngine;
|
import org.elasticsearch.script.ScriptEngine;
|
||||||
import org.elasticsearch.script.ScriptType;
|
import org.elasticsearch.script.ScriptType;
|
||||||
import org.elasticsearch.script.SearchScript;
|
|
||||||
import org.elasticsearch.search.SearchHit;
|
import org.elasticsearch.search.SearchHit;
|
||||||
import org.elasticsearch.search.SearchHits;
|
import org.elasticsearch.search.SearchHits;
|
||||||
import org.elasticsearch.search.lookup.LeafDocLookup;
|
import org.elasticsearch.search.lookup.SearchLookup;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||||
|
@ -76,16 +76,17 @@ public class ExplainableScriptIT extends ESIntegTestCase {
|
||||||
@Override
|
@Override
|
||||||
public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) {
|
public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) {
|
||||||
assert scriptSource.equals("explainable_script");
|
assert scriptSource.equals("explainable_script");
|
||||||
assert context == SearchScript.SCRIPT_SCORE_CONTEXT;
|
assert context == ScoreScript.CONTEXT;
|
||||||
SearchScript.Factory factory = (p, lookup) -> new SearchScript.LeafFactory() {
|
ScoreScript.Factory factory = (params1, lookup) -> new ScoreScript.LeafFactory() {
|
||||||
@Override
|
|
||||||
public SearchScript newInstance(LeafReaderContext context) throws IOException {
|
|
||||||
return new MyScript(lookup.doc().getLeafDocLookup(context));
|
|
||||||
}
|
|
||||||
@Override
|
@Override
|
||||||
public boolean needs_score() {
|
public boolean needs_score() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScoreScript newInstance(LeafReaderContext ctx) throws IOException {
|
||||||
|
return new MyScript(params1, lookup, ctx);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
return context.factoryClazz.cast(factory);
|
return context.factoryClazz.cast(factory);
|
||||||
}
|
}
|
||||||
|
@ -93,28 +94,21 @@ public class ExplainableScriptIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static class MyScript extends SearchScript implements ExplainableSearchScript {
|
static class MyScript extends ScoreScript implements ExplainableSearchScript {
|
||||||
LeafDocLookup docLookup;
|
|
||||||
|
|
||||||
MyScript(LeafDocLookup docLookup) {
|
MyScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
|
||||||
super(null, null, null);
|
super(params, lookup, leafContext);
|
||||||
this.docLookup = docLookup;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setDocument(int doc) {
|
|
||||||
docLookup.setDocument(doc);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Explanation explain(Explanation subQueryScore) throws IOException {
|
public Explanation explain(Explanation subQueryScore) throws IOException {
|
||||||
Explanation scoreExp = Explanation.match(subQueryScore.getValue(), "_score: ", subQueryScore);
|
Explanation scoreExp = Explanation.match(subQueryScore.getValue(), "_score: ", subQueryScore);
|
||||||
return Explanation.match((float) (runAsDouble()), "This script returned " + runAsDouble(), scoreExp);
|
return Explanation.match((float) (execute()), "This script returned " + execute(), scoreExp);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double runAsDouble() {
|
public double execute() {
|
||||||
return ((Number) ((ScriptDocValues) docLookup.get("number_field")).getValues().get(0)).doubleValue();
|
return ((Number) ((ScriptDocValues) getDoc().get("number_field")).getValues().get(0)).doubleValue();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,10 +23,12 @@ import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.IntSet;
|
import com.carrotsearch.hppc.IntSet;
|
||||||
import org.elasticsearch.action.ActionFuture;
|
import org.elasticsearch.action.ActionFuture;
|
||||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
|
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
|
||||||
|
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
|
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
|
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
|
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
|
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
|
||||||
|
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStats;
|
||||||
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus;
|
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus;
|
||||||
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
|
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
|
@ -83,7 +85,12 @@ import org.elasticsearch.test.TestCustomMetaData;
|
||||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
|
import java.nio.file.FileVisitResult;
|
||||||
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.SimpleFileVisitor;
|
||||||
|
import java.nio.file.attribute.BasicFileAttributes;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
@ -102,6 +109,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.greaterThan;
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
import static org.hamcrest.Matchers.hasSize;
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.Matchers.lessThan;
|
import static org.hamcrest.Matchers.lessThan;
|
||||||
import static org.hamcrest.Matchers.not;
|
import static org.hamcrest.Matchers.not;
|
||||||
import static org.hamcrest.Matchers.notNullValue;
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
@ -1019,6 +1027,129 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
|
||||||
assertThat(snapshots.get(0).getState().completed(), equalTo(true));
|
assertThat(snapshots.get(0).getState().completed(), equalTo(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testSnapshotTotalAndIncrementalSizes() throws IOException {
|
||||||
|
Client client = client();
|
||||||
|
final String indexName = "test-blocks-1";
|
||||||
|
final String repositoryName = "repo-" + indexName;
|
||||||
|
final String snapshot0 = "snapshot-0";
|
||||||
|
final String snapshot1 = "snapshot-1";
|
||||||
|
|
||||||
|
createIndex(indexName);
|
||||||
|
|
||||||
|
int docs = between(10, 100);
|
||||||
|
for (int i = 0; i < docs; i++) {
|
||||||
|
client.prepareIndex(indexName, "type").setSource("test", "init").execute().actionGet();
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("--> register a repository");
|
||||||
|
|
||||||
|
final Path repoPath = randomRepoPath();
|
||||||
|
assertAcked(client.admin().cluster().preparePutRepository(repositoryName)
|
||||||
|
.setType("fs")
|
||||||
|
.setSettings(Settings.builder().put("location", repoPath)));
|
||||||
|
|
||||||
|
logger.info("--> create a snapshot");
|
||||||
|
client.admin().cluster().prepareCreateSnapshot(repositoryName, snapshot0)
|
||||||
|
.setIncludeGlobalState(true)
|
||||||
|
.setWaitForCompletion(true)
|
||||||
|
.get();
|
||||||
|
|
||||||
|
SnapshotsStatusResponse response = client.admin().cluster().prepareSnapshotStatus(repositoryName)
|
||||||
|
.setSnapshots(snapshot0)
|
||||||
|
.get();
|
||||||
|
|
||||||
|
List<SnapshotStatus> snapshots = response.getSnapshots();
|
||||||
|
|
||||||
|
List<Path> snapshot0Files = scanSnapshotFolder(repoPath);
|
||||||
|
assertThat(snapshots, hasSize(1));
|
||||||
|
|
||||||
|
final int snapshot0FileCount = snapshot0Files.size();
|
||||||
|
final long snapshot0FileSize = calculateTotalFilesSize(snapshot0Files);
|
||||||
|
|
||||||
|
SnapshotStats stats = snapshots.get(0).getStats();
|
||||||
|
|
||||||
|
assertThat(stats.getTotalFileCount(), is(snapshot0FileCount));
|
||||||
|
assertThat(stats.getTotalSize(), is(snapshot0FileSize));
|
||||||
|
|
||||||
|
assertThat(stats.getIncrementalFileCount(), equalTo(snapshot0FileCount));
|
||||||
|
assertThat(stats.getIncrementalSize(), equalTo(snapshot0FileSize));
|
||||||
|
|
||||||
|
assertThat(stats.getIncrementalFileCount(), equalTo(stats.getProcessedFileCount()));
|
||||||
|
assertThat(stats.getIncrementalSize(), equalTo(stats.getProcessedSize()));
|
||||||
|
|
||||||
|
// add few docs - less than initially
|
||||||
|
docs = between(1, 5);
|
||||||
|
for (int i = 0; i < docs; i++) {
|
||||||
|
client.prepareIndex(indexName, "type").setSource("test", "test" + i).execute().actionGet();
|
||||||
|
}
|
||||||
|
|
||||||
|
// create another snapshot
|
||||||
|
// total size has to grow and has to be equal to files on fs
|
||||||
|
assertThat(client.admin().cluster()
|
||||||
|
.prepareCreateSnapshot(repositoryName, snapshot1)
|
||||||
|
.setWaitForCompletion(true).get().status(),
|
||||||
|
equalTo(RestStatus.OK));
|
||||||
|
|
||||||
|
// drop 1st one to avoid miscalculation as snapshot reuses some files of prev snapshot
|
||||||
|
assertTrue(client.admin().cluster()
|
||||||
|
.prepareDeleteSnapshot(repositoryName, snapshot0)
|
||||||
|
.get().isAcknowledged());
|
||||||
|
|
||||||
|
response = client.admin().cluster().prepareSnapshotStatus(repositoryName)
|
||||||
|
.setSnapshots(snapshot1)
|
||||||
|
.get();
|
||||||
|
|
||||||
|
final List<Path> snapshot1Files = scanSnapshotFolder(repoPath);
|
||||||
|
|
||||||
|
final int snapshot1FileCount = snapshot1Files.size();
|
||||||
|
final long snapshot1FileSize = calculateTotalFilesSize(snapshot1Files);
|
||||||
|
|
||||||
|
snapshots = response.getSnapshots();
|
||||||
|
|
||||||
|
SnapshotStats anotherStats = snapshots.get(0).getStats();
|
||||||
|
|
||||||
|
ArrayList<Path> snapshotFilesDiff = new ArrayList<>(snapshot1Files);
|
||||||
|
snapshotFilesDiff.removeAll(snapshot0Files);
|
||||||
|
|
||||||
|
assertThat(anotherStats.getIncrementalFileCount(), equalTo(snapshotFilesDiff.size()));
|
||||||
|
assertThat(anotherStats.getIncrementalSize(), equalTo(calculateTotalFilesSize(snapshotFilesDiff)));
|
||||||
|
|
||||||
|
assertThat(anotherStats.getIncrementalFileCount(), equalTo(anotherStats.getProcessedFileCount()));
|
||||||
|
assertThat(anotherStats.getIncrementalSize(), equalTo(anotherStats.getProcessedSize()));
|
||||||
|
|
||||||
|
assertThat(stats.getTotalSize(), lessThan(anotherStats.getTotalSize()));
|
||||||
|
assertThat(stats.getTotalFileCount(), lessThan(anotherStats.getTotalFileCount()));
|
||||||
|
|
||||||
|
assertThat(anotherStats.getTotalFileCount(), is(snapshot1FileCount));
|
||||||
|
assertThat(anotherStats.getTotalSize(), is(snapshot1FileSize));
|
||||||
|
}
|
||||||
|
|
||||||
|
private long calculateTotalFilesSize(List<Path> files) {
|
||||||
|
return files.stream().mapToLong(f -> {
|
||||||
|
try {
|
||||||
|
return Files.size(f);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
|
}).sum();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private List<Path> scanSnapshotFolder(Path repoPath) throws IOException {
|
||||||
|
List<Path> files = new ArrayList<>();
|
||||||
|
Files.walkFileTree(repoPath, new SimpleFileVisitor<Path>(){
|
||||||
|
@Override
|
||||||
|
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||||
|
if (file.getFileName().toString().startsWith("__")){
|
||||||
|
files.add(file);
|
||||||
|
}
|
||||||
|
return super.visitFile(file, attrs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|
||||||
public static class SnapshottableMetadata extends TestCustomMetaData {
|
public static class SnapshottableMetadata extends TestCustomMetaData {
|
||||||
public static final String TYPE = "test_snapshottable";
|
public static final String TYPE = "test_snapshottable";
|
||||||
|
|
||||||
|
|
|
@ -2066,7 +2066,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
||||||
SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test").get().getSnapshots().get(0);
|
SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test").get().getSnapshots().get(0);
|
||||||
List<SnapshotIndexShardStatus> shards = snapshotStatus.getShards();
|
List<SnapshotIndexShardStatus> shards = snapshotStatus.getShards();
|
||||||
for (SnapshotIndexShardStatus status : shards) {
|
for (SnapshotIndexShardStatus status : shards) {
|
||||||
assertThat(status.getStats().getProcessedFiles(), greaterThan(1));
|
assertThat(status.getStats().getProcessedFileCount(), greaterThan(1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2078,7 +2078,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
||||||
SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-1").get().getSnapshots().get(0);
|
SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-1").get().getSnapshots().get(0);
|
||||||
List<SnapshotIndexShardStatus> shards = snapshotStatus.getShards();
|
List<SnapshotIndexShardStatus> shards = snapshotStatus.getShards();
|
||||||
for (SnapshotIndexShardStatus status : shards) {
|
for (SnapshotIndexShardStatus status : shards) {
|
||||||
assertThat(status.getStats().getProcessedFiles(), equalTo(0));
|
assertThat(status.getStats().getProcessedFileCount(), equalTo(0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2091,7 +2091,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
||||||
SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-2").get().getSnapshots().get(0);
|
SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-2").get().getSnapshots().get(0);
|
||||||
List<SnapshotIndexShardStatus> shards = snapshotStatus.getShards();
|
List<SnapshotIndexShardStatus> shards = snapshotStatus.getShards();
|
||||||
for (SnapshotIndexShardStatus status : shards) {
|
for (SnapshotIndexShardStatus status : shards) {
|
||||||
assertThat(status.getStats().getProcessedFiles(), equalTo(2)); // we flush before the snapshot such that we have to process the segments_N files plus the .del file
|
assertThat(status.getStats().getProcessedFileCount(), equalTo(2)); // we flush before the snapshot such that we have to process the segments_N files plus the .del file
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -632,7 +632,7 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
||||||
|
|
||||||
final IndexShardSnapshotStatus.Copy lastSnapshotStatus = snapshotStatus.asCopy();
|
final IndexShardSnapshotStatus.Copy lastSnapshotStatus = snapshotStatus.asCopy();
|
||||||
assertEquals(IndexShardSnapshotStatus.Stage.DONE, lastSnapshotStatus.getStage());
|
assertEquals(IndexShardSnapshotStatus.Stage.DONE, lastSnapshotStatus.getStage());
|
||||||
assertEquals(shard.snapshotStoreMetadata().size(), lastSnapshotStatus.getNumberOfFiles());
|
assertEquals(shard.snapshotStoreMetadata().size(), lastSnapshotStatus.getTotalFileCount());
|
||||||
assertNull(lastSnapshotStatus.getFailure());
|
assertNull(lastSnapshotStatus.getFailure());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.index.similarity.ScriptedSimilarity.Doc;
|
||||||
import org.elasticsearch.index.similarity.ScriptedSimilarity.Field;
|
import org.elasticsearch.index.similarity.ScriptedSimilarity.Field;
|
||||||
import org.elasticsearch.index.similarity.ScriptedSimilarity.Query;
|
import org.elasticsearch.index.similarity.ScriptedSimilarity.Query;
|
||||||
import org.elasticsearch.index.similarity.ScriptedSimilarity.Term;
|
import org.elasticsearch.index.similarity.ScriptedSimilarity.Term;
|
||||||
import org.elasticsearch.index.similarity.SimilarityService;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript;
|
import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions;
|
import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions;
|
||||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||||
|
@ -36,7 +35,6 @@ import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.function.Predicate;
|
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
|
|
||||||
|
@ -114,6 +112,9 @@ public class MockScriptEngine implements ScriptEngine {
|
||||||
} else if (context.instanceClazz.equals(MovingFunctionScript.class)) {
|
} else if (context.instanceClazz.equals(MovingFunctionScript.class)) {
|
||||||
MovingFunctionScript.Factory factory = mockCompiled::createMovingFunctionScript;
|
MovingFunctionScript.Factory factory = mockCompiled::createMovingFunctionScript;
|
||||||
return context.factoryClazz.cast(factory);
|
return context.factoryClazz.cast(factory);
|
||||||
|
} else if (context.instanceClazz.equals(ScoreScript.class)) {
|
||||||
|
ScoreScript.Factory factory = new MockScoreScript(script);
|
||||||
|
return context.factoryClazz.cast(factory);
|
||||||
}
|
}
|
||||||
throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]");
|
throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]");
|
||||||
}
|
}
|
||||||
|
@ -343,4 +344,44 @@ public class MockScriptEngine implements ScriptEngine {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public class MockScoreScript implements ScoreScript.Factory {
|
||||||
|
|
||||||
|
private final Function<Map<String, Object>, Object> scripts;
|
||||||
|
|
||||||
|
MockScoreScript(Function<Map<String, Object>, Object> scripts) {
|
||||||
|
this.scripts = scripts;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScoreScript.LeafFactory newFactory(Map<String, Object> params, SearchLookup lookup) {
|
||||||
|
return new ScoreScript.LeafFactory() {
|
||||||
|
@Override
|
||||||
|
public boolean needs_score() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScoreScript newInstance(LeafReaderContext ctx) throws IOException {
|
||||||
|
Scorer[] scorerHolder = new Scorer[1];
|
||||||
|
return new ScoreScript(params, lookup, ctx) {
|
||||||
|
@Override
|
||||||
|
public double execute() {
|
||||||
|
Map<String, Object> vars = new HashMap<>(getParams());
|
||||||
|
vars.put("doc", getDoc());
|
||||||
|
if (scorerHolder[0] != null) {
|
||||||
|
vars.put("_score", new ScoreAccessor(scorerHolder[0]));
|
||||||
|
}
|
||||||
|
return ((Number) scripts.apply(vars)).doubleValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setScorer(Scorer scorer) {
|
||||||
|
scorerHolder[0] = scorer;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,13 +123,15 @@ public class XPackInfoResponse extends ActionResponse {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
return builder.startObject()
|
builder.startObject()
|
||||||
.field("uid", uid)
|
.field("uid", uid)
|
||||||
.field("type", type)
|
.field("type", type)
|
||||||
.field("mode", mode)
|
.field("mode", mode)
|
||||||
.field("status", status.label())
|
.field("status", status.label());
|
||||||
.timeField("expiry_date_in_millis", "expiry_date", expiryDate)
|
if (expiryDate != LicenseService.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) {
|
||||||
.endObject();
|
builder.timeField("expiry_date_in_millis", "expiry_date", expiryDate);
|
||||||
|
}
|
||||||
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
|
|
@ -52,24 +52,20 @@ dependencies {
|
||||||
compile (xpackProject('plugin:sql:sql-shared-client')) {
|
compile (xpackProject('plugin:sql:sql-shared-client')) {
|
||||||
transitive = false
|
transitive = false
|
||||||
}
|
}
|
||||||
compile (xpackProject('plugin:sql:sql-proto')) {
|
compile (xpackProject('plugin:sql:sql-shared-proto')) {
|
||||||
transitive = false
|
transitive = false
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bundled (xpackProject('plugin:sql:sql-shared-client')) {
|
bundled (xpackProject('plugin:sql:sql-shared-client')) {
|
||||||
transitive = false
|
transitive = false
|
||||||
}
|
}
|
||||||
bundled (xpackProject('plugin:sql:sql-proto')) {
|
bundled (xpackProject('plugin:sql:sql-shared-proto')) {
|
||||||
transitive = false
|
transitive = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
compile (project(':server')) {
|
|
||||||
transitive = false
|
|
||||||
}
|
|
||||||
compile (project(':libs:x-content')) {
|
compile (project(':libs:x-content')) {
|
||||||
transitive = false
|
transitive = false
|
||||||
}
|
}
|
||||||
compile "org.apache.lucene:lucene-core:${versions.lucene}"
|
|
||||||
compile 'joda-time:joda-time:2.9.9'
|
compile 'joda-time:joda-time:2.9.9'
|
||||||
compile project(':libs:elasticsearch-core')
|
compile project(':libs:elasticsearch-core')
|
||||||
runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
||||||
|
@ -80,15 +76,13 @@ dependencies {
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencyLicenses {
|
dependencyLicenses {
|
||||||
mapping from: /sql-proto.*/, to: 'elasticsearch'
|
mapping from: /sql-shared-proto.*/, to: 'elasticsearch'
|
||||||
mapping from: /sql-shared-client.*/, to: 'elasticsearch'
|
mapping from: /sql-shared-client.*/, to: 'elasticsearch'
|
||||||
mapping from: /jackson-.*/, to: 'jackson'
|
mapping from: /jackson-.*/, to: 'jackson'
|
||||||
mapping from: /lucene-.*/, to: 'lucene'
|
|
||||||
mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
|
mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
|
||||||
ignoreSha 'sql-proto'
|
ignoreSha 'sql-shared-proto'
|
||||||
ignoreSha 'sql-shared-client'
|
ignoreSha 'sql-shared-client'
|
||||||
ignoreSha 'elasticsearch'
|
ignoreSha 'elasticsearch'
|
||||||
ignoreSha 'elasticsearch-core'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -1,475 +0,0 @@
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was
|
|
||||||
derived from unicode conversion examples available at
|
|
||||||
http://www.unicode.org/Public/PROGRAMS/CVTUTF. Here is the copyright
|
|
||||||
from those sources:
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Copyright 2001-2004 Unicode, Inc.
|
|
||||||
*
|
|
||||||
* Disclaimer
|
|
||||||
*
|
|
||||||
* This source code is provided as is by Unicode, Inc. No claims are
|
|
||||||
* made as to fitness for any particular purpose. No warranties of any
|
|
||||||
* kind are expressed or implied. The recipient agrees to determine
|
|
||||||
* applicability of information provided. If this file has been
|
|
||||||
* purchased on magnetic or optical media from Unicode, Inc., the
|
|
||||||
* sole remedy for any claim will be exchange of defective media
|
|
||||||
* within 90 days of receipt.
|
|
||||||
*
|
|
||||||
* Limitations on Rights to Redistribute This Code
|
|
||||||
*
|
|
||||||
* Unicode, Inc. hereby grants the right to freely use the information
|
|
||||||
* supplied in this file in the creation of products supporting the
|
|
||||||
* Unicode Standard, and to make copies of this file in any form
|
|
||||||
* for internal or external distribution as long as this notice
|
|
||||||
* remains attached.
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
Some code in core/src/java/org/apache/lucene/util/ArrayUtil.java was
|
|
||||||
derived from Python 2.4.2 sources available at
|
|
||||||
http://www.python.org. Full license is here:
|
|
||||||
|
|
||||||
http://www.python.org/download/releases/2.4.2/license/
|
|
||||||
|
|
||||||
Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was
|
|
||||||
derived from Python 3.1.2 sources available at
|
|
||||||
http://www.python.org. Full license is here:
|
|
||||||
|
|
||||||
http://www.python.org/download/releases/3.1.2/license/
|
|
||||||
|
|
||||||
Some code in core/src/java/org/apache/lucene/util/automaton was
|
|
||||||
derived from Brics automaton sources available at
|
|
||||||
www.brics.dk/automaton/. Here is the copyright from those sources:
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Copyright (c) 2001-2009 Anders Moeller
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* Redistribution and use in source and binary forms, with or without
|
|
||||||
* modification, are permitted provided that the following conditions
|
|
||||||
* are met:
|
|
||||||
* 1. Redistributions of source code must retain the above copyright
|
|
||||||
* notice, this list of conditions and the following disclaimer.
|
|
||||||
* 2. Redistributions in binary form must reproduce the above copyright
|
|
||||||
* notice, this list of conditions and the following disclaimer in the
|
|
||||||
* documentation and/or other materials provided with the distribution.
|
|
||||||
* 3. The name of the author may not be used to endorse or promote products
|
|
||||||
* derived from this software without specific prior written permission.
|
|
||||||
*
|
|
||||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
|
||||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
|
||||||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
||||||
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
|
||||||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
|
||||||
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
The levenshtein automata tables in core/src/java/org/apache/lucene/util/automaton
|
|
||||||
were automatically generated with the moman/finenight FSA package.
|
|
||||||
Here is the copyright for those sources:
|
|
||||||
|
|
||||||
# Copyright (c) 2010, Jean-Philippe Barrette-LaPierre, <jpb@rrette.com>
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person
|
|
||||||
# obtaining a copy of this software and associated documentation
|
|
||||||
# files (the "Software"), to deal in the Software without
|
|
||||||
# restriction, including without limitation the rights to use,
|
|
||||||
# copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
# copies of the Software, and to permit persons to whom the
|
|
||||||
# Software is furnished to do so, subject to the following
|
|
||||||
# conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be
|
|
||||||
# included in all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
# OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
||||||
Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was
|
|
||||||
derived from ICU (http://www.icu-project.org)
|
|
||||||
The full license is available here:
|
|
||||||
http://source.icu-project.org/repos/icu/icu/trunk/license.html
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Copyright (C) 1999-2010, International Business Machines
|
|
||||||
* Corporation and others. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
* of this software and associated documentation files (the "Software"), to deal
|
|
||||||
* in the Software without restriction, including without limitation the rights
|
|
||||||
* to use, copy, modify, merge, publish, distribute, and/or sell copies of the
|
|
||||||
* Software, and to permit persons to whom the Software is furnished to do so,
|
|
||||||
* provided that the above copyright notice(s) and this permission notice appear
|
|
||||||
* in all copies of the Software and that both the above copyright notice(s) and
|
|
||||||
* this permission notice appear in supporting documentation.
|
|
||||||
*
|
|
||||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS.
|
|
||||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE
|
|
||||||
* LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR
|
|
||||||
* ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
|
|
||||||
* IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
|
||||||
* OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
*
|
|
||||||
* Except as contained in this notice, the name of a copyright holder shall not
|
|
||||||
* be used in advertising or otherwise to promote the sale, use or other
|
|
||||||
* dealings in this Software without prior written authorization of the
|
|
||||||
* copyright holder.
|
|
||||||
*/
|
|
||||||
|
|
||||||
The following license applies to the Snowball stemmers:
|
|
||||||
|
|
||||||
Copyright (c) 2001, Dr Martin Porter
|
|
||||||
Copyright (c) 2002, Richard Boulton
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright notice,
|
|
||||||
* this list of conditions and the following disclaimer.
|
|
||||||
* Redistributions in binary form must reproduce the above copyright
|
|
||||||
* notice, this list of conditions and the following disclaimer in the
|
|
||||||
* documentation and/or other materials provided with the distribution.
|
|
||||||
* Neither the name of the copyright holders nor the names of its contributors
|
|
||||||
* may be used to endorse or promote products derived from this software
|
|
||||||
* without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
|
||||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
The following license applies to the KStemmer:
|
|
||||||
|
|
||||||
Copyright © 2003,
|
|
||||||
Center for Intelligent Information Retrieval,
|
|
||||||
University of Massachusetts, Amherst.
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
3. The names "Center for Intelligent Information Retrieval" and
|
|
||||||
"University of Massachusetts" must not be used to endorse or promote products
|
|
||||||
derived from this software without prior written permission. To obtain
|
|
||||||
permission, contact info@ciir.cs.umass.edu.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF MASSACHUSETTS AND OTHER CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
|
|
||||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
|
|
||||||
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
|
||||||
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
|
||||||
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
|
|
||||||
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGE.
|
|
||||||
|
|
||||||
The following license applies to the Morfologik project:
|
|
||||||
|
|
||||||
Copyright (c) 2006 Dawid Weiss
|
|
||||||
Copyright (c) 2007-2011 Dawid Weiss, Marcin Miłkowski
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
* Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
* Neither the name of Morfologik nor the names of its contributors
|
|
||||||
may be used to endorse or promote products derived from this software
|
|
||||||
without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
|
||||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
The dictionary comes from Morfologik project. Morfologik uses data from
|
|
||||||
Polish ispell/myspell dictionary hosted at http://www.sjp.pl/slownik/en/ and
|
|
||||||
is licenced on the terms of (inter alia) LGPL and Creative Commons
|
|
||||||
ShareAlike. The part-of-speech tags were added in Morfologik project and
|
|
||||||
are not found in the data from sjp.pl. The tagset is similar to IPI PAN
|
|
||||||
tagset.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
The following license applies to the Morfeusz project,
|
|
||||||
used by org.apache.lucene.analysis.morfologik.
|
|
||||||
|
|
||||||
BSD-licensed dictionary of Polish (SGJP)
|
|
||||||
http://sgjp.pl/morfeusz/
|
|
||||||
|
|
||||||
Copyright © 2011 Zygmunt Saloni, Włodzimierz Gruszczyński,
|
|
||||||
Marcin Woliński, Robert Wołosz
|
|
||||||
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are
|
|
||||||
met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the
|
|
||||||
distribution.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS “AS IS” AND ANY EXPRESS
|
|
||||||
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE
|
|
||||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
|
||||||
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
|
||||||
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
|
||||||
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
|
||||||
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
@ -1,192 +0,0 @@
|
||||||
Apache Lucene
|
|
||||||
Copyright 2014 The Apache Software Foundation
|
|
||||||
|
|
||||||
This product includes software developed at
|
|
||||||
The Apache Software Foundation (http://www.apache.org/).
|
|
||||||
|
|
||||||
Includes software from other Apache Software Foundation projects,
|
|
||||||
including, but not limited to:
|
|
||||||
- Apache Ant
|
|
||||||
- Apache Jakarta Regexp
|
|
||||||
- Apache Commons
|
|
||||||
- Apache Xerces
|
|
||||||
|
|
||||||
ICU4J, (under analysis/icu) is licensed under an MIT styles license
|
|
||||||
and Copyright (c) 1995-2008 International Business Machines Corporation and others
|
|
||||||
|
|
||||||
Some data files (under analysis/icu/src/data) are derived from Unicode data such
|
|
||||||
as the Unicode Character Database. See http://unicode.org/copyright.html for more
|
|
||||||
details.
|
|
||||||
|
|
||||||
Brics Automaton (under core/src/java/org/apache/lucene/util/automaton) is
|
|
||||||
BSD-licensed, created by Anders Møller. See http://www.brics.dk/automaton/
|
|
||||||
|
|
||||||
The levenshtein automata tables (under core/src/java/org/apache/lucene/util/automaton) were
|
|
||||||
automatically generated with the moman/finenight FSA library, created by
|
|
||||||
Jean-Philippe Barrette-LaPierre. This library is available under an MIT license,
|
|
||||||
see http://sites.google.com/site/rrettesite/moman and
|
|
||||||
http://bitbucket.org/jpbarrette/moman/overview/
|
|
||||||
|
|
||||||
The class org.apache.lucene.util.WeakIdentityMap was derived from
|
|
||||||
the Apache CXF project and is Apache License 2.0.
|
|
||||||
|
|
||||||
The Google Code Prettify is Apache License 2.0.
|
|
||||||
See http://code.google.com/p/google-code-prettify/
|
|
||||||
|
|
||||||
JUnit (junit-4.10) is licensed under the Common Public License v. 1.0
|
|
||||||
See http://junit.sourceforge.net/cpl-v10.html
|
|
||||||
|
|
||||||
This product includes code (JaspellTernarySearchTrie) from Java Spelling Checkin
|
|
||||||
g Package (jaspell): http://jaspell.sourceforge.net/
|
|
||||||
License: The BSD License (http://www.opensource.org/licenses/bsd-license.php)
|
|
||||||
|
|
||||||
The snowball stemmers in
|
|
||||||
analysis/common/src/java/net/sf/snowball
|
|
||||||
were developed by Martin Porter and Richard Boulton.
|
|
||||||
The snowball stopword lists in
|
|
||||||
analysis/common/src/resources/org/apache/lucene/analysis/snowball
|
|
||||||
were developed by Martin Porter and Richard Boulton.
|
|
||||||
The full snowball package is available from
|
|
||||||
http://snowball.tartarus.org/
|
|
||||||
|
|
||||||
The KStem stemmer in
|
|
||||||
analysis/common/src/org/apache/lucene/analysis/en
|
|
||||||
was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst)
|
|
||||||
under the BSD-license.
|
|
||||||
|
|
||||||
The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default
|
|
||||||
stopword list that is BSD-licensed created by Jacques Savoy. These files reside in:
|
|
||||||
analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt,
|
|
||||||
analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt,
|
|
||||||
analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt,
|
|
||||||
analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt,
|
|
||||||
analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt,
|
|
||||||
analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt
|
|
||||||
See http://members.unine.ch/jacques.savoy/clef/index.html.
|
|
||||||
|
|
||||||
The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers
|
|
||||||
(common) are based on BSD-licensed reference implementations created by Jacques Savoy and
|
|
||||||
Ljiljana Dolamic. These files reside in:
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/de/GermanLightStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/de/GermanMinimalStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/es/SpanishLightStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishLightStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchLightStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchMinimalStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianLightStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/it/ItalianLightStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseLightStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/ru/RussianLightStemmer.java
|
|
||||||
analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishLightStemmer.java
|
|
||||||
|
|
||||||
The Stempel analyzer (stempel) includes BSD-licensed software developed
|
|
||||||
by the Egothor project http://egothor.sf.net/, created by Leo Galambos, Martin Kvapil,
|
|
||||||
and Edmond Nolan.
|
|
||||||
|
|
||||||
The Polish analyzer (stempel) comes with a default
|
|
||||||
stopword list that is BSD-licensed created by the Carrot2 project. The file resides
|
|
||||||
in stempel/src/resources/org/apache/lucene/analysis/pl/stopwords.txt.
|
|
||||||
See http://project.carrot2.org/license.html.
|
|
||||||
|
|
||||||
The SmartChineseAnalyzer source code (smartcn) was
|
|
||||||
provided by Xiaoping Gao and copyright 2009 by www.imdict.net.
|
|
||||||
|
|
||||||
WordBreakTestUnicode_*.java (under modules/analysis/common/src/test/)
|
|
||||||
is derived from Unicode data such as the Unicode Character Database.
|
|
||||||
See http://unicode.org/copyright.html for more details.
|
|
||||||
|
|
||||||
The Morfologik analyzer (morfologik) includes BSD-licensed software
|
|
||||||
developed by Dawid Weiss and Marcin Miłkowski (http://morfologik.blogspot.com/).
|
|
||||||
|
|
||||||
Morfologik uses data from Polish ispell/myspell dictionary
|
|
||||||
(http://www.sjp.pl/slownik/en/) licenced on the terms of (inter alia)
|
|
||||||
LGPL and Creative Commons ShareAlike.
|
|
||||||
|
|
||||||
Morfologic includes data from BSD-licensed dictionary of Polish (SGJP)
|
|
||||||
(http://sgjp.pl/morfeusz/)
|
|
||||||
|
|
||||||
Servlet-api.jar and javax.servlet-*.jar are under the CDDL license, the original
|
|
||||||
source code for this can be found at http://www.eclipse.org/jetty/downloads.php
|
|
||||||
|
|
||||||
===========================================================================
|
|
||||||
Kuromoji Japanese Morphological Analyzer - Apache Lucene Integration
|
|
||||||
===========================================================================
|
|
||||||
|
|
||||||
This software includes a binary and/or source version of data from
|
|
||||||
|
|
||||||
mecab-ipadic-2.7.0-20070801
|
|
||||||
|
|
||||||
which can be obtained from
|
|
||||||
|
|
||||||
http://atilika.com/releases/mecab-ipadic/mecab-ipadic-2.7.0-20070801.tar.gz
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
http://jaist.dl.sourceforge.net/project/mecab/mecab-ipadic/2.7.0-20070801/mecab-ipadic-2.7.0-20070801.tar.gz
|
|
||||||
|
|
||||||
===========================================================================
|
|
||||||
mecab-ipadic-2.7.0-20070801 Notice
|
|
||||||
===========================================================================
|
|
||||||
|
|
||||||
Nara Institute of Science and Technology (NAIST),
|
|
||||||
the copyright holders, disclaims all warranties with regard to this
|
|
||||||
software, including all implied warranties of merchantability and
|
|
||||||
fitness, in no event shall NAIST be liable for
|
|
||||||
any special, indirect or consequential damages or any damages
|
|
||||||
whatsoever resulting from loss of use, data or profits, whether in an
|
|
||||||
action of contract, negligence or other tortuous action, arising out
|
|
||||||
of or in connection with the use or performance of this software.
|
|
||||||
|
|
||||||
A large portion of the dictionary entries
|
|
||||||
originate from ICOT Free Software. The following conditions for ICOT
|
|
||||||
Free Software applies to the current dictionary as well.
|
|
||||||
|
|
||||||
Each User may also freely distribute the Program, whether in its
|
|
||||||
original form or modified, to any third party or parties, PROVIDED
|
|
||||||
that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
|
|
||||||
on, or be attached to, the Program, which is distributed substantially
|
|
||||||
in the same form as set out herein and that such intended
|
|
||||||
distribution, if actually made, will neither violate or otherwise
|
|
||||||
contravene any of the laws and regulations of the countries having
|
|
||||||
jurisdiction over the User or the intended distribution itself.
|
|
||||||
|
|
||||||
NO WARRANTY
|
|
||||||
|
|
||||||
The program was produced on an experimental basis in the course of the
|
|
||||||
research and development conducted during the project and is provided
|
|
||||||
to users as so produced on an experimental basis. Accordingly, the
|
|
||||||
program is provided without any warranty whatsoever, whether express,
|
|
||||||
implied, statutory or otherwise. The term "warranty" used herein
|
|
||||||
includes, but is not limited to, any warranty of the quality,
|
|
||||||
performance, merchantability and fitness for a particular purpose of
|
|
||||||
the program and the nonexistence of any infringement or violation of
|
|
||||||
any right of any third party.
|
|
||||||
|
|
||||||
Each user of the program will agree and understand, and be deemed to
|
|
||||||
have agreed and understood, that there is no warranty whatsoever for
|
|
||||||
the program and, accordingly, the entire risk arising from or
|
|
||||||
otherwise connected with the program is assumed by the user.
|
|
||||||
|
|
||||||
Therefore, neither ICOT, the copyright holder, or any other
|
|
||||||
organization that participated in or was otherwise related to the
|
|
||||||
development of the program and their respective officials, directors,
|
|
||||||
officers and other employees shall be held liable for any and all
|
|
||||||
damages, including, without limitation, general, special, incidental
|
|
||||||
and consequential damages, arising out of or otherwise in connection
|
|
||||||
with the use or inability to use the program or any product, material
|
|
||||||
or result produced or otherwise obtained by using the program,
|
|
||||||
regardless of whether they have been advised of, or otherwise had
|
|
||||||
knowledge of, the possibility of such damages at any time during the
|
|
||||||
project or thereafter. Each user will be deemed to have agreed to the
|
|
||||||
foregoing by his or her commencement of use of the program. The term
|
|
||||||
"use" as used herein includes, but is not limited to, the use,
|
|
||||||
modification, copying and distribution of the program and the
|
|
||||||
production of secondary products from the program.
|
|
||||||
|
|
||||||
In the case where the program, whether in its original form or
|
|
||||||
modified, was distributed or delivered to or received by a user from
|
|
||||||
any person, organization or entity other than ICOT, unless it makes or
|
|
||||||
grants independently of ICOT any specific warranty to the user in
|
|
||||||
writing, such person, organization or entity, will also be exempted
|
|
||||||
from and not be held liable to the user for any such damages as noted
|
|
||||||
above as far as the program is concerned.
|
|
|
@ -1 +0,0 @@
|
||||||
e118e4d05070378516b9055184b74498ba528dee
|
|
|
@ -10,9 +10,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse;
|
|
||||||
import org.elasticsearch.xpack.sql.proto.Mode;
|
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.ReadableDateTime;
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
import java.sql.JDBCType;
|
||||||
|
|
||||||
|
@ -51,7 +50,11 @@ public class TypeConverterTests extends ESTestCase {
|
||||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("value");
|
builder.field("value");
|
||||||
SqlQueryResponse.value(builder, Mode.JDBC, value);
|
if (value instanceof ReadableDateTime) {
|
||||||
|
builder.value(((ReadableDateTime) value).getMillis());
|
||||||
|
} else {
|
||||||
|
builder.value(value);
|
||||||
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
builder.close();
|
builder.close();
|
||||||
Object copy = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2().get("value");
|
Object copy = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2().get("value");
|
||||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.xpack.sql.cli.command;
|
||||||
import org.elasticsearch.xpack.sql.client.HttpClient;
|
import org.elasticsearch.xpack.sql.client.HttpClient;
|
||||||
import org.elasticsearch.xpack.sql.client.shared.ClientException;
|
import org.elasticsearch.xpack.sql.client.shared.ClientException;
|
||||||
import org.elasticsearch.xpack.sql.client.shared.Version;
|
import org.elasticsearch.xpack.sql.client.shared.Version;
|
||||||
import org.elasticsearch.xpack.sql.plugin.AbstractSqlQueryRequest;
|
|
||||||
import org.elasticsearch.xpack.sql.proto.MainResponse;
|
import org.elasticsearch.xpack.sql.proto.MainResponse;
|
||||||
import org.elasticsearch.xpack.sql.proto.Protocol;
|
import org.elasticsearch.xpack.sql.proto.Protocol;
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ public class CliSessionTests extends ESTestCase {
|
||||||
public void testProperConnection() throws Exception {
|
public void testProperConnection() throws Exception {
|
||||||
HttpClient httpClient = mock(HttpClient.class);
|
HttpClient httpClient = mock(HttpClient.class);
|
||||||
when(httpClient.serverInfo()).thenReturn(new MainResponse(randomAlphaOfLength(5), org.elasticsearch.Version.CURRENT.toString(),
|
when(httpClient.serverInfo()).thenReturn(new MainResponse(randomAlphaOfLength(5), org.elasticsearch.Version.CURRENT.toString(),
|
||||||
ClusterName.DEFAULT.value(), UUIDs.randomBase64UUID(), Build.CURRENT));
|
ClusterName.DEFAULT.value(), UUIDs.randomBase64UUID()));
|
||||||
CliSession cliSession = new CliSession(httpClient);
|
CliSession cliSession = new CliSession(httpClient);
|
||||||
cliSession.checkConnection();
|
cliSession.checkConnection();
|
||||||
verify(httpClient, times(1)).serverInfo();
|
verify(httpClient, times(1)).serverInfo();
|
||||||
|
@ -58,7 +58,7 @@ public class CliSessionTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
when(httpClient.serverInfo()).thenReturn(new MainResponse(randomAlphaOfLength(5),
|
when(httpClient.serverInfo()).thenReturn(new MainResponse(randomAlphaOfLength(5),
|
||||||
org.elasticsearch.Version.fromString(major + "." + minor + ".23").toString(),
|
org.elasticsearch.Version.fromString(major + "." + minor + ".23").toString(),
|
||||||
ClusterName.DEFAULT.value(), UUIDs.randomBase64UUID(), Build.CURRENT));
|
ClusterName.DEFAULT.value(), UUIDs.randomBase64UUID()));
|
||||||
CliSession cliSession = new CliSession(httpClient);
|
CliSession cliSession = new CliSession(httpClient);
|
||||||
expectThrows(ClientException.class, cliSession::checkConnection);
|
expectThrows(ClientException.class, cliSession::checkConnection);
|
||||||
verify(httpClient, times(1)).serverInfo();
|
verify(httpClient, times(1)).serverInfo();
|
||||||
|
|
|
@ -36,7 +36,7 @@ public class ServerInfoCliCommandTests extends ESTestCase {
|
||||||
HttpClient client = mock(HttpClient.class);
|
HttpClient client = mock(HttpClient.class);
|
||||||
CliSession cliSession = new CliSession(client);
|
CliSession cliSession = new CliSession(client);
|
||||||
when(client.serverInfo()).thenReturn(new MainResponse("my_node", "1.2.3",
|
when(client.serverInfo()).thenReturn(new MainResponse("my_node", "1.2.3",
|
||||||
new ClusterName("my_cluster").value(), UUIDs.randomBase64UUID(), Build.CURRENT));
|
new ClusterName("my_cluster").value(), UUIDs.randomBase64UUID()));
|
||||||
ServerInfoCliCommand cliCommand = new ServerInfoCliCommand();
|
ServerInfoCliCommand cliCommand = new ServerInfoCliCommand();
|
||||||
assertTrue(cliCommand.handle(testTerminal, cliSession, "info"));
|
assertTrue(cliCommand.handle(testTerminal, cliSession, "info"));
|
||||||
assertEquals(testTerminal.toString(), "Node:<em>my_node</em> Cluster:<em>my_cluster</em> Version:<em>1.2.3</em>\n");
|
assertEquals(testTerminal.toString(), "Node:<em>my_node</em> Cluster:<em>my_cluster</em> Version:<em>1.2.3</em>\n");
|
||||||
|
|
|
@ -24,6 +24,7 @@ dependencies {
|
||||||
compile (project(':libs:x-content')) {
|
compile (project(':libs:x-content')) {
|
||||||
transitive = false
|
transitive = false
|
||||||
}
|
}
|
||||||
|
compile xpackProject('plugin:sql:sql-shared-proto')
|
||||||
compile "org.apache.lucene:lucene-core:${versions.lucene}"
|
compile "org.apache.lucene:lucene-core:${versions.lucene}"
|
||||||
compile 'joda-time:joda-time:2.9.9'
|
compile 'joda-time:joda-time:2.9.9'
|
||||||
runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
||||||
|
|
|
@ -10,14 +10,14 @@ apply plugin: 'elasticsearch.build'
|
||||||
description = 'Code shared between jdbc and cli'
|
description = 'Code shared between jdbc and cli'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile xpackProject('plugin:sql:sql-proto')
|
compile xpackProject('plugin:sql:sql-shared-proto')
|
||||||
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
||||||
testCompile "org.elasticsearch.test:framework:${version}"
|
testCompile "org.elasticsearch.test:framework:${version}"
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencyLicenses {
|
dependencyLicenses {
|
||||||
mapping from: /jackson-.*/, to: 'jackson'
|
mapping from: /jackson-.*/, to: 'jackson'
|
||||||
mapping from: /sql-proto.*/, to: 'elasticsearch'
|
mapping from: /sql-shared-proto.*/, to: 'elasticsearch'
|
||||||
mapping from: /elasticsearch-cli.*/, to: 'elasticsearch'
|
mapping from: /elasticsearch-cli.*/, to: 'elasticsearch'
|
||||||
mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
|
mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
|
||||||
mapping from: /lucene-.*/, to: 'lucene'
|
mapping from: /lucene-.*/, to: 'lucene'
|
||||||
|
|
|
@ -5,14 +5,12 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.client;
|
package org.elasticsearch.xpack.sql.client;
|
||||||
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.core.internal.io.Streams;
|
import org.elasticsearch.core.internal.io.Streams;
|
||||||
|
@ -30,6 +28,8 @@ import org.elasticsearch.xpack.sql.proto.SqlClearCursorResponse;
|
||||||
import org.elasticsearch.xpack.sql.proto.SqlQueryRequest;
|
import org.elasticsearch.xpack.sql.proto.SqlQueryRequest;
|
||||||
import org.elasticsearch.xpack.sql.proto.SqlQueryResponse;
|
import org.elasticsearch.xpack.sql.proto.SqlQueryResponse;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
|
@ -92,13 +92,13 @@ public class HttpClient {
|
||||||
private <Request extends AbstractSqlRequest, Response> Response post(String path, Request request,
|
private <Request extends AbstractSqlRequest, Response> Response post(String path, Request request,
|
||||||
CheckedFunction<XContentParser, Response, IOException> responseParser)
|
CheckedFunction<XContentParser, Response, IOException> responseParser)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
BytesReference requestBytes = toXContent(request);
|
byte[] requestBytes = toXContent(request);
|
||||||
String query = "error_trace&mode=" + request.mode();
|
String query = "error_trace&mode=" + request.mode();
|
||||||
Tuple<XContentType, BytesReference> response =
|
Tuple<XContentType, byte[]> response =
|
||||||
AccessController.doPrivileged((PrivilegedAction<ResponseOrException<Tuple<XContentType, BytesReference>>>) () ->
|
AccessController.doPrivileged((PrivilegedAction<ResponseOrException<Tuple<XContentType, byte[]>>>) () ->
|
||||||
JreHttpUrlConnection.http(path, query, cfg, con ->
|
JreHttpUrlConnection.http(path, query, cfg, con ->
|
||||||
con.request(
|
con.request(
|
||||||
requestBytes::writeTo,
|
(out) -> out.write(requestBytes),
|
||||||
this::readFrom,
|
this::readFrom,
|
||||||
"POST"
|
"POST"
|
||||||
)
|
)
|
||||||
|
@ -120,8 +120,8 @@ public class HttpClient {
|
||||||
|
|
||||||
private <Response> Response get(String path, CheckedFunction<XContentParser, Response, IOException> responseParser)
|
private <Response> Response get(String path, CheckedFunction<XContentParser, Response, IOException> responseParser)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
Tuple<XContentType, BytesReference> response =
|
Tuple<XContentType, byte[]> response =
|
||||||
AccessController.doPrivileged((PrivilegedAction<ResponseOrException<Tuple<XContentType, BytesReference>>>) () ->
|
AccessController.doPrivileged((PrivilegedAction<ResponseOrException<Tuple<XContentType, byte[]>>>) () ->
|
||||||
JreHttpUrlConnection.http(path, "error_trace", cfg, con ->
|
JreHttpUrlConnection.http(path, "error_trace", cfg, con ->
|
||||||
con.request(
|
con.request(
|
||||||
null,
|
null,
|
||||||
|
@ -132,35 +132,44 @@ public class HttpClient {
|
||||||
return fromXContent(response.v1(), response.v2(), responseParser);
|
return fromXContent(response.v1(), response.v2(), responseParser);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <Request extends ToXContent> BytesReference toXContent(Request xContent) {
|
private static <Request extends ToXContent> byte[] toXContent(Request xContent) {
|
||||||
try {
|
try(ByteArrayOutputStream buffer = new ByteArrayOutputStream()) {
|
||||||
return XContentHelper.toXContent(xContent, REQUEST_BODY_CONTENT_TYPE, false);
|
try (XContentBuilder xContentBuilder = new XContentBuilder(REQUEST_BODY_CONTENT_TYPE.xContent(), buffer)) {
|
||||||
|
if (xContent.isFragment()) {
|
||||||
|
xContentBuilder.startObject();
|
||||||
|
}
|
||||||
|
xContent.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
|
||||||
|
if (xContent.isFragment()) {
|
||||||
|
xContentBuilder.endObject();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return buffer.toByteArray();
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
throw new ClientException("Cannot serialize request", ex);
|
throw new ClientException("Cannot serialize request", ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Tuple<XContentType, BytesReference> readFrom(InputStream inputStream, Function<String, String> headers) {
|
private Tuple<XContentType, byte[]> readFrom(InputStream inputStream, Function<String, String> headers) {
|
||||||
String contentType = headers.apply("Content-Type");
|
String contentType = headers.apply("Content-Type");
|
||||||
XContentType xContentType = XContentType.fromMediaTypeOrFormat(contentType);
|
XContentType xContentType = XContentType.fromMediaTypeOrFormat(contentType);
|
||||||
if (xContentType == null) {
|
if (xContentType == null) {
|
||||||
throw new IllegalStateException("Unsupported Content-Type: " + contentType);
|
throw new IllegalStateException("Unsupported Content-Type: " + contentType);
|
||||||
}
|
}
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||||
try {
|
try {
|
||||||
Streams.copy(inputStream, out);
|
Streams.copy(inputStream, out);
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
throw new ClientException("Cannot deserialize response", ex);
|
throw new ClientException("Cannot deserialize response", ex);
|
||||||
}
|
}
|
||||||
return new Tuple<>(xContentType, out.bytes());
|
return new Tuple<>(xContentType, out.toByteArray());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private <Response> Response fromXContent(XContentType xContentType, BytesReference bytesReference,
|
private <Response> Response fromXContent(XContentType xContentType, byte[] bytesReference,
|
||||||
CheckedFunction<XContentParser, Response, IOException> responseParser) {
|
CheckedFunction<XContentParser, Response, IOException> responseParser) {
|
||||||
try (InputStream stream = bytesReference.streamInput();
|
try (InputStream stream = new ByteArrayInputStream(bytesReference);
|
||||||
XContentParser parser = xContentType.xContent().createParser(registry,
|
XContentParser parser = xContentType.xContent().createParser(registry,
|
||||||
LoggingDeprecationHandler.INSTANCE, stream)) {
|
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) {
|
||||||
return responseParser.apply(parser);
|
return responseParser.apply(parser);
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
throw new ClientException("Cannot parse response", ex);
|
throw new ClientException("Cannot parse response", ex);
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This project contains XContent protocol classes shared between server and http client
|
||||||
|
*/
|
||||||
|
|
||||||
|
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.build'
|
||||||
|
|
||||||
|
description = 'Request and response objects shared by the cli, jdbc ' +
|
||||||
|
'and the Elasticsearch plugin'
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
compile (project(':libs:elasticsearch-core')) {
|
||||||
|
transitive = false
|
||||||
|
}
|
||||||
|
compile (project(':libs:x-content')) {
|
||||||
|
transitive = false
|
||||||
|
}
|
||||||
|
compile 'joda-time:joda-time:2.9.9'
|
||||||
|
runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
||||||
|
|
||||||
|
testCompile "org.elasticsearch.test:framework:${version}"
|
||||||
|
}
|
||||||
|
|
||||||
|
forbiddenApisMain {
|
||||||
|
//sql does not depend on server, so only jdk signatures should be checked
|
||||||
|
signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')]
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencyLicenses {
|
||||||
|
mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
|
||||||
|
mapping from: /jackson-.*/, to: 'jackson'
|
||||||
|
ignoreSha 'elasticsearch-core'
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
This copy of Jackson JSON processor streaming parser/generator is licensed under the
|
||||||
|
Apache (Software) License, version 2.0 ("the License").
|
||||||
|
See the License for details about distribution rights, and the
|
||||||
|
specific rights regarding derivate works.
|
||||||
|
|
||||||
|
You may obtain a copy of the License at:
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
@ -0,0 +1,20 @@
|
||||||
|
# Jackson JSON processor
|
||||||
|
|
||||||
|
Jackson is a high-performance, Free/Open Source JSON processing library.
|
||||||
|
It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has
|
||||||
|
been in development since 2007.
|
||||||
|
It is currently developed by a community of developers, as well as supported
|
||||||
|
commercially by FasterXML.com.
|
||||||
|
|
||||||
|
## Licensing
|
||||||
|
|
||||||
|
Jackson core and extension components may licensed under different licenses.
|
||||||
|
To find the details that apply to this artifact see the accompanying LICENSE file.
|
||||||
|
For more information, including possible other licensing options, contact
|
||||||
|
FasterXML.com (http://fasterxml.com).
|
||||||
|
|
||||||
|
## Credits
|
||||||
|
|
||||||
|
A list of contributors may be found from CREDITS file, which is included
|
||||||
|
in some artifacts (usually source distributions); but is always available
|
||||||
|
from the source code management (SCM) system project uses.
|
|
@ -0,0 +1 @@
|
||||||
|
eb21a035c66ad307e66ec8fce37f5d50fd62d039
|
|
@ -0,0 +1 @@
|
||||||
|
f7b520c458572890807d143670c9b24f4de90897
|
|
@ -0,0 +1,202 @@
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
|
@ -0,0 +1,5 @@
|
||||||
|
=============================================================================
|
||||||
|
= NOTICE file corresponding to section 4d of the Apache License Version 2.0 =
|
||||||
|
=============================================================================
|
||||||
|
This product includes software developed by
|
||||||
|
Joda.org (http://www.joda.org/).
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.sql.proto;
|
||||||
|
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -74,7 +73,7 @@ public class ColumnInfo implements ToXContentObject {
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
if (Strings.hasText(table)) {
|
if (table != null && table.isEmpty() == false) {
|
||||||
builder.field("table", table);
|
builder.field("table", table);
|
||||||
}
|
}
|
||||||
builder.field("name", name);
|
builder.field("name", name);
|
||||||
|
@ -146,6 +145,6 @@ public class ColumnInfo implements ToXContentObject {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return Strings.toString(this);
|
return ProtoUtils.toString(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -6,7 +6,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.xpack.sql.proto;
|
package org.elasticsearch.xpack.sql.proto;
|
||||||
|
|
||||||
import org.elasticsearch.Build;
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
@ -21,18 +20,16 @@ public class MainResponse {
|
||||||
private String version;
|
private String version;
|
||||||
private String clusterName;
|
private String clusterName;
|
||||||
private String clusterUuid;
|
private String clusterUuid;
|
||||||
// TODO: Add parser for Build
|
|
||||||
private Build build;
|
|
||||||
|
|
||||||
private MainResponse() {
|
private MainResponse() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public MainResponse(String nodeName, String version, String clusterName, String clusterUuid, Build build) {
|
public MainResponse(String nodeName, String version, String clusterName, String clusterUuid) {
|
||||||
this.nodeName = nodeName;
|
this.nodeName = nodeName;
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.clusterName = clusterName;
|
this.clusterName = clusterName;
|
||||||
this.clusterUuid = clusterUuid;
|
this.clusterUuid = clusterUuid;
|
||||||
this.build = build;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getNodeName() {
|
public String getNodeName() {
|
||||||
|
@ -51,10 +48,6 @@ public class MainResponse {
|
||||||
return clusterUuid;
|
return clusterUuid;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Build getBuild() {
|
|
||||||
return build;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final ObjectParser<MainResponse, Void> PARSER = new ObjectParser<>(MainResponse.class.getName(), true,
|
private static final ObjectParser<MainResponse, Void> PARSER = new ObjectParser<>(MainResponse.class.getName(), true,
|
||||||
MainResponse::new);
|
MainResponse::new);
|
||||||
|
|
||||||
|
@ -65,15 +58,6 @@ public class MainResponse {
|
||||||
PARSER.declareString((response, value) -> {
|
PARSER.declareString((response, value) -> {
|
||||||
}, new ParseField("tagline"));
|
}, new ParseField("tagline"));
|
||||||
PARSER.declareObject((response, value) -> {
|
PARSER.declareObject((response, value) -> {
|
||||||
final String buildFlavor = (String) value.get("build_flavor");
|
|
||||||
final String buildType = (String) value.get("build_type");
|
|
||||||
response.build =
|
|
||||||
new Build(
|
|
||||||
buildFlavor == null ? Build.Flavor.UNKNOWN : Build.Flavor.fromDisplayName(buildFlavor),
|
|
||||||
buildType == null ? Build.Type.UNKNOWN : Build.Type.fromDisplayName(buildType),
|
|
||||||
(String) value.get("build_hash"),
|
|
||||||
(String) value.get("build_date"),
|
|
||||||
(boolean) value.get("build_snapshot"));
|
|
||||||
response.version = (String) value.get("number");
|
response.version = (String) value.get("number");
|
||||||
}, (parser, context) -> parser.map(), new ParseField("version"));
|
}, (parser, context) -> parser.map(), new ParseField("version"));
|
||||||
}
|
}
|
||||||
|
@ -94,12 +78,11 @@ public class MainResponse {
|
||||||
return Objects.equals(nodeName, other.nodeName) &&
|
return Objects.equals(nodeName, other.nodeName) &&
|
||||||
Objects.equals(version, other.version) &&
|
Objects.equals(version, other.version) &&
|
||||||
Objects.equals(clusterUuid, other.clusterUuid) &&
|
Objects.equals(clusterUuid, other.clusterUuid) &&
|
||||||
Objects.equals(build, other.build) &&
|
|
||||||
Objects.equals(clusterName, other.clusterName);
|
Objects.equals(clusterName, other.clusterName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(nodeName, version, clusterUuid, build, clusterName);
|
return Objects.hash(nodeName, version, clusterUuid, clusterName);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -0,0 +1,85 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.xpack.sql.proto;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
|
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
|
public final class ProtoUtils {
|
||||||
|
|
||||||
|
private ProtoUtils() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a generic value from the XContent stream
|
||||||
|
*/
|
||||||
|
public static Object parseFieldsValue(XContentParser parser) throws IOException {
|
||||||
|
XContentParser.Token token = parser.currentToken();
|
||||||
|
if (token == XContentParser.Token.VALUE_STRING) {
|
||||||
|
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
|
||||||
|
return parser.text();
|
||||||
|
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||||
|
return parser.numberValue();
|
||||||
|
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||||
|
return parser.booleanValue();
|
||||||
|
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||||
|
return null;
|
||||||
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
|
return parser.mapOrdered();
|
||||||
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
|
return parser.listOrderedMap();
|
||||||
|
} else {
|
||||||
|
String message = "Failed to parse object: unexpected token [%s] found";
|
||||||
|
throw new IllegalStateException(String.format(Locale.ROOT, message, token));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a string representation of the builder (only applicable for text based xcontent).
|
||||||
|
*
|
||||||
|
* @param xContentBuilder builder containing an object to converted to a string
|
||||||
|
*/
|
||||||
|
public static String toString(XContentBuilder xContentBuilder) {
|
||||||
|
byte[] byteArray = ((ByteArrayOutputStream) xContentBuilder.getOutputStream()).toByteArray();
|
||||||
|
return new String(byteArray, StandardCharsets.UTF_8);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String toString(ToXContent toXContent) {
|
||||||
|
try {
|
||||||
|
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||||
|
if (toXContent.isFragment()) {
|
||||||
|
builder.startObject();
|
||||||
|
}
|
||||||
|
toXContent.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||||
|
if (toXContent.isFragment()) {
|
||||||
|
builder.endObject();
|
||||||
|
}
|
||||||
|
builder.close();
|
||||||
|
return toString(builder);
|
||||||
|
} catch (IOException e) {
|
||||||
|
try {
|
||||||
|
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||||
|
builder.startObject();
|
||||||
|
builder.field("error", "error building toString out of XContent: " + e.getMessage());
|
||||||
|
builder.endObject();
|
||||||
|
builder.close();
|
||||||
|
return toString(builder);
|
||||||
|
} catch (IOException e2) {
|
||||||
|
throw new IllegalArgumentException("cannot generate error message for deserialization", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -18,7 +18,7 @@ import java.util.Objects;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseFieldsValue;
|
import static org.elasticsearch.xpack.sql.proto.ProtoUtils.parseFieldsValue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Response to perform an sql query for JDBC/CLI client
|
* Response to perform an sql query for JDBC/CLI client
|
|
@ -11,13 +11,13 @@ import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.xpack.sql.proto.ProtoUtils.parseFieldsValue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represent a strongly typed parameter value
|
* Represent a strongly typed parameter value
|
||||||
|
@ -33,7 +33,7 @@ public class SqlTypedParamValue implements ToXContentObject {
|
||||||
private static final ParseField TYPE = new ParseField("type");
|
private static final ParseField TYPE = new ParseField("type");
|
||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareField(constructorArg(), (p, c) -> XContentParserUtils.parseFieldsValue(p), VALUE, ObjectParser.ValueType.VALUE);
|
PARSER.declareField(constructorArg(), (p, c) -> parseFieldsValue(p), VALUE, ObjectParser.ValueType.VALUE);
|
||||||
PARSER.declareString(constructorArg(), TYPE);
|
PARSER.declareString(constructorArg(), TYPE);
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.xpack.sql.proto;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class ProtoUtilsTests extends ESTestCase {
|
||||||
|
|
||||||
|
public void testGenericValueParsing() throws IOException {
|
||||||
|
|
||||||
|
String json = ProtoUtils.toString((builder, params) -> {
|
||||||
|
builder.field("int", 42);
|
||||||
|
builder.field("double", 42.5);
|
||||||
|
builder.field("string", "foobar");
|
||||||
|
builder.nullField("null");
|
||||||
|
return builder;
|
||||||
|
});
|
||||||
|
|
||||||
|
XContentParser parser =
|
||||||
|
JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json);
|
||||||
|
|
||||||
|
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||||
|
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||||
|
assertEquals(XContentParser.Token.FIELD_NAME, parser.currentToken());
|
||||||
|
String fieldName = parser.currentName();
|
||||||
|
parser.nextToken();
|
||||||
|
Object val = ProtoUtils.parseFieldsValue(parser);
|
||||||
|
switch (fieldName) {
|
||||||
|
case "int":
|
||||||
|
assertEquals(42, val);
|
||||||
|
break;
|
||||||
|
case "double":
|
||||||
|
assertEquals(42.5, val);
|
||||||
|
break;
|
||||||
|
case "string":
|
||||||
|
assertEquals("foobar", val);
|
||||||
|
break;
|
||||||
|
case "null":
|
||||||
|
assertNull(val);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
fail("Unexpected value " + fieldName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertNull(parser.nextToken());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -96,6 +96,9 @@ subprojects {
|
||||||
|
|
||||||
// CLI testing dependencies
|
// CLI testing dependencies
|
||||||
testRuntime project(path: xpackModule('sql:sql-cli'), configuration: 'nodeps')
|
testRuntime project(path: xpackModule('sql:sql-cli'), configuration: 'nodeps')
|
||||||
|
testRuntime (xpackProject('plugin:sql:sql-proto')) {
|
||||||
|
transitive = false
|
||||||
|
}
|
||||||
testRuntime "org.jline:jline:3.6.0"
|
testRuntime "org.jline:jline:3.6.0"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue