Merge branch 'master' into ccr
* master: [Rollup] Better error message when trying to set non-rollup index (#32965) HLRC: Use Optional in validation logic (#33104) Remove unused User class from protocol (#33137) ingest: Introduce the dissect processor (#32884) [Docs] Add link to es-kotlin-wrapper-client (#32618) [Docs] Remove repeating words (#33087) Minor spelling and grammar fix (#32931) Remove support for deprecated params._agg/_aggs for scripted metric aggregations (#32979) Watcher: Simplify finding next date in cron schedule (#33015) Run Third party audit with forbidden APIs CLI (part3/3) (#33052) Fix plugin build test on Windows (#33078) HLRC+MINOR: Remove Unused Private Method (#33165) Remove old unused test script files (#32970) Build analysis-icu client JAR (#33184) Ensure to generate identical NoOp for the same failure (#33141) ShardSearchFailure#readFrom to set index and shardId (#33161)
This commit is contained in:
commit
c42dc77896
|
@ -102,7 +102,6 @@ dependencies {
|
|||
compile 'com.netflix.nebula:gradle-info-plugin:3.0.3'
|
||||
compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r'
|
||||
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
|
||||
compile 'de.thetaphi:forbiddenapis:2.5'
|
||||
compile 'org.apache.rat:apache-rat:0.11'
|
||||
compile "org.elasticsearch:jna:4.5.1"
|
||||
compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4'
|
||||
|
|
|
@ -802,8 +802,6 @@ class BuildPlugin implements Plugin<Project> {
|
|||
systemProperty 'tests.task', path
|
||||
systemProperty 'tests.security.manager', 'true'
|
||||
systemProperty 'jna.nosys', 'true'
|
||||
// TODO: remove this deprecation compatibility setting for 7.0
|
||||
systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false'
|
||||
systemProperty 'compiler.java', project.ext.compilerJavaVersion.getMajorVersion()
|
||||
if (project.ext.inFipsJvm) {
|
||||
systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + "FIPS"
|
||||
|
|
|
@ -31,6 +31,11 @@ class PrecommitTasks {
|
|||
|
||||
/** Adds a precommit task, which depends on non-test verification tasks. */
|
||||
public static Task create(Project project, boolean includeDependencyLicenses) {
|
||||
Configuration forbiddenApisConfiguration = project.configurations.create("forbiddenApisCliJar")
|
||||
project.dependencies {
|
||||
forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.5')
|
||||
}
|
||||
|
||||
List<Task> precommitTasks = [
|
||||
configureCheckstyle(project),
|
||||
configureForbiddenApisCli(project),
|
||||
|
@ -39,7 +44,7 @@ class PrecommitTasks {
|
|||
project.tasks.create('licenseHeaders', LicenseHeadersTask.class),
|
||||
project.tasks.create('filepermissions', FilePermissionsTask.class),
|
||||
project.tasks.create('jarHell', JarHellTask.class),
|
||||
project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)
|
||||
configureThirdPartyAudit(project)
|
||||
]
|
||||
|
||||
// tasks with just tests don't need dependency licenses, so this flag makes adding
|
||||
|
@ -75,32 +80,26 @@ class PrecommitTasks {
|
|||
return project.tasks.create(precommitOptions)
|
||||
}
|
||||
|
||||
private static Task configureForbiddenApisCli(Project project) {
|
||||
Configuration forbiddenApisConfiguration = project.configurations.create("forbiddenApisCliJar")
|
||||
project.dependencies {
|
||||
forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.5')
|
||||
private static Task configureThirdPartyAudit(Project project) {
|
||||
ThirdPartyAuditTask thirdPartyAuditTask = project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)
|
||||
ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources')
|
||||
thirdPartyAuditTask.configure {
|
||||
dependsOn(buildResources)
|
||||
signatureFile = buildResources.copy("forbidden/third-party-audit.txt")
|
||||
javaHome = project.runtimeJavaHome
|
||||
}
|
||||
return thirdPartyAuditTask
|
||||
}
|
||||
Task forbiddenApisCli = project.tasks.create('forbiddenApis')
|
||||
|
||||
private static Task configureForbiddenApisCli(Project project) {
|
||||
Task forbiddenApisCli = project.tasks.create('forbiddenApis')
|
||||
project.sourceSets.forEach { sourceSet ->
|
||||
forbiddenApisCli.dependsOn(
|
||||
project.tasks.create(sourceSet.getTaskName('forbiddenApis', null), ForbiddenApisCliTask) {
|
||||
ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources')
|
||||
dependsOn(buildResources)
|
||||
execAction = { spec ->
|
||||
spec.classpath = project.files(
|
||||
project.configurations.forbiddenApisCliJar,
|
||||
sourceSet.compileClasspath,
|
||||
sourceSet.runtimeClasspath
|
||||
)
|
||||
spec.executable = "${project.runtimeJavaHome}/bin/java"
|
||||
}
|
||||
inputs.files(
|
||||
forbiddenApisConfiguration,
|
||||
sourceSet.compileClasspath,
|
||||
sourceSet.runtimeClasspath
|
||||
)
|
||||
|
||||
it.sourceSet = sourceSet
|
||||
javaHome = project.runtimeJavaHome
|
||||
targetCompatibility = project.compilerJavaVersion
|
||||
bundledSignatures = [
|
||||
"jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out"
|
||||
|
|
|
@ -1,297 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
|
||||
import org.apache.tools.ant.BuildEvent;
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.apache.tools.ant.BuildListener;
|
||||
import org.apache.tools.ant.BuildLogger;
|
||||
import org.apache.tools.ant.DefaultLogger;
|
||||
import org.apache.tools.ant.Project;
|
||||
import org.elasticsearch.gradle.AntTask;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputFiles
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Basic static checking to keep tabs on third party JARs
|
||||
*/
|
||||
public class ThirdPartyAuditTask extends AntTask {
|
||||
|
||||
// patterns for classes to exclude, because we understand their issues
|
||||
private List<String> excludes = [];
|
||||
|
||||
/**
|
||||
* Input for the task. Set javadoc for {#link getJars} for more. Protected
|
||||
* so the afterEvaluate closure in the constructor can write it.
|
||||
*/
|
||||
protected FileCollection jars;
|
||||
|
||||
/**
|
||||
* Classpath against which to run the third patty audit. Protected so the
|
||||
* afterEvaluate closure in the constructor can write it.
|
||||
*/
|
||||
protected FileCollection classpath;
|
||||
|
||||
/**
|
||||
* We use a simple "marker" file that we touch when the task succeeds
|
||||
* as the task output. This is compared against the modified time of the
|
||||
* inputs (ie the jars/class files).
|
||||
*/
|
||||
@OutputFile
|
||||
File successMarker = new File(project.buildDir, 'markers/thirdPartyAudit')
|
||||
|
||||
ThirdPartyAuditTask() {
|
||||
// we depend on this because its the only reliable configuration
|
||||
// this probably makes the build slower: gradle you suck here when it comes to configurations, you pay the price.
|
||||
dependsOn(project.configurations.testCompile);
|
||||
description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'";
|
||||
|
||||
project.afterEvaluate {
|
||||
Configuration configuration = project.configurations.findByName('runtime')
|
||||
Configuration compileOnly = project.configurations.findByName('compileOnly')
|
||||
if (configuration == null) {
|
||||
// some projects apparently do not have 'runtime'? what a nice inconsistency,
|
||||
// basically only serves to waste time in build logic!
|
||||
configuration = project.configurations.findByName('testCompile')
|
||||
}
|
||||
assert configuration != null
|
||||
if (project.plugins.hasPlugin(ShadowPlugin)) {
|
||||
Configuration original = configuration
|
||||
configuration = project.configurations.create('thirdPartyAudit')
|
||||
configuration.extendsFrom(original, project.configurations.bundle)
|
||||
}
|
||||
if (compileOnly == null) {
|
||||
classpath = configuration
|
||||
} else {
|
||||
classpath = project.files(configuration, compileOnly)
|
||||
}
|
||||
|
||||
// we only want third party dependencies.
|
||||
jars = configuration.fileCollection({ dependency ->
|
||||
dependency.group.startsWith("org.elasticsearch") == false
|
||||
});
|
||||
|
||||
// we don't want provided dependencies, which we have already scanned. e.g. don't
|
||||
// scan ES core's dependencies for every single plugin
|
||||
if (compileOnly != null) {
|
||||
jars -= compileOnly
|
||||
}
|
||||
inputs.files(jars)
|
||||
onlyIf { jars.isEmpty() == false }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* classes that should be excluded from the scan,
|
||||
* e.g. because we know what sheisty stuff those particular classes are up to.
|
||||
*/
|
||||
public void setExcludes(String[] classes) {
|
||||
for (String s : classes) {
|
||||
if (s.indexOf('*') != -1) {
|
||||
throw new IllegalArgumentException("illegal third party audit exclusion: '" + s + "', wildcards are not permitted!");
|
||||
}
|
||||
}
|
||||
excludes = classes.sort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns current list of exclusions.
|
||||
*/
|
||||
@Input
|
||||
public List<String> getExcludes() {
|
||||
return excludes;
|
||||
}
|
||||
|
||||
// yes, we parse Uwe Schindler's errors to find missing classes, and to keep a continuous audit. Just don't let him know!
|
||||
static final Pattern MISSING_CLASS_PATTERN =
|
||||
Pattern.compile(/WARNING: The referenced class '(.*)' cannot be loaded\. Please fix the classpath\!/);
|
||||
|
||||
static final Pattern VIOLATION_PATTERN =
|
||||
Pattern.compile(/\s\sin ([a-zA-Z0-9\$\.]+) \(.*\)/);
|
||||
|
||||
// we log everything and capture errors and handle them with our whitelist
|
||||
// this is important, as we detect stale whitelist entries, workaround forbidden apis bugs,
|
||||
// and it also allows whitelisting missing classes!
|
||||
static class EvilLogger extends DefaultLogger {
|
||||
final Set<String> missingClasses = new TreeSet<>();
|
||||
final Map<String,List<String>> violations = new TreeMap<>();
|
||||
String previousLine = null;
|
||||
|
||||
@Override
|
||||
public void messageLogged(BuildEvent event) {
|
||||
if (event.getTask().getClass() == de.thetaphi.forbiddenapis.ant.AntTask.class) {
|
||||
if (event.getPriority() == Project.MSG_WARN) {
|
||||
Matcher m = MISSING_CLASS_PATTERN.matcher(event.getMessage());
|
||||
if (m.matches()) {
|
||||
missingClasses.add(m.group(1).replace('.', '/') + ".class");
|
||||
}
|
||||
|
||||
// Reset the priority of the event to DEBUG, so it doesn't
|
||||
// pollute the build output
|
||||
event.setMessage(event.getMessage(), Project.MSG_DEBUG);
|
||||
} else if (event.getPriority() == Project.MSG_ERR) {
|
||||
Matcher m = VIOLATION_PATTERN.matcher(event.getMessage());
|
||||
if (m.matches()) {
|
||||
String violation = previousLine + '\n' + event.getMessage();
|
||||
String clazz = m.group(1).replace('.', '/') + ".class";
|
||||
List<String> current = violations.get(clazz);
|
||||
if (current == null) {
|
||||
current = new ArrayList<>();
|
||||
violations.put(clazz, current);
|
||||
}
|
||||
current.add(violation);
|
||||
}
|
||||
previousLine = event.getMessage();
|
||||
}
|
||||
}
|
||||
super.messageLogged(event);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BuildLogger makeLogger(PrintStream stream, int outputLevel) {
|
||||
DefaultLogger log = new EvilLogger();
|
||||
log.errorPrintStream = stream;
|
||||
log.outputPrintStream = stream;
|
||||
log.messageOutputLevel = outputLevel;
|
||||
return log;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void runAnt(AntBuilder ant) {
|
||||
ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask);
|
||||
|
||||
// print which jars we are going to scan, always
|
||||
// this is not the time to try to be succinct! Forbidden will print plenty on its own!
|
||||
Set<String> names = new TreeSet<>();
|
||||
for (File jar : jars) {
|
||||
names.add(jar.getName());
|
||||
}
|
||||
|
||||
// TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first,
|
||||
// and then remove our temp dir afterwards. don't complain: try it yourself.
|
||||
// we don't use gradle temp dir handling, just google it, or try it yourself.
|
||||
|
||||
File tmpDir = new File(project.buildDir, 'tmp/thirdPartyAudit');
|
||||
|
||||
// clean up any previous mess (if we failed), then unzip everything to one directory
|
||||
ant.delete(dir: tmpDir.getAbsolutePath());
|
||||
tmpDir.mkdirs();
|
||||
for (File jar : jars) {
|
||||
ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath());
|
||||
}
|
||||
|
||||
// convert exclusion class names to binary file names
|
||||
List<String> excludedFiles = excludes.collect {it.replace('.', '/') + ".class"}
|
||||
Set<String> excludedSet = new TreeSet<>(excludedFiles);
|
||||
|
||||
// jarHellReprise
|
||||
Set<String> sheistySet = getSheistyClasses(tmpDir.toPath());
|
||||
|
||||
try {
|
||||
ant.thirdPartyAudit(failOnUnsupportedJava: false,
|
||||
failOnMissingClasses: false,
|
||||
classpath: classpath.asPath) {
|
||||
fileset(dir: tmpDir)
|
||||
signatures {
|
||||
string(value: getClass().getResourceAsStream('/forbidden/third-party-audit.txt').getText('UTF-8'))
|
||||
}
|
||||
}
|
||||
} catch (BuildException ignore) {}
|
||||
|
||||
EvilLogger evilLogger = null;
|
||||
for (BuildListener listener : ant.project.getBuildListeners()) {
|
||||
if (listener instanceof EvilLogger) {
|
||||
evilLogger = (EvilLogger) listener;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert evilLogger != null;
|
||||
|
||||
// keep our whitelist up to date
|
||||
Set<String> bogusExclusions = new TreeSet<>(excludedSet);
|
||||
bogusExclusions.removeAll(sheistySet);
|
||||
bogusExclusions.removeAll(evilLogger.missingClasses);
|
||||
bogusExclusions.removeAll(evilLogger.violations.keySet());
|
||||
if (!bogusExclusions.isEmpty()) {
|
||||
throw new IllegalStateException("Invalid exclusions, nothing is wrong with these classes: " + bogusExclusions);
|
||||
}
|
||||
|
||||
// don't duplicate classes with the JDK
|
||||
sheistySet.removeAll(excludedSet);
|
||||
if (!sheistySet.isEmpty()) {
|
||||
throw new IllegalStateException("JAR HELL WITH JDK! " + sheistySet);
|
||||
}
|
||||
|
||||
// don't allow a broken classpath
|
||||
evilLogger.missingClasses.removeAll(excludedSet);
|
||||
if (!evilLogger.missingClasses.isEmpty()) {
|
||||
throw new IllegalStateException("CLASSES ARE MISSING! " + evilLogger.missingClasses);
|
||||
}
|
||||
|
||||
// don't use internal classes
|
||||
evilLogger.violations.keySet().removeAll(excludedSet);
|
||||
if (!evilLogger.violations.isEmpty()) {
|
||||
throw new IllegalStateException("VIOLATIONS WERE FOUND! " + evilLogger.violations);
|
||||
}
|
||||
|
||||
// clean up our mess (if we succeed)
|
||||
ant.delete(dir: tmpDir.getAbsolutePath());
|
||||
|
||||
successMarker.setText("", 'UTF-8')
|
||||
}
|
||||
|
||||
/**
|
||||
* check for sheisty classes: if they also exist in the extensions classloader, its jar hell with the jdk!
|
||||
*/
|
||||
private Set<String> getSheistyClasses(Path root) {
|
||||
// system.parent = extensions loader.
|
||||
// note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!).
|
||||
// but groovy/gradle needs to work at all first!
|
||||
ClassLoader ext = ClassLoader.getSystemClassLoader().getParent();
|
||||
assert ext != null;
|
||||
|
||||
Set<String> sheistySet = new TreeSet<>();
|
||||
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
String entry = root.relativize(file).toString().replace('\\', '/');
|
||||
if (entry.endsWith(".class")) {
|
||||
if (ext.getResource(entry) != null) {
|
||||
sheistySet.add(entry);
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
return sheistySet;
|
||||
}
|
||||
}
|
|
@ -53,6 +53,8 @@ public class StandaloneRestTestPlugin implements Plugin<Project> {
|
|||
|
||||
// only setup tests to build
|
||||
project.sourceSets.create('test')
|
||||
// create a compileOnly configuration as others might expect it
|
||||
project.configurations.create("compileOnly")
|
||||
project.dependencies.add('testCompile', "org.elasticsearch.test:framework:${VersionProperties.elasticsearch}")
|
||||
|
||||
project.eclipse.classpath.sourceSets = [project.sourceSets.test]
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
public class JdkJarHellCheck {
|
||||
|
||||
private Set<String> detected = new HashSet<>();
|
||||
|
||||
private void scanForJDKJarHell(Path root) throws IOException {
|
||||
// system.parent = extensions loader.
|
||||
// note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!)
|
||||
ClassLoader ext = ClassLoader.getSystemClassLoader().getParent();
|
||||
assert ext != null;
|
||||
|
||||
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
|
||||
String entry = root.relativize(file).toString().replace('\\', '/');
|
||||
if (entry.endsWith(".class")) {
|
||||
if (ext.getResource(entry) != null) {
|
||||
detected.add(
|
||||
entry
|
||||
.replace("/", ".")
|
||||
.replace(".class","")
|
||||
);
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public Set<String> getDetected() {
|
||||
return Collections.unmodifiableSet(detected);
|
||||
}
|
||||
|
||||
public static void main(String[] argv) throws IOException {
|
||||
JdkJarHellCheck checker = new JdkJarHellCheck();
|
||||
for (String location : argv) {
|
||||
Path path = Paths.get(location);
|
||||
if (Files.exists(path) == false) {
|
||||
throw new IllegalArgumentException("Path does not exist: " + path);
|
||||
}
|
||||
checker.scanForJDKJarHell(path);
|
||||
}
|
||||
if (checker.getDetected().isEmpty()) {
|
||||
System.exit(0);
|
||||
} else {
|
||||
checker.getDetected().forEach(System.out::println);
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -18,10 +18,9 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import de.thetaphi.forbiddenapis.cli.CliMain;
|
||||
import org.gradle.api.Action;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.JavaVersion;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
|
@ -29,6 +28,7 @@ import org.gradle.api.tasks.Input;
|
|||
import org.gradle.api.tasks.InputFiles;
|
||||
import org.gradle.api.tasks.OutputFile;
|
||||
import org.gradle.api.tasks.SkipWhenEmpty;
|
||||
import org.gradle.api.tasks.SourceSet;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.process.JavaExecSpec;
|
||||
|
||||
|
@ -50,7 +50,8 @@ public class ForbiddenApisCliTask extends DefaultTask {
|
|||
private Set<String> suppressAnnotations = new LinkedHashSet<>();
|
||||
private JavaVersion targetCompatibility;
|
||||
private FileCollection classesDirs;
|
||||
private Action<JavaExecSpec> execAction;
|
||||
private SourceSet sourceSet;
|
||||
private String javaHome;
|
||||
|
||||
@Input
|
||||
public JavaVersion getTargetCompatibility() {
|
||||
|
@ -69,14 +70,6 @@ public class ForbiddenApisCliTask extends DefaultTask {
|
|||
}
|
||||
}
|
||||
|
||||
public Action<JavaExecSpec> getExecAction() {
|
||||
return execAction;
|
||||
}
|
||||
|
||||
public void setExecAction(Action<JavaExecSpec> execAction) {
|
||||
this.execAction = execAction;
|
||||
}
|
||||
|
||||
@OutputFile
|
||||
public File getMarkerFile() {
|
||||
return new File(
|
||||
|
@ -131,11 +124,41 @@ public class ForbiddenApisCliTask extends DefaultTask {
|
|||
this.suppressAnnotations = suppressAnnotations;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public FileCollection getClassPathFromSourceSet() {
|
||||
return getProject().files(
|
||||
sourceSet.getCompileClasspath(),
|
||||
sourceSet.getRuntimeClasspath()
|
||||
);
|
||||
}
|
||||
|
||||
public void setSourceSet(SourceSet sourceSet) {
|
||||
this.sourceSet = sourceSet;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public Configuration getForbiddenAPIsConfiguration() {
|
||||
return getProject().getConfigurations().getByName("forbiddenApisCliJar");
|
||||
}
|
||||
|
||||
@Input
|
||||
public String getJavaHome() {
|
||||
return javaHome;
|
||||
}
|
||||
|
||||
public void setJavaHome(String javaHome) {
|
||||
this.javaHome = javaHome;
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void runForbiddenApisAndWriteMarker() throws IOException {
|
||||
getProject().javaexec((JavaExecSpec spec) -> {
|
||||
execAction.execute(spec);
|
||||
spec.setMain(CliMain.class.getName());
|
||||
spec.classpath(
|
||||
getForbiddenAPIsConfiguration(),
|
||||
getClassPathFromSourceSet()
|
||||
);
|
||||
spec.setExecutable(getJavaHome() + "/bin/java");
|
||||
spec.setMain("de.thetaphi.forbiddenapis.cli.CliMain");
|
||||
// build the command line
|
||||
getSignaturesFiles().forEach(file -> spec.args("-f", file.getAbsolutePath()));
|
||||
getSuppressAnnotations().forEach(annotation -> spec.args("--suppressannotation", annotation));
|
||||
|
|
|
@ -0,0 +1,288 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.elasticsearch.gradle.JdkJarHellCheck;
|
||||
import org.elasticsearch.test.NamingConventionsCheck;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.InputFile;
|
||||
import org.gradle.api.tasks.InputFiles;
|
||||
import org.gradle.api.tasks.OutputDirectory;
|
||||
import org.gradle.api.tasks.StopExecutionException;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.process.ExecResult;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ThirdPartyAuditTask extends DefaultTask {
|
||||
|
||||
private static final Pattern MISSING_CLASS_PATTERN = Pattern.compile(
|
||||
"WARNING: The referenced class '(.*)' cannot be loaded\\. Please fix the classpath!"
|
||||
);
|
||||
|
||||
private static final Pattern VIOLATION_PATTERN = Pattern.compile(
|
||||
"\\s\\sin ([a-zA-Z0-9$.]+) \\(.*\\)"
|
||||
);
|
||||
|
||||
/**
|
||||
* patterns for classes to exclude, because we understand their issues
|
||||
*/
|
||||
private Set<String> excludes = new TreeSet<>();
|
||||
|
||||
private File signatureFile;
|
||||
|
||||
private String javaHome;
|
||||
|
||||
@InputFiles
|
||||
public Configuration getForbiddenAPIsConfiguration() {
|
||||
return getProject().getConfigurations().getByName("forbiddenApisCliJar");
|
||||
}
|
||||
|
||||
@InputFile
|
||||
public File getSignatureFile() {
|
||||
return signatureFile;
|
||||
}
|
||||
|
||||
public void setSignatureFile(File signatureFile) {
|
||||
this.signatureFile = signatureFile;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public Configuration getRuntimeConfiguration() {
|
||||
Configuration runtime = getProject().getConfigurations().findByName("runtime");
|
||||
if (runtime == null) {
|
||||
return getProject().getConfigurations().getByName("testCompile");
|
||||
}
|
||||
return runtime;
|
||||
}
|
||||
|
||||
@Input
|
||||
public String getJavaHome() {
|
||||
return javaHome;
|
||||
}
|
||||
|
||||
public void setJavaHome(String javaHome) {
|
||||
this.javaHome = javaHome;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public Configuration getCompileOnlyConfiguration() {
|
||||
return getProject().getConfigurations().getByName("compileOnly");
|
||||
}
|
||||
|
||||
@OutputDirectory
|
||||
public File getJarExpandDir() {
|
||||
return new File(
|
||||
new File(getProject().getBuildDir(), "precommit/thirdPartyAudit"),
|
||||
getName()
|
||||
);
|
||||
}
|
||||
|
||||
public void setExcludes(String... classes) {
|
||||
excludes.clear();
|
||||
for (String each : classes) {
|
||||
if (each.indexOf('*') != -1) {
|
||||
throw new IllegalArgumentException("illegal third party audit exclusion: '" + each + "', wildcards are not permitted!");
|
||||
}
|
||||
excludes.add(each);
|
||||
}
|
||||
}
|
||||
|
||||
@Input
|
||||
public Set<String> getExcludes() {
|
||||
return Collections.unmodifiableSet(excludes);
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void runThirdPartyAudit() throws IOException {
|
||||
FileCollection jars = getJarsToScan();
|
||||
|
||||
extractJars(jars);
|
||||
|
||||
final String forbiddenApisOutput = runForbiddenAPIsCli();
|
||||
|
||||
final Set<String> missingClasses = new TreeSet<>();
|
||||
Matcher missingMatcher = MISSING_CLASS_PATTERN.matcher(forbiddenApisOutput);
|
||||
while (missingMatcher.find()) {
|
||||
missingClasses.add(missingMatcher.group(1));
|
||||
}
|
||||
|
||||
final Set<String> violationsClasses = new TreeSet<>();
|
||||
Matcher violationMatcher = VIOLATION_PATTERN.matcher(forbiddenApisOutput);
|
||||
while (violationMatcher.find()) {
|
||||
violationsClasses.add(violationMatcher.group(1));
|
||||
}
|
||||
|
||||
Set<String> jdkJarHellClasses = runJdkJarHellCheck();
|
||||
|
||||
assertNoPointlessExclusions(missingClasses, violationsClasses, jdkJarHellClasses);
|
||||
|
||||
assertNoMissingAndViolations(missingClasses, violationsClasses);
|
||||
|
||||
assertNoJarHell(jdkJarHellClasses);
|
||||
}
|
||||
|
||||
private void extractJars(FileCollection jars) {
|
||||
File jarExpandDir = getJarExpandDir();
|
||||
jars.forEach(jar ->
|
||||
getProject().copy(spec -> {
|
||||
spec.from(getProject().zipTree(jar));
|
||||
spec.into(jarExpandDir);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private void assertNoJarHell(Set<String> jdkJarHellClasses) {
|
||||
jdkJarHellClasses.removeAll(excludes);
|
||||
if (jdkJarHellClasses.isEmpty() == false) {
|
||||
throw new IllegalStateException("Jar Hell with the JDK:" + formatClassList(jdkJarHellClasses));
|
||||
}
|
||||
}
|
||||
|
||||
private void assertNoMissingAndViolations(Set<String> missingClasses, Set<String> violationsClasses) {
|
||||
missingClasses.removeAll(excludes);
|
||||
violationsClasses.removeAll(excludes);
|
||||
String missingText = formatClassList(missingClasses);
|
||||
String violationsText = formatClassList(violationsClasses);
|
||||
if (missingText.isEmpty() && violationsText.isEmpty()) {
|
||||
getLogger().info("Third party audit passed successfully");
|
||||
} else {
|
||||
throw new IllegalStateException(
|
||||
"Audit of third party dependencies failed:\n" +
|
||||
(missingText.isEmpty() ? "" : "Missing classes:\n" + missingText) +
|
||||
(violationsText.isEmpty() ? "" : "Classes with violations:\n" + violationsText)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertNoPointlessExclusions(Set<String> missingClasses, Set<String> violationsClasses, Set<String> jdkJarHellClasses) {
|
||||
// keep our whitelist up to date
|
||||
Set<String> bogusExclusions = new TreeSet<>(excludes);
|
||||
bogusExclusions.removeAll(missingClasses);
|
||||
bogusExclusions.removeAll(jdkJarHellClasses);
|
||||
bogusExclusions.removeAll(violationsClasses);
|
||||
if (bogusExclusions.isEmpty() == false) {
|
||||
throw new IllegalStateException(
|
||||
"Invalid exclusions, nothing is wrong with these classes: " + formatClassList(bogusExclusions)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private String runForbiddenAPIsCli() throws IOException {
|
||||
ByteArrayOutputStream errorOut = new ByteArrayOutputStream();
|
||||
getProject().javaexec(spec -> {
|
||||
spec.setExecutable(javaHome + "/bin/java");
|
||||
spec.classpath(
|
||||
getForbiddenAPIsConfiguration(),
|
||||
getRuntimeConfiguration(),
|
||||
getCompileOnlyConfiguration()
|
||||
);
|
||||
spec.setMain("de.thetaphi.forbiddenapis.cli.CliMain");
|
||||
spec.args(
|
||||
"-f", getSignatureFile().getAbsolutePath(),
|
||||
"-d", getJarExpandDir(),
|
||||
"--allowmissingclasses"
|
||||
);
|
||||
spec.setErrorOutput(errorOut);
|
||||
if (getLogger().isInfoEnabled() == false) {
|
||||
spec.setStandardOutput(new NullOutputStream());
|
||||
}
|
||||
spec.setIgnoreExitValue(true);
|
||||
});
|
||||
final String forbiddenApisOutput;
|
||||
try (ByteArrayOutputStream outputStream = errorOut) {
|
||||
forbiddenApisOutput = outputStream.toString(StandardCharsets.UTF_8.name());
|
||||
}
|
||||
if (getLogger().isInfoEnabled()) {
|
||||
getLogger().info(forbiddenApisOutput);
|
||||
}
|
||||
return forbiddenApisOutput;
|
||||
}
|
||||
|
||||
private FileCollection getJarsToScan() {
|
||||
FileCollection jars = getRuntimeConfiguration()
|
||||
.fileCollection(dep -> dep.getGroup().startsWith("org.elasticsearch") == false);
|
||||
Configuration compileOnlyConfiguration = getCompileOnlyConfiguration();
|
||||
// don't scan provided dependencies that we already scanned, e.x. don't scan cores dependencies for every plugin
|
||||
if (compileOnlyConfiguration != null) {
|
||||
jars.minus(compileOnlyConfiguration);
|
||||
}
|
||||
if (jars.isEmpty()) {
|
||||
throw new StopExecutionException("No jars to scan");
|
||||
}
|
||||
return jars;
|
||||
}
|
||||
|
||||
private String formatClassList(Set<String> classList) {
|
||||
return classList.stream()
|
||||
.map(name -> " * " + name)
|
||||
.collect(Collectors.joining("\n"));
|
||||
}
|
||||
|
||||
private Set<String> runJdkJarHellCheck() throws IOException {
|
||||
ByteArrayOutputStream standardOut = new ByteArrayOutputStream();
|
||||
ExecResult execResult = getProject().javaexec(spec -> {
|
||||
URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation();
|
||||
if (location.getProtocol().equals("file") == false) {
|
||||
throw new GradleException("Unexpected location for NamingConventionCheck class: " + location);
|
||||
}
|
||||
try {
|
||||
spec.classpath(
|
||||
location.toURI().getPath(),
|
||||
getRuntimeConfiguration(),
|
||||
getCompileOnlyConfiguration()
|
||||
);
|
||||
} catch (URISyntaxException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
spec.setMain(JdkJarHellCheck.class.getName());
|
||||
spec.args(getJarExpandDir());
|
||||
spec.setIgnoreExitValue(true);
|
||||
spec.setExecutable(javaHome + "/bin/java");
|
||||
spec.setStandardOutput(standardOut);
|
||||
});
|
||||
if (execResult.getExitValue() == 0) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
final String jdkJarHellCheckList;
|
||||
try (ByteArrayOutputStream outputStream = standardOut) {
|
||||
jdkJarHellCheckList = outputStream.toString(StandardCharsets.UTF_8.name());
|
||||
}
|
||||
return new TreeSet<>(Arrays.asList(jdkJarHellCheckList.split("\\r?\\n")));
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -158,7 +158,12 @@ public class BuildExamplePluginsIT extends GradleIntegrationTestCase {
|
|||
Objects.requireNonNull(property, "test.local-test-repo-path not passed to tests");
|
||||
File file = new File(property);
|
||||
assertTrue("Expected " + property + " to exist, but it did not!", file.exists());
|
||||
if (File.separator.equals("\\")) {
|
||||
// Use / on Windows too, the build script is not happy with \
|
||||
return file.getAbsolutePath().replace(File.separator, "/");
|
||||
} else {
|
||||
return file.getAbsolutePath();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
|
@ -177,6 +176,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
@ -1011,9 +1011,9 @@ public class RestHighLevelClient implements Closeable {
|
|||
RequestOptions options,
|
||||
CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
Set<Integer> ignores) throws IOException {
|
||||
ValidationException validationException = request.validate();
|
||||
if (validationException != null && validationException.validationErrors().isEmpty() == false) {
|
||||
throw validationException;
|
||||
Optional<ValidationException> validationException = request.validate();
|
||||
if (validationException != null && validationException.isPresent()) {
|
||||
throw validationException.get();
|
||||
}
|
||||
return internalPerformRequest(request, requestConverter, options, responseConverter, ignores);
|
||||
}
|
||||
|
@ -1106,9 +1106,9 @@ public class RestHighLevelClient implements Closeable {
|
|||
RequestOptions options,
|
||||
CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
ActionListener<Resp> listener, Set<Integer> ignores) {
|
||||
ValidationException validationException = request.validate();
|
||||
if (validationException != null && validationException.validationErrors().isEmpty() == false) {
|
||||
listener.onFailure(validationException);
|
||||
Optional<ValidationException> validationException = request.validate();
|
||||
if (validationException != null && validationException.isPresent()) {
|
||||
listener.onFailure(validationException.get());
|
||||
return;
|
||||
}
|
||||
internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores);
|
||||
|
@ -1218,15 +1218,6 @@ public class RestHighLevelClient implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
private static RequestOptions optionsForHeaders(Header[] headers) {
|
||||
RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
|
||||
for (Header header : headers) {
|
||||
Objects.requireNonNull(header, "header cannot be null");
|
||||
options.addHeader(header.getName(), header.getValue());
|
||||
}
|
||||
return options.build();
|
||||
}
|
||||
|
||||
static boolean convertExistsResponse(Response response) {
|
||||
return response.getStatusLine().getStatusCode() == 200;
|
||||
}
|
||||
|
|
|
@ -18,24 +18,20 @@
|
|||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Defines a validation layer for Requests.
|
||||
*/
|
||||
public interface Validatable {
|
||||
ValidationException EMPTY_VALIDATION = new ValidationException() {
|
||||
@Override
|
||||
public void addValidationError(String error) {
|
||||
throw new UnsupportedOperationException("Validation messages should not be added to the empty validation");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Perform validation. This method does not have to be overridden in the event that no validation needs to be done.
|
||||
* Perform validation. This method does not have to be overridden in the event that no validation needs to be done,
|
||||
* or the validation was done during object construction time. A {@link ValidationException} that is not null is
|
||||
* assumed to contain validation errors and will be thrown.
|
||||
*
|
||||
* @return potentially null, in the event of older actions, an empty {@link ValidationException} in newer actions, or finally a
|
||||
* {@link ValidationException} that contains a list of all failed validation.
|
||||
* @return An {@link Optional} {@link ValidationException} that contains a list of validation errors.
|
||||
*/
|
||||
default ValidationException validate() {
|
||||
return EMPTY_VALIDATION;
|
||||
default Optional<ValidationException> validate() {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,9 +41,6 @@ integTestCluster {
|
|||
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
|
||||
systemProperty 'es.scripting.use_java_time', 'false'
|
||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||
|
||||
// TODO: remove this deprecation compatibility setting for 7.0
|
||||
systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false'
|
||||
}
|
||||
|
||||
// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed
|
||||
|
|
|
@ -133,6 +133,9 @@ The following project appears to be abandoned:
|
|||
* https://github.com/mbuhot/eskotlin[ES Kotlin]:
|
||||
Elasticsearch Query DSL for kotlin based on the {client}/java-api/current/index.html[official Elasticsearch Java client].
|
||||
|
||||
* https://github.com/jillesvangurp/es-kotlin-wrapper-client[ES Kotlin Wrapper Client]:
|
||||
Kotlin extension functions and abstractions for the {client}/java-api/current/index.html[official Elasticsearch Highlevel Client]. Aims to reduce the amount of boilerplate needed to do searches, bulk indexing and other common things users do with the client.
|
||||
|
||||
[[lua]]
|
||||
== Lua
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ The license can be added or updated using the `putLicense()` method:
|
|||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/LicensingDocumentationIT.java[put-license-execute]
|
||||
--------------------------------------------------
|
||||
<1> Set the categories of information to retrieve. The the default is to
|
||||
<1> Set the categories of information to retrieve. The default is to
|
||||
return no information which is useful for checking if {xpack} is installed
|
||||
but not much else.
|
||||
<2> A JSON document containing the license information.
|
||||
|
|
|
@ -270,7 +270,7 @@ include-tagged::{doc-tests}/MigrationDocumentationIT.java[migration-cluster-heal
|
|||
helper requires the content type of the response to be passed as an argument and returns
|
||||
a `Map` of objects. Values in the map can be of any type, including inner `Map` that are
|
||||
used to represent the JSON object hierarchy.
|
||||
<5> Retrieve the value of the `status` field in the response map, casts it as a a `String`
|
||||
<5> Retrieve the value of the `status` field in the response map, casts it as a `String`
|
||||
object and use the `ClusterHealthStatus.fromString()` method to convert it as a `ClusterHealthStatus`
|
||||
object. This method throws an exception if the value does not corresponds to a valid cluster
|
||||
health status.
|
||||
|
|
|
@ -13,7 +13,7 @@ include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-info-execut
|
|||
--------------------------------------------------
|
||||
<1> Enable verbose mode. The default is `false` but `true` will return
|
||||
more information.
|
||||
<2> Set the categories of information to retrieve. The the default is to
|
||||
<2> Set the categories of information to retrieve. The default is to
|
||||
return no information which is useful for checking if {xpack} is installed
|
||||
but not much else.
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Painless doesn't have a
|
||||
https://en.wikipedia.org/wiki/Read%E2%80%93eval%E2%80%93print_loop[REPL]
|
||||
and while it'd be nice for it to have one one day, it wouldn't tell you the
|
||||
and while it'd be nice for it to have one day, it wouldn't tell you the
|
||||
whole story around debugging painless scripts embedded in Elasticsearch because
|
||||
the data that the scripts have access to or "context" is so important. For now
|
||||
the best way to debug embedded scripts is by throwing exceptions at choice
|
||||
|
|
|
@ -254,7 +254,7 @@ and `]` tokens.
|
|||
*Errors*
|
||||
|
||||
* If a value other than an `int` type value or a value that is castable to an
|
||||
`int` type value is specified for for a dimension's size.
|
||||
`int` type value is specified for a dimension's size.
|
||||
|
||||
*Grammar*
|
||||
|
||||
|
|
|
@ -433,8 +433,8 @@ Scripts can be inline (as in above example), indexed or stored on disk. For deta
|
|||
Available parameters in the script are
|
||||
|
||||
[horizontal]
|
||||
`_subset_freq`:: Number of documents the term appears in in the subset.
|
||||
`_superset_freq`:: Number of documents the term appears in in the superset.
|
||||
`_subset_freq`:: Number of documents the term appears in the subset.
|
||||
`_superset_freq`:: Number of documents the term appears in the superset.
|
||||
`_subset_size`:: Number of documents in the subset.
|
||||
`_superset_size`:: Number of documents in the superset.
|
||||
|
||||
|
|
|
@ -307,7 +307,7 @@ POST /_search
|
|||
|
||||
===== stdDev Function
|
||||
|
||||
This function accepts a collection of doubles and and average, then returns the standard deviation of the values in that window.
|
||||
This function accepts a collection of doubles and average, then returns the standard deviation of the values in that window.
|
||||
`null` and `NaN` values are ignored; the sum is only calculated over the real values. If the window is empty, or all values are
|
||||
`null`/`NaN`, `0.0` is returned as the result.
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ GET /_remote/info
|
|||
----------------------------------
|
||||
// CONSOLE
|
||||
|
||||
This command returns returns connection and endpoint information keyed by
|
||||
This command returns connection and endpoint information keyed by
|
||||
the configured remote cluster alias.
|
||||
|
||||
[float]
|
||||
|
|
|
@ -31,7 +31,7 @@ POST /_cluster/reroute
|
|||
// CONSOLE
|
||||
// TEST[skip:doc tests run with only a single node]
|
||||
|
||||
It is important to note that that after processing any reroute commands
|
||||
It is important to note that after processing any reroute commands
|
||||
Elasticsearch will perform rebalancing as normal (respecting the values of
|
||||
settings such as `cluster.routing.rebalance.enable`) in order to remain in a
|
||||
balanced state. For example, if the requested allocation includes moving a
|
||||
|
|
|
@ -127,7 +127,7 @@ might look like:
|
|||
The new `description` field contains human readable text that identifies the
|
||||
particular request that the task is performing such as identifying the search
|
||||
request being performed by a search task like the example above. Other kinds of
|
||||
task have have different descriptions, like <<docs-reindex,`_reindex`>> which
|
||||
task have different descriptions, like <<docs-reindex,`_reindex`>> which
|
||||
has the search and the destination, or <<docs-bulk,`_bulk`>> which just has the
|
||||
number of requests and the destination indices. Many requests will only have an
|
||||
empty description because more detailed information about the request is not
|
||||
|
|
|
@ -1049,6 +1049,199 @@ understands this to mean `2016-04-01` as is explained in the <<date-math-index-n
|
|||
| `index_name_format` | no | yyyy-MM-dd | The format to be used when printing the parsed date into the index name. An valid Joda pattern is expected here.
|
||||
|======
|
||||
|
||||
[[dissect-processor]]
|
||||
=== Dissect Processor
|
||||
|
||||
Similar to the <<grok-processor,Grok Processor>>, dissect also extracts structured fields out of a single text field
|
||||
within a document. However unlike the <<grok-processor,Grok Processor>>, dissect does not use
|
||||
https://en.wikipedia.org/wiki/Regular_expression[Regular Expressions]. This allows dissect's syntax to be simple and for
|
||||
some cases faster than the <<grok-processor,Grok Processor>>.
|
||||
|
||||
Dissect matches a single text field against a defined pattern.
|
||||
|
||||
For example the following pattern:
|
||||
[source,txt]
|
||||
--------------------------------------------------
|
||||
%{clientip} %{ident} %{auth} [%{@timestamp}] \"%{verb} %{request} HTTP/%{httpversion}\" %{status} %{size}
|
||||
--------------------------------------------------
|
||||
will match a log line of this format:
|
||||
[source,txt]
|
||||
--------------------------------------------------
|
||||
1.2.3.4 - - [30/Apr/1998:22:00:52 +0000] \"GET /english/venues/cities/images/montpellier/18.gif HTTP/1.0\" 200 3171
|
||||
--------------------------------------------------
|
||||
and result in a document with the following fields:
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
"doc": {
|
||||
"_index": "_index",
|
||||
"_type": "_type",
|
||||
"_id": "_id",
|
||||
"_source": {
|
||||
"request": "/english/venues/cities/images/montpellier/18.gif",
|
||||
"auth": "-",
|
||||
"ident": "-",
|
||||
"verb": "GET",
|
||||
"@timestamp": "30/Apr/1998:22:00:52 +0000",
|
||||
"size": "3171",
|
||||
"clientip": "1.2.3.4",
|
||||
"httpversion": "1.0",
|
||||
"status": "200"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
A dissect pattern is defined by the parts of the string that will be discarded. In the example above the first part
|
||||
to be discarded is a single space. Dissect finds this space, then assigns the value of `clientip` is everything up
|
||||
until that space.
|
||||
Later dissect matches the `[` and then `]` and then assigns `@timestamp` to everything in-between `[` and `]`.
|
||||
Paying special attention the parts of the string to discard will help build successful dissect patterns.
|
||||
|
||||
Successful matches require all keys in a pattern to have a value. If any of the `%{keyname}` defined in the pattern do
|
||||
not have a value, then an exception is thrown and may be handled by the <<handling-failure-in-pipelines,on_falure>> directive.
|
||||
An empty key `%{}` or a <<dissect-modifier-named-skip-key, named skip key>> can be used to match values, but exclude the value from
|
||||
the final document. All matched values are represented as string data types. The <<convert-processor, convert processor>>
|
||||
may be used to convert to expected data type.
|
||||
|
||||
Dissect also supports <<dissect-key-modifiers,key modifiers>> that can change dissect's default
|
||||
behavior. For example you can instruct dissect to ignore certain fields, append fields, skip over padding, etc.
|
||||
See <<dissect-key-modifiers, below>> for more information.
|
||||
|
||||
[[dissect-options]]
|
||||
.Dissect Options
|
||||
[options="header"]
|
||||
|======
|
||||
| Name | Required | Default | Description
|
||||
| `field` | yes | - | The field to dissect
|
||||
| `pattern` | yes | - | The pattern to apply to the field
|
||||
| `append_separator`| no | "" (empty string) | The character(s) that separate the appended fields.
|
||||
| `ignore_missing` | no | false | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document
|
||||
| `
|
||||
|======
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"dissect": {
|
||||
"field": "message",
|
||||
"pattern" : "%{clientip} %{ident} %{auth} [%{@timestamp}] \"%{verb} %{request} HTTP/%{httpversion}\" %{status} %{size}"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
[[dissect-key-modifiers]]
|
||||
==== Dissect key modifiers
|
||||
Key modifiers can change the default behavior for dissection. Key modifiers may be found on the left or right
|
||||
of the `%{keyname}` always inside the `%{` and `}`. For example `%{+keyname ->}` has the append and right padding
|
||||
modifiers.
|
||||
|
||||
.Dissect Key Modifiers
|
||||
[options="header"]
|
||||
|======
|
||||
| Modifier | Name | Position | Example | Description | Details
|
||||
| `->` | Skip right padding | (far) right | `%{keyname1->}` | Skips any repeated characters to the right | <<dissect-modifier-skip-right-padding,link>>
|
||||
| `+` | Append | left | `%{+keyname} %{+keyname}` | Appends two or more fields together | <<dissect-modifier-append-key,link>>
|
||||
| `+` with `/n` | Append with order | left and right | `%{+keyname/2} %{+keyname/1}` | Appends two or more fields together in the order specified | <<dissect-modifier-append-key-with-order,link>>
|
||||
| `?` | Named skip key | left | `%{?ignoreme}` | Skips the matched value in the output. Same behavior as `%{}`| <<dissect-modifier-named-skip-key,link>>
|
||||
| `*` and `&` | Reference keys | left | `%{*r1} %{&r1}` | Sets the output key as value of `*` and output value of `&` | <<dissect-modifier-reference-keys,link>>
|
||||
| `
|
||||
|======
|
||||
|
||||
[[dissect-modifier-skip-right-padding]]
|
||||
===== Right padding modifier (`->`)
|
||||
|
||||
The algorithm that performs the dissection is very strict in that it requires all characters in the pattern to match
|
||||
the source string. For example, the pattern `%{fookey} %{barkey}` (1 space), will match the string "foo{nbsp}bar"
|
||||
(1 space), but will not match the string "foo{nbsp}{nbsp}bar" (2 spaces) since the pattern has only 1 space and the
|
||||
source string has 2 spaces.
|
||||
|
||||
The right padding modifier helps with this case. Adding the right padding modifier to the pattern `%{fookey->} %{barkey}`,
|
||||
It will now will match "foo{nbsp}bar" (1 space) and "foo{nbsp}{nbsp}bar" (2 spaces)
|
||||
and even "foo{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}bar" (10 spaces).
|
||||
|
||||
Use the right padding modifier to allow for repetition of the characters after a `%{keyname->}`.
|
||||
|
||||
The right padding modifier may be placed on any key with any other modifiers. It should always be the furthest right
|
||||
modifier. For example: `%{+keyname/1->}` and `%{->}`
|
||||
|
||||
Right padding modifier example
|
||||
|======
|
||||
| *Pattern* | `%{ts->} %{level}`
|
||||
| *Input* | 1998-08-10T17:15:42,466{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}WARN
|
||||
| *Result* a|
|
||||
* ts = 1998-08-10T17:15:42,466
|
||||
* level = WARN
|
||||
|======
|
||||
|
||||
The right padding modifier may be used with an empty key to help skip unwanted data. For example, the same input string, but wrapped with brackets requires the use of an empty right padded key to achieve the same result.
|
||||
|
||||
Right padding modifier with empty key example
|
||||
|======
|
||||
| *Pattern* | `[%{ts}]%{->}[%{level}]`
|
||||
| *Input* | [1998-08-10T17:15:42,466]{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}{nbsp}[WARN]
|
||||
| *Result* a|
|
||||
* ts = 1998-08-10T17:15:42,466
|
||||
* level = WARN
|
||||
|======
|
||||
|
||||
===== Append modifier (`+`)
|
||||
[[dissect-modifier-append-key]]
|
||||
Dissect supports appending two or more results together for the output.
|
||||
Values are appended left to right. An append separator can be specified.
|
||||
In this example the append_separator is defined as a space.
|
||||
|
||||
Append modifier example
|
||||
|======
|
||||
| *Pattern* | `%{+name} %{+name} %{+name} %{+name}`
|
||||
| *Input* | john jacob jingleheimer schmidt
|
||||
| *Result* a|
|
||||
* name = john jacob jingleheimer schmidt
|
||||
|======
|
||||
|
||||
===== Append with order modifier (`+` and `/n`)
|
||||
[[dissect-modifier-append-key-with-order]]
|
||||
Dissect supports appending two or more results together for the output.
|
||||
Values are appended based on the order defined (`/n`). An append separator can be specified.
|
||||
In this example the append_separator is defined as a comma.
|
||||
|
||||
Append with order modifier example
|
||||
|======
|
||||
| *Pattern* | `%{+name/2} %{+name/4} %{+name/3} %{+name/1}`
|
||||
| *Input* | john jacob jingleheimer schmidt
|
||||
| *Result* a|
|
||||
* name = schmidt,john,jingleheimer,jacob
|
||||
|======
|
||||
|
||||
===== Named skip key (`?`)
|
||||
[[dissect-modifier-named-skip-key]]
|
||||
Dissect supports ignoring matches in the final result. This can be done with an empty key `%{}`, but for readability
|
||||
it may be desired to give that empty key a name.
|
||||
|
||||
Named skip key modifier example
|
||||
|======
|
||||
| *Pattern* | `%{clientip} %{?ident} %{?auth} [%{@timestamp}]`
|
||||
| *Input* | 1.2.3.4 - - [30/Apr/1998:22:00:52 +0000]
|
||||
| *Result* a|
|
||||
* ip = 1.2.3.4
|
||||
* @timestamp = 30/Apr/1998:22:00:52 +0000
|
||||
|======
|
||||
|
||||
===== Reference keys (`*` and `&`)
|
||||
[[dissect-modifier-reference-keys]]
|
||||
Dissect support using parsed values as the key/value pairings for the structured content. Imagine a system that
|
||||
partially logs in key/value pairs. Reference keys allow you to maintain that key/value relationship.
|
||||
|
||||
Reference key modifier example
|
||||
|======
|
||||
| *Pattern* | `[%{ts}] [%{level}] %{*p1}:%{&p1} %{*p2}:%{&p2}`
|
||||
| *Input* | [2018-08-10T17:15:42,466] [ERR] ip:1.2.3.4 error:REFUSED
|
||||
| *Result* a|
|
||||
* ts = 1998-08-10T17:15:42,466
|
||||
* level = ERR
|
||||
* ip = 1.2.3.4
|
||||
* error = REFUSED
|
||||
|======
|
||||
|
||||
[[dot-expand-processor]]
|
||||
=== Dot Expander Processor
|
||||
|
||||
|
|
|
@ -21,5 +21,3 @@ has been removed. `missing_bucket` should be used instead.
|
|||
The object used to share aggregation state between the scripts in a Scripted Metric
|
||||
Aggregation is now a variable called `state` available in the script context, rather than
|
||||
being provided via the `params` object as `params._agg`.
|
||||
|
||||
The old `params._agg` variable is still available as well.
|
||||
|
|
|
@ -51,7 +51,7 @@ NOTE: These settings only take effect on a full cluster restart.
|
|||
|
||||
=== Dangling indices
|
||||
|
||||
When a node joins the cluster, any shards stored in its local data directory
|
||||
When a node joins the cluster, any shards stored in its local data
|
||||
directory which do not already exist in the cluster will be imported into the
|
||||
cluster. This functionality is intended as a best effort to help users who
|
||||
lose all master nodes. If a new master node is started which is unaware of
|
||||
|
|
|
@ -96,7 +96,7 @@ see <<http-exporter-settings>>.
|
|||
[[http-exporter-dns]]
|
||||
==== Using DNS Hosts in HTTP Exporters
|
||||
|
||||
{monitoring} runs inside of the the JVM security manager. When the JVM has the
|
||||
{monitoring} runs inside of the JVM security manager. When the JVM has the
|
||||
security manager enabled, the JVM changes the duration so that it caches DNS
|
||||
lookups indefinitely (for example, the mapping of a DNS hostname to an IP
|
||||
address). For this reason, if you are in an environment where the DNS response
|
||||
|
|
|
@ -41,5 +41,5 @@ WARNING: `span_multi` queries will hit too many clauses failure if the number of
|
|||
boolean query limit (defaults to 1024).To avoid an unbounded expansion you can set the <<query-dsl-multi-term-rewrite,
|
||||
rewrite method>> of the multi term query to `top_terms_*` rewrite. Or, if you use `span_multi` on `prefix` query only,
|
||||
you can activate the <<index-prefix-config,`index_prefixes`>> field option of the `text` field instead. This will
|
||||
rewrite any prefix query on the field to a a single term query that matches the indexed prefix.
|
||||
rewrite any prefix query on the field to a single term query that matches the indexed prefix.
|
||||
|
||||
|
|
|
@ -217,4 +217,4 @@ Response:
|
|||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
NOTE: Second level of of collapsing doesn't allow `inner_hits`.
|
||||
NOTE: Second level of collapsing doesn't allow `inner_hits`.
|
|
@ -334,7 +334,7 @@ the filter. If not set, the user DN is passed into the filter. Defaults to Empt
|
|||
`unmapped_groups_as_roles`::
|
||||
If set to `true`, the names of any unmapped LDAP groups are used as role names
|
||||
and assigned to the user. A group is considered to be _unmapped_ if it is not
|
||||
not referenced in a
|
||||
referenced in a
|
||||
{xpack-ref}/mapping-roles.html#mapping-roles-file[role-mapping file]. API-based
|
||||
role mappings are not considered. Defaults to `false`.
|
||||
|
||||
|
@ -479,7 +479,7 @@ this setting controls the amount of time to cache DNS lookups. Defaults
|
|||
to `1h`.
|
||||
|
||||
`domain_name`::
|
||||
The domain name of Active Directory. If the the `url` and `user_search_dn`
|
||||
The domain name of Active Directory. If the `url` and the `user_search_dn`
|
||||
settings are not specified, the cluster can derive those values from this
|
||||
setting. Required.
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ So let's start from the bottom; these roughly are:
|
|||
|
||||
|`column`
|
||||
|`field`
|
||||
|In both cases, at the lowest level, data is stored in in _named_ entries, of a variety of <<sql-data-types, data types>>, containing _one_ value. SQL calls such an entry a _column_ while {es} a _field_.
|
||||
|In both cases, at the lowest level, data is stored in _named_ entries, of a variety of <<sql-data-types, data types>>, containing _one_ value. SQL calls such an entry a _column_ while {es} a _field_.
|
||||
Notice that in {es} a field can contain _multiple_ values of the same type (esentially a list) while in SQL, a _column_ can contain _exactly_ one value of said type.
|
||||
{es-sql} will do its best to preserve the SQL semantic and, depending on the query, reject those that return fields with more than one value.
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ No need for additional hardware, processes, runtimes or libraries to query {es};
|
|||
|
||||
Lightweight and efficient::
|
||||
|
||||
{es-sql} does not abstract {es} and its search capabilities - on the contrary, it embrases and exposes to SQL to allow proper full-text search, in real-time, in the same declarative, succint fashion.
|
||||
{es-sql} does not abstract {es} and its search capabilities - on the contrary, it embraces and exposes SQL to allow proper full-text search, in real-time, in the same declarative, succint fashion.
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -230,7 +230,7 @@ As many Elasticsearch tests are checking for a similar output, like the amount o
|
|||
`assertMatchCount()`:: Asserts a matching count from a percolation response
|
||||
`assertFirstHit()`:: Asserts the first hit hits the specified matcher
|
||||
`assertSecondHit()`:: Asserts the second hit hits the specified matcher
|
||||
`assertThirdHit()`:: Asserts the third hits hits the specified matcher
|
||||
`assertThirdHit()`:: Asserts the third hit hits the specified matcher
|
||||
`assertSearchHit()`:: Assert a certain element in a search response hits the specified matcher
|
||||
`assertNoFailures()`:: Asserts that no shard failures have occurred in the response
|
||||
`assertFailures()`:: Asserts that shard failures have happened during a search request
|
||||
|
|
|
@ -459,7 +459,7 @@ Upgrading indices create with Lucene 3.x (Elasticsearch v0.20 and before) to Luc
|
|||
[float]
|
||||
=== Improve error handling when deleting files (STATUS: DONE, v1.4.0.Beta1)
|
||||
|
||||
Lucene uses reference counting to prevent files that are still in use from being deleted. Lucene testing discovered a bug ({JIRA}5919[LUCENE-5919]) when decrementing the ref count on a batch of files. If deleting some of the files resulted in an exception (e.g. due to interference from a virus scanner), the files that had had their ref counts decremented successfully could later have their ref counts deleted again, incorrectly, resulting in files being physically deleted before their time. This is fixed in Lucene 4.10.
|
||||
Lucene uses reference counting to prevent files that are still in use from being deleted. Lucene testing discovered a bug ({JIRA}5919[LUCENE-5919]) when decrementing the ref count on a batch of files. If deleting some of the files resulted in an exception (e.g. due to interference from a virus scanner), the files that had their ref counts decremented successfully could later have their ref counts deleted again, incorrectly, resulting in files being physically deleted before their time. This is fixed in Lucene 4.10.
|
||||
|
||||
[float]
|
||||
=== Using Lucene Checksums to verify shards during snapshot/restore (STATUS:DONE, v1.3.3)
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Math.log(_score * 2) + params.my_modifier
|
|
@ -1,5 +0,0 @@
|
|||
double profit = 0;
|
||||
for (t in params._agg.transactions) {
|
||||
profit += t
|
||||
}
|
||||
return profit
|
|
@ -1 +0,0 @@
|
|||
params._agg.transactions = []
|
|
@ -1 +0,0 @@
|
|||
params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)
|
|
@ -1,5 +0,0 @@
|
|||
double profit = 0;
|
||||
for (a in params._aggs) {
|
||||
profit += a
|
||||
}
|
||||
return profit
|
|
@ -1,2 +0,0 @@
|
|||
// Simple script to load a field. Not really a good example, but a simple one.
|
||||
doc[params.field].value
|
|
@ -26,6 +26,7 @@ esplugin {
|
|||
dependencies {
|
||||
compileOnly project(':modules:lang-painless')
|
||||
compile project(':libs:grok')
|
||||
compile project(':libs:dissect')
|
||||
}
|
||||
|
||||
compileJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes"
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.dissect.DissectParser;
|
||||
import org.elasticsearch.ingest.AbstractProcessor;
|
||||
import org.elasticsearch.ingest.ConfigurationUtils;
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public final class DissectProcessor extends AbstractProcessor {
|
||||
|
||||
public static final String TYPE = "dissect";
|
||||
//package private members for testing
|
||||
final String field;
|
||||
final boolean ignoreMissing;
|
||||
final String pattern;
|
||||
final String appendSeparator;
|
||||
final DissectParser dissectParser;
|
||||
|
||||
DissectProcessor(String tag, String field, String pattern, String appendSeparator, boolean ignoreMissing) {
|
||||
super(tag);
|
||||
this.field = field;
|
||||
this.ignoreMissing = ignoreMissing;
|
||||
this.pattern = pattern;
|
||||
this.appendSeparator = appendSeparator;
|
||||
this.dissectParser = new DissectParser(pattern, appendSeparator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument ingestDocument) {
|
||||
String input = ingestDocument.getFieldValue(field, String.class, ignoreMissing);
|
||||
if (input == null && ignoreMissing) {
|
||||
return;
|
||||
} else if (input == null) {
|
||||
throw new IllegalArgumentException("field [" + field + "] is null, cannot process it.");
|
||||
}
|
||||
dissectParser.parse(input).forEach(ingestDocument::setFieldValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
public static final class Factory implements Processor.Factory {
|
||||
|
||||
@Override
|
||||
public DissectProcessor create(Map<String, Processor.Factory> registry, String processorTag, Map<String, Object> config) {
|
||||
String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
|
||||
String pattern = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "pattern");
|
||||
String appendSeparator = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "append_separator", "");
|
||||
boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
|
||||
return new DissectProcessor(processorTag, field, pattern, appendSeparator, ignoreMissing);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -82,6 +82,7 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl
|
|||
processors.put(KeyValueProcessor.TYPE, new KeyValueProcessor.Factory());
|
||||
processors.put(URLDecodeProcessor.TYPE, new URLDecodeProcessor.Factory());
|
||||
processors.put(BytesProcessor.TYPE, new BytesProcessor.Factory());
|
||||
processors.put(DissectProcessor.TYPE, new DissectProcessor.Factory());
|
||||
return Collections.unmodifiableMap(processors);
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.dissect.DissectException;
|
||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class DissectProcessorFactoryTests extends ESTestCase {
|
||||
|
||||
public void testCreate() {
|
||||
DissectProcessor.Factory factory = new DissectProcessor.Factory();
|
||||
String fieldName = RandomDocumentPicks.randomFieldName(random());
|
||||
String processorTag = randomAlphaOfLength(10);
|
||||
String pattern = "%{a},%{b},%{c}";
|
||||
String appendSeparator = ":";
|
||||
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", fieldName);
|
||||
config.put("pattern", pattern);
|
||||
config.put("append_separator", appendSeparator);
|
||||
config.put("ignore_missing", true);
|
||||
|
||||
DissectProcessor processor = factory.create(null, processorTag, config);
|
||||
assertThat(processor.getTag(), equalTo(processorTag));
|
||||
assertThat(processor.field, equalTo(fieldName));
|
||||
assertThat(processor.pattern, equalTo(pattern));
|
||||
assertThat(processor.appendSeparator, equalTo(appendSeparator));
|
||||
assertThat(processor.dissectParser, is(notNullValue()));
|
||||
assertThat(processor.ignoreMissing, is(true));
|
||||
}
|
||||
|
||||
public void testCreateMissingField() {
|
||||
DissectProcessor.Factory factory = new DissectProcessor.Factory();
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("pattern", "%{a},%{b},%{c}");
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config));
|
||||
assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing"));
|
||||
}
|
||||
|
||||
public void testCreateMissingPattern() {
|
||||
DissectProcessor.Factory factory = new DissectProcessor.Factory();
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", randomAlphaOfLength(10));
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config));
|
||||
assertThat(e.getMessage(), Matchers.equalTo("[pattern] required property is missing"));
|
||||
}
|
||||
|
||||
public void testCreateMissingOptionals() {
|
||||
DissectProcessor.Factory factory = new DissectProcessor.Factory();
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("pattern", "%{a},%{b},%{c}");
|
||||
config.put("field", randomAlphaOfLength(10));
|
||||
DissectProcessor processor = factory.create(null, "_tag", config);
|
||||
assertThat(processor.appendSeparator, equalTo(""));
|
||||
assertThat(processor.ignoreMissing, is(false));
|
||||
}
|
||||
|
||||
public void testCreateBadPattern() {
|
||||
DissectProcessor.Factory factory = new DissectProcessor.Factory();
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("pattern", "no keys defined");
|
||||
config.put("field", randomAlphaOfLength(10));
|
||||
expectThrows(DissectException.class, () -> factory.create(null, "_tag", config));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,114 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.dissect.DissectException;
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
||||
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
* Basic tests for the {@link DissectProcessor}. See the {@link org.elasticsearch.dissect.DissectParser} test suite for a comprehensive
|
||||
* set of dissect tests.
|
||||
*/
|
||||
public class DissectProcessorTests extends ESTestCase {
|
||||
|
||||
public void testMatch() {
|
||||
IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null,
|
||||
Collections.singletonMap("message", "foo,bar,baz"));
|
||||
DissectProcessor dissectProcessor = new DissectProcessor("", "message", "%{a},%{b},%{c}", "", true);
|
||||
dissectProcessor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("foo"));
|
||||
assertThat(ingestDocument.getFieldValue("b", String.class), equalTo("bar"));
|
||||
assertThat(ingestDocument.getFieldValue("c", String.class), equalTo("baz"));
|
||||
}
|
||||
|
||||
public void testMatchOverwrite() {
|
||||
IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null,
|
||||
MapBuilder.<String, Object>newMapBuilder()
|
||||
.put("message", "foo,bar,baz")
|
||||
.put("a", "willgetstompped")
|
||||
.map());
|
||||
assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("willgetstompped"));
|
||||
DissectProcessor dissectProcessor = new DissectProcessor("", "message", "%{a},%{b},%{c}", "", true);
|
||||
dissectProcessor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("foo"));
|
||||
assertThat(ingestDocument.getFieldValue("b", String.class), equalTo("bar"));
|
||||
assertThat(ingestDocument.getFieldValue("c", String.class), equalTo("baz"));
|
||||
}
|
||||
|
||||
public void testAdvancedMatch() {
|
||||
IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null,
|
||||
Collections.singletonMap("message", "foo bar,,,,,,,baz nope:notagain 😊 🐇 🙃"));
|
||||
DissectProcessor dissectProcessor =
|
||||
new DissectProcessor("", "message", "%{a->} %{*b->},%{&b} %{}:%{?skipme} %{+smile/2} 🐇 %{+smile/1}", "::::", true);
|
||||
dissectProcessor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("foo"));
|
||||
assertThat(ingestDocument.getFieldValue("bar", String.class), equalTo("baz"));
|
||||
expectThrows(IllegalArgumentException.class, () -> ingestDocument.getFieldValue("nope", String.class));
|
||||
expectThrows(IllegalArgumentException.class, () -> ingestDocument.getFieldValue("notagain", String.class));
|
||||
assertThat(ingestDocument.getFieldValue("smile", String.class), equalTo("🙃::::😊"));
|
||||
}
|
||||
|
||||
public void testMiss() {
|
||||
IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null,
|
||||
Collections.singletonMap("message", "foo:bar,baz"));
|
||||
DissectProcessor dissectProcessor = new DissectProcessor("", "message", "%{a},%{b},%{c}", "", true);
|
||||
DissectException e = expectThrows(DissectException.class, () -> dissectProcessor.execute(ingestDocument));
|
||||
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to find match for dissect pattern"));
|
||||
}
|
||||
|
||||
public void testNonStringValueWithIgnoreMissing() {
|
||||
String fieldName = RandomDocumentPicks.randomFieldName(random());
|
||||
Processor processor = new DissectProcessor("", fieldName, "%{a},%{b},%{c}", "", true);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
|
||||
ingestDocument.setFieldValue(fieldName, randomInt());
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
|
||||
assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]"));
|
||||
}
|
||||
|
||||
public void testNullValueWithIgnoreMissing() throws Exception {
|
||||
String fieldName = RandomDocumentPicks.randomFieldName(random());
|
||||
Processor processor = new DissectProcessor("", fieldName, "%{a},%{b},%{c}", "", true);
|
||||
IngestDocument originalIngestDocument = RandomDocumentPicks
|
||||
.randomIngestDocument(random(), Collections.singletonMap(fieldName, null));
|
||||
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
|
||||
processor.execute(ingestDocument);
|
||||
assertIngestDocument(originalIngestDocument, ingestDocument);
|
||||
}
|
||||
|
||||
public void testNullValueWithOutIgnoreMissing() {
|
||||
String fieldName = RandomDocumentPicks.randomFieldName(random());
|
||||
Processor processor = new DissectProcessor("", fieldName, "%{a},%{b},%{c}", "", false);
|
||||
IngestDocument originalIngestDocument = RandomDocumentPicks
|
||||
.randomIngestDocument(random(), Collections.singletonMap(fieldName, null));
|
||||
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
|
||||
expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test dissect processor match":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"dissect" : {
|
||||
"field" : "message",
|
||||
"pattern" : "%{a} %{b} %{c}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: {message: "foo bar baz"}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.message: "foo bar baz" }
|
||||
- match: { _source.a: "foo" }
|
||||
- match: { _source.b: "bar" }
|
||||
- match: { _source.c: "baz" }
|
||||
---
|
||||
"Test dissect processor mismatch":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"dissect" : {
|
||||
"field" : "message",
|
||||
"pattern" : "%{a},%{b},%{c}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
catch: '/Unable to find match for dissect pattern: \%\{a\},\%\{b\},\%\{c\} against source: foo bar baz/'
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 2
|
||||
pipeline: "my_pipeline"
|
||||
body: {message: "foo bar baz"}
|
||||
|
||||
---
|
||||
"Test fail to create dissect processor":
|
||||
- do:
|
||||
catch: '/Unable to parse pattern/'
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"dissect" : {
|
||||
"field" : "message",
|
||||
"pattern" : "bad pattern"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask
|
|||
esplugin {
|
||||
description 'The ICU Analysis plugin integrates Lucene ICU module into elasticsearch, adding ICU relates analysis components.'
|
||||
classname 'org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin'
|
||||
hasClientJar = true
|
||||
}
|
||||
|
||||
tasks.withType(ForbiddenApisCliTask) {
|
||||
|
|
|
@ -128,7 +128,7 @@ thirdPartyAudit.excludes = [
|
|||
]
|
||||
|
||||
// jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9)
|
||||
if (JavaVersion.current() <= JavaVersion.VERSION_1_8) {
|
||||
if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += [
|
||||
'javax.xml.bind.Binder',
|
||||
'javax.xml.bind.ContextFinder$1',
|
||||
|
|
|
@ -87,7 +87,7 @@ thirdPartyAudit.excludes = [
|
|||
'org.apache.log.Logger',
|
||||
]
|
||||
|
||||
if (JavaVersion.current() > JavaVersion.VERSION_1_8) {
|
||||
if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += [
|
||||
'javax.xml.bind.DatatypeConverter',
|
||||
'javax.xml.bind.JAXBContext'
|
||||
|
|
|
@ -2106,7 +2106,27 @@ thirdPartyAudit.excludes = [
|
|||
'ucar.nc2.dataset.NetcdfDataset'
|
||||
]
|
||||
|
||||
if (JavaVersion.current() > JavaVersion.VERSION_1_8) {
|
||||
if (project.runtimeJavaVersion == JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += [
|
||||
// TODO: Why is this needed ?
|
||||
'com.sun.javadoc.ClassDoc',
|
||||
'com.sun.javadoc.Doc',
|
||||
'com.sun.javadoc.Doclet',
|
||||
'com.sun.javadoc.ExecutableMemberDoc',
|
||||
'com.sun.javadoc.FieldDoc',
|
||||
'com.sun.javadoc.MethodDoc',
|
||||
'com.sun.javadoc.PackageDoc',
|
||||
'com.sun.javadoc.Parameter',
|
||||
'com.sun.javadoc.ProgramElementDoc',
|
||||
'com.sun.javadoc.RootDoc',
|
||||
'com.sun.javadoc.SourcePosition',
|
||||
'com.sun.javadoc.Tag',
|
||||
'com.sun.javadoc.Type',
|
||||
'com.sun.tools.javadoc.Main'
|
||||
]
|
||||
}
|
||||
|
||||
if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += [
|
||||
'javax.activation.ActivationDataFlavor',
|
||||
'javax.activation.CommandMap',
|
||||
|
|
|
@ -582,6 +582,25 @@ thirdPartyAudit.excludes = [
|
|||
'com.squareup.okhttp.ResponseBody'
|
||||
]
|
||||
|
||||
if (JavaVersion.current() > JavaVersion.VERSION_1_8) {
|
||||
if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += ['javax.xml.bind.annotation.adapters.HexBinaryAdapter']
|
||||
}
|
||||
|
||||
if (project.runtimeJavaVersion == JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += [
|
||||
// TODO: Why is this needed ?
|
||||
'com.sun.javadoc.AnnotationDesc',
|
||||
'com.sun.javadoc.AnnotationTypeDoc',
|
||||
'com.sun.javadoc.ClassDoc',
|
||||
'com.sun.javadoc.ConstructorDoc',
|
||||
'com.sun.javadoc.Doc',
|
||||
'com.sun.javadoc.DocErrorReporter',
|
||||
'com.sun.javadoc.FieldDoc',
|
||||
'com.sun.javadoc.LanguageVersion',
|
||||
'com.sun.javadoc.MethodDoc',
|
||||
'com.sun.javadoc.PackageDoc',
|
||||
'com.sun.javadoc.ProgramElementDoc',
|
||||
'com.sun.javadoc.RootDoc',
|
||||
'com.sun.tools.doclets.standard.Standard'
|
||||
]
|
||||
}
|
||||
|
|
|
@ -447,7 +447,7 @@ thirdPartyAudit.excludes = [
|
|||
]
|
||||
|
||||
// jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9)
|
||||
if (JavaVersion.current() <= JavaVersion.VERSION_1_8) {
|
||||
if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += [
|
||||
'javax.xml.bind.Binder',
|
||||
'javax.xml.bind.ContextFinder$1',
|
||||
|
|
|
@ -304,17 +304,22 @@ thirdPartyAudit.excludes = [
|
|||
'com.google.common.geometry.S2LatLng',
|
||||
]
|
||||
|
||||
if (JavaVersion.current() <= JavaVersion.VERSION_1_8) {
|
||||
// Used by Log4J 2.11.1
|
||||
if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += [
|
||||
// Used by Log4J 2.11.1
|
||||
'java.io.ObjectInputFilter',
|
||||
'java.io.ObjectInputFilter$Config',
|
||||
'java.io.ObjectInputFilter$FilterInfo',
|
||||
'java.io.ObjectInputFilter$Status'
|
||||
'java.io.ObjectInputFilter$Status',
|
||||
// added in 9
|
||||
'java.lang.ProcessHandle',
|
||||
'java.lang.StackWalker',
|
||||
'java.lang.StackWalker$Option',
|
||||
'java.lang.StackWalker$StackFrame'
|
||||
]
|
||||
}
|
||||
|
||||
if (JavaVersion.current() > JavaVersion.VERSION_1_8) {
|
||||
if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += ['javax.xml.bind.DatatypeConverter']
|
||||
}
|
||||
|
||||
|
@ -341,16 +346,3 @@ if (isEclipse == false || project.path == ":server-tests") {
|
|||
check.dependsOn integTest
|
||||
integTest.mustRunAfter test
|
||||
}
|
||||
|
||||
// TODO: remove these compatibility tests in 7.0
|
||||
additionalTest('testScriptedMetricAggParamsV6Compatibility') {
|
||||
include '**/ScriptedMetricAggregatorAggStateV6CompatTests.class'
|
||||
include '**/InternalScriptedMetricAggStateV6CompatTests.class'
|
||||
systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'true'
|
||||
}
|
||||
|
||||
test {
|
||||
// these are tested explicitly in separate test tasks
|
||||
exclude '**/ScriptedMetricAggregatorAggStateV6CompatTests.class'
|
||||
exclude '**/InternalScriptedMetricAggStateV6CompatTests.class'
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.util.Objects;
|
|||
public abstract class ShardOperationFailedException implements Streamable, ToXContent {
|
||||
|
||||
protected String index;
|
||||
protected int shardId;
|
||||
protected int shardId = -1;
|
||||
protected String reason;
|
||||
protected RestStatus status;
|
||||
protected Throwable cause;
|
||||
|
|
|
@ -54,8 +54,7 @@ public class ShardSearchFailure extends ShardOperationFailedException {
|
|||
|
||||
private SearchShardTarget shardTarget;
|
||||
|
||||
private ShardSearchFailure() {
|
||||
|
||||
ShardSearchFailure() {
|
||||
}
|
||||
|
||||
public ShardSearchFailure(Exception e) {
|
||||
|
@ -101,6 +100,8 @@ public class ShardSearchFailure extends ShardOperationFailedException {
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
shardTarget = new SearchShardTarget(in);
|
||||
index = shardTarget.getFullyQualifiedIndexName();
|
||||
shardId = shardTarget.getShardId().getId();
|
||||
}
|
||||
reason = in.readString();
|
||||
status = RestStatus.readFrom(in);
|
||||
|
|
|
@ -271,7 +271,7 @@ public class ReplicationResponse extends ActionResponse {
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
shardId = ShardId.readShardId(in);
|
||||
super.shardId = shardId.getId();
|
||||
super.index = shardId.getIndexName();
|
||||
index = shardId.getIndexName();
|
||||
nodeId = in.readOptionalString();
|
||||
cause = in.readException();
|
||||
status = RestStatus.readFrom(in);
|
||||
|
|
|
@ -846,7 +846,7 @@ public class InternalEngine extends Engine {
|
|||
} else if (indexResult.getSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
// if we have document failure, record it as a no-op in the translog with the generated seq_no
|
||||
final NoOp noOp = new NoOp(indexResult.getSeqNo(), index.primaryTerm(), index.origin(),
|
||||
index.startTime(), indexResult.getFailure().getMessage());
|
||||
index.startTime(), indexResult.getFailure().toString());
|
||||
location = innerNoOp(noOp).getTranslogLocation();
|
||||
} else {
|
||||
location = null;
|
||||
|
@ -1183,7 +1183,7 @@ public class InternalEngine extends Engine {
|
|||
location = translog.add(new Translog.Delete(delete, deleteResult));
|
||||
} else if (deleteResult.getSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
location = translog.add(new Translog.NoOp(deleteResult.getSeqNo(),
|
||||
delete.primaryTerm(), deleteResult.getFailure().getMessage()));
|
||||
delete.primaryTerm(), deleteResult.getFailure().toString()));
|
||||
} else {
|
||||
location = null;
|
||||
}
|
||||
|
|
|
@ -22,8 +22,6 @@ package org.elasticsearch.script;
|
|||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
@ -33,30 +31,11 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
public class ScriptedMetricAggContexts {
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||
new DeprecationLogger(Loggers.getLogger(ScriptedMetricAggContexts.class));
|
||||
|
||||
// Public for access from tests
|
||||
public static final String AGG_PARAM_DEPRECATION_WARNING =
|
||||
"params._agg/_aggs for scripted metric aggregations are deprecated, use state/states (not in params) instead. " +
|
||||
"Use -Des.aggregations.enable_scripted_metric_agg_param=false to disable.";
|
||||
|
||||
public static boolean deprecatedAggParamEnabled() {
|
||||
boolean enabled = Boolean.parseBoolean(
|
||||
System.getProperty("es.aggregations.enable_scripted_metric_agg_param", "true"));
|
||||
|
||||
if (enabled) {
|
||||
DEPRECATION_LOGGER.deprecatedAndMaybeLog("enable_scripted_metric_agg_param", AGG_PARAM_DEPRECATION_WARNING);
|
||||
}
|
||||
|
||||
return enabled;
|
||||
}
|
||||
|
||||
private abstract static class ParamsAndStateBase {
|
||||
private final Map<String, Object> params;
|
||||
private final Object state;
|
||||
private final Map<String, Object> state;
|
||||
|
||||
ParamsAndStateBase(Map<String, Object> params, Object state) {
|
||||
ParamsAndStateBase(Map<String, Object> params, Map<String, Object> state) {
|
||||
this.params = params;
|
||||
this.state = state;
|
||||
}
|
||||
|
@ -71,14 +50,14 @@ public class ScriptedMetricAggContexts {
|
|||
}
|
||||
|
||||
public abstract static class InitScript extends ParamsAndStateBase {
|
||||
public InitScript(Map<String, Object> params, Object state) {
|
||||
public InitScript(Map<String, Object> params, Map<String, Object> state) {
|
||||
super(params, state);
|
||||
}
|
||||
|
||||
public abstract void execute();
|
||||
|
||||
public interface Factory {
|
||||
InitScript newInstance(Map<String, Object> params, Object state);
|
||||
InitScript newInstance(Map<String, Object> params, Map<String, Object> state);
|
||||
}
|
||||
|
||||
public static String[] PARAMETERS = {};
|
||||
|
@ -89,7 +68,7 @@ public class ScriptedMetricAggContexts {
|
|||
private final LeafSearchLookup leafLookup;
|
||||
private Scorer scorer;
|
||||
|
||||
public MapScript(Map<String, Object> params, Object state, SearchLookup lookup, LeafReaderContext leafContext) {
|
||||
public MapScript(Map<String, Object> params, Map<String, Object> state, SearchLookup lookup, LeafReaderContext leafContext) {
|
||||
super(params, state);
|
||||
|
||||
this.leafLookup = leafContext == null ? null : lookup.getLeafSearchLookup(leafContext);
|
||||
|
@ -131,7 +110,7 @@ public class ScriptedMetricAggContexts {
|
|||
}
|
||||
|
||||
public interface Factory {
|
||||
LeafFactory newFactory(Map<String, Object> params, Object state, SearchLookup lookup);
|
||||
LeafFactory newFactory(Map<String, Object> params, Map<String, Object> state, SearchLookup lookup);
|
||||
}
|
||||
|
||||
public static String[] PARAMETERS = new String[] {};
|
||||
|
@ -139,14 +118,14 @@ public class ScriptedMetricAggContexts {
|
|||
}
|
||||
|
||||
public abstract static class CombineScript extends ParamsAndStateBase {
|
||||
public CombineScript(Map<String, Object> params, Object state) {
|
||||
public CombineScript(Map<String, Object> params, Map<String, Object> state) {
|
||||
super(params, state);
|
||||
}
|
||||
|
||||
public abstract Object execute();
|
||||
|
||||
public interface Factory {
|
||||
CombineScript newInstance(Map<String, Object> params, Object state);
|
||||
CombineScript newInstance(Map<String, Object> params, Map<String, Object> state);
|
||||
}
|
||||
|
||||
public static String[] PARAMETERS = {};
|
||||
|
|
|
@ -95,11 +95,6 @@ public class InternalScriptedMetric extends InternalAggregation implements Scrip
|
|||
params.putAll(firstAggregation.reduceScript.getParams());
|
||||
}
|
||||
|
||||
// Add _aggs to params map for backwards compatibility (redundant with a context variable on the ReduceScript created below).
|
||||
if (ScriptedMetricAggContexts.deprecatedAggParamEnabled()) {
|
||||
params.put("_aggs", aggregationObjects);
|
||||
}
|
||||
|
||||
ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile(
|
||||
firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT);
|
||||
ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, aggregationObjects);
|
||||
|
|
|
@ -41,10 +41,10 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
|
|||
private final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript;
|
||||
private final ScriptedMetricAggContexts.CombineScript combineScript;
|
||||
private final Script reduceScript;
|
||||
private Object aggState;
|
||||
private Map<String, Object> aggState;
|
||||
|
||||
protected ScriptedMetricAggregator(String name, ScriptedMetricAggContexts.MapScript.LeafFactory mapScript, ScriptedMetricAggContexts.CombineScript combineScript,
|
||||
Script reduceScript, Object aggState, SearchContext context, Aggregator parent,
|
||||
Script reduceScript, Map<String, Object> aggState, SearchContext context, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
|
|
|
@ -80,20 +80,7 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory<ScriptedM
|
|||
aggParams = new HashMap<>();
|
||||
}
|
||||
|
||||
// Add _agg to params map for backwards compatibility (redundant with context variables on the scripts created below).
|
||||
// When this is removed, aggState (as passed to ScriptedMetricAggregator) can be changed to Map<String, Object>, since
|
||||
// it won't be possible to completely replace it with another type as is possible when it's an entry in params.
|
||||
Object aggState = new HashMap<String, Object>();
|
||||
if (ScriptedMetricAggContexts.deprecatedAggParamEnabled()) {
|
||||
if (aggParams.containsKey("_agg") == false) {
|
||||
// Add _agg if it wasn't added manually
|
||||
aggParams.put("_agg", aggState);
|
||||
} else {
|
||||
// If it was added manually, also use it for the agg context variable to reduce the likelihood of
|
||||
// weird behavior due to multiple different variables.
|
||||
aggState = aggParams.get("_agg");
|
||||
}
|
||||
}
|
||||
Map<String, Object> aggState = new HashMap<String, Object>();
|
||||
|
||||
final ScriptedMetricAggContexts.InitScript initScript = this.initScript.newInstance(
|
||||
mergeParams(aggParams, initScriptParams), aggState);
|
||||
|
|
|
@ -102,7 +102,7 @@ public class SnapshotShardFailure extends ShardOperationFailedException {
|
|||
nodeId = in.readOptionalString();
|
||||
shardId = ShardId.readShardId(in);
|
||||
super.shardId = shardId.getId();
|
||||
super.index = shardId.getIndexName();
|
||||
index = shardId.getIndexName();
|
||||
reason = in.readString();
|
||||
status = RestStatus.readFrom(in);
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
|
@ -180,7 +181,7 @@ public class SearchResponseTests extends ESTestCase {
|
|||
int numFailures = randomIntBetween(1, 5);
|
||||
ShardSearchFailure[] failures = new ShardSearchFailure[numFailures];
|
||||
for (int i = 0; i < failures.length; i++) {
|
||||
failures[i] = ShardSearchFailureTests.createTestItem();
|
||||
failures[i] = ShardSearchFailureTests.createTestItem(IndexMetaData.INDEX_UUID_NA_VALUE);
|
||||
}
|
||||
SearchResponse response = createTestItem(failures);
|
||||
XContentType xcontentType = randomFrom(XContentType.values());
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -38,7 +39,7 @@ import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
|||
|
||||
public class ShardSearchFailureTests extends ESTestCase {
|
||||
|
||||
public static ShardSearchFailure createTestItem() {
|
||||
public static ShardSearchFailure createTestItem(String indexUuid) {
|
||||
String randomMessage = randomAlphaOfLengthBetween(3, 20);
|
||||
Exception ex = new ParsingException(0, 0, randomMessage , new IllegalArgumentException("some bad argument"));
|
||||
SearchShardTarget searchShardTarget = null;
|
||||
|
@ -47,7 +48,7 @@ public class ShardSearchFailureTests extends ESTestCase {
|
|||
String indexName = randomAlphaOfLengthBetween(5, 10);
|
||||
String clusterAlias = randomBoolean() ? randomAlphaOfLengthBetween(5, 10) : null;
|
||||
searchShardTarget = new SearchShardTarget(nodeId,
|
||||
new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), randomInt()), clusterAlias, OriginalIndices.NONE);
|
||||
new ShardId(new Index(indexName, indexUuid), randomInt()), clusterAlias, OriginalIndices.NONE);
|
||||
}
|
||||
return new ShardSearchFailure(ex, searchShardTarget);
|
||||
}
|
||||
|
@ -66,7 +67,7 @@ public class ShardSearchFailureTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException {
|
||||
ShardSearchFailure response = createTestItem();
|
||||
ShardSearchFailure response = createTestItem(IndexMetaData.INDEX_UUID_NA_VALUE);
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
boolean humanReadable = randomBoolean();
|
||||
BytesReference originalBytes = toShuffledXContent(response, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
||||
|
@ -134,4 +135,15 @@ public class ShardSearchFailureTests extends ESTestCase {
|
|||
+ "}",
|
||||
xContent.utf8ToString());
|
||||
}
|
||||
|
||||
public void testSerialization() throws IOException {
|
||||
ShardSearchFailure testItem = createTestItem(randomAlphaOfLength(12));
|
||||
ShardSearchFailure deserializedInstance = copyStreamable(testItem, writableRegistry(),
|
||||
ShardSearchFailure::new, VersionUtils.randomVersion(random()));
|
||||
assertEquals(testItem.index(), deserializedInstance.index());
|
||||
assertEquals(testItem.shard(), deserializedInstance.shard());
|
||||
assertEquals(testItem.shardId(), deserializedInstance.shardId());
|
||||
assertEquals(testItem.reason(), deserializedInstance.reason());
|
||||
assertEquals(testItem.status(), deserializedInstance.status());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.index.replication;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -36,9 +37,7 @@ import org.elasticsearch.common.util.iterable.Iterables;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.EngineConfig;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.engine.EngineTestCase;
|
||||
import org.elasticsearch.index.engine.InternalEngine;
|
||||
import org.elasticsearch.index.engine.InternalEngineTests;
|
||||
import org.elasticsearch.index.engine.SegmentsStats;
|
||||
|
@ -49,6 +48,7 @@ import org.elasticsearch.index.seqno.SequenceNumbers;
|
|||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.IndexShardTests;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.index.translog.SnapshotMatchers;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
import org.elasticsearch.indices.recovery.RecoveryTarget;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
|
@ -56,6 +56,7 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
import org.hamcrest.Matcher;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -342,12 +343,11 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
|
|||
* for primary and replica shards
|
||||
*/
|
||||
public void testDocumentFailureReplication() throws Exception {
|
||||
String failureMessage = "simulated document failure";
|
||||
final EngineFactory failIndexingOpsEngine = new EngineFactory() {
|
||||
@Override
|
||||
public Engine newReadWriteEngine(EngineConfig config) {
|
||||
return EngineTestCase.createInternalEngine((directory, writerConfig) ->
|
||||
new IndexWriter(directory, writerConfig) {
|
||||
final IOException indexException = new IOException("simulated indexing failure");
|
||||
final IOException deleteException = new IOException("simulated deleting failure");
|
||||
final EngineFactory engineFactory = config -> InternalEngineTests.createInternalEngine((dir, iwc) ->
|
||||
new IndexWriter(dir, iwc) {
|
||||
final AtomicBoolean throwAfterIndexedOneDoc = new AtomicBoolean(); // need one document to trigger delete in IW.
|
||||
@Override
|
||||
public long addDocument(Iterable<? extends IndexableField> doc) throws IOException {
|
||||
boolean isTombstone = false;
|
||||
|
@ -356,42 +356,70 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
|
|||
isTombstone = true;
|
||||
}
|
||||
}
|
||||
if (isTombstone) {
|
||||
return super.addDocument(doc);
|
||||
if (isTombstone == false && throwAfterIndexedOneDoc.getAndSet(true)) {
|
||||
throw indexException;
|
||||
} else {
|
||||
throw new IOException(failureMessage);
|
||||
return super.addDocument(doc);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public long deleteDocuments(Term... terms) throws IOException {
|
||||
throw deleteException;
|
||||
}
|
||||
@Override
|
||||
public long softUpdateDocument(Term term, Iterable<? extends IndexableField> doc, Field...fields) throws IOException {
|
||||
throw deleteException; // a delete uses softUpdateDocument API if soft-deletes enabled
|
||||
}
|
||||
}, null, null, config);
|
||||
}
|
||||
};
|
||||
try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetaData(0)) {
|
||||
@Override
|
||||
protected EngineFactory getEngineFactory(ShardRouting routing) { return failIndexingOpsEngine; }}) {
|
||||
protected EngineFactory getEngineFactory(ShardRouting routing) { return engineFactory; }}) {
|
||||
|
||||
// test only primary
|
||||
// start with the primary only so two first failures are replicated to replicas via recovery from the translog of the primary.
|
||||
shards.startPrimary();
|
||||
BulkItemResponse response = shards.index(
|
||||
new IndexRequest(index.getName(), "type", "1")
|
||||
.source("{}", XContentType.JSON)
|
||||
);
|
||||
assertTrue(response.isFailed());
|
||||
assertNoOpTranslogOperationForDocumentFailure(shards, 1, shards.getPrimary().getPendingPrimaryTerm(), failureMessage);
|
||||
shards.assertAllEqual(0);
|
||||
long primaryTerm = shards.getPrimary().getPendingPrimaryTerm();
|
||||
List<Translog.Operation> expectedTranslogOps = new ArrayList<>();
|
||||
BulkItemResponse indexResp = shards.index(new IndexRequest(index.getName(), "type", "1").source("{}", XContentType.JSON));
|
||||
assertThat(indexResp.isFailed(), equalTo(false));
|
||||
expectedTranslogOps.add(new Translog.Index("type", "1", 0, primaryTerm, 1, "{}".getBytes(StandardCharsets.UTF_8), null, -1));
|
||||
try (Translog.Snapshot snapshot = getTranslog(shards.getPrimary()).newSnapshot()) {
|
||||
assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps));
|
||||
}
|
||||
|
||||
indexResp = shards.index(new IndexRequest(index.getName(), "type", "any").source("{}", XContentType.JSON));
|
||||
assertThat(indexResp.getFailure().getCause(), equalTo(indexException));
|
||||
expectedTranslogOps.add(new Translog.NoOp(1, primaryTerm, indexException.toString()));
|
||||
|
||||
BulkItemResponse deleteResp = shards.delete(new DeleteRequest(index.getName(), "type", "1"));
|
||||
assertThat(deleteResp.getFailure().getCause(), equalTo(deleteException));
|
||||
expectedTranslogOps.add(new Translog.NoOp(2, primaryTerm, deleteException.toString()));
|
||||
shards.assertAllEqual(1);
|
||||
|
||||
// add some replicas
|
||||
int nReplica = randomIntBetween(1, 3);
|
||||
for (int i = 0; i < nReplica; i++) {
|
||||
shards.addReplica();
|
||||
}
|
||||
shards.startReplicas(nReplica);
|
||||
response = shards.index(
|
||||
new IndexRequest(index.getName(), "type", "1")
|
||||
.source("{}", XContentType.JSON)
|
||||
);
|
||||
assertTrue(response.isFailed());
|
||||
assertNoOpTranslogOperationForDocumentFailure(shards, 2, shards.getPrimary().getPendingPrimaryTerm(), failureMessage);
|
||||
shards.assertAllEqual(0);
|
||||
for (IndexShard shard : shards) {
|
||||
try (Translog.Snapshot snapshot = getTranslog(shard).newSnapshot()) {
|
||||
assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps));
|
||||
}
|
||||
}
|
||||
// unlike previous failures, these two failures replicated directly from the replication channel.
|
||||
indexResp = shards.index(new IndexRequest(index.getName(), "type", "any").source("{}", XContentType.JSON));
|
||||
assertThat(indexResp.getFailure().getCause(), equalTo(indexException));
|
||||
expectedTranslogOps.add(new Translog.NoOp(3, primaryTerm, indexException.toString()));
|
||||
|
||||
deleteResp = shards.delete(new DeleteRequest(index.getName(), "type", "1"));
|
||||
assertThat(deleteResp.getFailure().getCause(), equalTo(deleteException));
|
||||
expectedTranslogOps.add(new Translog.NoOp(4, primaryTerm, deleteException.toString()));
|
||||
|
||||
for (IndexShard shard : shards) {
|
||||
try (Translog.Snapshot snapshot = getTranslog(shard).newSnapshot()) {
|
||||
assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps));
|
||||
}
|
||||
}
|
||||
shards.assertAllEqual(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -564,26 +592,4 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
|
|||
shards.assertAllEqual(0);
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertNoOpTranslogOperationForDocumentFailure(
|
||||
Iterable<IndexShard> replicationGroup,
|
||||
int expectedOperation,
|
||||
long expectedPrimaryTerm,
|
||||
String failureMessage) throws IOException {
|
||||
for (IndexShard indexShard : replicationGroup) {
|
||||
try(Translog.Snapshot snapshot = getTranslog(indexShard).newSnapshot()) {
|
||||
assertThat(snapshot.totalOperations(), equalTo(expectedOperation));
|
||||
long expectedSeqNo = 0L;
|
||||
Translog.Operation op = snapshot.next();
|
||||
do {
|
||||
assertThat(op.opType(), equalTo(Translog.Operation.Type.NO_OP));
|
||||
assertThat(op.seqNo(), equalTo(expectedSeqNo));
|
||||
assertThat(op.primaryTerm(), equalTo(expectedPrimaryTerm));
|
||||
assertThat(((Translog.NoOp) op).reason(), containsString(failureMessage));
|
||||
op = snapshot.next();
|
||||
expectedSeqNo++;
|
||||
} while (op != null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,109 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.scripted;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptedMetricAggContexts;
|
||||
import org.elasticsearch.script.ScriptEngine;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
|
||||
/**
|
||||
* This test verifies that the _aggs param is added correctly when the system property
|
||||
* "es.aggregations.enable_scripted_metric_agg_param" is set to true.
|
||||
*/
|
||||
public class InternalScriptedMetricAggStateV6CompatTests extends InternalAggregationTestCase<InternalScriptedMetric> {
|
||||
|
||||
private static final String REDUCE_SCRIPT_NAME = "reduceScript";
|
||||
|
||||
@Override
|
||||
protected InternalScriptedMetric createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
Script reduceScript = new Script(ScriptType.INLINE, MockScriptEngine.NAME, REDUCE_SCRIPT_NAME, Collections.emptyMap());
|
||||
return new InternalScriptedMetric(name, "agg value", reduceScript, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock of the script service. The script that is run looks at the
|
||||
* "_aggs" parameter to verify that it was put in place by InternalScriptedMetric.
|
||||
*/
|
||||
@Override
|
||||
protected ScriptService mockScriptService() {
|
||||
Function<Map<String, Object>, Object> script = params -> {
|
||||
Object aggs = params.get("_aggs");
|
||||
Object states = params.get("states");
|
||||
assertThat(aggs, instanceOf(List.class));
|
||||
assertThat(aggs, sameInstance(states));
|
||||
return aggs;
|
||||
};
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME,
|
||||
Collections.singletonMap(REDUCE_SCRIPT_NAME, script));
|
||||
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
|
||||
return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertReduced(InternalScriptedMetric reduced, List<InternalScriptedMetric> inputs) {
|
||||
assertWarnings(ScriptedMetricAggContexts.AGG_PARAM_DEPRECATION_WARNING);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Reader<InternalScriptedMetric> instanceReader() {
|
||||
return InternalScriptedMetric::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertFromXContent(InternalScriptedMetric aggregation, ParsedAggregation parsedAggregation) {}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> excludePathsFromXContentInsertion() {
|
||||
return path -> path.contains(CommonFields.VALUE.getPreferredName());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance) {
|
||||
String name = instance.getName();
|
||||
Object value = instance.aggregation();
|
||||
Script reduceScript = instance.reduceScript;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
return new InternalScriptedMetric(name + randomAlphaOfLength(5), value, reduceScript, pipelineAggregators,
|
||||
metaData);
|
||||
}
|
||||
}
|
|
@ -1,180 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.scripted;
|
||||
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptedMetricAggContexts;
|
||||
import org.elasticsearch.script.ScriptEngine;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
|
||||
/**
|
||||
* This test verifies that the _agg param is added correctly when the system property
|
||||
* "es.aggregations.enable_scripted_metric_agg_param" is set to true.
|
||||
*/
|
||||
public class ScriptedMetricAggregatorAggStateV6CompatTests extends AggregatorTestCase {
|
||||
|
||||
private static final String AGG_NAME = "scriptedMetric";
|
||||
private static final Script INIT_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScript", Collections.emptyMap());
|
||||
private static final Script MAP_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScript", Collections.emptyMap());
|
||||
private static final Script COMBINE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScript",
|
||||
Collections.emptyMap());
|
||||
|
||||
private static final Script INIT_SCRIPT_EXPLICIT_AGG = new Script(ScriptType.INLINE, MockScriptEngine.NAME,
|
||||
"initScriptExplicitAgg", Collections.emptyMap());
|
||||
private static final Script MAP_SCRIPT_EXPLICIT_AGG = new Script(ScriptType.INLINE, MockScriptEngine.NAME,
|
||||
"mapScriptExplicitAgg", Collections.emptyMap());
|
||||
private static final Script COMBINE_SCRIPT_EXPLICIT_AGG = new Script(ScriptType.INLINE, MockScriptEngine.NAME,
|
||||
"combineScriptExplicitAgg", Collections.emptyMap());
|
||||
private static final String EXPLICIT_AGG_OBJECT = "Explicit agg object";
|
||||
|
||||
private static final Map<String, Function<Map<String, Object>, Object>> SCRIPTS = new HashMap<>();
|
||||
|
||||
@BeforeClass
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void initMockScripts() {
|
||||
// If _agg is provided implicitly, it should be the same objects as "state" from the context.
|
||||
SCRIPTS.put("initScript", params -> {
|
||||
Object agg = params.get("_agg");
|
||||
Object state = params.get("state");
|
||||
assertThat(agg, instanceOf(Map.class));
|
||||
assertThat(agg, sameInstance(state));
|
||||
return agg;
|
||||
});
|
||||
SCRIPTS.put("mapScript", params -> {
|
||||
Object agg = params.get("_agg");
|
||||
Object state = params.get("state");
|
||||
assertThat(agg, instanceOf(Map.class));
|
||||
assertThat(agg, sameInstance(state));
|
||||
return agg;
|
||||
});
|
||||
SCRIPTS.put("combineScript", params -> {
|
||||
Object agg = params.get("_agg");
|
||||
Object state = params.get("state");
|
||||
assertThat(agg, instanceOf(Map.class));
|
||||
assertThat(agg, sameInstance(state));
|
||||
return agg;
|
||||
});
|
||||
|
||||
SCRIPTS.put("initScriptExplicitAgg", params -> {
|
||||
Object agg = params.get("_agg");
|
||||
assertThat(agg, equalTo(EXPLICIT_AGG_OBJECT));
|
||||
return agg;
|
||||
});
|
||||
SCRIPTS.put("mapScriptExplicitAgg", params -> {
|
||||
Object agg = params.get("_agg");
|
||||
assertThat(agg, equalTo(EXPLICIT_AGG_OBJECT));
|
||||
return agg;
|
||||
});
|
||||
SCRIPTS.put("combineScriptExplicitAgg", params -> {
|
||||
Object agg = params.get("_agg");
|
||||
assertThat(agg, equalTo(EXPLICIT_AGG_OBJECT));
|
||||
return agg;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that the _agg param is implicitly added
|
||||
*/
|
||||
public void testWithImplicitAggParam() throws IOException {
|
||||
try (Directory directory = newDirectory()) {
|
||||
Integer numDocs = 10;
|
||||
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
|
||||
}
|
||||
}
|
||||
try (IndexReader indexReader = DirectoryReader.open(directory)) {
|
||||
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
|
||||
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT);
|
||||
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
|
||||
}
|
||||
}
|
||||
|
||||
assertWarnings(ScriptedMetricAggContexts.AGG_PARAM_DEPRECATION_WARNING);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that an explicitly added _agg param is honored
|
||||
*/
|
||||
public void testWithExplicitAggParam() throws IOException {
|
||||
try (Directory directory = newDirectory()) {
|
||||
Integer numDocs = 10;
|
||||
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, Object> aggParams = new HashMap<>();
|
||||
aggParams.put("_agg", EXPLICIT_AGG_OBJECT);
|
||||
|
||||
try (IndexReader indexReader = DirectoryReader.open(directory)) {
|
||||
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
|
||||
aggregationBuilder
|
||||
.params(aggParams)
|
||||
.initScript(INIT_SCRIPT_EXPLICIT_AGG)
|
||||
.mapScript(MAP_SCRIPT_EXPLICIT_AGG)
|
||||
.combineScript(COMBINE_SCRIPT_EXPLICIT_AGG);
|
||||
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
|
||||
}
|
||||
}
|
||||
|
||||
assertWarnings(ScriptedMetricAggContexts.AGG_PARAM_DEPRECATION_WARNING);
|
||||
}
|
||||
|
||||
/**
|
||||
* We cannot use Mockito for mocking QueryShardContext in this case because
|
||||
* script-related methods (e.g. QueryShardContext#getLazyExecutableScript)
|
||||
* is final and cannot be mocked
|
||||
*/
|
||||
@Override
|
||||
protected QueryShardContext queryShardContextMock(MapperService mapperService) {
|
||||
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, SCRIPTS);
|
||||
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
|
||||
ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS);
|
||||
return new QueryShardContext(0, mapperService.getIndexSettings(), null, null, mapperService, null, scriptService,
|
||||
xContentRegistry(), writableRegistry(), null, null, System::currentTimeMillis, null);
|
||||
}
|
||||
}
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.action.resync.ResyncReplicationRequest;
|
|||
import org.elasticsearch.action.resync.ResyncReplicationResponse;
|
||||
import org.elasticsearch.action.resync.TransportResyncReplicationAction;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicatedWriteRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicationOperation;
|
||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||
|
@ -206,26 +207,23 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
|||
}
|
||||
|
||||
public BulkItemResponse index(IndexRequest indexRequest) throws Exception {
|
||||
PlainActionFuture<BulkItemResponse> listener = new PlainActionFuture<>();
|
||||
final ActionListener<BulkShardResponse> wrapBulkListener = ActionListener.wrap(
|
||||
bulkShardResponse -> listener.onResponse(bulkShardResponse.getResponses()[0]),
|
||||
listener::onFailure);
|
||||
BulkItemRequest[] items = new BulkItemRequest[1];
|
||||
items[0] = new BulkItemRequest(0, indexRequest);
|
||||
BulkShardRequest request = new BulkShardRequest(shardId, indexRequest.getRefreshPolicy(), items);
|
||||
new IndexingAction(request, wrapBulkListener, this).execute();
|
||||
return listener.get();
|
||||
return executeWriteRequest(indexRequest, indexRequest.getRefreshPolicy());
|
||||
}
|
||||
|
||||
public BulkItemResponse delete(DeleteRequest deleteRequest) throws Exception {
|
||||
return executeWriteRequest(deleteRequest, deleteRequest.getRefreshPolicy());
|
||||
}
|
||||
|
||||
private BulkItemResponse executeWriteRequest(
|
||||
DocWriteRequest<?> writeRequest, WriteRequest.RefreshPolicy refreshPolicy) throws Exception {
|
||||
PlainActionFuture<BulkItemResponse> listener = new PlainActionFuture<>();
|
||||
final ActionListener<BulkShardResponse> wrapBulkListener = ActionListener.wrap(
|
||||
bulkShardResponse -> listener.onResponse(bulkShardResponse.getResponses()[0]),
|
||||
listener::onFailure);
|
||||
BulkItemRequest[] items = new BulkItemRequest[1];
|
||||
items[0] = new BulkItemRequest(0, deleteRequest);
|
||||
BulkShardRequest request = new BulkShardRequest(shardId, deleteRequest.getRefreshPolicy(), items);
|
||||
new IndexingAction(request, wrapBulkListener, this).execute();
|
||||
items[0] = new BulkItemRequest(0, writeRequest);
|
||||
BulkShardRequest request = new BulkShardRequest(shardId, refreshPolicy, items);
|
||||
new WriteReplicationAction(request, wrapBulkListener, this).execute();
|
||||
return listener.get();
|
||||
}
|
||||
|
||||
|
@ -623,9 +621,9 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
|||
|
||||
}
|
||||
|
||||
class IndexingAction extends ReplicationAction<BulkShardRequest, BulkShardRequest, BulkShardResponse> {
|
||||
class WriteReplicationAction extends ReplicationAction<BulkShardRequest, BulkShardRequest, BulkShardResponse> {
|
||||
|
||||
IndexingAction(BulkShardRequest request, ActionListener<BulkShardResponse> listener, ReplicationGroup replicationGroup) {
|
||||
WriteReplicationAction(BulkShardRequest request, ActionListener<BulkShardResponse> listener, ReplicationGroup replicationGroup) {
|
||||
super(request, listener, replicationGroup, "indexing");
|
||||
}
|
||||
|
||||
|
|
|
@ -242,16 +242,18 @@ public class MockScriptEngine implements ScriptEngine {
|
|||
return new MockMovingFunctionScript();
|
||||
}
|
||||
|
||||
public ScriptedMetricAggContexts.InitScript createMetricAggInitScript(Map<String, Object> params, Object state) {
|
||||
public ScriptedMetricAggContexts.InitScript createMetricAggInitScript(Map<String, Object> params, Map<String, Object> state) {
|
||||
return new MockMetricAggInitScript(params, state, script != null ? script : ctx -> 42d);
|
||||
}
|
||||
|
||||
public ScriptedMetricAggContexts.MapScript.LeafFactory createMetricAggMapScript(Map<String, Object> params, Object state,
|
||||
public ScriptedMetricAggContexts.MapScript.LeafFactory createMetricAggMapScript(Map<String, Object> params,
|
||||
Map<String, Object> state,
|
||||
SearchLookup lookup) {
|
||||
return new MockMetricAggMapScript(params, state, lookup, script != null ? script : ctx -> 42d);
|
||||
}
|
||||
|
||||
public ScriptedMetricAggContexts.CombineScript createMetricAggCombineScript(Map<String, Object> params, Object state) {
|
||||
public ScriptedMetricAggContexts.CombineScript createMetricAggCombineScript(Map<String, Object> params,
|
||||
Map<String, Object> state) {
|
||||
return new MockMetricAggCombineScript(params, state, script != null ? script : ctx -> 42d);
|
||||
}
|
||||
|
||||
|
@ -415,7 +417,7 @@ public class MockScriptEngine implements ScriptEngine {
|
|||
public static class MockMetricAggInitScript extends ScriptedMetricAggContexts.InitScript {
|
||||
private final Function<Map<String, Object>, Object> script;
|
||||
|
||||
MockMetricAggInitScript(Map<String, Object> params, Object state,
|
||||
MockMetricAggInitScript(Map<String, Object> params, Map<String, Object> state,
|
||||
Function<Map<String, Object>, Object> script) {
|
||||
super(params, state);
|
||||
this.script = script;
|
||||
|
@ -436,11 +438,11 @@ public class MockScriptEngine implements ScriptEngine {
|
|||
|
||||
public static class MockMetricAggMapScript implements ScriptedMetricAggContexts.MapScript.LeafFactory {
|
||||
private final Map<String, Object> params;
|
||||
private final Object state;
|
||||
private final Map<String, Object> state;
|
||||
private final SearchLookup lookup;
|
||||
private final Function<Map<String, Object>, Object> script;
|
||||
|
||||
MockMetricAggMapScript(Map<String, Object> params, Object state, SearchLookup lookup,
|
||||
MockMetricAggMapScript(Map<String, Object> params, Map<String, Object> state, SearchLookup lookup,
|
||||
Function<Map<String, Object>, Object> script) {
|
||||
this.params = params;
|
||||
this.state = state;
|
||||
|
@ -473,7 +475,7 @@ public class MockScriptEngine implements ScriptEngine {
|
|||
public static class MockMetricAggCombineScript extends ScriptedMetricAggContexts.CombineScript {
|
||||
private final Function<Map<String, Object>, Object> script;
|
||||
|
||||
MockMetricAggCombineScript(Map<String, Object> params, Object state,
|
||||
MockMetricAggCombineScript(Map<String, Object> params, Map<String, Object> state,
|
||||
Function<Map<String, Object>, Object> script) {
|
||||
super(params, state);
|
||||
this.script = script;
|
||||
|
|
|
@ -44,7 +44,7 @@ thirdPartyAudit.excludes = [
|
|||
'org.osgi.framework.wiring.BundleWiring'
|
||||
]
|
||||
|
||||
if (JavaVersion.current() <= JavaVersion.VERSION_1_8) {
|
||||
if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) {
|
||||
// Used by Log4J 2.11.1
|
||||
thirdPartyAudit.excludes += [
|
||||
'java.io.ObjectInputFilter',
|
||||
|
@ -53,3 +53,12 @@ if (JavaVersion.current() <= JavaVersion.VERSION_1_8) {
|
|||
'java.io.ObjectInputFilter$Status'
|
||||
]
|
||||
}
|
||||
|
||||
if (project.runtimeJavaVersion == JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += [
|
||||
'java.lang.ProcessHandle',
|
||||
'java.lang.StackWalker',
|
||||
'java.lang.StackWalker$Option',
|
||||
'java.lang.StackWalker$StackFrame'
|
||||
]
|
||||
}
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext;
|
|||
import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.elasticsearch.common.logging.Loggers;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.core.security.action.user;
|
|||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
||||
import org.elasticsearch.xpack.core.security.support.Validation;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
import org.elasticsearch.xpack.core.security.xcontent.XContentUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.core.security.action.user;
|
|||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
|
|
@ -20,7 +20,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
||||
import org.elasticsearch.xpack.core.security.support.Validation;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
import org.elasticsearch.xpack.core.security.xcontent.XContentUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.xpack.core.security.user.InternalUserSerializationHelper;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Base64;
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.core.security.authc;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.xpack.core.XPackField;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
|
|
@ -63,7 +63,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication;
|
|||
import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField;
|
||||
import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader;
|
||||
import org.elasticsearch.xpack.core.security.support.Exceptions;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.support.MetadataUtils;
|
||||
|
||||
/**
|
||||
|
|
|
@ -9,7 +9,6 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.support.MetadataUtils;
|
||||
|
||||
import java.util.Collections;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.support.MetadataUtils;
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.support.MetadataUtils;
|
||||
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.user;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.support.MetadataUtils;
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.support.MetadataUtils;
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.authz.privilege.SystemPrivilege;
|
||||
|
||||
import java.util.function.Predicate;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.security;
|
||||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
|
@ -5,8 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
|
||||
/**
|
||||
* internal user that manages xpack security. Has all cluster/indices permissions.
|
||||
*/
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
|
||||
import org.elasticsearch.xpack.core.security.authz.permission.Role;
|
||||
import org.elasticsearch.xpack.core.security.index.IndexAuditTrailField;
|
||||
|
|
|
@ -76,7 +76,7 @@ import org.elasticsearch.test.IndexSettingsModule;
|
|||
import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader;
|
||||
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions;
|
||||
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
|
|
@ -3,9 +3,10 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.security;
|
||||
package org.elasticsearch.xpack.core.security.user;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
|
||||
import java.util.Collections;
|
||||
|
|
@ -158,7 +158,8 @@ public class TransportPutRollupJobAction extends TransportMasterNodeAction<PutRo
|
|||
MappingMetaData mappings = getMappingResponse.getMappings().get(indexName).get(RollupField.TYPE_NAME);
|
||||
Object m = mappings.getSourceAsMap().get("_meta");
|
||||
if (m == null) {
|
||||
String msg = "Expected to find _meta key in mapping of rollup index [" + indexName + "] but not found.";
|
||||
String msg = "Rollup data cannot be added to existing indices that contain non-rollup data (expected " +
|
||||
"to find _meta key in mapping of rollup index [" + indexName + "] but not found).";
|
||||
logger.error(msg);
|
||||
listener.onFailure(new RuntimeException(msg));
|
||||
return;
|
||||
|
@ -166,8 +167,9 @@ public class TransportPutRollupJobAction extends TransportMasterNodeAction<PutRo
|
|||
|
||||
Map<String, Object> metadata = (Map<String, Object>) m;
|
||||
if (metadata.get(RollupField.ROLLUP_META) == null) {
|
||||
String msg = "Expected to find rollup meta key [" + RollupField.ROLLUP_META + "] in mapping of rollup index [" + indexName
|
||||
+ "] but not found.";
|
||||
String msg = "Rollup data cannot be added to existing indices that contain non-rollup data (expected " +
|
||||
"to find rollup meta key [" + RollupField.ROLLUP_META + "] in mapping of rollup index ["
|
||||
+ indexName + "] but not found).";
|
||||
logger.error(msg);
|
||||
listener.onFailure(new RuntimeException(msg));
|
||||
return;
|
||||
|
|
|
@ -180,8 +180,9 @@ public class PutJobStateMachineTests extends ESTestCase {
|
|||
ActionListener<AcknowledgedResponse> testListener = ActionListener.wrap(response -> {
|
||||
fail("Listener success should not have been triggered.");
|
||||
}, e -> {
|
||||
assertThat(e.getMessage(), equalTo("Expected to find _meta key in mapping of rollup index ["
|
||||
+ job.getConfig().getRollupIndex() + "] but not found."));
|
||||
assertThat(e.getMessage(), equalTo("Rollup data cannot be added to existing indices that contain " +
|
||||
"non-rollup data (expected to find _meta key in mapping of rollup index ["
|
||||
+ job.getConfig().getRollupIndex() + "] but not found)."));
|
||||
});
|
||||
|
||||
Logger logger = mock(Logger.class);
|
||||
|
@ -206,6 +207,44 @@ public class PutJobStateMachineTests extends ESTestCase {
|
|||
verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testMetadataButNotRollup() {
|
||||
RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
|
||||
|
||||
ActionListener<AcknowledgedResponse> testListener = ActionListener.wrap(response -> {
|
||||
fail("Listener success should not have been triggered.");
|
||||
}, e -> {
|
||||
assertThat(e.getMessage(), equalTo("Rollup data cannot be added to existing indices that contain " +
|
||||
"non-rollup data (expected to find rollup meta key [_rollup] in mapping of rollup index ["
|
||||
+ job.getConfig().getRollupIndex() + "] but not found)."));
|
||||
});
|
||||
|
||||
Logger logger = mock(Logger.class);
|
||||
Client client = mock(Client.class);
|
||||
|
||||
ArgumentCaptor<ActionListener> requestCaptor = ArgumentCaptor.forClass(ActionListener.class);
|
||||
doAnswer(invocation -> {
|
||||
GetMappingsResponse response = mock(GetMappingsResponse.class);
|
||||
Map<String, Object> m = new HashMap<>(2);
|
||||
m.put("random",
|
||||
Collections.singletonMap(job.getConfig().getId(), job.getConfig()));
|
||||
MappingMetaData meta = new MappingMetaData(RollupField.TYPE_NAME,
|
||||
Collections.singletonMap("_meta", m));
|
||||
ImmutableOpenMap.Builder<String, MappingMetaData> builder = ImmutableOpenMap.builder(1);
|
||||
builder.put(RollupField.TYPE_NAME, meta);
|
||||
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> builder2 = ImmutableOpenMap.builder(1);
|
||||
builder2.put(job.getConfig().getRollupIndex(), builder.build());
|
||||
|
||||
when(response.getMappings()).thenReturn(builder2.build());
|
||||
requestCaptor.getValue().onResponse(response);
|
||||
return null;
|
||||
}).when(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), requestCaptor.capture());
|
||||
|
||||
TransportPutRollupJobAction.updateMapping(job, testListener, mock(PersistentTasksService.class), client, logger);
|
||||
verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testNoMappingVersion() {
|
||||
RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
|
||||
|
|
|
@ -242,7 +242,7 @@ thirdPartyAudit.excludes = [
|
|||
'javax.persistence.EntityManagerFactory',
|
||||
'javax.persistence.EntityTransaction',
|
||||
'javax.persistence.LockModeType',
|
||||
'javax/persistence/Query',
|
||||
'javax.persistence.Query',
|
||||
// [missing classes] OpenSAML storage and HttpClient cache have optional memcache support
|
||||
'net.spy.memcached.CASResponse',
|
||||
'net.spy.memcached.CASValue',
|
||||
|
@ -266,7 +266,7 @@ thirdPartyAudit.excludes = [
|
|||
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
|
||||
]
|
||||
|
||||
if (JavaVersion.current() > JavaVersion.VERSION_1_8) {
|
||||
if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
|
||||
thirdPartyAudit.excludes += [
|
||||
'javax.xml.bind.JAXBContext',
|
||||
'javax.xml.bind.JAXBElement',
|
||||
|
|
|
@ -19,7 +19,7 @@ import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutRequest;
|
|||
import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutResponse;
|
||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||
import org.elasticsearch.xpack.core.security.authc.Realm;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
import org.elasticsearch.xpack.security.authc.Realms;
|
||||
import org.elasticsearch.xpack.security.authc.TokenService;
|
||||
import org.elasticsearch.xpack.security.authc.saml.SamlNameId;
|
||||
|
|
|
@ -18,7 +18,7 @@ import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction;
|
|||
import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest;
|
||||
import org.elasticsearch.xpack.core.security.action.user.AuthenticateResponse;
|
||||
import org.elasticsearch.xpack.core.security.user.SystemUser;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
import org.elasticsearch.xpack.core.security.user.XPackUser;
|
||||
|
||||
import java.util.function.Supplier;
|
||||
|
|
|
@ -18,7 +18,7 @@ import org.elasticsearch.xpack.core.security.action.user.GetUsersRequest;
|
|||
import org.elasticsearch.xpack.core.security.action.user.GetUsersResponse;
|
||||
import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm;
|
||||
import org.elasticsearch.xpack.core.security.user.SystemUser;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
import org.elasticsearch.xpack.core.security.user.XPackUser;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege;
|
|||
import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege;
|
||||
import org.elasticsearch.xpack.core.security.authz.privilege.Privilege;
|
||||
import org.elasticsearch.xpack.core.security.support.Automatons;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.xpack.core.security.user.User;
|
||||
import org.elasticsearch.xpack.security.authz.AuthorizationService;
|
||||
import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore;
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue