From 30cc33e2e542810a9c893297cdc79c0ab8ea3bdb Mon Sep 17 00:00:00 2001
From: Jack Conradson <osjdconrad@gmail.com>
Date: Mon, 24 Apr 2017 09:58:02 -0700
Subject: [PATCH 01/34] Fix Painless Lambdas for Java 9 (#24070)

Replaces LambdaMetaFactory with LambdaBootstrap, a custom solution for lambdas in Painless using a design similar to LambdaMetaFactory, but allows for custom adaptation of types which recent changes to LambdaMetaFactory no longer allowed.
---
 .../org/elasticsearch/painless/Compiler.java  |  14 +-
 .../java/org/elasticsearch/painless/Def.java  |  65 +--
 .../elasticsearch/painless/FeatureTest.java   |  13 +-
 .../elasticsearch/painless/FunctionRef.java   | 206 ++++---
 .../painless/LambdaBootstrap.java             | 530 ++++++++++++++++++
 .../painless/WriterConstants.java             |  17 +-
 .../elasticsearch/painless/antlr/Walker.java  |   2 +-
 .../painless/node/ECapturingFunctionRef.java  |  47 +-
 .../painless/node/EFunctionRef.java           |  54 +-
 .../elasticsearch/painless/node/ELambda.java  |  70 ++-
 .../painless/node/SFunction.java              |   2 +-
 .../painless/org.elasticsearch.txt            |   1 +
 .../painless/AugmentationTests.java           | 144 ++---
 .../painless/FunctionRefTests.java            | 132 ++---
 .../elasticsearch/painless/LambdaTests.java   |  42 +-
 .../painless/node/NodeToStringTests.java      |   8 +-
 16 files changed, 888 insertions(+), 459 deletions(-)
 create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java

diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java
index 9961dcbe156..976ef897aec 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java
@@ -79,9 +79,19 @@ final class Compiler {
          * @param bytes The generated byte code.
          * @return A Class object extending {@link PainlessScript}.
          */
-        Class<? extends PainlessScript> define(String name, byte[] bytes) {
+        Class<? extends PainlessScript> defineScript(String name, byte[] bytes) {
             return defineClass(name, bytes, 0, bytes.length, CODESOURCE).asSubclass(PainlessScript.class);
         }
+
+        /**
+         * Generates a Class object for a lambda method.
+         * @param name The name of the class.
+         * @param bytes The generated byte code.
+         * @return A Class object.
+         */
+        Class<?> defineLambda(String name, byte[] bytes) {
+            return defineClass(name, bytes, 0, bytes.length);
+        }
     }
 
     /**
@@ -110,7 +120,7 @@ final class Compiler {
         root.write();
 
         try {
-            Class<? extends PainlessScript> clazz = loader.define(CLASS_NAME, root.getBytes());
+            Class<? extends PainlessScript> clazz = loader.defineScript(CLASS_NAME, root.getBytes());
             clazz.getField("$DEFINITION").set(null, definition);
             java.lang.reflect.Constructor<? extends PainlessScript> constructor =
                     clazz.getConstructor(String.class, String.class, BitSet.class);
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java
index 5250238c817..6d9fad7d79e 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java
@@ -23,7 +23,6 @@ import org.elasticsearch.painless.Definition.Method;
 import org.elasticsearch.painless.Definition.RuntimeClass;
 
 import java.lang.invoke.CallSite;
-import java.lang.invoke.LambdaMetafactory;
 import java.lang.invoke.MethodHandle;
 import java.lang.invoke.MethodHandles;
 import java.lang.invoke.MethodHandles.Lookup;
@@ -132,7 +131,7 @@ public final class Def {
         } catch (final ReflectiveOperationException roe) {
             throw new AssertionError(roe);
         }
-        
+
         // lookup up the factory for arraylength MethodHandle (intrinsic) from Java 9:
         // https://bugs.openjdk.java.net/browse/JDK-8156915
         MethodHandle arrayLengthMHFactory;
@@ -150,7 +149,7 @@ public final class Def {
     static <T extends Throwable> void rethrow(Throwable t) throws T {
         throw (T) t;
     }
-    
+
     /** Returns an array length getter MethodHandle for the given array type */
     static MethodHandle arrayLengthGetter(Class<?> arrayType) {
         if (JAVA9_ARRAY_LENGTH_MH_FACTORY != null) {
@@ -206,7 +205,7 @@ public final class Def {
                 }
             }
         }
-        
+
         throw new IllegalArgumentException("Unable to find dynamic method [" + name + "] with [" + arity + "] arguments " +
                                            "for class [" + receiverClass.getCanonicalName() + "].");
     }
@@ -239,7 +238,7 @@ public final class Def {
          if (recipeString.isEmpty()) {
              return lookupMethodInternal(definition, receiverClass, name, numArguments - 1).handle;
          }
-         
+
          // convert recipe string to a bitset for convenience (the code below should be refactored...)
          BitSet lambdaArgs = new BitSet();
          for (int i = 0; i < recipeString.length(); i++) {
@@ -247,7 +246,7 @@ public final class Def {
          }
 
          // otherwise: first we have to compute the "real" arity. This is because we have extra arguments:
-         // e.g. f(a, g(x), b, h(y), i()) looks like f(a, g, x, b, h, y, i). 
+         // e.g. f(a, g(x), b, h(y), i()) looks like f(a, g, x, b, h, y, i).
          int arity = callSiteType.parameterCount() - 1;
          int upTo = 1;
          for (int i = 1; i < numArguments; i++) {
@@ -257,7 +256,7 @@ public final class Def {
                  arity -= numCaptures;
              }
          }
-         
+
          // lookup the method with the proper arity, then we know everything (e.g. interface types of parameters).
          // based on these we can finally link any remaining lambdas that were deferred.
          Method method = lookupMethodInternal(definition, receiverClass, name, arity);
@@ -268,7 +267,7 @@ public final class Def {
          for (int i = 1; i < numArguments; i++) {
              // its a functional reference, replace the argument with an impl
              if (lambdaArgs.get(i - 1)) {
-                 // decode signature of form 'type.call,2' 
+                 // decode signature of form 'type.call,2'
                  String signature = (String) args[upTo++];
                  int separator = signature.lastIndexOf('.');
                  int separator2 = signature.indexOf(',');
@@ -313,10 +312,10 @@ public final class Def {
                  replaced += numCaptures;
              }
          }
-         
+
          return handle;
      }
-     
+
      /**
       * Returns an implementation of interfaceClass that calls receiverClass.name
       * <p>
@@ -335,7 +334,7 @@ public final class Def {
         return lookupReferenceInternal(definition, lookup, interfaceType, implMethod.owner.name,
                 implMethod.name, receiverClass);
      }
-     
+
      /** Returns a method handle to an implementation of clazz, given method reference signature. */
     private static MethodHandle lookupReferenceInternal(Definition definition, Lookup lookup,
             Definition.Type clazz, String type, String call, Class<?>... captures)
@@ -351,47 +350,37 @@ public final class Def {
              int arity = interfaceMethod.arguments.size() + captures.length;
              final MethodHandle handle;
              try {
-                 MethodHandle accessor = lookup.findStaticGetter(lookup.lookupClass(), 
-                                                                 getUserFunctionHandleFieldName(call, arity), 
+                 MethodHandle accessor = lookup.findStaticGetter(lookup.lookupClass(),
+                                                                 getUserFunctionHandleFieldName(call, arity),
                                                                  MethodHandle.class);
-                 handle = (MethodHandle) accessor.invokeExact();
+                 handle = (MethodHandle)accessor.invokeExact();
              } catch (NoSuchFieldException | IllegalAccessException e) {
                  // is it a synthetic method? If we generated the method ourselves, be more helpful. It can only fail
                  // because the arity does not match the expected interface type.
                  if (call.contains("$")) {
-                     throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name + 
+                     throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name +
                                                         "] in [" + clazz.clazz + "]");
                  }
                  throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments.");
              }
-             ref = new FunctionRef(clazz, interfaceMethod, handle, captures.length);
+             ref = new FunctionRef(clazz, interfaceMethod, call, handle.type(), captures.length);
          } else {
              // whitelist lookup
              ref = new FunctionRef(definition, clazz, type, call, captures.length);
          }
-         final CallSite callSite;
-         if (ref.needsBridges()) {
-             callSite = LambdaMetafactory.altMetafactory(lookup, 
-                     ref.invokedName, 
-                     ref.invokedType,
-                     ref.samMethodType,
-                     ref.implMethod,
-                     ref.samMethodType,
-                     LambdaMetafactory.FLAG_BRIDGES,
-                     1,
-                     ref.interfaceMethodType);
-         } else {
-             callSite = LambdaMetafactory.altMetafactory(lookup, 
-                     ref.invokedName, 
-                     ref.invokedType,
-                     ref.samMethodType,
-                     ref.implMethod,
-                     ref.samMethodType,
-                     0);
-         }
+         final CallSite callSite = LambdaBootstrap.lambdaBootstrap(
+             lookup,
+             ref.interfaceMethodName,
+             ref.factoryMethodType,
+             ref.interfaceMethodType,
+             ref.delegateClassName,
+             ref.delegateInvokeType,
+             ref.delegateMethodName,
+             ref.delegateMethodType
+         );
          return callSite.dynamicInvoker().asType(MethodType.methodType(clazz.clazz, captures));
      }
-     
+
      /** gets the field name used to lookup up the MethodHandle for a function. */
      public static String getUserFunctionHandleFieldName(String name, int arity) {
          return "handle$" + name + "$" + arity;
@@ -595,7 +584,7 @@ public final class Def {
         throw new IllegalArgumentException("Attempting to address a non-array type " +
                                            "[" + receiverClass.getCanonicalName() + "] as an array.");
     }
-    
+
     /** Helper class for isolating MethodHandles and methods to get iterators over arrays
      * (to emulate "enhanced for loop" using MethodHandles). These cause boxing, and are not as efficient
      * as they could be, but works.
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FeatureTest.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FeatureTest.java
index 603023e61fe..1561aeddf83 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FeatureTest.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FeatureTest.java
@@ -1,5 +1,6 @@
 package org.elasticsearch.painless;
 
+import java.util.List;
 import java.util.function.Function;
 
 /*
@@ -25,11 +26,11 @@ import java.util.function.Function;
 public class FeatureTest {
     private int x;
     private int y;
-    
+
     /** empty ctor */
     public FeatureTest() {
     }
-    
+
     /** ctor with params */
     public FeatureTest(int x, int y) {
         this.x = x;
@@ -60,14 +61,18 @@ public class FeatureTest {
     public static boolean overloadedStatic() {
         return true;
     }
-    
+
     /** static method that returns what you ask it */
     public static boolean overloadedStatic(boolean whatToReturn) {
         return whatToReturn;
     }
-    
+
     /** method taking two functions! */
     public Object twoFunctionsOfX(Function<Object,Object> f, Function<Object,Object> g) {
         return f.apply(g.apply(x));
     }
+
+    public void listInput(List<Object> list) {
+
+    }
 }
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java
index dddd9166311..6bfe911d974 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java
@@ -20,125 +20,138 @@
 package org.elasticsearch.painless;
 
 import org.elasticsearch.painless.Definition.Method;
-import org.objectweb.asm.Handle;
-import org.objectweb.asm.Opcodes;
+import org.elasticsearch.painless.Definition.Type;
+import org.elasticsearch.painless.api.Augmentation;
 
-import java.lang.invoke.MethodHandle;
 import java.lang.invoke.MethodType;
 import java.lang.reflect.Modifier;
 
-/** 
- * Reference to a function or lambda. 
+import static org.elasticsearch.painless.WriterConstants.CLASS_NAME;
+import static org.objectweb.asm.Opcodes.H_INVOKEINTERFACE;
+import static org.objectweb.asm.Opcodes.H_INVOKESTATIC;
+import static org.objectweb.asm.Opcodes.H_INVOKEVIRTUAL;
+import static org.objectweb.asm.Opcodes.H_NEWINVOKESPECIAL;
+
+/**
+ * Reference to a function or lambda.
  * <p>
- * Once you have created one of these, you have "everything you need" to call LambdaMetaFactory
- * either statically from bytecode with invokedynamic, or at runtime from Java.  
+ * Once you have created one of these, you have "everything you need" to call {@link LambdaBootstrap}
+ * either statically from bytecode with invokedynamic, or at runtime from Java.
  */
 public class FunctionRef {
-    /** Function Object's method name */
-    public final String invokedName;
-    /** CallSite signature */
-    public final MethodType invokedType;
-    /** Implementation method */
-    public final MethodHandle implMethod;
-    /** Function Object's method signature */
-    public final MethodType samMethodType;
-    /** When bridging is required, request this bridge interface */
+
+    /** functional interface method name */
+    public final String interfaceMethodName;
+    /** factory (CallSite) method signature */
+    public final MethodType factoryMethodType;
+    /** functional interface method signature */
     public final MethodType interfaceMethodType;
-    
-    /** ASM "Handle" to the method, for the constant pool */
-    public final Handle implMethodASM;
-    
+    /** class of the delegate method to be called */
+    public final String delegateClassName;
+    /** the invocation type of the delegate method */
+    public final int delegateInvokeType;
+    /** the name of the delegate method */
+    public final String delegateMethodName;
+    /** delegate method signature */
+    public final MethodType delegateMethodType;
+
+    /** interface method */
+    public final Method interfaceMethod;
+    /** delegate method */
+    public final Method delegateMethod;
+
+    /** factory method type descriptor */
+    public final String factoryDescriptor;
+    /** functional interface method as type */
+    public final org.objectweb.asm.Type interfaceType;
+    /** delegate method type method as type */
+    public final org.objectweb.asm.Type delegateType;
+
     /**
      * Creates a new FunctionRef, which will resolve {@code type::call} from the whitelist.
      * @param definition the whitelist against which this script is being compiled
-     * @param expected interface type to implement.
+     * @param expected functional interface type to implement.
      * @param type the left hand side of a method reference expression
      * @param call the right hand side of a method reference expression
      * @param numCaptures number of captured arguments
-     */    
-    public FunctionRef(Definition definition, Definition.Type expected, String type, String call,
-            int numCaptures) {
-        this(expected, expected.struct.getFunctionalMethod(),
-                lookup(definition, expected, type, call, numCaptures > 0), numCaptures);
+     */
+    public FunctionRef(Definition definition, Type expected, String type, String call, int numCaptures) {
+        this(expected, expected.struct.getFunctionalMethod(), lookup(definition, expected, type, call, numCaptures > 0), numCaptures);
     }
 
     /**
      * Creates a new FunctionRef (already resolved)
-     * @param expected interface type to implement
-     * @param method functional interface method
-     * @param impl implementation method
+     * @param expected functional interface type to implement
+     * @param interfaceMethod functional interface method
+     * @param delegateMethod implementation method
      * @param numCaptures number of captured arguments
-     */   
-    public FunctionRef(Definition.Type expected, Definition.Method method, Definition.Method impl, int numCaptures) {
-        // e.g. compareTo
-        invokedName = method.name;
-        // e.g. (Object)Comparator
-        MethodType implType = impl.getMethodType();
-        // only include captured parameters as arguments
-        invokedType = MethodType.methodType(expected.clazz, 
-                implType.dropParameterTypes(numCaptures, implType.parameterCount()));
-        // e.g. (Object,Object)int
-        interfaceMethodType = method.getMethodType().dropParameterTypes(0, 1);
+     */
+    public FunctionRef(Type expected, Method interfaceMethod, Method delegateMethod, int numCaptures) {
+        MethodType delegateMethodType = delegateMethod.getMethodType();
 
-        final int tag;
-        if ("<init>".equals(impl.name)) {
-            tag = Opcodes.H_NEWINVOKESPECIAL;
-        } else if (Modifier.isStatic(impl.modifiers)) {
-            tag = Opcodes.H_INVOKESTATIC;
-        } else if (impl.owner.clazz.isInterface()) {
-            tag = Opcodes.H_INVOKEINTERFACE;
+        interfaceMethodName = interfaceMethod.name;
+        factoryMethodType = MethodType.methodType(expected.clazz,
+                delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
+        interfaceMethodType = interfaceMethod.getMethodType().dropParameterTypes(0, 1);
+
+        // the Painless$Script class can be inferred if owner is null
+        if (delegateMethod.owner == null) {
+            delegateClassName = CLASS_NAME;
+        } else if (delegateMethod.augmentation) {
+            delegateClassName = Augmentation.class.getName();
         } else {
-            tag = Opcodes.H_INVOKEVIRTUAL;
+            delegateClassName = delegateMethod.owner.clazz.getName();
         }
-        final String owner;
-        final boolean ownerIsInterface;
-        if (impl.owner == null) {
-            // owner == null: script class itself
-            ownerIsInterface = false;
-            owner = WriterConstants.CLASS_TYPE.getInternalName();
-        } else if (impl.augmentation) {
-            ownerIsInterface = false;
-            owner = WriterConstants.AUGMENTATION_TYPE.getInternalName();
+
+        if ("<init>".equals(delegateMethod.name)) {
+            delegateInvokeType = H_NEWINVOKESPECIAL;
+        } else if (Modifier.isStatic(delegateMethod.modifiers)) {
+            delegateInvokeType = H_INVOKESTATIC;
+        } else if (delegateMethod.owner.clazz.isInterface()) {
+            delegateInvokeType = H_INVOKEINTERFACE;
         } else {
-            ownerIsInterface = impl.owner.clazz.isInterface();
-            owner = impl.owner.type.getInternalName();
+            delegateInvokeType = H_INVOKEVIRTUAL;
         }
-        implMethodASM = new Handle(tag, owner, impl.name, impl.method.getDescriptor(), ownerIsInterface);
-        implMethod = impl.handle;
-        
-        // remove any prepended captured arguments for the 'natural' signature.
-        samMethodType = adapt(interfaceMethodType, impl.getMethodType().dropParameterTypes(0, numCaptures));
+
+        delegateMethodName = delegateMethod.name;
+        this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
+
+        this.interfaceMethod = interfaceMethod;
+        this.delegateMethod = delegateMethod;
+
+        factoryDescriptor = factoryMethodType.toMethodDescriptorString();
+        interfaceType = org.objectweb.asm.Type.getMethodType(interfaceMethodType.toMethodDescriptorString());
+        delegateType = org.objectweb.asm.Type.getMethodType(this.delegateMethodType.toMethodDescriptorString());
     }
 
     /**
-     * Creates a new FunctionRef (low level). 
-     * <p>
-     * This will <b>not</b> set implMethodASM. It is for runtime use only.
+     * Creates a new FunctionRef (low level).
+     * It is for runtime use only.
      */
-    public FunctionRef(Definition.Type expected, Definition.Method method, MethodHandle impl, int numCaptures) {
-        // e.g. compareTo
-        invokedName = method.name;
-        // e.g. (Object)Comparator
-        MethodType implType = impl.type();
-        // only include captured parameters as arguments
-        invokedType = MethodType.methodType(expected.clazz, 
-                implType.dropParameterTypes(numCaptures, implType.parameterCount()));
-        // e.g. (Object,Object)int
-        interfaceMethodType = method.getMethodType().dropParameterTypes(0, 1);
+    public FunctionRef(Type expected, Method interfaceMethod, String delegateMethodName, MethodType delegateMethodType, int numCaptures) {
+        interfaceMethodName = interfaceMethod.name;
+        factoryMethodType = MethodType.methodType(expected.clazz,
+            delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
+        interfaceMethodType = interfaceMethod.getMethodType().dropParameterTypes(0, 1);
 
-        implMethod = impl;
-        
-        implMethodASM = null;
-        
-        // remove any prepended captured arguments for the 'natural' signature.
-        samMethodType = adapt(interfaceMethodType, impl.type().dropParameterTypes(0, numCaptures));
+        delegateClassName = CLASS_NAME;
+        delegateInvokeType = H_INVOKESTATIC;
+        this.delegateMethodName = delegateMethodName;
+        this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
+
+        this.interfaceMethod = null;
+        delegateMethod = null;
+
+        factoryDescriptor = null;
+        interfaceType = null;
+        delegateType = null;
     }
 
-    /** 
+    /**
      * Looks up {@code type::call} from the whitelist, and returns a matching method.
      */
     private static Definition.Method lookup(Definition definition, Definition.Type expected,
-            String type, String call, boolean receiverCaptured) {
+                                            String type, String call, boolean receiverCaptured) {
         // check its really a functional interface
         // for e.g. Comparable
         Method method = expected.struct.getFunctionalMethod();
@@ -177,27 +190,4 @@ public class FunctionRef {
         }
         return impl;
     }
-
-    /** Returns true if you should ask LambdaMetaFactory to construct a bridge for the interface signature */
-    public boolean needsBridges() {
-        // currently if the interface differs, we ask for a bridge, but maybe we should do smarter checking?
-        // either way, stuff will fail if its wrong :)
-        return interfaceMethodType.equals(samMethodType) == false;
-    }
-    
-    /** 
-     * If the interface expects a primitive type to be returned, we can't return Object,
-     * But we can set SAM to the wrapper version, and a cast will take place 
-     */
-    private MethodType adapt(MethodType expected, MethodType actual) {
-        // add some checks, now that we've set everything up, to deliver exceptions as early as possible.
-        if (expected.parameterCount() != actual.parameterCount()) {
-            throw new IllegalArgumentException("Incorrect number of parameters for [" + invokedName + 
-                                               "] in [" + invokedType.returnType() + "]");
-        }
-        if (expected.returnType().isPrimitive() && actual.returnType() == Object.class) {
-            actual = actual.changeReturnType(MethodType.methodType(expected.returnType()).wrap().returnType());
-        }
-        return actual;
-    }
 }
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java
new file mode 100644
index 00000000000..746467c454b
--- /dev/null
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java
@@ -0,0 +1,530 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.painless;
+
+import org.objectweb.asm.ClassWriter;
+import org.objectweb.asm.FieldVisitor;
+import org.objectweb.asm.Handle;
+import org.objectweb.asm.Type;
+import org.objectweb.asm.commons.GeneratorAdapter;
+import org.objectweb.asm.commons.Method;
+
+import java.lang.invoke.CallSite;
+import java.lang.invoke.ConstantCallSite;
+import java.lang.invoke.LambdaConversionException;
+import java.lang.invoke.MethodHandle;
+import java.lang.invoke.MethodHandles;
+import java.lang.invoke.MethodType;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
+import java.util.concurrent.atomic.AtomicLong;
+
+import static java.lang.invoke.MethodHandles.Lookup;
+import static org.elasticsearch.painless.Compiler.Loader;
+import static org.elasticsearch.painless.WriterConstants.CLASS_VERSION;
+import static org.elasticsearch.painless.WriterConstants.DELEGATE_BOOTSTRAP_HANDLE;
+import static org.objectweb.asm.Opcodes.ACC_FINAL;
+import static org.objectweb.asm.Opcodes.ACC_PRIVATE;
+import static org.objectweb.asm.Opcodes.ACC_PUBLIC;
+import static org.objectweb.asm.Opcodes.ACC_STATIC;
+import static org.objectweb.asm.Opcodes.ACC_SUPER;
+import static org.objectweb.asm.Opcodes.ACC_SYNTHETIC;
+import static org.objectweb.asm.Opcodes.H_INVOKEINTERFACE;
+import static org.objectweb.asm.Opcodes.H_INVOKESTATIC;
+import static org.objectweb.asm.Opcodes.H_INVOKEVIRTUAL;
+import static org.objectweb.asm.Opcodes.H_NEWINVOKESPECIAL;
+
+/**
+ * LambdaBootstrap is used to generate all the code necessary to execute
+ * lambda functions and method references within Painless.  The code generation
+ * used here is based upon the following article:
+ * http://cr.openjdk.java.net/~briangoetz/lambda/lambda-translation.html
+ * However, it is a simplified version as Painless has no concept of generics
+ * or serialization.  LambdaBootstrap is being used as a replacement for
+ * {@link java.lang.invoke.LambdaMetafactory} since the Painless casting model
+ * cannot be fully supported through this class.
+ *
+ * For each lambda function/method reference used within a Painless script
+ * a class will be generated at link-time using the
+ * {@link LambdaBootstrap#lambdaBootstrap} method that contains the following:
+ * 1. member fields for any captured variables
+ * 2. a constructor that will take in captured variables and assign them to
+ * their respective member fields
+ * 3. if there are captures, a factory method that will take in captured
+ * variables and delegate them to the constructor
+ * 4. a method that will load the member fields representing captured variables
+ * and take in any other necessary values based on the arguments passed into the
+ * lambda function/reference method; it will then make a delegated call to the
+ * actual lambda function/reference method
+ *
+ * Take for example the following Painless script:
+ *
+ * {@code
+ * List list1 = new ArrayList(); "
+ * list1.add(2); "
+ * List list2 = new ArrayList(); "
+ * list1.forEach(x -> list2.add(x));"
+ * return list[0]"
+ * }
+ *
+ * The script contains a lambda function with a captured variable.
+ * The following Lambda class would be generated:
+ *
+ * {@code
+ *     public static final class $$Lambda0 implements Consumer {
+ *         private List arg$0;
+ *
+ *         public $$Lambda0(List arg$0) {
+ *             this.arg$0 = arg$0;
+ *         }
+ *
+ *         public static $$Lambda0 get$Lambda(List arg$0) {
+ *             return $$Lambda0(arg$0);
+ *         }
+ *
+ *         public void accept(Object val$0) {
+ *             Painless$Script.lambda$0(this.arg$0, val$0);
+ *         }
+ *     }
+ *
+ *     public class Painless$Script implements ... {
+ *         ...
+ *         public static lambda$0(List list2, Object x) {
+ *             list2.add(x);
+ *         }
+ *         ...
+ *     }
+ * }
+ *
+ * Note if the above didn't have a captured variable then
+ * the factory method get$Lambda would not have been generated.
+ * Also the accept method actually uses an invokedynamic
+ * instruction to call the lambda$0 method so that
+ * {@link MethodHandle#asType} can be used to do the necessary
+ * conversions between argument types without having to hard
+ * code them.
+ *
+ * When the {@link CallSite} is linked the linked method depends
+ * on whether or not there are captures.  If there are no captures
+ * the same instance of the generated lambda class will be
+ * returned each time by the factory method as there are no
+ * changing values other than the arguments.  If there are
+ * captures a new instance of the generated lambda class will
+ * be returned each time with the captures passed into the
+ * factory method to be stored in the member fields.
+ */
+public final class LambdaBootstrap {
+
+    /**
+     * Metadata for a captured variable used during code generation.
+     */
+    private static final class Capture {
+        private final String name;
+        private final Type type;
+        private final String desc;
+
+        /**
+         * Converts incoming parameters into the name, type, and
+         * descriptor for the captured argument.
+         * @param count The captured argument count
+         * @param type The class type of the captured argument
+         */
+        private Capture(int count, Class<?> type) {
+            this.name = "arg$" + count;
+            this.type = Type.getType(type);
+            this.desc = this.type.getDescriptor();
+        }
+    }
+
+    /**
+     * A counter used to generate a unique name
+     * for each lambda function/reference class.
+     */
+    private static final AtomicLong COUNTER = new AtomicLong(0);
+
+    /**
+     * Generates a lambda class for a lambda function/method reference
+     * within a Painless script.  Variables with the prefix interface are considered
+     * to represent values for code generated for the lambda class. Variables with
+     * the prefix delegate are considered to represent values for code generated
+     * within the Painless script.  The interface method delegates (calls) to the
+     * delegate method.
+     * @param lookup Standard {@link MethodHandles#lookup}
+     * @param interfaceMethodName Name of functional interface method that is called
+     * @param factoryMethodType The type of method to be linked to this CallSite; note that
+     *                          captured types are based on the parameters for this method
+     * @param interfaceMethodType The type of method representing the functional interface method
+     * @param delegateClassName The name of the Painless script class
+     * @param delegateInvokeType The type of method call to be made
+     *                           (static, virtual, interface, or constructor)
+     * @param delegateMethodName The name of the method to be called in the Painless script class
+     * @param delegateMethodType The type of method call in the Painless script class without
+     *                           the captured types
+     * @return A {@link CallSite} linked to a factory method for creating a lambda class
+     * that implements the expected functional interface
+     * @throws LambdaConversionException Thrown when an illegal type conversion occurs at link time
+     */
+    public static CallSite lambdaBootstrap(
+            Lookup lookup,
+            String interfaceMethodName,
+            MethodType factoryMethodType,
+            MethodType interfaceMethodType,
+            String delegateClassName,
+            int delegateInvokeType,
+            String delegateMethodName,
+            MethodType delegateMethodType)
+            throws LambdaConversionException {
+        String factoryMethodName = "get$lambda";
+        String lambdaClassName = lookup.lookupClass().getName().replace('.', '/') +
+            "$$Lambda" + COUNTER.getAndIncrement();
+        Type lambdaClassType = Type.getType("L" + lambdaClassName + ";");
+
+        validateTypes(interfaceMethodType, delegateMethodType);
+
+        ClassWriter cw =
+            beginLambdaClass(lambdaClassName, factoryMethodType.returnType().getName());
+        Capture[] captures = generateCaptureFields(cw, factoryMethodType);
+        Method constructorMethod =
+            generateLambdaConstructor(cw, lambdaClassType, factoryMethodType, captures);
+
+        if (captures.length > 0) {
+            generateFactoryMethod(
+                cw, factoryMethodName, factoryMethodType, lambdaClassType, constructorMethod);
+        }
+
+        generateInterfaceMethod(cw, factoryMethodType, lambdaClassName, lambdaClassType,
+            interfaceMethodName, interfaceMethodType, delegateClassName, delegateInvokeType,
+            delegateMethodName, delegateMethodType, captures);
+        endLambdaClass(cw);
+
+        Class<?> lambdaClass =
+            createLambdaClass((Loader)lookup.lookupClass().getClassLoader(), cw, lambdaClassName);
+
+        if (captures.length > 0) {
+            return createCaptureCallSite(lookup, factoryMethodName, factoryMethodType, lambdaClass);
+        } else {
+            return createNoCaptureCallSite(factoryMethodType, lambdaClass);
+        }
+    }
+
+    /**
+     * Validates some conversions at link time.  Currently, only ensures that the lambda method
+     * with a return value cannot delegate to a delegate method with no return type.
+     */
+    private static void validateTypes(MethodType interfaceMethodType, MethodType delegateMethodType)
+            throws LambdaConversionException {
+
+        if (interfaceMethodType.returnType() != void.class &&
+            delegateMethodType.returnType() == void.class) {
+            throw new LambdaConversionException("lambda expects return type ["
+                + interfaceMethodType.returnType() + "], but found return type [void]");
+        }
+    }
+
+    /**
+     * Creates the {@link ClassWriter} to be used for the lambda class generation.
+     */
+    private static ClassWriter beginLambdaClass(String lambdaClassName, String lambdaInterface) {
+        String baseClass = Object.class.getName().replace('.', '/');
+        lambdaInterface = lambdaInterface.replace('.', '/');
+        int modifiers = ACC_PUBLIC | ACC_STATIC | ACC_SUPER | ACC_FINAL | ACC_SYNTHETIC;
+
+        ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS);
+        cw.visit(CLASS_VERSION,
+            modifiers, lambdaClassName, null, baseClass, new String[] {lambdaInterface});
+
+        return cw;
+    }
+
+    /**
+     * Generates member fields for captured variables
+     * based on the parameters for the factory method.
+     * @return An array of captured variable metadata
+     * for generating method arguments later on
+     */
+    private static Capture[] generateCaptureFields(ClassWriter cw, MethodType factoryMethodType) {
+        int captureTotal = factoryMethodType.parameterCount();
+        Capture[] captures = new Capture[captureTotal];
+
+        for (int captureCount = 0; captureCount < captureTotal; ++captureCount) {
+            captures[captureCount] =
+                new Capture(captureCount, factoryMethodType.parameterType(captureCount));
+            int modifiers = ACC_PRIVATE + ACC_FINAL;
+
+            FieldVisitor fv = cw.visitField(
+                modifiers, captures[captureCount].name, captures[captureCount].desc, null, null);
+            fv.visitEnd();
+        }
+
+        return captures;
+    }
+
+    /**
+     * Generates a constructor that will take in captured
+     * arguments if any and store them in their respective
+     * member fields.
+     * @return The constructor {@link Method} used to
+     * call this method from a potential factory method
+     * if there are captured arguments
+     */
+    private static Method generateLambdaConstructor(
+            ClassWriter cw,
+            Type lambdaClassType,
+            MethodType factoryMethodType,
+            Capture[] captures) {
+
+        String conName = "<init>";
+        String conDesc = factoryMethodType.changeReturnType(void.class).toMethodDescriptorString();
+        Method conMeth = new Method(conName, conDesc);
+        Type baseConType = Type.getType(Object.class);
+        Method baseConMeth = new Method(conName,
+            MethodType.methodType(void.class).toMethodDescriptorString());
+        int modifiers = ACC_PUBLIC;
+
+        GeneratorAdapter constructor = new GeneratorAdapter(modifiers, conMeth,
+            cw.visitMethod(modifiers, conName, conDesc, null, null));
+        constructor.visitCode();
+        constructor.loadThis();
+        constructor.invokeConstructor(baseConType, baseConMeth);
+
+        for (int captureCount = 0; captureCount < captures.length; ++captureCount) {
+            constructor.loadThis();
+            constructor.loadArg(captureCount);
+            constructor.putField(
+                lambdaClassType, captures[captureCount].name, captures[captureCount].type);
+        }
+
+        constructor.returnValue();
+        constructor.endMethod();
+
+        return conMeth;
+    }
+
+    /**
+     * Generates a factory method that can be used to create the lambda class
+     * if there are captured variables.
+     */
+    private static void generateFactoryMethod(
+            ClassWriter cw,
+            String factoryMethodName,
+            MethodType factoryMethodType,
+            Type lambdaClassType,
+            Method constructorMethod) {
+
+        String facDesc = factoryMethodType.toMethodDescriptorString();
+        Method facMeth = new Method(factoryMethodName, facDesc);
+        int modifiers = ACC_PUBLIC | ACC_STATIC;
+
+        GeneratorAdapter factory = new GeneratorAdapter(modifiers, facMeth,
+            cw.visitMethod(modifiers, factoryMethodName, facDesc, null, null));
+        factory.visitCode();
+        factory.newInstance(lambdaClassType);
+        factory.dup();
+        factory.loadArgs();
+        factory.invokeConstructor(lambdaClassType, constructorMethod);
+        factory.returnValue();
+        factory.endMethod();
+    }
+
+    /**
+     * Generates the interface method that will delegate (call) to the delegate method.
+     */
+    private static void generateInterfaceMethod(
+            ClassWriter cw,
+            MethodType factoryMethodType,
+            String lambdaClassName,
+            Type lambdaClassType,
+            String interfaceMethodName,
+            MethodType interfaceMethodType,
+            String delegateClassName,
+            int delegateInvokeType,
+            String delegateMethodName,
+            MethodType delegateMethodType,
+            Capture[] captures)
+            throws LambdaConversionException {
+
+        String lamDesc = interfaceMethodType.toMethodDescriptorString();
+        Method lamMeth = new Method(lambdaClassName, lamDesc);
+        int modifiers = ACC_PUBLIC;
+
+        GeneratorAdapter iface = new GeneratorAdapter(modifiers, lamMeth,
+            cw.visitMethod(modifiers, interfaceMethodName, lamDesc, null, null));
+        iface.visitCode();
+
+        // Handles the case where a reference method refers to a constructor.
+        // A new instance of the requested type will be created and the
+        // constructor with no parameters will be called.
+        // Example: String::new
+        if (delegateInvokeType == H_NEWINVOKESPECIAL) {
+            String conName = "<init>";
+            String conDesc = MethodType.methodType(void.class).toMethodDescriptorString();
+            Method conMeth = new Method(conName, conDesc);
+            Type conType = Type.getType(delegateMethodType.returnType());
+
+            iface.newInstance(conType);
+            iface.dup();
+            iface.invokeConstructor(conType, conMeth);
+        } else {
+            // Loads any captured variables onto the stack.
+            for (int captureCount = 0; captureCount < captures.length; ++captureCount) {
+                iface.loadThis();
+                iface.getField(
+                    lambdaClassType, captures[captureCount].name, captures[captureCount].type);
+            }
+
+            // Loads any passed in arguments onto the stack.
+            iface.loadArgs();
+
+            // Handles the case for a lambda function or a static reference method.
+            // interfaceMethodType and delegateMethodType both have the captured types
+            // inserted into their type signatures.  This later allows the delegate
+            // method to be invoked dynamically and have the interface method types
+            // appropriately converted to the delegate method types.
+            // Example: Integer::parseInt
+            // Example: something.each(x -> x + 1)
+            if (delegateInvokeType == H_INVOKESTATIC) {
+                interfaceMethodType =
+                    interfaceMethodType.insertParameterTypes(0, factoryMethodType.parameterArray());
+                delegateMethodType =
+                    delegateMethodType.insertParameterTypes(0, factoryMethodType.parameterArray());
+            } else if (delegateInvokeType == H_INVOKEVIRTUAL ||
+                delegateInvokeType == H_INVOKEINTERFACE) {
+                // Handles the case for a virtual or interface reference method with no captures.
+                // delegateMethodType drops the 'this' parameter because it will be re-inserted
+                // when the method handle for the dynamically invoked delegate method is created.
+                // Example: Object::toString
+                if (captures.length == 0) {
+                    Class<?> clazz = delegateMethodType.parameterType(0);
+                    delegateClassName = clazz.getName();
+                    delegateMethodType = delegateMethodType.dropParameterTypes(0, 1);
+                // Handles the case for a virtual or interface reference method with 'this'
+                // captured. interfaceMethodType inserts the 'this' type into its
+                // method signature. This later allows the delegate
+                // method to be invoked dynamically and have the interface method types
+                // appropriately converted to the delegate method types.
+                // Example: something::toString
+                } else if (captures.length == 1) {
+                    Class<?> clazz = factoryMethodType.parameterType(0);
+                    delegateClassName = clazz.getName();
+                    interfaceMethodType = interfaceMethodType.insertParameterTypes(0, clazz);
+                } else {
+                    throw new LambdaConversionException(
+                        "unexpected number of captures [ " + captures.length + "]");
+                }
+            } else {
+                throw new IllegalStateException(
+                    "unexpected invocation type [" + delegateInvokeType + "]");
+            }
+
+            Handle delegateHandle =
+                new Handle(delegateInvokeType, delegateClassName.replace('.', '/'),
+                    delegateMethodName, delegateMethodType.toMethodDescriptorString(),
+                    delegateInvokeType == H_INVOKEINTERFACE);
+            iface.invokeDynamic(delegateMethodName, Type.getMethodType(interfaceMethodType
+                    .toMethodDescriptorString()).getDescriptor(), DELEGATE_BOOTSTRAP_HANDLE,
+                delegateHandle);
+        }
+
+        iface.returnValue();
+        iface.endMethod();
+    }
+
+    /**
+     * Closes the {@link ClassWriter}.
+     */
+    private static void endLambdaClass(ClassWriter cw) {
+        cw.visitEnd();
+    }
+
+    /**
+     * Defines the {@link Class} for the lambda class using the same {@link Loader}
+     * that originally defined the class for the Painless script.
+     */
+    private static Class<?> createLambdaClass(
+            Loader loader,
+            ClassWriter cw,
+            String lambdaClassName) {
+
+        byte[] classBytes = cw.toByteArray();
+        return AccessController.doPrivileged((PrivilegedAction<Class<?>>)() ->
+            loader.defineLambda(lambdaClassName.replace('/', '.'), classBytes));
+    }
+
+    /**
+     * Creates an {@link ConstantCallSite} that will return the same instance
+     * of the generated lambda class every time this linked factory method is called.
+     */
+    private static CallSite createNoCaptureCallSite(
+            MethodType factoryMethodType,
+            Class<?> lambdaClass) {
+
+        Constructor<?> constructor = AccessController.doPrivileged(
+            (PrivilegedAction<Constructor<?>>)() -> {
+                try {
+                    return lambdaClass.getConstructor();
+                } catch (NoSuchMethodException nsme) {
+                    throw new IllegalStateException("unable to create lambda class", nsme);
+                }
+            });
+
+        try {
+            return new ConstantCallSite(MethodHandles.constant(
+                factoryMethodType.returnType(), constructor.newInstance()));
+        } catch (InstantiationException |
+            IllegalAccessException |
+            InvocationTargetException exception) {
+            throw new IllegalStateException("unable to create lambda class", exception);
+        }
+    }
+
+    /**
+     * Creates an {@link ConstantCallSite}
+     */
+    private static CallSite createCaptureCallSite(
+            Lookup lookup,
+            String factoryMethodName,
+            MethodType factoryMethodType,
+            Class<?> lambdaClass) {
+
+        try {
+            return new ConstantCallSite(
+                lookup.findStatic(lambdaClass, factoryMethodName, factoryMethodType));
+        } catch (NoSuchMethodException | IllegalAccessException exception) {
+            throw new IllegalStateException("unable to create lambda factory class", exception);
+        }
+    }
+
+    /**
+     * Links the delegate method to the returned {@link CallSite}.  The linked
+     * delegate method will use converted types from the interface method.  Using
+     * invokedynamic to make the delegate method call allows
+     * {@link MethodHandle#asType} to be used to do the type conversion instead
+     * of either a lot more code or requiring many {@link Definition.Type}s to be looked
+     * up at link-time.
+     */
+    public static CallSite delegateBootstrap(Lookup lookup,
+                                             String delegateMethodName,
+                                             MethodType interfaceMethodType,
+                                             MethodHandle delegateMethodHandle) {
+        return new ConstantCallSite(delegateMethodHandle.asType(interfaceMethodType));
+    }
+}
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java
index f29afbb74e6..2772cdcf275 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java
@@ -84,11 +84,10 @@ public final class WriterConstants {
     public static final Type UTILITY_TYPE = Type.getType(Utility.class);
     public static final Method STRING_TO_CHAR = getAsmMethod(char.class, "StringTochar", String.class);
     public static final Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class);
-    
-    public static final Type OBJECT_ARRAY_TYPE = Type.getType("[Ljava/lang/Object;");
+
 
     public static final Type METHOD_HANDLE_TYPE = Type.getType(MethodHandle.class);
-    
+
     public static final Type AUGMENTATION_TYPE = Type.getType(Augmentation.class);
 
     /**
@@ -110,7 +109,6 @@ public final class WriterConstants {
     public static final Method DEF_BOOTSTRAP_DELEGATE_METHOD = getAsmMethod(CallSite.class, "bootstrap", Definition.class,
             MethodHandles.Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class);
 
-
     public static final Type DEF_UTIL_TYPE = Type.getType(Def.class);
     public static final Method DEF_TO_BOOLEAN         = getAsmMethod(boolean.class, "DefToboolean"       , Object.class);
     public static final Method DEF_TO_BYTE_IMPLICIT   = getAsmMethod(byte.class   , "DefTobyteImplicit"  , Object.class);
@@ -132,10 +130,15 @@ public final class WriterConstants {
     /** invokedynamic bootstrap for lambda expression/method references */
     public static final MethodType LAMBDA_BOOTSTRAP_TYPE =
             MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class,
-                                  MethodType.class, Object[].class);
+                                  MethodType.class, MethodType.class, String.class, int.class, String.class, MethodType.class);
     public static final Handle LAMBDA_BOOTSTRAP_HANDLE =
-            new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(LambdaMetafactory.class),
-                "altMetafactory", LAMBDA_BOOTSTRAP_TYPE.toMethodDescriptorString(), false);
+            new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(LambdaBootstrap.class),
+                "lambdaBootstrap", LAMBDA_BOOTSTRAP_TYPE.toMethodDescriptorString(), false);
+    public static final MethodType DELEGATE_BOOTSTRAP_TYPE =
+        MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, MethodHandle.class);
+    public static final Handle DELEGATE_BOOTSTRAP_HANDLE =
+        new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(LambdaBootstrap.class),
+            "delegateBootstrap", DELEGATE_BOOTSTRAP_TYPE.toMethodDescriptorString(), false);
 
     /** dynamic invokedynamic bootstrap for indy string concats (Java 9+) */
     public static final Handle INDY_STRING_CONCAT_BOOTSTRAP_HANDLE;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
index 51f37447821..0210b7ce151 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
@@ -1060,7 +1060,7 @@ public final class Walker extends PainlessParserBaseVisitor<ANode> {
 
         for (LamtypeContext lamtype : ctx.lamtype()) {
             if (lamtype.decltype() == null) {
-                paramTypes.add("def");
+                paramTypes.add(null);
             } else {
                 paramTypes.add(lamtype.decltype().getText());
             }
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java
index 717d2a43b1e..f40883186dd 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java
@@ -19,6 +19,7 @@
 
 package org.elasticsearch.painless.node;
 
+import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.Definition;
 import org.elasticsearch.painless.FunctionRef;
@@ -30,10 +31,10 @@ import org.elasticsearch.painless.MethodWriter;
 import org.objectweb.asm.Opcodes;
 import org.objectweb.asm.Type;
 
-import java.lang.invoke.LambdaMetafactory;
 import java.util.Objects;
 import java.util.Set;
 
+import static org.elasticsearch.painless.Definition.VOID_TYPE;
 import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
 
 /**
@@ -77,6 +78,17 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda
             if (captured.type.sort != Definition.Sort.DEF) {
                 try {
                     ref = new FunctionRef(locals.getDefinition(), expected, captured.type.name, call, 1);
+
+                    // check casts between the interface method and the delegate method are legal
+                    for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) {
+                        Definition.Type from = ref.interfaceMethod.arguments.get(i);
+                        Definition.Type to = ref.delegateMethod.arguments.get(i);
+                        AnalyzerCaster.getLegalCast(location, from, to, false, true);
+                    }
+
+                    if (ref.interfaceMethod.rtn != VOID_TYPE) {
+                        AnalyzerCaster.getLegalCast(location, ref.delegateMethod.rtn, ref.interfaceMethod.rtn, false, true);
+                    }
                 } catch (IllegalArgumentException e) {
                     throw createError(e);
                 }
@@ -101,29 +113,16 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda
         } else {
             // typed interface, typed implementation
             writer.visitVarInsn(captured.type.type.getOpcode(Opcodes.ILOAD), captured.getSlot());
-            // convert MethodTypes to asm Type for the constant pool.
-            String invokedType = ref.invokedType.toMethodDescriptorString();
-            Type samMethodType = Type.getMethodType(ref.samMethodType.toMethodDescriptorString());
-            Type interfaceType = Type.getMethodType(ref.interfaceMethodType.toMethodDescriptorString());
-            if (ref.needsBridges()) {
-                writer.invokeDynamic(ref.invokedName,
-                                     invokedType,
-                                     LAMBDA_BOOTSTRAP_HANDLE,
-                                     samMethodType,
-                                     ref.implMethodASM,
-                                     samMethodType,
-                                     LambdaMetafactory.FLAG_BRIDGES,
-                                     1,
-                                     interfaceType);
-            } else {
-                writer.invokeDynamic(ref.invokedName,
-                                     invokedType,
-                                     LAMBDA_BOOTSTRAP_HANDLE,
-                                     samMethodType,
-                                     ref.implMethodASM,
-                                     samMethodType,
-                                     0);
-            }
+            writer.invokeDynamic(
+                ref.interfaceMethodName,
+                ref.factoryDescriptor,
+                LAMBDA_BOOTSTRAP_HANDLE,
+                ref.interfaceType,
+                ref.delegateClassName,
+                ref.delegateInvokeType,
+                ref.delegateMethodName,
+                ref.delegateType
+            );
         }
     }
 
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java
index 0fe11400269..c9b2a5d91d2 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java
@@ -19,6 +19,7 @@
 
 package org.elasticsearch.painless.node;
 
+import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.Definition;
 import org.elasticsearch.painless.Definition.Method;
 import org.elasticsearch.painless.Definition.MethodKey;
@@ -29,10 +30,10 @@ import org.elasticsearch.painless.Location;
 import org.elasticsearch.painless.MethodWriter;
 import org.objectweb.asm.Type;
 
-import java.lang.invoke.LambdaMetafactory;
 import java.util.Objects;
 import java.util.Set;
 
+import static org.elasticsearch.painless.Definition.VOID_TYPE;
 import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
 
 /**
@@ -71,16 +72,28 @@ public final class EFunctionRef extends AExpression implements ILambda {
                         throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
                                                            "to [" + expected.name + "], not a functional interface");
                     }
-                    Method implMethod = locals.getMethod(new MethodKey(call, interfaceMethod.arguments.size()));
-                    if (implMethod == null) {
+                    Method delegateMethod = locals.getMethod(new MethodKey(call, interfaceMethod.arguments.size()));
+                    if (delegateMethod == null) {
                         throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
                                                            "to [" + expected.name + "], function not found");
                     }
-                    ref = new FunctionRef(expected, interfaceMethod, implMethod, 0);
+                    ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0);
+
+                    // check casts between the interface method and the delegate method are legal
+                    for (int i = 0; i < interfaceMethod.arguments.size(); ++i) {
+                        Definition.Type from = interfaceMethod.arguments.get(i);
+                        Definition.Type to = delegateMethod.arguments.get(i);
+                        AnalyzerCaster.getLegalCast(location, from, to, false, true);
+                    }
+
+                    if (interfaceMethod.rtn != VOID_TYPE) {
+                        AnalyzerCaster.getLegalCast(location, delegateMethod.rtn, interfaceMethod.rtn, false, true);
+                    }
                 } else {
                     // whitelist lookup
                     ref = new FunctionRef(locals.getDefinition(), expected, type, call, 0);
                 }
+
             } catch (IllegalArgumentException e) {
                 throw createError(e);
             }
@@ -92,29 +105,16 @@ public final class EFunctionRef extends AExpression implements ILambda {
     void write(MethodWriter writer, Globals globals) {
         if (ref != null) {
             writer.writeDebugInfo(location);
-            // convert MethodTypes to asm Type for the constant pool.
-            String invokedType = ref.invokedType.toMethodDescriptorString();
-            Type samMethodType = Type.getMethodType(ref.samMethodType.toMethodDescriptorString());
-            Type interfaceType = Type.getMethodType(ref.interfaceMethodType.toMethodDescriptorString());
-            if (ref.needsBridges()) {
-                writer.invokeDynamic(ref.invokedName,
-                                     invokedType,
-                                     LAMBDA_BOOTSTRAP_HANDLE,
-                                     samMethodType,
-                                     ref.implMethodASM,
-                                     samMethodType,
-                                     LambdaMetafactory.FLAG_BRIDGES,
-                                     1,
-                                     interfaceType);
-            } else {
-                writer.invokeDynamic(ref.invokedName,
-                                     invokedType,
-                                     LAMBDA_BOOTSTRAP_HANDLE,
-                                     samMethodType,
-                                     ref.implMethodASM,
-                                     samMethodType,
-                                     0);
-            }
+            writer.invokeDynamic(
+                ref.interfaceMethodName,
+                ref.factoryDescriptor,
+                LAMBDA_BOOTSTRAP_HANDLE,
+                ref.interfaceType,
+                ref.delegateClassName,
+                ref.delegateInvokeType,
+                ref.delegateMethodName,
+                ref.delegateType
+            );
         } else {
             // TODO: don't do this: its just to cutover :)
             writer.push((String)null);
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
index ca086561d69..ad843f86af2 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
@@ -19,6 +19,7 @@
 
 package org.elasticsearch.painless.node;
 
+import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.Definition;
 import org.elasticsearch.painless.Definition.Method;
 import org.elasticsearch.painless.Definition.Type;
@@ -31,7 +32,6 @@ import org.elasticsearch.painless.MethodWriter;
 import org.elasticsearch.painless.node.SFunction.FunctionReserved;
 import org.objectweb.asm.Opcodes;
 
-import java.lang.invoke.LambdaMetafactory;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashSet;
@@ -39,7 +39,10 @@ import java.util.List;
 import java.util.Objects;
 import java.util.Set;
 
+import static org.elasticsearch.painless.Definition.VOID_TYPE;
+import static org.elasticsearch.painless.WriterConstants.CLASS_NAME;
 import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
+import static org.objectweb.asm.Opcodes.H_INVOKESTATIC;
 
 /**
  * Lambda expression node.
@@ -109,8 +112,15 @@ public final class ELambda extends AExpression implements ILambda {
             interfaceMethod = null;
             // we don't know anything: treat as def
             returnType = Definition.DEF_TYPE;
-            // don't infer any types
-            actualParamTypeStrs = paramTypeStrs;
+            // don't infer any types, replace any null types with def
+            actualParamTypeStrs = new ArrayList<>();
+            for (String type : paramTypeStrs) {
+                if (type == null) {
+                    actualParamTypeStrs.add("def");
+                } else {
+                    actualParamTypeStrs.add(type);
+                }
+            }
         } else {
             // we know the method statically, infer return type and any unknown/def types
             interfaceMethod = expected.struct.getFunctionalMethod();
@@ -128,11 +138,11 @@ public final class ELambda extends AExpression implements ILambda {
             } else {
                 returnType = interfaceMethod.rtn;
             }
-            // replace any def types with the actual type (which could still be def)
-            actualParamTypeStrs = new ArrayList<String>();
+            // replace any null types with the actual type
+            actualParamTypeStrs = new ArrayList<>();
             for (int i = 0; i < paramTypeStrs.size(); i++) {
                 String paramType = paramTypeStrs.get(i);
-                if (paramType.equals(Definition.DEF_TYPE.name)) {
+                if (paramType == null) {
                     actualParamTypeStrs.add(interfaceMethod.arguments.get(i).name);
                 } else {
                     actualParamTypeStrs.add(paramType);
@@ -180,6 +190,18 @@ public final class ELambda extends AExpression implements ILambda {
             } catch (IllegalArgumentException e) {
                 throw createError(e);
             }
+
+            // check casts between the interface method and the delegate method are legal
+            for (int i = 0; i < interfaceMethod.arguments.size(); ++i) {
+                Type from = interfaceMethod.arguments.get(i);
+                Type to = desugared.parameters.get(i + captures.size()).type;
+                AnalyzerCaster.getLegalCast(location, from, to, false, true);
+            }
+
+            if (interfaceMethod.rtn != VOID_TYPE) {
+                AnalyzerCaster.getLegalCast(location, desugared.rtnType, interfaceMethod.rtn, false, true);
+            }
+
             actual = expected;
         }
     }
@@ -194,31 +216,17 @@ public final class ELambda extends AExpression implements ILambda {
             for (Variable capture : captures) {
                 writer.visitVarInsn(capture.type.type.getOpcode(Opcodes.ILOAD), capture.getSlot());
             }
-            // convert MethodTypes to asm Type for the constant pool.
-            String invokedType = ref.invokedType.toMethodDescriptorString();
-            org.objectweb.asm.Type samMethodType =
-                org.objectweb.asm.Type.getMethodType(ref.samMethodType.toMethodDescriptorString());
-            org.objectweb.asm.Type interfaceType =
-                org.objectweb.asm.Type.getMethodType(ref.interfaceMethodType.toMethodDescriptorString());
-            if (ref.needsBridges()) {
-                writer.invokeDynamic(ref.invokedName,
-                                     invokedType,
-                                     LAMBDA_BOOTSTRAP_HANDLE,
-                                     samMethodType,
-                                     ref.implMethodASM,
-                                     samMethodType,
-                                     LambdaMetafactory.FLAG_BRIDGES,
-                                     1,
-                                     interfaceType);
-            } else {
-                writer.invokeDynamic(ref.invokedName,
-                                     invokedType,
-                                     LAMBDA_BOOTSTRAP_HANDLE,
-                                     samMethodType,
-                                     ref.implMethodASM,
-                                     samMethodType,
-                                     0);
-            }
+
+            writer.invokeDynamic(
+                ref.interfaceMethodName,
+                ref.factoryDescriptor,
+                LAMBDA_BOOTSTRAP_HANDLE,
+                ref.interfaceType,
+                ref.delegateClassName,
+                ref.delegateInvokeType,
+                ref.delegateMethodName,
+                ref.delegateType
+            );
         } else {
             // placeholder
             writer.push((String)null);
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java
index 3ef20b023ce..257f2975c93 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java
@@ -174,7 +174,7 @@ public final class SFunction extends AStatement {
 
     /** Writes the function to given ClassVisitor. */
     void write (ClassVisitor writer, CompilerSettings settings, Globals globals) {
-        int access = Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC;
+        int access = Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC;
         if (synthetic) {
             access |= Opcodes.ACC_SYNTHETIC;
         }
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt
index 8c4f104a0c1..68d86356e95 100644
--- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt
@@ -155,4 +155,5 @@ class org.elasticsearch.painless.FeatureTest -> org.elasticsearch.painless.Featu
   boolean overloadedStatic()
   boolean overloadedStatic(boolean)
   Object twoFunctionsOfX(Function,Function)
+  void listInput(List)
 }
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java
index 311a6b775dd..acf698e2fc7 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java
@@ -19,35 +19,29 @@
 
 package org.elasticsearch.painless;
 
-import org.apache.lucene.util.Constants;
-
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
 public class AugmentationTests extends ScriptTestCase {
-    
+
     public void testStatic() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("ArrayList l = new ArrayList(); l.add(1); return l.getLength();"));
         assertEquals(1, exec("ArrayList l = new ArrayList(); l.add(1); return l.length;"));
     }
-    
+
     public void testSubclass() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("List l = new ArrayList(); l.add(1); return l.getLength();"));
         assertEquals(1, exec("List l = new ArrayList(); l.add(1); return l.length;"));
     }
-    
+
     public void testDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("def l = new ArrayList(); l.add(1); return l.getLength();"));
         assertEquals(1, exec("def l = new ArrayList(); l.add(1); return l.length;"));
     }
-    
+
     public void testCapturingReference() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("int foo(Supplier t) { return t.get() }" +
                              "ArrayList l = new ArrayList(); l.add(1);" +
                              "return foo(l::getLength);"));
@@ -58,164 +52,140 @@ public class AugmentationTests extends ScriptTestCase {
                              "def l = new ArrayList(); l.add(1);" +
                              "return foo(l::getLength);"));
     }
-    
+
     public void testIterable_Any() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(true, 
+        assertEquals(true,
                 exec("List l = new ArrayList(); l.add(1); l.any(x -> x == 1)"));
     }
-    
+
     public void testIterable_AsCollection() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(true, 
+        assertEquals(true,
                 exec("List l = new ArrayList(); return l.asCollection() === l"));
     }
-    
+
     public void testIterable_AsList() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(true, 
+        assertEquals(true,
                 exec("List l = new ArrayList(); return l.asList() === l"));
-        assertEquals(5, 
+        assertEquals(5,
                 exec("Set l = new HashSet(); l.add(5); return l.asList()[0]"));
     }
-    
+
     public void testIterable_Each() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(1, 
+        assertEquals(1,
                 exec("List l = new ArrayList(); l.add(1); List l2 = new ArrayList(); l.each(l2::add); return l2.size()"));
     }
-    
+
     public void testIterable_EachWithIndex() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(0, 
+        assertEquals(0,
                 exec("List l = new ArrayList(); l.add(2); Map m = new HashMap(); l.eachWithIndex(m::put); return m.get(2)"));
     }
-    
+
     public void testIterable_Every() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(false, exec("List l = new ArrayList(); l.add(1); l.add(2); l.every(x -> x == 1)"));
     }
-    
+
     public void testIterable_FindResults() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(1, 
+        assertEquals(1,
                 exec("List l = new ArrayList(); l.add(1); l.add(2); l.findResults(x -> x == 1 ? x : null).size()"));
     }
-    
+
     public void testIterable_GroupBy() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(2, 
+        assertEquals(2,
                 exec("List l = new ArrayList(); l.add(1); l.add(-1); l.groupBy(x -> x < 0 ? 'negative' : 'positive').size()"));
     }
-    
+
     public void testIterable_Join() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals("test,ing", 
+        assertEquals("test,ing",
                 exec("List l = new ArrayList(); l.add('test'); l.add('ing'); l.join(',')"));
     }
-    
+
     public void testIterable_Sum() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(3.0D, exec("def l = [1,2]; return l.sum()"));
-        assertEquals(5.0D, 
+        assertEquals(5.0D,
                 exec("List l = new ArrayList(); l.add(1); l.add(2); l.sum(x -> x + 1)"));
     }
-    
+
     public void testCollection_Collect() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(Arrays.asList(2, 3), 
+        assertEquals(Arrays.asList(2, 3),
                 exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(x -> x + 1)"));
-        assertEquals(asSet(2, 3), 
+        assertEquals(asSet(2, 3),
                 exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(new HashSet(), x -> x + 1)"));
     }
-    
+
     public void testCollection_Find() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(2, 
+        assertEquals(2,
                 exec("List l = new ArrayList(); l.add(1); l.add(2); return l.find(x -> x == 2)"));
     }
-    
+
     public void testCollection_FindAll() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(Arrays.asList(2), 
+        assertEquals(Arrays.asList(2),
                 exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findAll(x -> x == 2)"));
     }
-    
+
     public void testCollection_FindResult() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals("found", 
+        assertEquals("found",
                 exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult(x -> x > 1 ? 'found' : null)"));
-        assertEquals("notfound", 
+        assertEquals("notfound",
                 exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult('notfound', x -> x > 10 ? 'found' : null)"));
     }
-    
+
     public void testCollection_Split() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(Arrays.asList(Arrays.asList(2), Arrays.asList(1)), 
+        assertEquals(Arrays.asList(Arrays.asList(2), Arrays.asList(1)),
                 exec("List l = new ArrayList(); l.add(1); l.add(2); return l.split(x -> x == 2)"));
     }
-    
+
     public void testMap_Collect() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(Arrays.asList("one1", "two2"), 
+        assertEquals(Arrays.asList("one1", "two2"),
                 exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect((key,value) -> key + value)"));
-        assertEquals(asSet("one1", "two2"), 
+        assertEquals(asSet("one1", "two2"),
                 exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect(new HashSet(), (key,value) -> key + value)"));
     }
-    
+
     public void testMap_Count() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(1, 
+        assertEquals(1,
                 exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.count((key,value) -> value == 2)"));
     }
-    
+
     public void testMap_Each() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(2, 
+        assertEquals(2,
                 exec("Map m = new TreeMap(); m.one = 1; m.two = 2; Map m2 = new TreeMap(); m.each(m2::put); return m2.size()"));
     }
-    
+
     public void testMap_Every() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(false, 
+        assertEquals(false,
                 exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.every((key,value) -> value == 2)"));
     }
-    
+
     public void testMap_Find() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals("two", 
+        assertEquals("two",
                 exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.find((key,value) -> value == 2).key"));
     }
-    
+
     public void testMap_FindAll() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(Collections.singletonMap("two", 2), 
+        assertEquals(Collections.singletonMap("two", 2),
                 exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findAll((key,value) -> value == 2)"));
     }
-    
+
     public void testMap_FindResult() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals("found", 
+        assertEquals("found",
                 exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findResult((key,value) -> value == 2 ? 'found' : null)"));
-        assertEquals("notfound", 
-                exec("Map m = new TreeMap(); m.one = 1; m.two = 2; " + 
+        assertEquals("notfound",
+                exec("Map m = new TreeMap(); m.one = 1; m.two = 2; " +
                      "return m.findResult('notfound', (key,value) -> value == 10 ? 'found' : null)"));
     }
-    
+
     public void testMap_FindResults() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(Arrays.asList("negative", "positive"),
-                exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " + 
+                exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " +
                      "return m.findResults((key,value) -> value < 0 ? 'negative' : 'positive')"));
     }
-    
+
     public void testMap_GroupBy() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         Map<String,Map<String,Integer>> expected = new HashMap<>();
         expected.put("negative", Collections.singletonMap("a", -1));
         expected.put("positive", Collections.singletonMap("b", 1));
         assertEquals(expected,
-                exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " + 
+                exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " +
                      "return m.groupBy((key,value) -> value < 0 ? 'negative' : 'positive')"));
     }
 }
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java
index 5dd2481c0f1..4bd687a7205 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java
@@ -19,7 +19,6 @@
 
 package org.elasticsearch.painless;
 
-import org.apache.lucene.util.Constants;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 
@@ -33,39 +32,32 @@ import static org.hamcrest.Matchers.startsWith;
 public class FunctionRefTests extends ScriptTestCase {
 
     public void testStaticMethodReference() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(Integer::compare); return l.get(0);"));
     }
-    
+
     public void testStaticMethodReferenceDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(Integer::compare); return l.get(0);"));
     }
 
     public void testVirtualMethodReference() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("List l = new ArrayList(); l.add(1); l.add(1); return l.stream().mapToInt(Integer::intValue).sum();"));
     }
-    
+
     public void testVirtualMethodReferenceDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("def l = new ArrayList(); l.add(1); l.add(1); return l.stream().mapToInt(Integer::intValue).sum();"));
     }
 
     public void testQualifiedStaticMethodReference() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(true,
                 exec("List l = [true]; l.stream().map(org.elasticsearch.painless.FeatureTest::overloadedStatic).findFirst().get()"));
     }
 
     public void testQualifiedStaticMethodReferenceDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(true,
                 exec("def l = [true]; l.stream().map(org.elasticsearch.painless.FeatureTest::overloadedStatic).findFirst().get()"));
     }
 
     public void testQualifiedVirtualMethodReference() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         long instant = randomLong();
         assertEquals(instant, exec(
                 "List l = [params.d]; return l.stream().mapToLong(org.joda.time.ReadableDateTime::getMillis).sum()",
@@ -73,7 +65,6 @@ public class FunctionRefTests extends ScriptTestCase {
     }
 
     public void testQualifiedVirtualMethodReferenceDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         long instant = randomLong();
         assertEquals(instant, exec(
                 "def l = [params.d]; return l.stream().mapToLong(org.joda.time.ReadableDateTime::getMillis).sum()",
@@ -81,129 +72,112 @@ public class FunctionRefTests extends ScriptTestCase {
     }
 
     public void testCtorMethodReference() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(3.0D, 
-                exec("List l = new ArrayList(); l.add(1.0); l.add(2.0); " + 
-                        "DoubleStream doubleStream = l.stream().mapToDouble(Double::doubleValue);" + 
+        assertEquals(3.0D,
+                exec("List l = new ArrayList(); l.add(1.0); l.add(2.0); " +
+                        "DoubleStream doubleStream = l.stream().mapToDouble(Double::doubleValue);" +
                         "DoubleSummaryStatistics stats = doubleStream.collect(DoubleSummaryStatistics::new, " +
                         "DoubleSummaryStatistics::accept, " +
-                        "DoubleSummaryStatistics::combine); " + 
+                        "DoubleSummaryStatistics::combine); " +
                         "return stats.getSum()"));
     }
-    
+
     public void testCtorMethodReferenceDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(3.0D, 
-            exec("def l = new ArrayList(); l.add(1.0); l.add(2.0); " + 
-                 "def doubleStream = l.stream().mapToDouble(Double::doubleValue);" + 
+        assertEquals(3.0D,
+            exec("def l = new ArrayList(); l.add(1.0); l.add(2.0); " +
+                 "def doubleStream = l.stream().mapToDouble(Double::doubleValue);" +
                  "def stats = doubleStream.collect(DoubleSummaryStatistics::new, " +
                                                   "DoubleSummaryStatistics::accept, " +
-                                                  "DoubleSummaryStatistics::combine); " + 
+                                                  "DoubleSummaryStatistics::combine); " +
                  "return stats.getSum()"));
     }
 
     public void testArrayCtorMethodRef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(1.0D, 
-                exec("List l = new ArrayList(); l.add(1.0); l.add(2.0); " + 
-                     "def[] array = l.stream().toArray(Double[]::new);" + 
+        assertEquals(1.0D,
+                exec("List l = new ArrayList(); l.add(1.0); l.add(2.0); " +
+                     "def[] array = l.stream().toArray(Double[]::new);" +
                      "return array[0];"));
     }
 
     public void testArrayCtorMethodRefDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(1.0D, 
-                exec("def l = new ArrayList(); l.add(1.0); l.add(2.0); " + 
-                     "def[] array = l.stream().toArray(Double[]::new);" + 
+        assertEquals(1.0D,
+                exec("def l = new ArrayList(); l.add(1.0); l.add(2.0); " +
+                     "def[] array = l.stream().toArray(Double[]::new);" +
                      "return array[0];"));
     }
 
     public void testCapturingMethodReference() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("5", exec("Integer x = Integer.valueOf(5); return Optional.empty().orElseGet(x::toString);"));
         assertEquals("[]", exec("List l = new ArrayList(); return Optional.empty().orElseGet(l::toString);"));
     }
-    
+
     public void testCapturingMethodReferenceDefImpl() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("5", exec("def x = Integer.valueOf(5); return Optional.empty().orElseGet(x::toString);"));
         assertEquals("[]", exec("def l = new ArrayList(); return Optional.empty().orElseGet(l::toString);"));
     }
-    
+
     public void testCapturingMethodReferenceDefInterface() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("5", exec("Integer x = Integer.valueOf(5); def opt = Optional.empty(); return opt.orElseGet(x::toString);"));
         assertEquals("[]", exec("List l = new ArrayList(); def opt = Optional.empty(); return opt.orElseGet(l::toString);"));
     }
-    
+
     public void testCapturingMethodReferenceDefEverywhere() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("5", exec("def x = Integer.valueOf(5); def opt = Optional.empty(); return opt.orElseGet(x::toString);"));
         assertEquals("[]", exec("def l = new ArrayList(); def opt = Optional.empty(); return opt.orElseGet(l::toString);"));
     }
-    
+
     public void testCapturingMethodReferenceMultipleLambdas() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("testingcdefg", exec(
                 "String x = 'testing';" +
-                "String y = 'abcdefg';" + 
-                "org.elasticsearch.painless.FeatureTest test = new org.elasticsearch.painless.FeatureTest(2,3);" + 
+                "String y = 'abcdefg';" +
+                "org.elasticsearch.painless.FeatureTest test = new org.elasticsearch.painless.FeatureTest(2,3);" +
                 "return test.twoFunctionsOfX(x::concat, y::substring);"));
     }
-    
+
     public void testCapturingMethodReferenceMultipleLambdasDefImpls() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("testingcdefg", exec(
                 "def x = 'testing';" +
-                "def y = 'abcdefg';" + 
-                "org.elasticsearch.painless.FeatureTest test = new org.elasticsearch.painless.FeatureTest(2,3);" + 
+                "def y = 'abcdefg';" +
+                "org.elasticsearch.painless.FeatureTest test = new org.elasticsearch.painless.FeatureTest(2,3);" +
                 "return test.twoFunctionsOfX(x::concat, y::substring);"));
     }
-    
+
     public void testCapturingMethodReferenceMultipleLambdasDefInterface() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("testingcdefg", exec(
                 "String x = 'testing';" +
-                "String y = 'abcdefg';" + 
-                "def test = new org.elasticsearch.painless.FeatureTest(2,3);" + 
+                "String y = 'abcdefg';" +
+                "def test = new org.elasticsearch.painless.FeatureTest(2,3);" +
                 "return test.twoFunctionsOfX(x::concat, y::substring);"));
     }
-    
+
     public void testCapturingMethodReferenceMultipleLambdasDefEverywhere() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("testingcdefg", exec(
                 "def x = 'testing';" +
-                "def y = 'abcdefg';" + 
-                "def test = new org.elasticsearch.painless.FeatureTest(2,3);" + 
+                "def y = 'abcdefg';" +
+                "def test = new org.elasticsearch.painless.FeatureTest(2,3);" +
                 "return test.twoFunctionsOfX(x::concat, y::substring);"));
     }
-    
+
     public void testOwnStaticMethodReference() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("int mycompare(int i, int j) { j - i } " +
                              "List l = new ArrayList(); l.add(2); l.add(1); l.sort(this::mycompare); return l.get(0);"));
     }
-    
+
     public void testOwnStaticMethodReferenceDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("int mycompare(int i, int j) { j - i } " +
                              "def l = new ArrayList(); l.add(2); l.add(1); l.sort(this::mycompare); return l.get(0);"));
     }
 
     public void testInterfaceDefaultMethod() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals("bar", exec("String f(BiFunction function) { function.apply('foo', 'bar') }" + 
+        assertEquals("bar", exec("String f(BiFunction function) { function.apply('foo', 'bar') }" +
                                  "Map map = new HashMap(); f(map::getOrDefault)"));
     }
-    
+
     public void testInterfaceDefaultMethodDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals("bar", exec("String f(BiFunction function) { function.apply('foo', 'bar') }" + 
+        assertEquals("bar", exec("String f(BiFunction function) { function.apply('foo', 'bar') }" +
                                  "def map = new HashMap(); f(map::getOrDefault)"));
     }
 
     public void testMethodMissing() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         Exception e = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("List l = [2, 1]; l.sort(Integer::bogus); return l.get(0);");
         });
@@ -211,7 +185,6 @@ public class FunctionRefTests extends ScriptTestCase {
     }
 
     public void testQualifiedMethodMissing() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         Exception e = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("List l = [2, 1]; l.sort(org.joda.time.ReadableDateTime::bogus); return l.get(0);", false);
         });
@@ -219,7 +192,6 @@ public class FunctionRefTests extends ScriptTestCase {
     }
 
     public void testClassMissing() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         Exception e = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("List l = [2, 1]; l.sort(Bogus::bogus); return l.get(0);", false);
         });
@@ -227,7 +199,6 @@ public class FunctionRefTests extends ScriptTestCase {
     }
 
     public void testQualifiedClassMissing() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         Exception e = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("List l = [2, 1]; l.sort(org.joda.time.BogusDateTime::bogus); return l.get(0);", false);
         });
@@ -237,7 +208,6 @@ public class FunctionRefTests extends ScriptTestCase {
     }
 
     public void testNotFunctionalInterface() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("List l = new ArrayList(); l.add(2); l.add(1); l.add(Integer::bogus); return l.get(0);");
         });
@@ -245,38 +215,33 @@ public class FunctionRefTests extends ScriptTestCase {
     }
 
     public void testIncompatible() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         expectScriptThrows(BootstrapMethodError.class, () -> {
             exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::startsWith); return l.get(0);");
         });
     }
-    
+
     public void testWrongArity() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("Optional.empty().orElseGet(String::startsWith);");
         });
         assertThat(expected.getMessage(), containsString("Unknown reference"));
     }
-    
+
     public void testWrongArityNotEnough() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);");
         });
         assertTrue(expected.getMessage().contains("Unknown reference"));
     }
-    
+
     public void testWrongArityDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);");
         });
         assertThat(expected.getMessage(), containsString("Unknown reference"));
     }
-    
+
     public void testWrongArityNotEnoughDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);");
         });
@@ -284,29 +249,26 @@ public class FunctionRefTests extends ScriptTestCase {
     }
 
     public void testReturnVoid() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        Throwable expected = expectScriptThrows(BootstrapMethodError.class, () -> {
-            exec("StringBuilder b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(b::setLength);");
+        Throwable expected = expectScriptThrows(ClassCastException.class, () -> {
+            exec("StringBuilder b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(b::setLength).sum();");
         });
-        assertThat(expected.getCause().getMessage(),
-                containsString("Type mismatch for lambda expected return: void is not convertible to long"));
+        assertThat(expected.getMessage(), containsString("Cannot cast from [void] to [long]."));
     }
 
     public void testReturnVoidDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         Exception expected = expectScriptThrows(LambdaConversionException.class, () -> {
             exec("StringBuilder b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(b::setLength);");
         });
-        assertThat(expected.getMessage(), containsString("Type mismatch for lambda expected return: void is not convertible to long"));
+        assertThat(expected.getMessage(), containsString("lambda expects return type [long], but found return type [void]"));
 
         expected = expectScriptThrows(LambdaConversionException.class, () -> {
             exec("def b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(b::setLength);");
         });
-        assertThat(expected.getMessage(), containsString("Type mismatch for lambda expected return: void is not convertible to long"));
+        assertThat(expected.getMessage(), containsString("lambda expects return type [long], but found return type [void]"));
 
         expected = expectScriptThrows(LambdaConversionException.class, () -> {
             exec("def b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(b::setLength);");
         });
-        assertThat(expected.getMessage(), containsString("Type mismatch for lambda expected return: void is not convertible to long"));
+        assertThat(expected.getMessage(), containsString("lambda expects return type [long], but found return type [void]"));
     }
 }
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java
index bcb92a527d9..20e257e5747 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java
@@ -19,8 +19,6 @@
 
 package org.elasticsearch.painless;
 
-import org.apache.lucene.util.Constants;
-
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
@@ -30,87 +28,72 @@ import static org.hamcrest.Matchers.containsString;
 public class LambdaTests extends ScriptTestCase {
 
     public void testNoArgLambda() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("Optional.empty().orElseGet(() -> 1);"));
     }
 
     public void testNoArgLambdaDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("def x = Optional.empty(); x.orElseGet(() -> 1);"));
     }
 
     public void testLambdaWithArgs() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); "
                                  + "l.sort((a, b) -> a.length() - b.length()); return l.get(0)"));
 
     }
 
     public void testLambdaWithTypedArgs() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); "
                                  + "l.sort((String a, String b) -> a.length() - b.length()); return l.get(0)"));
 
     }
 
     public void testPrimitiveLambdas() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); "
                            + "return l.stream().mapToInt(x -> x + 1).sum();"));
     }
 
     public void testPrimitiveLambdasWithTypedArgs() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); "
                            + "return l.stream().mapToInt(int x -> x + 1).sum();"));
     }
 
     public void testPrimitiveLambdasDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); "
                            + "return l.stream().mapToInt(x -> x + 1).sum();"));
     }
 
     public void testPrimitiveLambdasWithTypedArgsDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); "
                            + "return l.stream().mapToInt(int x -> x + 1).sum();"));
     }
 
     public void testPrimitiveLambdasConvertible() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
-        assertEquals(2, exec("List l = new ArrayList(); l.add(1); l.add(1); "
-                           + "return l.stream().mapToInt(byte x -> x).sum();"));
+        assertEquals(2, exec("List l = new ArrayList(); l.add((short)1); l.add(1); "
+                           + "return l.stream().mapToInt(long x -> (int)1).sum();"));
     }
 
     public void testPrimitiveArgs() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> x + 1)"));
     }
 
     public void testPrimitiveArgsTyped() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(int x -> x + 1)"));
     }
 
     public void testPrimitiveArgsTypedOddly() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2L, exec("long applyOne(IntFunction arg) { arg.apply(1) } applyOne(long x -> x + 1)"));
     }
 
     public void testMultipleStatements() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> { def y = x + 1; return y })"));
     }
 
     public void testUnneededCurlyStatements() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> { x + 1 })"));
     }
 
     /** interface ignores return value */
     public void testVoidReturn() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("List list = new ArrayList(); "
                            + "list.add(2); "
                            + "List list2 = new ArrayList(); "
@@ -120,7 +103,6 @@ public class LambdaTests extends ScriptTestCase {
 
     /** interface ignores return value */
     public void testVoidReturnDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("def list = new ArrayList(); "
                            + "list.add(2); "
                            + "List list2 = new ArrayList(); "
@@ -129,19 +111,16 @@ public class LambdaTests extends ScriptTestCase {
     }
 
     public void testTwoLambdas() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("testingcdefg", exec(
                 "org.elasticsearch.painless.FeatureTest test = new org.elasticsearch.painless.FeatureTest(2,3);" +
                 "return test.twoFunctionsOfX(x -> 'testing'.concat(x), y -> 'abcdefg'.substring(y))"));
     }
 
     public void testNestedLambdas() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("Optional.empty().orElseGet(() -> Optional.empty().orElseGet(() -> 1));"));
     }
 
     public void testLambdaInLoop() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(100, exec("int sum = 0; " +
                                "for (int i = 0; i < 100; i++) {" +
                                "  sum += Optional.empty().orElseGet(() -> 1);" +
@@ -150,17 +129,14 @@ public class LambdaTests extends ScriptTestCase {
     }
 
     public void testCapture() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(5, exec("int x = 5; return Optional.empty().orElseGet(() -> x);"));
     }
 
     public void testTwoCaptures() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals("1test", exec("int x = 1; String y = 'test'; return Optional.empty().orElseGet(() -> x + y);"));
     }
 
     public void testCapturesAreReadOnly() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("List l = new ArrayList(); l.add(1); l.add(1); "
                     + "return l.stream().mapToInt(x -> { l = null; return x + 1 }).sum();");
@@ -170,14 +146,12 @@ public class LambdaTests extends ScriptTestCase {
 
     @AwaitsFix(bugUrl = "def type tracking")
     public void testOnlyCapturesAreReadOnly() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); "
                            + "return l.stream().mapToInt(x -> { x += 1; return x }).sum();"));
     }
 
     /** Lambda parameters shouldn't be able to mask a variable already in scope */
     public void testNoParamMasking() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("int x = 0; List l = new ArrayList(); l.add(1); l.add(1); "
                     + "return l.stream().mapToInt(x -> { x += 1; return x }).sum();");
@@ -186,24 +160,20 @@ public class LambdaTests extends ScriptTestCase {
     }
 
     public void testCaptureDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(5, exec("int x = 5; def y = Optional.empty(); y.orElseGet(() -> x);"));
     }
 
     public void testNestedCapture() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(1, exec("boolean x = false; int y = 1;" +
                              "return Optional.empty().orElseGet(() -> x ? 5 : Optional.empty().orElseGet(() -> y));"));
     }
 
     public void testNestedCaptureParams() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(2, exec("int foo(Function f) { return f.apply(1) }" +
                              "return foo(x -> foo(y -> x + 1))"));
     }
 
     public void testWrongArity() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, false, () -> {
             exec("Optional.empty().orElseGet(x -> x);");
         });
@@ -211,7 +181,6 @@ public class LambdaTests extends ScriptTestCase {
     }
 
     public void testWrongArityDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("def y = Optional.empty(); return y.orElseGet(x -> x);");
         });
@@ -219,7 +188,6 @@ public class LambdaTests extends ScriptTestCase {
     }
 
     public void testWrongArityNotEnough() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, false, () -> {
             exec("List l = new ArrayList(); l.add(1); l.add(1); "
                + "return l.stream().mapToInt(() -> 5).sum();");
@@ -228,7 +196,6 @@ public class LambdaTests extends ScriptTestCase {
     }
 
     public void testWrongArityNotEnoughDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
             exec("def l = new ArrayList(); l.add(1); l.add(1); "
                + "return l.stream().mapToInt(() -> 5).sum();");
@@ -237,17 +204,14 @@ public class LambdaTests extends ScriptTestCase {
     }
 
     public void testLambdaInFunction() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(5, exec("def foo() { Optional.empty().orElseGet(() -> 5) } return foo();"));
     }
 
     public void testLambdaCaptureFunctionParam() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         assertEquals(5, exec("def foo(int x) { Optional.empty().orElseGet(() -> x) } return foo(5);"));
     }
 
     public void testReservedCapture() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         String compare = "boolean compare(Supplier s, def v) {s.get() == v}";
         assertEquals(true, exec(compare + "compare(() -> new ArrayList(), new ArrayList())"));
         assertEquals(true, exec(compare + "compare(() -> { new ArrayList() }, new ArrayList())"));
@@ -272,7 +236,6 @@ public class LambdaTests extends ScriptTestCase {
     }
 
     public void testReturnVoid() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         Throwable expected = expectScriptThrows(ClassCastException.class, () -> {
             exec("StringBuilder b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(i -> b.setLength(i))");
         });
@@ -280,7 +243,6 @@ public class LambdaTests extends ScriptTestCase {
     }
 
     public void testReturnVoidDef() {
-        assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9);
         // If we can catch the error at compile time we do
         Exception expected = expectScriptThrows(ClassCastException.class, () -> {
             exec("StringBuilder b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(i -> b.setLength(i))");
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java
index df9d0c0f4ea..13d48bd2a01 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java
@@ -240,7 +240,7 @@ public class NodeToStringTests extends ESTestCase {
                 + "}).sum()");
         assertToString(
                   "(SSource (SReturn (PCallInvoke (PCallInvoke (PCallInvoke (EListInit (ENumeric 1) (ENumeric 2) (ENumeric 3)) stream) "
-                + "mapToInt (Args (ELambda (Pair def x)\n"
+                + "mapToInt (Args (ELambda (Pair null x)\n"
                 + "  (SReturn (EBinary (EVariable x) + (ENumeric 1)))))) sum)))",
                   "return [1, 2, 3].stream().mapToInt(x -> x + 1).sum()");
         assertToString(
@@ -250,7 +250,7 @@ public class NodeToStringTests extends ESTestCase {
                 + "  return a.length() - b.length()\n"
                 + "})");
         assertToString(
-                  "(SSource (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair def a) (Pair def b)\n"
+                  "(SSource (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair null a) (Pair null b)\n"
                 + "  (SReturn (EBinary (PCallInvoke (EVariable a) length) - (PCallInvoke (EVariable b) length))))))))",
                   "return ['a', 'b'].sort((a, b) -> a.length() - b.length())");
         assertToString(
@@ -371,14 +371,14 @@ public class NodeToStringTests extends ESTestCase {
         assertToString(
                   "(SSource\n"
                 + "  (SDeclBlock (SDeclaration int[] a (ENewArray int dims (Args (ENumeric 10)))))\n"
-                + "  (SReturn (PField (EVariable a) length)))", 
+                + "  (SReturn (PField (EVariable a) length)))",
                   "int[] a = new int[10];\n"
                 + "return a.length");
         assertToString(
                 "(SSource\n"
               + "  (SDeclBlock (SDeclaration org.elasticsearch.painless.FeatureTest a (ENewObj org.elasticsearch.painless.FeatureTest)))\n"
               + "  (SExpression (EAssignment (PField (EVariable a) x) = (ENumeric 10)))\n"
-              + "  (SReturn (PField (EVariable a) x)))", 
+              + "  (SReturn (PField (EVariable a) x)))",
                 "org.elasticsearch.painless.FeatureTest a = new org.elasticsearch.painless.FeatureTest();\n"
               + "a.x = 10;\n"
               + "return a.x");

From 5fbc86e2aa1cb84ad835a3dd9f13d6c03bd6b509 Mon Sep 17 00:00:00 2001
From: Nik Everett <nik9000@gmail.com>
Date: Mon, 24 Apr 2017 14:22:39 -0400
Subject: [PATCH 02/34] Allow painless to load stored fields (#24290)

We document that painless can load stored fields but it can't
because the classes that make that work aren't whitelisted.
---
 docs/reference/modules/scripting/fields.asciidoc         | 9 ++++-----
 .../org/elasticsearch/painless/org.elasticsearch.txt     | 6 ++++++
 2 files changed, 10 insertions(+), 5 deletions(-)

diff --git a/docs/reference/modules/scripting/fields.asciidoc b/docs/reference/modules/scripting/fields.asciidoc
index 9b71866d989..0dbb5b6b42e 100644
--- a/docs/reference/modules/scripting/fields.asciidoc
+++ b/docs/reference/modules/scripting/fields.asciidoc
@@ -48,13 +48,13 @@ relevance `_score` of each document:
 
 [source,js]
 -------------------------------------
-PUT my_index/my_type/1
+PUT my_index/my_type/1?refresh
 {
   "text": "quick brown fox",
   "popularity": 1
 }
 
-PUT my_index/my_type/2
+PUT my_index/my_type/2?refresh
 {
   "text": "quick fox",
   "popularity": 5
@@ -93,7 +93,7 @@ store, enabled by default on all fields except for <<text,analyzed `text` fields
 
 [source,js]
 -------------------------------
-PUT my_index/my_type/1
+PUT my_index/my_type/1?refresh
 {
   "cost_price": 100
 }
@@ -186,7 +186,7 @@ PUT my_index
   }
 }
 
-PUT my_index/my_type/1
+PUT my_index/my_type/1?refresh
 {
   "title": "Mr",
   "first_name": "Barry",
@@ -229,4 +229,3 @@ The only time it really makes sense to use stored fields instead of the
 access a few small stored fields instead of the entire `_source`.
 
 =======================================================
-
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt
index 68d86356e95..ce78f8a6315 100644
--- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt
@@ -157,3 +157,9 @@ class org.elasticsearch.painless.FeatureTest -> org.elasticsearch.painless.Featu
   Object twoFunctionsOfX(Function,Function)
   void listInput(List)
 }
+
+class org.elasticsearch.search.lookup.FieldLookup -> org.elasticsearch.search.lookup.FieldLookup extends Object {
+  def getValue()
+  List getValues()
+  boolean isEmpty()
+}

From e429d66956c88bc0b968eb39ec492a32ab61f316 Mon Sep 17 00:00:00 2001
From: Nik Everett <nik9000@gmail.com>
Date: Mon, 24 Apr 2017 16:08:19 -0400
Subject: [PATCH 03/34] CONSOLEify some more docs

Relates to #18160
---
 docs/build.gradle                             |  7 -----
 .../mapping/fields/all-field.asciidoc         |  2 ++
 .../mapping/params/analyzer.asciidoc          |  5 +---
 docs/reference/mapping/types/binary.asciidoc  |  3 +-
 docs/reference/mapping/types/ip.asciidoc      |  7 +++--
 docs/reference/mapping/types/nested.asciidoc  |  1 +
 docs/reference/mapping/types/object.asciidoc  |  2 +-
 .../mapping/types/percolator.asciidoc         | 30 +++++++++++++------
 8 files changed, 32 insertions(+), 25 deletions(-)

diff --git a/docs/build.gradle b/docs/build.gradle
index 63d9d12c2da..b75a88fc6ce 100644
--- a/docs/build.gradle
+++ b/docs/build.gradle
@@ -80,13 +80,6 @@ buildRestTests.expectedUnconvertedCandidates = [
   'reference/indices/shard-stores.asciidoc',
   'reference/ingest/ingest-node.asciidoc',
   'reference/mapping/dynamic/templates.asciidoc',
-  'reference/mapping/fields/all-field.asciidoc',
-  'reference/mapping/params/analyzer.asciidoc',
-  'reference/mapping/types/binary.asciidoc',
-  'reference/mapping/types/ip.asciidoc',
-  'reference/mapping/types/nested.asciidoc',
-  'reference/mapping/types/object.asciidoc',
-  'reference/mapping/types/percolator.asciidoc',
   'reference/modules/cross-cluster-search.asciidoc', // this is hard to test since we need 2 clusters -- maybe we can trick it into referencing itself...
   'reference/search/field-stats.asciidoc',
   'reference/search/profile.asciidoc',
diff --git a/docs/reference/mapping/fields/all-field.asciidoc b/docs/reference/mapping/fields/all-field.asciidoc
index 068fb45f7ec..f6cf8237e1c 100644
--- a/docs/reference/mapping/fields/all-field.asciidoc
+++ b/docs/reference/mapping/fields/all-field.asciidoc
@@ -101,6 +101,8 @@ requests>> (which is rewritten to a `query_string` query internally):
 --------------------------------
 GET _search?q=john+smith+new+york
 --------------------------------
+// TEST[skip:_all is no longer allowed]
+// CONSOLE
 
 Other queries, such as the <<query-dsl-match-query,`match`>> and
 <<query-dsl-term-query,`term`>> queries require you to specify the `_all` field
diff --git a/docs/reference/mapping/params/analyzer.asciidoc b/docs/reference/mapping/params/analyzer.asciidoc
index 0b60451e02a..358be142373 100644
--- a/docs/reference/mapping/params/analyzer.asciidoc
+++ b/docs/reference/mapping/params/analyzer.asciidoc
@@ -135,11 +135,7 @@ PUT my_index
       }
    }
 }
---------------------------------------------------
-// CONSOLE
 
-[source,js]
---------------------------------------------------
 PUT my_index/my_type/1
 {
    "title":"The Quick Brown Fox"
@@ -159,6 +155,7 @@ GET my_index/my_type/_search
    }
 }
 --------------------------------------------------
+// CONSOLE
 <1> `my_analyzer` analyzer which tokens all terms including stop words
 <2> `my_stop_analyzer` analyzer which removes stop words
 <3> `analyzer` setting that points to the `my_analyzer` analyzer which will be used at index time
diff --git a/docs/reference/mapping/types/binary.asciidoc b/docs/reference/mapping/types/binary.asciidoc
index 7f82523416f..556fc55b7f0 100644
--- a/docs/reference/mapping/types/binary.asciidoc
+++ b/docs/reference/mapping/types/binary.asciidoc
@@ -29,6 +29,7 @@ PUT my_index/my_type/1
   "blob": "U29tZSBiaW5hcnkgYmxvYg==" <1>
 }
 --------------------------------------------------
+// CONSOLE
 <1> The Base64 encoded binary value must not have embedded newlines `\n`.
 
 [[binary-params]]
@@ -49,5 +50,3 @@ The following parameters are accepted by `binary` fields:
     Whether the field value should be stored and retrievable separately from
     the <<mapping-source-field,`_source`>> field. Accepts `true` or `false`
     (default).
-
-
diff --git a/docs/reference/mapping/types/ip.asciidoc b/docs/reference/mapping/types/ip.asciidoc
index 423b501cbcd..afb482c454b 100644
--- a/docs/reference/mapping/types/ip.asciidoc
+++ b/docs/reference/mapping/types/ip.asciidoc
@@ -34,7 +34,7 @@ GET my_index/_search
 }
 --------------------------------------------------
 // CONSOLE
-
+// TESTSETUP
 
 [[ip-params]]
 ==== Parameters for `ip` fields
@@ -86,6 +86,7 @@ GET my_index/_search
   }
 }
 --------------------------------------------------
+// CONSOLE
 
 or
 
@@ -100,6 +101,7 @@ GET my_index/_search
   }
 }
 --------------------------------------------------
+// CONSOLE
 
 Also beware that colons are special characters to the
 <<query-dsl-query-string-query,`query_string`>> query, so ipv6 addresses will
@@ -108,7 +110,7 @@ searched value:
 
 [source,js]
 --------------------------------------------------
-GET t/_search
+GET my_index/_search
 {
   "query": {
     "query_string" : {
@@ -117,3 +119,4 @@ GET t/_search
   }
 }
 --------------------------------------------------
+// CONSOLE
diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc
index 67896f07e22..8047193f934 100644
--- a/docs/reference/mapping/types/nested.asciidoc
+++ b/docs/reference/mapping/types/nested.asciidoc
@@ -42,6 +42,7 @@ would be transformed internally into a document that looks more like this:
   "user.last" :  [ "smith", "white" ]
 }
 --------------------------------------------------
+// NOTCONSOLE
 
 The `user.first` and `user.last` fields are flattened into multi-value fields,
 and the association between `alice` and `white` is lost.  This document would
diff --git a/docs/reference/mapping/types/object.asciidoc b/docs/reference/mapping/types/object.asciidoc
index e35f159712e..31f728d1ae0 100644
--- a/docs/reference/mapping/types/object.asciidoc
+++ b/docs/reference/mapping/types/object.asciidoc
@@ -35,6 +35,7 @@ pairs, something like this:
   "manager.name.last":  "Smith"
 }
 --------------------------------------------------
+// NOTCONSOLE
 
 An explicit mapping for the above document could look like this:
 
@@ -96,4 +97,3 @@ The following parameters are accepted by `object` fields:
 
 IMPORTANT: If you need to index arrays of objects instead of single objects,
 read <<nested>> first.
-
diff --git a/docs/reference/mapping/types/percolator.asciidoc b/docs/reference/mapping/types/percolator.asciidoc
index 6197c1b9d0b..1a5121ae307 100644
--- a/docs/reference/mapping/types/percolator.asciidoc
+++ b/docs/reference/mapping/types/percolator.asciidoc
@@ -15,27 +15,39 @@ If the following mapping configures the `percolator` field type for the
 
 [source,js]
 --------------------------------------------------
+PUT my_index
 {
-    "properties": {
-        "query": {
-            "type": "percolator"
+    "mappings": {
+        "doc": {
+            "properties": {
+                "query": {
+                    "type": "percolator"
+                },
+                "field": {
+                    "type": "text"
+                }
+            }
         }
     }
 }
 --------------------------------------------------
+// CONSOLE
+// TESTSETUP
 
-Then the following json snippet can be indexed as a native query:
+Then you can index a query:
 
 [source,js]
 --------------------------------------------------
+PUT my_index/doc/match_value
 {
     "query" : {
-		"match" : {
-			"field" : "value"
-		}
-	}
+        "match" : {
+            "field" : "value"
+        }
+    }
 }
 --------------------------------------------------
+// CONSOLE
 
 [IMPORTANT]
 =====================================
@@ -82,4 +94,4 @@ queries are indexed by the `percolator` field type then the get call is executed
 query evaluates these queries, the fetches terms, shapes etc. as the were upon index time will be used. Important to note
 is that fetching of terms that these queries do, happens both each time the percolator query gets indexed on both primary
 and replica shards, so the terms that are actually indexed can be different between shard copies, if the source index
-changed while indexing.
\ No newline at end of file
+changed while indexing.

From db93735321c3df5938c90d6575b2680c54677c9f Mon Sep 17 00:00:00 2001
From: Nik Everett <nik9000@gmail.com>
Date: Mon, 24 Apr 2017 17:06:54 -0400
Subject: [PATCH 04/34] CONSOLEify some of the docs documentation

delete, index, and update.

Relates to #18160
---
 docs/build.gradle                   |  3 --
 docs/reference/docs/delete.asciidoc | 33 ++++++++++++++++++----
 docs/reference/docs/index_.asciidoc | 44 +++++++++++++++--------------
 docs/reference/docs/update.asciidoc | 10 ++++---
 4 files changed, 57 insertions(+), 33 deletions(-)

diff --git a/docs/build.gradle b/docs/build.gradle
index b75a88fc6ce..a587cec658f 100644
--- a/docs/build.gradle
+++ b/docs/build.gradle
@@ -64,11 +64,8 @@ buildRestTests.expectedUnconvertedCandidates = [
   'reference/cluster/stats.asciidoc',
   'reference/cluster/tasks.asciidoc',
   'reference/docs/delete-by-query.asciidoc',
-  'reference/docs/delete.asciidoc',
-  'reference/docs/index_.asciidoc',
   'reference/docs/reindex.asciidoc',
   'reference/docs/update-by-query.asciidoc',
-  'reference/docs/update.asciidoc',
   'reference/index-modules/similarity.asciidoc',
   'reference/index-modules/store.asciidoc',
   'reference/index-modules/translog.asciidoc',
diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc
index dd5c1de1485..403c05679af 100644
--- a/docs/reference/docs/delete.asciidoc
+++ b/docs/reference/docs/delete.asciidoc
@@ -8,8 +8,10 @@ from an index called twitter, under a type called tweet, with id valued
 
 [source,js]
 --------------------------------------------------
-$ curl -XDELETE 'http://localhost:9200/twitter/tweet/1'
+DELETE /twitter/tweet/1
 --------------------------------------------------
+// CONSOLE
+// TEST[setup:twitter]
 
 The result of the above delete operation is:
 
@@ -17,18 +19,21 @@ The result of the above delete operation is:
 --------------------------------------------------
 {
     "_shards" : {
-        "total" : 10,
+        "total" : 2,
         "failed" : 0,
-        "successful" : 10
+        "successful" : 2
     },
     "found" : true,
     "_index" : "twitter",
     "_type" : "tweet",
     "_id" : "1",
     "_version" : 2,
+    "_primary_term": 1,
+    "_seq_no": 5,
     "result": "deleted"
 }
 --------------------------------------------------
+// TESTRESPONSE[s/"successful" : 2/"successful" : 1/]
 
 [float]
 [[delete-versioning]]
@@ -48,10 +53,26 @@ When indexing using the ability to control the routing, in order to
 delete a document, the routing value should also be provided. For
 example:
 
+////
+Example to delete with routing
+
 [source,js]
 --------------------------------------------------
-$ curl -XDELETE 'http://localhost:9200/twitter/tweet/1?routing=kimchy'
+PUT /twitter/tweet/1?routing=kimhcy
+{
+    "test": "test"
+}
 --------------------------------------------------
+// CONSOLE
+////
+
+
+[source,js]
+--------------------------------------------------
+DELETE /twitter/tweet/1?routing=kimchy
+--------------------------------------------------
+// CONSOLE
+// TEST[continued]
 
 The above will delete a tweet with id 1, but will be routed based on the
 user. Note, issuing a delete without the correct routing, will cause the
@@ -130,5 +151,7 @@ to 5 minutes:
 
 [source,js]
 --------------------------------------------------
-$ curl -XDELETE 'http://localhost:9200/twitter/tweet/1?timeout=5m'
+DELETE /twitter/tweet/1?timeout=5m
 --------------------------------------------------
+// CONSOLE
+// TEST[setup:twitter]
diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc
index 2af9cf0a0c5..bcbcee2f907 100644
--- a/docs/reference/docs/index_.asciidoc
+++ b/docs/reference/docs/index_.asciidoc
@@ -46,9 +46,9 @@ The `_shards` header provides information about the replication process of the i
 
 The index operation is successful in the case `successful` is at least 1.
 
-NOTE:   Replica shards may not all be started when an indexing operation successfully returns (by default, only the 
-        primary is required, but this behavior can be <<index-wait-for-active-shards,changed>>). In that case, 
-        `total` will be equal to the total shards based on the `number_of_replicas` setting and `successful` will be 
+NOTE:   Replica shards may not all be started when an indexing operation successfully returns (by default, only the
+        primary is required, but this behavior can be <<index-wait-for-active-shards,changed>>). In that case,
+        `total` will be equal to the total shards based on the `number_of_replicas` setting and `successful` will be
         equal to the number of shards started (primary plus replicas). If there were no failures, the `failed` will be 0.
 
 [float]
@@ -101,6 +101,7 @@ PUT twitter/tweet/1?version=2
 }
 --------------------------------------------------
 // CONSOLE
+// TEST[continued]
 // TEST[catch: conflict]
 
 *NOTE:* versioning is completely real time, and is not affected by the
@@ -312,46 +313,46 @@ if needed, the update is distributed to applicable replicas.
 [[index-wait-for-active-shards]]
 === Wait For Active Shards
 
-To improve the resiliency of writes to the system, indexing operations 
-can be configured to wait for a certain number of active shard copies 
+To improve the resiliency of writes to the system, indexing operations
+can be configured to wait for a certain number of active shard copies
 before proceeding with the operation. If the requisite number of active
-shard copies are not available, then the write operation must wait and 
-retry, until either the requisite shard copies have started or a timeout 
-occurs. By default, write operations only wait for the primary shards 
+shard copies are not available, then the write operation must wait and
+retry, until either the requisite shard copies have started or a timeout
+occurs. By default, write operations only wait for the primary shards
 to be active before proceeding (i.e. `wait_for_active_shards=1`).
 This default can be overridden in the index settings dynamically
-by setting `index.write.wait_for_active_shards`. To alter this behavior 
+by setting `index.write.wait_for_active_shards`. To alter this behavior
 per operation, the `wait_for_active_shards` request parameter can be used.
 
 Valid values are `all` or any positive integer up to the total number
 of configured copies per shard in the index (which is `number_of_replicas+1`).
-Specifying a negative value or a number greater than the number of 
+Specifying a negative value or a number greater than the number of
 shard copies will throw an error.
 
 For example, suppose we have a cluster of three nodes, `A`, `B`, and `C` and
-we create an index `index` with the number of replicas set to 3 (resulting in 
-4 shard copies, one more copy than there are nodes). If we 
+we create an index `index` with the number of replicas set to 3 (resulting in
+4 shard copies, one more copy than there are nodes). If we
 attempt an indexing operation, by default the operation will only ensure
 the primary copy of each shard is available before proceeding. This means
 that even if `B` and `C` went down, and `A` hosted the primary shard copies,
-the indexing operation would still proceed with only one copy of the data. 
+the indexing operation would still proceed with only one copy of the data.
 If `wait_for_active_shards` is set on the request to `3` (and all 3 nodes
-are up), then the indexing operation will require 3 active shard copies 
+are up), then the indexing operation will require 3 active shard copies
 before proceeding, a requirement which should be met because there are 3
 active nodes in the cluster, each one holding a copy of the shard. However,
-if we set `wait_for_active_shards` to `all` (or to `4`, which is the same), 
-the indexing operation will not proceed as we do not have all 4 copies of 
-each shard active in the index. The operation will timeout 
+if we set `wait_for_active_shards` to `all` (or to `4`, which is the same),
+the indexing operation will not proceed as we do not have all 4 copies of
+each shard active in the index. The operation will timeout
 unless a new node is brought up in the cluster to host the fourth copy of
 the shard.
 
-It is important to note that this setting greatly reduces the chances of 
-the write operation not writing to the requisite number of shard copies, 
+It is important to note that this setting greatly reduces the chances of
+the write operation not writing to the requisite number of shard copies,
 but it does not completely eliminate the possibility, because this check
 occurs before the write operation commences. Once the write operation
-is underway, it is still possible for replication to fail on any number of 
+is underway, it is still possible for replication to fail on any number of
 shard copies but still succeed on the primary. The `_shards` section of the
-write operation's response reveals the number of shard copies on which 
+write operation's response reveals the number of shard copies on which
 replication succeeded/failed.
 
 [source,js]
@@ -364,6 +365,7 @@ replication succeeded/failed.
     }
 }
 --------------------------------------------------
+// NOTCONSOLE
 
 [float]
 [[index-refresh]]
diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc
index c904326f2d4..47711830df6 100644
--- a/docs/reference/docs/update.asciidoc
+++ b/docs/reference/docs/update.asciidoc
@@ -75,7 +75,7 @@ We can also add a new field to the document:
 --------------------------------------------------
 POST test/type1/1/_update
 {
-    "script" : "ctx._source.new_field = \"value_of_new_field\""
+    "script" : "ctx._source.new_field = 'value_of_new_field'"
 }
 --------------------------------------------------
 // CONSOLE
@@ -87,7 +87,7 @@ Or remove a field from the document:
 --------------------------------------------------
 POST test/type1/1/_update
 {
-    "script" : "ctx._source.remove(\"new_field\")"
+    "script" : "ctx._source.remove('new_field')"
 }
 --------------------------------------------------
 // CONSOLE
@@ -102,7 +102,7 @@ the doc if the `tags` field contain `green`, otherwise it does nothing
 POST test/type1/1/_update
 {
     "script" : {
-        "inline": "if (ctx._source.tags.contains(params.tag)) { ctx.op = \"delete\" } else { ctx.op = \"none\" }",
+        "inline": "if (ctx._source.tags.contains(params.tag)) { ctx.op = 'delete' } else { ctx.op = 'none' }",
         "lang": "painless",
         "params" : {
             "tag" : "green"
@@ -242,6 +242,9 @@ POST sessions/session/dh3sgudg8gsrgl/_update
     "upsert" : {}
 }
 --------------------------------------------------
+// CONSOLE
+// TEST[s/"id": "my_web_session_summariser"/"inline": "ctx._source.page_view_event = params.pageViewEvent"/]
+// TEST[continued]
 
 [float]
 ==== `doc_as_upsert`
@@ -263,7 +266,6 @@ POST test/type1/1/_update
 // CONSOLE
 // TEST[continued]
 
-
 [float]
 === Parameters
 

From 3ae671aaf3dfdedf7871641c4a4617106b903999 Mon Sep 17 00:00:00 2001
From: Nik Everett <nik9000@gmail.com>
Date: Mon, 24 Apr 2017 17:22:06 -0400
Subject: [PATCH 05/34] Docs test: Be ok with different _seq_nos

The test wanted specific _primary_terms and _seq_nos but there is
no need to specify that.
---
 docs/reference/docs/delete.asciidoc | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc
index 403c05679af..6fbe5e33ce2 100644
--- a/docs/reference/docs/delete.asciidoc
+++ b/docs/reference/docs/delete.asciidoc
@@ -34,6 +34,8 @@ The result of the above delete operation is:
 }
 --------------------------------------------------
 // TESTRESPONSE[s/"successful" : 2/"successful" : 1/]
+// TESTRESPONSE[s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
+// TESTRESPONSE[s/"_seq_no" : 5/"_seq_no" : $body._seq_no/]
 
 [float]
 [[delete-versioning]]

From 6ebf08759bbb224401fb44a08665aa2c4c428f5e Mon Sep 17 00:00:00 2001
From: Ryan Ernst <ryan@iernst.net>
Date: Mon, 24 Apr 2017 15:45:20 -0700
Subject: [PATCH 06/34] Templates: Add compileTemplate method to ScriptService
 for template consumers (#24280)

This commit adds a compileTemplate method to the ScriptService.
Eventually this will be used to easily cutover all consumers to a new
TemplateService.

relates #16314
---
 .../index/query/QueryRewriteContext.java      |  6 ++--
 .../ingest/InternalTemplateService.java       | 19 ++++------
 .../elasticsearch/script/ScriptService.java   |  8 +++++
 .../template/CompiledTemplate.java            | 35 +++++++++++++++++++
 .../TransportSearchTemplateAction.java        |  8 ++---
 5 files changed, 57 insertions(+), 19 deletions(-)
 create mode 100644 core/src/main/java/org/elasticsearch/template/CompiledTemplate.java

diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java
index 183ab690ce2..03642115976 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java
+++ b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java
@@ -30,6 +30,7 @@ import org.elasticsearch.script.ExecutableScript;
 import org.elasticsearch.script.Script;
 import org.elasticsearch.script.ScriptContext;
 import org.elasticsearch.script.ScriptService;
+import org.elasticsearch.template.CompiledTemplate;
 
 import java.util.function.LongSupplier;
 
@@ -105,8 +106,7 @@ public class QueryRewriteContext {
     }
 
     public BytesReference getTemplateBytes(Script template) {
-        CompiledScript compiledTemplate = scriptService.compile(template, ScriptContext.Standard.SEARCH);
-        ExecutableScript executable = scriptService.executable(compiledTemplate, template.getParams());
-        return (BytesReference) executable.run();
+        CompiledTemplate compiledTemplate = scriptService.compileTemplate(template, ScriptContext.Standard.SEARCH);
+        return compiledTemplate.run(template.getParams());
     }
 }
diff --git a/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java
index 26d6737706b..b5aa2dbc51a 100644
--- a/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java
+++ b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java
@@ -19,16 +19,16 @@
 
 package org.elasticsearch.ingest;
 
+import java.util.Collections;
+import java.util.Map;
+
+import org.apache.logging.log4j.util.Supplier;
 import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.script.CompiledScript;
-import org.elasticsearch.script.ExecutableScript;
 import org.elasticsearch.script.Script;
 import org.elasticsearch.script.ScriptContext;
 import org.elasticsearch.script.ScriptService;
 import org.elasticsearch.script.ScriptType;
-
-import java.util.Collections;
-import java.util.Map;
+import org.elasticsearch.template.CompiledTemplate;
 
 public class InternalTemplateService implements TemplateService {
 
@@ -44,16 +44,11 @@ public class InternalTemplateService implements TemplateService {
         int mustacheEnd = template.indexOf("}}");
         if (mustacheStart != -1 && mustacheEnd != -1 && mustacheStart < mustacheEnd) {
             Script script = new Script(ScriptType.INLINE, "mustache", template, Collections.emptyMap());
-            CompiledScript compiledScript = scriptService.compile(script, ScriptContext.Standard.INGEST);
+            CompiledTemplate compiledTemplate = scriptService.compileTemplate(script, ScriptContext.Standard.INGEST);
             return new Template() {
                 @Override
                 public String execute(Map<String, Object> model) {
-                    ExecutableScript executableScript = scriptService.executable(compiledScript, model);
-                    Object result = executableScript.run();
-                    if (result instanceof BytesReference) {
-                        return ((BytesReference) result).utf8ToString();
-                    }
-                    return String.valueOf(result);
+                    return compiledTemplate.run(model).utf8ToString();
                 }
 
                 @Override
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java
index e0c7b3c63de..7a8691656a4 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptService.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java
@@ -38,6 +38,7 @@ import org.elasticsearch.cluster.metadata.MetaData;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.breaker.CircuitBreakingException;
+import org.elasticsearch.common.bytes.BytesReference;
 import org.elasticsearch.common.cache.Cache;
 import org.elasticsearch.common.cache.CacheBuilder;
 import org.elasticsearch.common.cache.RemovalListener;
@@ -56,6 +57,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.common.xcontent.json.JsonXContent;
 import org.elasticsearch.env.Environment;
 import org.elasticsearch.search.lookup.SearchLookup;
+import org.elasticsearch.template.CompiledTemplate;
 import org.elasticsearch.watcher.FileChangesListener;
 import org.elasticsearch.watcher.FileWatcher;
 import org.elasticsearch.watcher.ResourceWatcherService;
@@ -320,6 +322,12 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
         }
     }
 
+    /** Compiles a template. Note this will be moved to a separate TemplateService in the future. */
+    public CompiledTemplate compileTemplate(Script script, ScriptContext scriptContext) {
+        CompiledScript compiledScript = compile(script, scriptContext);
+        return params -> (BytesReference)executable(compiledScript, params).run();
+    }
+
     /**
      * Check whether there have been too many compilations within the last minute, throwing a circuit breaking exception if so.
      * This is a variant of the token bucket algorithm: https://en.wikipedia.org/wiki/Token_bucket
diff --git a/core/src/main/java/org/elasticsearch/template/CompiledTemplate.java b/core/src/main/java/org/elasticsearch/template/CompiledTemplate.java
new file mode 100644
index 00000000000..52f46202eb8
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/template/CompiledTemplate.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.template;
+
+import java.util.Map;
+
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.script.CompiledScript;
+import org.elasticsearch.script.ScriptType;
+
+/**
+ * A template that may be executed.
+ */
+public interface CompiledTemplate {
+
+    /** Run a template and return the resulting string, encoded in utf8 bytes. */
+    BytesReference run(Map<String, Object> params);
+}
diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java
index 61f099f6c24..60435e72a4b 100644
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java
+++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java
@@ -19,6 +19,7 @@
 
 package org.elasticsearch.script.mustache;
 
+import org.apache.logging.log4j.util.Supplier;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.search.SearchRequest;
 import org.elasticsearch.action.search.SearchResponse;
@@ -39,6 +40,7 @@ import org.elasticsearch.script.ExecutableScript;
 import org.elasticsearch.script.Script;
 import org.elasticsearch.script.ScriptService;
 import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.template.CompiledTemplate;
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.transport.TransportService;
 
@@ -72,10 +74,8 @@ public class TransportSearchTemplateAction extends HandledTransportAction<Search
         try {
             Script script = new Script(request.getScriptType(), TEMPLATE_LANG, request.getScript(),
                 request.getScriptParams() == null ? Collections.emptyMap() : request.getScriptParams());
-            CompiledScript compiledScript = scriptService.compile(script, SEARCH);
-            ExecutableScript executable = scriptService.executable(compiledScript, script.getParams());
-
-            BytesReference source = (BytesReference) executable.run();
+            CompiledTemplate compiledScript = scriptService.compileTemplate(script, SEARCH);
+            BytesReference source = compiledScript.run(script.getParams());
             response.setSource(source);
 
             if (request.isSimulate()) {

From f2e31cdeefc4e03f5202951643b1c6e746817d10 Mon Sep 17 00:00:00 2001
From: Colin Goodheart-Smithe <colings86@users.noreply.github.com>
Date: Tue, 25 Apr 2017 10:30:08 +0100
Subject: [PATCH 07/34] [TEST] mute failing docs test

---
 docs/reference/docs/delete.asciidoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc
index 6fbe5e33ce2..73632557b62 100644
--- a/docs/reference/docs/delete.asciidoc
+++ b/docs/reference/docs/delete.asciidoc
@@ -11,7 +11,7 @@ from an index called twitter, under a type called tweet, with id valued
 DELETE /twitter/tweet/1
 --------------------------------------------------
 // CONSOLE
-// TEST[setup:twitter]
+// TEST[skip:https://github.com/elastic/elasticsearch/issues/24303]
 
 The result of the above delete operation is:
 

From 508b774d76bc7e3787ac1545c8e6fae35a35fe4a Mon Sep 17 00:00:00 2001
From: Jason Tedor <jason@tedor.me>
Date: Tue, 25 Apr 2017 06:04:02 -0400
Subject: [PATCH 08/34] Revert "[TEST] mute failing docs test"

This reverts commit f2e31cdeefc4e03f5202951643b1c6e746817d10.
---
 docs/reference/docs/delete.asciidoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc
index 73632557b62..6fbe5e33ce2 100644
--- a/docs/reference/docs/delete.asciidoc
+++ b/docs/reference/docs/delete.asciidoc
@@ -11,7 +11,7 @@ from an index called twitter, under a type called tweet, with id valued
 DELETE /twitter/tweet/1
 --------------------------------------------------
 // CONSOLE
-// TEST[skip:https://github.com/elastic/elasticsearch/issues/24303]
+// TEST[setup:twitter]
 
 The result of the above delete operation is:
 

From 88de33d43d48dc4cc185306ebae72eedf1a97b4f Mon Sep 17 00:00:00 2001
From: Koen De Groote <kdg.private@gmail.com>
Date: Tue, 25 Apr 2017 13:13:55 +0200
Subject: [PATCH 09/34] Minor changes to collection creation from enums
 (#24274)

These changes are mainly cosmetic with minor perf advantages drawn from checkstyle.
---
 .../org/elasticsearch/cluster/node/DiscoveryNode.java    | 2 +-
 .../routing/allocation/AllocateUnassignedDecision.java   | 4 ++--
 .../org/elasticsearch/common/settings/SecureSetting.java | 9 ++-------
 .../java/org/elasticsearch/common/unit/TimeValue.java    | 3 ++-
 .../java/org/elasticsearch/ingest/IngestDocument.java    | 3 ++-
 .../java/org/elasticsearch/script/ScriptSettings.java    | 3 ++-
 .../org/elasticsearch/search/internal/SearchContext.java | 4 ++--
 .../java/org/elasticsearch/AnalysisFactoryTestCase.java  | 7 ++++---
 .../org/elasticsearch/cluster/ESAllocationTestCase.java  | 4 ++--
 .../java/org/elasticsearch/test/ClusterServiceUtils.java | 4 ++--
 .../org/elasticsearch/test/PosixPermissionsResetter.java | 6 +++---
 11 files changed, 24 insertions(+), 25 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java
index 3eea37e2c89..0e8435d0f8e 100644
--- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java
+++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java
@@ -198,7 +198,7 @@ public class DiscoveryNode implements Writeable, ToXContent {
 
     /** extract node roles from the given settings */
     public static Set<Role> getRolesFromSettings(Settings settings) {
-        Set<Role> roles = new HashSet<>();
+        Set<Role> roles = EnumSet.noneOf(Role.class);
         if (Node.NODE_INGEST_SETTING.get(settings)) {
             roles.add(Role.INGEST);
         }
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java
index 49b9604e345..decdafd724c 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java
@@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
 
 import java.io.IOException;
 import java.util.Collections;
-import java.util.HashMap;
+import java.util.EnumMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
@@ -49,7 +49,7 @@ public class AllocateUnassignedDecision extends AbstractAllocationDecision {
      */
     private static final Map<AllocationStatus, AllocateUnassignedDecision> CACHED_DECISIONS;
     static {
-        Map<AllocationStatus, AllocateUnassignedDecision> cachedDecisions = new HashMap<>();
+        Map<AllocationStatus, AllocateUnassignedDecision> cachedDecisions = new EnumMap<>(AllocationStatus.class);
         cachedDecisions.put(AllocationStatus.FETCHING_SHARD_DATA,
             new AllocateUnassignedDecision(AllocationStatus.FETCHING_SHARD_DATA, null, null, null, false, 0L, 0L));
         cachedDecisions.put(AllocationStatus.NO_VALID_SHARD_COPY,
diff --git a/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java b/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java
index 2efb36696c5..e93240b9a32 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java
@@ -21,12 +21,9 @@ package org.elasticsearch.common.settings;
 
 import java.io.InputStream;
 import java.security.GeneralSecurityException;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Objects;
+import java.util.EnumSet;
 import java.util.Set;
 
-import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.util.ArrayUtils;
 
 
@@ -36,9 +33,7 @@ import org.elasticsearch.common.util.ArrayUtils;
  * This class allows access to settings from the Elasticsearch keystore.
  */
 public abstract class SecureSetting<T> extends Setting<T> {
-    private static final Set<Property> ALLOWED_PROPERTIES = new HashSet<>(
-        Arrays.asList(Property.Deprecated, Property.Shared)
-    );
+    private static final Set<Property> ALLOWED_PROPERTIES = EnumSet.of(Property.Deprecated, Property.Shared);
 
     private static final Property[] FIXED_PROPERTIES = {
         Property.NodeScope
diff --git a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java
index 4ab91aac5b5..4c3344eb9d8 100644
--- a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java
+++ b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java
@@ -31,6 +31,7 @@ import org.joda.time.format.PeriodFormatter;
 
 import java.io.IOException;
 import java.util.Collections;
+import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Locale;
@@ -48,7 +49,7 @@ public class TimeValue implements Writeable, Comparable<TimeValue> {
     private static Map<Byte, TimeUnit> BYTE_TIME_UNIT_MAP;
 
     static {
-        final Map<TimeUnit, Byte> timeUnitByteMap = new HashMap<>();
+        final Map<TimeUnit, Byte> timeUnitByteMap = new EnumMap<>(TimeUnit.class);
         timeUnitByteMap.put(TimeUnit.NANOSECONDS, (byte)0);
         timeUnitByteMap.put(TimeUnit.MICROSECONDS, (byte)1);
         timeUnitByteMap.put(TimeUnit.MILLISECONDS, (byte)2);
diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java
index 02d3988c4a8..fcf49ef6992 100644
--- a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java
+++ b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java
@@ -31,6 +31,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Base64;
 import java.util.Date;
+import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -554,7 +555,7 @@ public final class IngestDocument {
      * Metadata fields that used to be accessible as ordinary top level fields will be removed as part of this call.
      */
     public Map<MetaData, String> extractMetadata() {
-        Map<MetaData, String> metadataMap = new HashMap<>();
+        Map<MetaData, String> metadataMap = new EnumMap<>(MetaData.class);
         for (MetaData metaData : MetaData.values()) {
             metadataMap.put(metaData, cast(metaData.getFieldName(), sourceAndMetadata.remove(metaData.getFieldName()), String.class));
         }
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java
index 447097a4884..e4387aa52dd 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java
@@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Settings;
 
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -35,7 +36,7 @@ public class ScriptSettings {
     private static final Map<ScriptType, Setting<Boolean>> SCRIPT_TYPE_SETTING_MAP;
 
     static {
-        Map<ScriptType, Setting<Boolean>> scriptTypeSettingMap = new HashMap<>();
+        Map<ScriptType, Setting<Boolean>> scriptTypeSettingMap = new EnumMap<>(ScriptType.class);
         for (ScriptType scriptType : ScriptType.values()) {
             scriptTypeSettingMap.put(scriptType, Setting.boolSetting(
                 ScriptModes.sourceKey(scriptType),
diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java
index 59d3646f18f..ebb2157d981 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java
@@ -63,7 +63,7 @@ import org.elasticsearch.search.sort.SortAndFormats;
 import org.elasticsearch.search.suggest.SuggestionSearchContext;
 
 import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.EnumMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -336,7 +336,7 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
      */
     public void addReleasable(Releasable releasable, Lifetime lifetime) {
         if (clearables == null) {
-            clearables = new HashMap<>();
+            clearables = new EnumMap<>(Lifetime.class);
         }
         List<Releasable> releasables = clearables.get(lifetime);
         if (releasables == null) {
diff --git a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
index 7f60058788a..d49a1b4cae5 100644
--- a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
@@ -98,6 +98,7 @@ import org.elasticsearch.indices.analysis.PreBuiltTokenizers;
 import org.elasticsearch.test.ESTestCase;
 
 import java.util.Collection;
+import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
@@ -152,7 +153,7 @@ public class AnalysisFactoryTestCase extends ESTestCase {
 
     static final Map<PreBuiltTokenizers, Class<?>> PREBUILT_TOKENIZERS;
     static {
-        PREBUILT_TOKENIZERS = new HashMap<>();
+        PREBUILT_TOKENIZERS = new EnumMap<>(PreBuiltTokenizers.class);
         for (PreBuiltTokenizers tokenizer : PreBuiltTokenizers.values()) {
             Class<?> luceneFactoryClazz;
             switch (tokenizer) {
@@ -289,7 +290,7 @@ public class AnalysisFactoryTestCase extends ESTestCase {
 
     static final Map<PreBuiltTokenFilters, Class<?>> PREBUILT_TOKENFILTERS;
     static {
-        PREBUILT_TOKENFILTERS = new HashMap<>();
+        PREBUILT_TOKENFILTERS = new EnumMap<>(PreBuiltTokenFilters.class);
         for (PreBuiltTokenFilters tokenizer : PreBuiltTokenFilters.values()) {
             Class<?> luceneFactoryClazz;
             switch (tokenizer) {
@@ -335,7 +336,7 @@ public class AnalysisFactoryTestCase extends ESTestCase {
 
     static final Map<PreBuiltCharFilters, Class<?>> PREBUILT_CHARFILTERS;
     static {
-        PREBUILT_CHARFILTERS = new HashMap<>();
+        PREBUILT_CHARFILTERS = new EnumMap<>(PreBuiltCharFilters.class);
         for (PreBuiltCharFilters tokenizer : PreBuiltCharFilters.values()) {
             Class<?> luceneFactoryClazz;
             switch (tokenizer) {
diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java
index a0cc0130bfc..02f8896be4d 100644
--- a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java
@@ -43,7 +43,7 @@ import org.elasticsearch.test.gateway.TestGatewayAllocator;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.HashSet;
+import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
@@ -95,7 +95,7 @@ public abstract class ESAllocationTestCase extends ESTestCase {
     }
 
     protected static Set<DiscoveryNode.Role> MASTER_DATA_ROLES =
-            Collections.unmodifiableSet(new HashSet<>(Arrays.asList(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA)));
+            Collections.unmodifiableSet(EnumSet.of(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA));
 
     protected static DiscoveryNode newNode(String nodeId) {
         return newNode(nodeId, Version.CURRENT);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java
index 01f626a1e2b..b2eced6fd35 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java
@@ -33,7 +33,7 @@ import org.elasticsearch.threadpool.ThreadPool;
 
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.HashSet;
+import java.util.EnumSet;
 import java.util.concurrent.CountDownLatch;
 
 import static junit.framework.TestCase.fail;
@@ -42,7 +42,7 @@ public class ClusterServiceUtils {
 
     public static ClusterService createClusterService(ThreadPool threadPool) {
         DiscoveryNode discoveryNode = new DiscoveryNode("node", ESTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(),
-                                                           new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())),Version.CURRENT);
+                EnumSet.allOf(DiscoveryNode.Role.class), Version.CURRENT);
         return createClusterService(threadPool, discoveryNode);
     }
 
diff --git a/test/framework/src/main/java/org/elasticsearch/test/PosixPermissionsResetter.java b/test/framework/src/main/java/org/elasticsearch/test/PosixPermissionsResetter.java
index a644205bad9..19bea3802f0 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/PosixPermissionsResetter.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/PosixPermissionsResetter.java
@@ -18,6 +18,8 @@
  */
 package org.elasticsearch.test;
 
+import java.util.EnumSet;
+import java.util.Set;
 import org.junit.Assert;
 
 import java.io.IOException;
@@ -25,8 +27,6 @@ import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.attribute.PosixFileAttributeView;
 import java.nio.file.attribute.PosixFilePermission;
-import java.util.HashSet;
-import java.util.Set;
 
 /** Stores the posix attributes for a path and resets them on close. */
 public class PosixPermissionsResetter implements AutoCloseable {
@@ -46,6 +46,6 @@ public class PosixPermissionsResetter implements AutoCloseable {
     }
 
     public Set<PosixFilePermission> getCopyPermissions() {
-        return new HashSet<>(permissions);
+        return EnumSet.copyOf(permissions);
     }
 }

From 739cb35d1be6509eeca5200857de6f377270d735 Mon Sep 17 00:00:00 2001
From: Guillaume Le Floch <glfloch@gmail.com>
Date: Tue, 25 Apr 2017 13:43:21 +0200
Subject: [PATCH 10/34] Allow passing single scrollID in clear scroll API body
 (#24242)

* Allow single scrollId in string format

Closes #24233
---
 .../action/search/RestClearScrollAction.java  | 13 ++++--
 docs/reference/search/request/scroll.asciidoc |  2 +-
 .../rest-api-spec/test/scroll/11_clear.yaml   | 46 ++++++++++++++++++-
 3 files changed, 56 insertions(+), 5 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java
index 5f39db3a357..c7281da23f1 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java
@@ -78,10 +78,17 @@ public class RestClearScrollAction extends BaseRestHandler {
             while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
                 if (token == XContentParser.Token.FIELD_NAME) {
                     currentFieldName = parser.currentName();
-                } else if ("scroll_id".equals(currentFieldName) && token == XContentParser.Token.START_ARRAY) {
-                    while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+                } else if ("scroll_id".equals(currentFieldName)){
+                    if (token == XContentParser.Token.START_ARRAY) {
+                        while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+                            if (token.isValue() == false) {
+                                throw new IllegalArgumentException("scroll_id array element should only contain scroll_id");
+                            }
+                            clearScrollRequest.addScrollId(parser.text());
+                        }
+                    } else {
                         if (token.isValue() == false) {
-                            throw new IllegalArgumentException("scroll_id array element should only contain scroll_id");
+                            throw new IllegalArgumentException("scroll_id element should only contain scroll_id");
                         }
                         clearScrollRequest.addScrollId(parser.text());
                     }
diff --git a/docs/reference/search/request/scroll.asciidoc b/docs/reference/search/request/scroll.asciidoc
index 82a27881720..1b78c0bc9ba 100644
--- a/docs/reference/search/request/scroll.asciidoc
+++ b/docs/reference/search/request/scroll.asciidoc
@@ -144,7 +144,7 @@ cleared as soon as the scroll is not being used anymore using the
 ---------------------------------------
 DELETE /_search/scroll
 {
-    "scroll_id" : ["DXF1ZXJ5QW5kRmV0Y2gBAAAAAAAAAD4WYm9laVYtZndUQlNsdDcwakFMNjU1QQ=="]
+    "scroll_id" : "DXF1ZXJ5QW5kRmV0Y2gBAAAAAAAAAD4WYm9laVYtZndUQlNsdDcwakFMNjU1QQ=="
 }
 ---------------------------------------
 // CONSOLE
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/11_clear.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/11_clear.yaml
index aa4885825d2..c2a026df1d7 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/11_clear.yaml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/11_clear.yaml
@@ -38,7 +38,7 @@
           scroll_id: $scroll_id1
 
 ---
-"Body params override query string":
+"Body params with array param override query string":
   - do:
       indices.create:
           index:  test_scroll
@@ -76,3 +76,47 @@
         catch: missing
         clear_scroll:
           scroll_id: $scroll_id1
+
+---
+"Body params with string param scroll id override query string":
+  - skip:
+      version: " - 5.99.99"
+      reason:  this uses a new API that has been added in 6.0
+
+  - do:
+      indices.create:
+          index:  test_scroll
+  - do:
+      index:
+          index:  test_scroll
+          type:   test
+          id:     42
+          body:   { foo: bar }
+
+  - do:
+      indices.refresh: {}
+
+  - do:
+      search:
+        index: test_scroll
+        scroll: 1m
+        body:
+          query:
+            match_all: {}
+
+  - set: {_scroll_id: scroll_id1}
+
+  - do:
+      clear_scroll:
+        scroll_id: "invalid_scroll_id"
+        body: { "scroll_id": "$scroll_id1" }
+
+  - do:
+      catch: missing
+      scroll:
+        scroll_id: $scroll_id1
+
+  - do:
+        catch: missing
+        clear_scroll:
+          scroll_id: $scroll_id1

From 92d6b2b60d7f3bb397acbfdad865f5835a558fa9 Mon Sep 17 00:00:00 2001
From: Bodecker DellaMaria <bojdell@gmail.com>
Date: Tue, 25 Apr 2017 07:43:55 -0500
Subject: [PATCH 11/34] Add newly open-sourced Scala REST client "escalar"
 (#24174)

Workday recently open-sourced our internal Scala wrapper for the Elasticsearch REST API. We plan to continue maintaining the library and use it in our products. Thought it would be a good idea to link it here in case anyone else is interested in using it!
---
 docs/community-clients/index.asciidoc | 11 +++++++----
 1 file changed, 7 insertions(+), 4 deletions(-)

diff --git a/docs/community-clients/index.asciidoc b/docs/community-clients/index.asciidoc
index 99d7e5f768c..fbb4a3ddce8 100644
--- a/docs/community-clients/index.asciidoc
+++ b/docs/community-clients/index.asciidoc
@@ -238,16 +238,19 @@ The following projects appear to be abandoned:
 * https://github.com/sksamuel/elastic4s[elastic4s]:
   Scala DSL.
 
-* https://github.com/scalastuff/esclient[esclient]:
-  Thin Scala client.
-
 * https://github.com/gphat/wabisabi[wabisabi]:
   Asynchronous REST API Scala client.
+  
+* https://github.com/workday/escalar[escalar]:
+  Type-safe Scala wrapper for the REST API.
 
 * https://github.com/SumoLogic/elasticsearch-client[elasticsearch-client]:
   Scala DSL that uses the REST API. Akka and AWS helpers included.
 
-The following project appears to be abandoned:
+The following projects appear to be abandoned:
+
+* https://github.com/scalastuff/esclient[esclient]:
+  Thin Scala client.
 
 * https://github.com/bsadeh/scalastic[scalastic]:
   Scala client.

From e69147a87098a7261586225c9d5dae7f51c01d9f Mon Sep 17 00:00:00 2001
From: Simon Willnauer <simonw@apache.org>
Date: Tue, 25 Apr 2017 17:34:25 +0200
Subject: [PATCH 12/34] Add support for `tests.enable_mock_modules` to
 ESIntegTestCase (#24309)

`tests.enable_mock_modules` is a documented but unrespected / unused
option to disable all mock modules / pluings during test runs. This
will basically site-step mock assertions like check-index on shard closing.
This can speed up test-execution dramatically on nodes with slow disks etc.

Relates to #24304
---
 .../src/main/java/org/elasticsearch/test/ESIntegTestCase.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java
index 7c8d8cd1a55..0240a8c4315 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java
@@ -299,6 +299,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
      */
     public static final String TESTS_ENABLE_MOCK_MODULES = "tests.enable_mock_modules";
 
+    private static final boolean MOCK_MODULES_ENABLED = "true".equals(System.getProperty(TESTS_ENABLE_MOCK_MODULES, "true"));
     /**
      * Threshold at which indexing switches from frequently async to frequently bulk.
      */
@@ -1882,7 +1883,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
     /** Return the mock plugins the cluster should use */
     protected Collection<Class<? extends Plugin>> getMockPlugins() {
         final ArrayList<Class<? extends Plugin>> mocks = new ArrayList<>();
-        if (randomBoolean()) { // sometimes run without those completely
+        if (MOCK_MODULES_ENABLED && randomBoolean()) { // sometimes run without those completely
             if (randomBoolean() && addMockTransportService()) {
                 mocks.add(MockTransportService.TestPlugin.class);
             }

From 0adaf9fb4ce2daaefebd1d08a42a7f691d0c3b55 Mon Sep 17 00:00:00 2001
From: Danilo Akamine <danilowz@gmail.com>
Date: Tue, 25 Apr 2017 12:48:44 -0400
Subject: [PATCH 13/34] Drop `search_analyzer` parameter from keyword.asciidoc
 (#24221)

`search_analyzer` isn't supported by `keyword` fields so this removes
it from the documentation for them.
---
 docs/reference/mapping/types/keyword.asciidoc | 5 -----
 1 file changed, 5 deletions(-)

diff --git a/docs/reference/mapping/types/keyword.asciidoc b/docs/reference/mapping/types/keyword.asciidoc
index aba8fb9ea50..e560f8ae1d0 100644
--- a/docs/reference/mapping/types/keyword.asciidoc
+++ b/docs/reference/mapping/types/keyword.asciidoc
@@ -91,11 +91,6 @@ The following parameters are accepted by `keyword` fields:
     the <<mapping-source-field,`_source`>> field. Accepts `true` or `false`
     (default).
 
-<<search-analyzer,`search_analyzer`>>::
-
-    The <<analyzer,`analyzer`>> that should be used at search time on
-    <<mapping-index,`analyzed`>> fields. Defaults to the `analyzer` setting.
-
 <<similarity,`similarity`>>::
 
     Which scoring algorithm or _similarity_ should be used. Defaults

From 1b660c51275972e01520becafafc82110c8e50a0 Mon Sep 17 00:00:00 2001
From: Jason Tedor <jason@tedor.me>
Date: Tue, 25 Apr 2017 16:25:52 -0400
Subject: [PATCH 14/34] Fix incorrect logger invocation

It looks like auto-complete gave us a nasty surprise here with
Logger#equals being invoked instead of Logger#error swallowing the
absolute worst-possible level of a log message. This commit fixes the
invocation.
---
 .../elasticsearch/transport/netty4/Netty4InternalESLogger.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java
index aaa277e34b3..91bbe1c1a9b 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java
@@ -161,7 +161,7 @@ class Netty4InternalESLogger extends AbstractInternalLogger {
 
     @Override
     public void error(String msg) {
-        logger.equals(msg);
+        logger.error(msg);
     }
 
     @Override

From b744dc3bccda4cf0644dd06661c7886c4b6a4696 Mon Sep 17 00:00:00 2001
From: Till Backhaus <tback@users.noreply.github.com>
Date: Tue, 25 Apr 2017 22:51:58 +0200
Subject: [PATCH 15/34] Link to minimum master nodes docs from Zen docs

This commit adds a link to the minimum master nodes section of the
important settings docs from the Zen discovery docs to clarify the
meaning and importance of setting minimum master nodes to a quorum of
master-eligible nodes.

Relates #24311
---
 docs/reference/modules/discovery/zen.asciidoc | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/docs/reference/modules/discovery/zen.asciidoc b/docs/reference/modules/discovery/zen.asciidoc
index d65f278d41a..349dd1800e9 100644
--- a/docs/reference/modules/discovery/zen.asciidoc
+++ b/docs/reference/modules/discovery/zen.asciidoc
@@ -77,9 +77,10 @@ complete and for the elected node to accept its mastership. The same setting con
 active master eligible nodes that should be a part of any active cluster. If this requirement is not met the
 active master node will step down and a new master election will begin.
 
-This setting must be set to a quorum of your master eligible nodes. It is recommended to avoid
-having only two master eligible nodes, since a quorum of two is two. Therefore, a loss
-of either master eligible node will result in an inoperable cluster.
+This setting must be set to a <<minimum_master_nodes,quorum>> of your master
+eligible nodes. It is recommended to avoid having only two master eligible
+nodes, since a quorum of two is two. Therefore, a loss of either master
+eligible node will result in an inoperable cluster.
 
 [float]
 [[fault-detection]]

From fc97e25b564011bdb1d272a15e5b6b1f22c0c633 Mon Sep 17 00:00:00 2001
From: Nik Everett <nik9000@gmail.com>
Date: Tue, 25 Apr 2017 21:11:47 -0400
Subject: [PATCH 16/34] Add task to look for tests in src/main (#24298)

Creates a new task `namingConventionsMain`, that runs on the
`buildSrc` and `test:framework` projects and fails the build if
any of the classes in the main artifacts are named like tests or
are non-abstract subclasses of ESTestCase.

It also fixes the three tests that would cause it to fail.
---
 buildSrc/build.gradle                         |   7 +
 .../precommit/NamingConventionsTask.groovy    |  50 ++--
 .../test/NamingConventionsCheck.java          | 232 ++++++++++++------
 .../test/NamingConventionsCheckInMainIT.java  |  26 ++
 .../NamingConventionsCheckInMainTests.java    |  26 ++
 test/framework/build.gradle                   |   5 +
 .../AnalysisFactoryTestCase.java              |   2 +-
 .../disruption/LongGCDisruptionTests.java}    |   2 +-
 .../test/disruption/NetworkDisruptionIT.java  |   0
 .../disruption/NetworkDisruptionTests.java    |   0
 10 files changed, 255 insertions(+), 95 deletions(-)
 create mode 100644 buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java
 create mode 100644 buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java
 rename test/framework/src/{main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java => test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java} (99%)
 rename test/framework/src/{main => test}/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java (100%)
 rename test/framework/src/{main => test}/java/org/elasticsearch/test/disruption/NetworkDisruptionTests.java (100%)

diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
index 6536c77e587..0839b8a22f8 100644
--- a/buildSrc/build.gradle
+++ b/buildSrc/build.gradle
@@ -156,4 +156,11 @@ if (project != rootProject) {
     testClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$UnitTestCase'
     integTestClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$IntegTestCase'
   }
+
+  task namingConventionsMain(type: org.elasticsearch.gradle.precommit.NamingConventionsTask) {
+    checkForTestsInMain = true
+    testClass = namingConventions.testClass
+    integTestClass = namingConventions.integTestClass
+  }
+  precommit.dependsOn namingConventionsMain
 }
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy
index 52de7dac2d5..2711a0e38f2 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy
@@ -38,17 +38,7 @@ public class NamingConventionsTask extends LoggedExec {
      * inputs (ie the jars/class files).
      */
     @OutputFile
-    File successMarker = new File(project.buildDir, 'markers/namingConventions')
-
-    /**
-     * The classpath to run the naming conventions checks against. Must contain the files in the test
-     * output directory and everything required to load those classes.
-     *
-     * We don't declare the actual test files as a dependency or input because if they change then
-     * this will change.
-     */
-    @InputFiles
-    FileCollection classpath = project.sourceSets.test.runtimeClasspath
+    File successMarker = new File(project.buildDir, "markers/${this.name}")
 
     /**
      * Should we skip the integ tests in disguise tests? Defaults to true because only core names its
@@ -69,18 +59,35 @@ public class NamingConventionsTask extends LoggedExec {
     @Input
     String integTestClass = 'org.elasticsearch.test.ESIntegTestCase'
 
+    /**
+     * Should the test also check the main classpath for test classes instead of
+     * doing the usual checks to the test classpath.
+     */
+    @Input
+    boolean checkForTestsInMain = false;
+
     public NamingConventionsTask() {
         // Extra classpath contains the actual test
-        project.configurations.create('namingConventions')
-        Dependency buildToolsDep = project.dependencies.add('namingConventions',
-                "org.elasticsearch.gradle:build-tools:${VersionProperties.elasticsearch}")
-        buildToolsDep.transitive = false // We don't need gradle in the classpath. It conflicts.
+        if (false == project.configurations.names.contains('namingConventions')) {
+            project.configurations.create('namingConventions')
+            Dependency buildToolsDep = project.dependencies.add('namingConventions',
+                    "org.elasticsearch.gradle:build-tools:${VersionProperties.elasticsearch}")
+            buildToolsDep.transitive = false // We don't need gradle in the classpath. It conflicts.
+        }
         FileCollection extraClasspath = project.configurations.namingConventions
         dependsOn(extraClasspath)
 
-        description = "Runs NamingConventionsCheck on ${classpath}"
+        FileCollection classpath = project.sourceSets.test.runtimeClasspath
+        inputs.files(classpath)
+        description = "Tests that test classes aren't misnamed or misplaced"
         executable = new File(project.javaHome, 'bin/java')
-        onlyIf { project.sourceSets.test.output.classesDir.exists() }
+        if (false == checkForTestsInMain) {
+            /* This task is created by default for all subprojects with this
+             * setting and there is no point in running it if the files don't
+             * exist. */
+            onlyIf { project.sourceSets.test.output.classesDir.exists() }
+        }
+
         /*
          * We build the arguments in a funny afterEvaluate/doFirst closure so that we can wait for the classpath to be
          * ready for us. Strangely neither one on their own are good enough.
@@ -104,7 +111,14 @@ public class NamingConventionsTask extends LoggedExec {
                 if (':build-tools'.equals(project.path)) {
                     args('--self-test')
                 }
-                args('--', project.sourceSets.test.output.classesDir.absolutePath)
+                if (checkForTestsInMain) {
+                    args('--main')
+                    args('--')
+                    args(project.sourceSets.main.output.classesDir.absolutePath)
+                } else {
+                    args('--')
+                    args(project.sourceSets.test.output.classesDir.absolutePath)
+                }
             }
         }
         doLast { successMarker.setText("", 'UTF-8') }
diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java
index cbfa31d1aaf..9bd14675d34 100644
--- a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java
+++ b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java
@@ -28,6 +28,7 @@ import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.nio.file.attribute.BasicFileAttributes;
 import java.util.HashSet;
+import java.util.Objects;
 import java.util.Set;
 
 /**
@@ -49,6 +50,7 @@ public class NamingConventionsCheck {
         Path rootPath = null;
         boolean skipIntegTestsInDisguise = false;
         boolean selfTest = false;
+        boolean checkMainClasses = false;
         for (int i = 0; i < args.length; i++) {
             String arg = args[i];
             switch (arg) {
@@ -64,6 +66,9 @@ public class NamingConventionsCheck {
                 case "--self-test":
                     selfTest = true;
                     break;
+                case "--main":
+                    checkMainClasses = true;
+                    break;
                 case "--":
                     rootPath = Paths.get(args[++i]);
                     break;
@@ -73,28 +78,43 @@ public class NamingConventionsCheck {
         }
 
         NamingConventionsCheck check = new NamingConventionsCheck(testClass, integTestClass);
-        check.check(rootPath, skipIntegTestsInDisguise);
+        if (checkMainClasses) {
+            check.checkMain(rootPath);
+        } else {
+            check.checkTests(rootPath, skipIntegTestsInDisguise);
+        }
 
         if (selfTest) {
-            assertViolation("WrongName", check.missingSuffix);
-            assertViolation("WrongNameTheSecond", check.missingSuffix);
-            assertViolation("DummyAbstractTests", check.notRunnable);
-            assertViolation("DummyInterfaceTests", check.notRunnable);
-            assertViolation("InnerTests", check.innerClasses);
-            assertViolation("NotImplementingTests", check.notImplementing);
-            assertViolation("PlainUnit", check.pureUnitTest);
+            if (checkMainClasses) {
+                assertViolation(NamingConventionsCheckInMainTests.class.getName(), check.testsInMain);
+                assertViolation(NamingConventionsCheckInMainIT.class.getName(), check.testsInMain);
+            } else {
+                assertViolation("WrongName", check.missingSuffix);
+                assertViolation("WrongNameTheSecond", check.missingSuffix);
+                assertViolation("DummyAbstractTests", check.notRunnable);
+                assertViolation("DummyInterfaceTests", check.notRunnable);
+                assertViolation("InnerTests", check.innerClasses);
+                assertViolation("NotImplementingTests", check.notImplementing);
+                assertViolation("PlainUnit", check.pureUnitTest);
+            }
         }
 
         // Now we should have no violations
-        assertNoViolations("Not all subclasses of " + check.testClass.getSimpleName()
-                + " match the naming convention. Concrete classes must end with [Tests]", check.missingSuffix);
+        assertNoViolations(
+                "Not all subclasses of " + check.testClass.getSimpleName()
+                    + " match the naming convention. Concrete classes must end with [Tests]",
+                check.missingSuffix);
         assertNoViolations("Classes ending with [Tests] are abstract or interfaces", check.notRunnable);
         assertNoViolations("Found inner classes that are tests, which are excluded from the test runner", check.innerClasses);
         assertNoViolations("Pure Unit-Test found must subclass [" + check.testClass.getSimpleName() + "]", check.pureUnitTest);
         assertNoViolations("Classes ending with [Tests] must subclass [" + check.testClass.getSimpleName() + "]", check.notImplementing);
+        assertNoViolations(
+                "Classes ending with [Tests] or [IT] or extending [" + check.testClass.getSimpleName() + "] must be in src/test/java",
+                check.testsInMain);
         if (skipIntegTestsInDisguise == false) {
-            assertNoViolations("Subclasses of " + check.integTestClass.getSimpleName() +
-                    " should end with IT as they are integration tests", check.integTestsInDisguise);
+            assertNoViolations(
+                    "Subclasses of " + check.integTestClass.getSimpleName() + " should end with IT as they are integration tests",
+                    check.integTestsInDisguise);
         }
     }
 
@@ -104,86 +124,78 @@ public class NamingConventionsCheck {
     private final Set<Class<?>> integTestsInDisguise = new HashSet<>();
     private final Set<Class<?>> notRunnable = new HashSet<>();
     private final Set<Class<?>> innerClasses = new HashSet<>();
+    private final Set<Class<?>> testsInMain = new HashSet<>();
 
     private final Class<?> testClass;
     private final Class<?> integTestClass;
 
     public NamingConventionsCheck(Class<?> testClass, Class<?> integTestClass) {
-        this.testClass = testClass;
+        this.testClass = Objects.requireNonNull(testClass, "--test-class is required");
         this.integTestClass = integTestClass;
     }
 
-    public void check(Path rootPath, boolean skipTestsInDisguised) throws IOException {
-        Files.walkFileTree(rootPath, new FileVisitor<Path>() {
-            /**
-             * The package name of the directory we are currently visiting. Kept as a string rather than something fancy because we load
-             * just about every class and doing so requires building a string out of it anyway. At least this way we don't need to build the
-             * first part of the string over and over and over again.
-             */
-            private String packageName;
-
+    public void checkTests(Path rootPath, boolean skipTestsInDisguised) throws IOException {
+        Files.walkFileTree(rootPath, new TestClassVisitor() {
             @Override
-            public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
-                // First we visit the root directory
-                if (packageName == null) {
-                    // And it package is empty string regardless of the directory name
-                    packageName = "";
-                } else {
-                    packageName += dir.getFileName() + ".";
+            protected void visitTestClass(Class<?> clazz) {
+                if (skipTestsInDisguised == false && integTestClass.isAssignableFrom(clazz)) {
+                    integTestsInDisguise.add(clazz);
                 }
-                return FileVisitResult.CONTINUE;
-            }
-
-            @Override
-            public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
-                // Go up one package by jumping back to the second to last '.'
-                packageName = packageName.substring(0, 1 + packageName.lastIndexOf('.', packageName.length() - 2));
-                return FileVisitResult.CONTINUE;
-            }
-
-            @Override
-            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
-                String filename = file.getFileName().toString();
-                if (filename.endsWith(".class")) {
-                    String className = filename.substring(0, filename.length() - ".class".length());
-                    Class<?> clazz = loadClassWithoutInitializing(packageName + className);
-                    if (clazz.getName().endsWith("Tests")) {
-                        if (skipTestsInDisguised == false && integTestClass.isAssignableFrom(clazz)) {
-                            integTestsInDisguise.add(clazz);
-                        }
-                        if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
-                            notRunnable.add(clazz);
-                        } else if (isTestCase(clazz) == false) {
-                            notImplementing.add(clazz);
-                        } else if (Modifier.isStatic(clazz.getModifiers())) {
-                            innerClasses.add(clazz);
-                        }
-                    } else if (clazz.getName().endsWith("IT")) {
-                        if (isTestCase(clazz) == false) {
-                            notImplementing.add(clazz);
-                        }
-                    } else if (Modifier.isAbstract(clazz.getModifiers()) == false && Modifier.isInterface(clazz.getModifiers()) == false) {
-                        if (isTestCase(clazz)) {
-                            missingSuffix.add(clazz);
-                        } else if (junit.framework.Test.class.isAssignableFrom(clazz)) {
-                            pureUnitTest.add(clazz);
-                        }
-                    }
+                if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
+                    notRunnable.add(clazz);
+                } else if (isTestCase(clazz) == false) {
+                    notImplementing.add(clazz);
+                } else if (Modifier.isStatic(clazz.getModifiers())) {
+                    innerClasses.add(clazz);
                 }
-                return FileVisitResult.CONTINUE;
-            }
-
-            private boolean isTestCase(Class<?> clazz) {
-                return testClass.isAssignableFrom(clazz);
             }
 
             @Override
-            public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
-                throw exc;
+            protected void visitIntegrationTestClass(Class<?> clazz) {
+                if (isTestCase(clazz) == false) {
+                    notImplementing.add(clazz);
+                }
+            }
+
+            @Override
+            protected void visitOtherClass(Class<?> clazz) {
+                if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
+                    return;
+                }
+                if (isTestCase(clazz)) {
+                    missingSuffix.add(clazz);
+                } else if (junit.framework.Test.class.isAssignableFrom(clazz)) {
+                    pureUnitTest.add(clazz);
+                }
             }
         });
     }
 
+    public void checkMain(Path rootPath) throws IOException {
+        Files.walkFileTree(rootPath, new TestClassVisitor() {
+            @Override
+            protected void visitTestClass(Class<?> clazz) {
+                testsInMain.add(clazz);
+            }
+
+            @Override
+            protected void visitIntegrationTestClass(Class<?> clazz) {
+                testsInMain.add(clazz);
+            }
+
+            @Override
+            protected void visitOtherClass(Class<?> clazz) {
+                if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
+                    return;
+                }
+                if (isTestCase(clazz)) {
+                    testsInMain.add(clazz);
+                }
+            }
+        });
+
+    }
+
     /**
      * Fail the process if there are any violations in the set. Named to look like a junit assertion even though it isn't because it is
      * similar enough.
@@ -203,7 +215,7 @@ public class NamingConventionsCheck {
      * similar enough.
      */
     private static void assertViolation(String className, Set<Class<?>> set) {
-        className = "org.elasticsearch.test.NamingConventionsCheckBadClasses$" + className;
+        className = className.startsWith("org") ? className : "org.elasticsearch.test.NamingConventionsCheckBadClasses$" + className;
         if (false == set.remove(loadClassWithoutInitializing(className))) {
             System.err.println("Error in NamingConventionsCheck! Expected [" + className + "] to be a violation but wasn't.");
             System.exit(1);
@@ -229,4 +241,74 @@ public class NamingConventionsCheck {
             throw new RuntimeException(e);
         }
     }
+
+    abstract class TestClassVisitor implements FileVisitor<Path> {
+        /**
+         * The package name of the directory we are currently visiting. Kept as a string rather than something fancy because we load
+         * just about every class and doing so requires building a string out of it anyway. At least this way we don't need to build the
+         * first part of the string over and over and over again.
+         */
+        private String packageName;
+
+        /**
+         * Visit classes named like a test.
+         */
+        protected abstract void visitTestClass(Class<?> clazz);
+        /**
+         * Visit classes named like an integration test.
+         */
+        protected abstract void visitIntegrationTestClass(Class<?> clazz);
+        /**
+         * Visit classes not named like a test at all.
+         */
+        protected abstract void visitOtherClass(Class<?> clazz);
+
+        @Override
+        public final FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
+            // First we visit the root directory
+            if (packageName == null) {
+                // And it package is empty string regardless of the directory name
+                packageName = "";
+            } else {
+                packageName += dir.getFileName() + ".";
+            }
+            return FileVisitResult.CONTINUE;
+        }
+
+        @Override
+        public final FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
+            // Go up one package by jumping back to the second to last '.'
+            packageName = packageName.substring(0, 1 + packageName.lastIndexOf('.', packageName.length() - 2));
+            return FileVisitResult.CONTINUE;
+        }
+
+        @Override
+        public final FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
+            String filename = file.getFileName().toString();
+            if (filename.endsWith(".class")) {
+                String className = filename.substring(0, filename.length() - ".class".length());
+                Class<?> clazz = loadClassWithoutInitializing(packageName + className);
+                if (clazz.getName().endsWith("Tests")) {
+                    visitTestClass(clazz);
+                } else if (clazz.getName().endsWith("IT")) {
+                    visitIntegrationTestClass(clazz);
+                } else {
+                    visitOtherClass(clazz);
+                }
+            }
+            return FileVisitResult.CONTINUE;
+        }
+
+        /**
+         * Is this class a test case?
+         */
+        protected boolean isTestCase(Class<?> clazz) {
+            return testClass.isAssignableFrom(clazz);
+        }
+
+        @Override
+        public final FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
+            throw exc;
+        }
+    }
 }
diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java
new file mode 100644
index 00000000000..46adc7f065b
--- /dev/null
+++ b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.test;
+
+/**
+ * This class should fail the naming conventions self test.
+ */
+public class NamingConventionsCheckInMainIT {
+}
diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java
new file mode 100644
index 00000000000..27c0b41eb3f
--- /dev/null
+++ b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.test;
+
+/**
+ * This class should fail the naming conventions self test.
+ */
+public class NamingConventionsCheckInMainTests {
+}
diff --git a/test/framework/build.gradle b/test/framework/build.gradle
index 6756495e0a1..13a5ef11ce2 100644
--- a/test/framework/build.gradle
+++ b/test/framework/build.gradle
@@ -63,3 +63,8 @@ thirdPartyAudit.excludes = [
   'org.easymock.IArgumentMatcher',
   'org.jmock.core.Constraint',
 ]
+
+task namingConventionsMain(type: org.elasticsearch.gradle.precommit.NamingConventionsTask) {
+  checkForTestsInMain = true
+}
+precommit.dependsOn namingConventionsMain
diff --git a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
index d49a1b4cae5..cbabdeef4af 100644
--- a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
@@ -113,7 +113,7 @@ import java.util.regex.Pattern;
  * If we don't want to expose one for a specific reason, just map it to Void.
  * The deprecated ones can be mapped to Deprecated.class.
  */
-public class AnalysisFactoryTestCase extends ESTestCase {
+public abstract class AnalysisFactoryTestCase extends ESTestCase {
 
     private static final Pattern UNDERSCORE_THEN_ANYTHING = Pattern.compile("_(.)");
 
diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java b/test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java
similarity index 99%
rename from test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java
rename to test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java
index a5cd7c30723..48bd18986c2 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java
+++ b/test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java
@@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.greaterThan;
 
-public class LongGCDisruptionTest extends ESTestCase {
+public class LongGCDisruptionTests extends ESTestCase {
 
     static class LockedExecutor {
         ReentrantLock lock = new ReentrantLock();
diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java b/test/framework/src/test/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java
similarity index 100%
rename from test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java
rename to test/framework/src/test/java/org/elasticsearch/test/disruption/NetworkDisruptionIT.java
diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruptionTests.java b/test/framework/src/test/java/org/elasticsearch/test/disruption/NetworkDisruptionTests.java
similarity index 100%
rename from test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruptionTests.java
rename to test/framework/src/test/java/org/elasticsearch/test/disruption/NetworkDisruptionTests.java

From 3c845727f80c609275b68eabd1c2d5d2c1fff4ec Mon Sep 17 00:00:00 2001
From: Koen De Groote <kdg.private@gmail.com>
Date: Wed, 26 Apr 2017 04:15:00 +0200
Subject: [PATCH 17/34] Replace alternating regex with character classes

This commit replaces two alternating regular expressions (that is,
regular expressions that consist of the form a|b where a and b are
characters) with the equivalent regular expression rewritten as a
character class (that is, [ab]) The reason this is an improvement is
because a|b involves backtracking while [ab] does not.

Relates #24316
---
 core/src/main/java/org/elasticsearch/Version.java               | 2 +-
 .../org/elasticsearch/common/logging/DeprecationLogger.java     | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java
index 199f67bcb0b..a7f01ef5528 100644
--- a/core/src/main/java/org/elasticsearch/Version.java
+++ b/core/src/main/java/org/elasticsearch/Version.java
@@ -196,7 +196,7 @@ public class Version implements Comparable<Version> {
         if (snapshot = version.endsWith("-SNAPSHOT")) {
             version = version.substring(0, version.length() - 9);
         }
-        String[] parts = version.split("\\.|\\-");
+        String[] parts = version.split("[.-]");
         if (parts.length < 3 || parts.length > 4) {
             throw new IllegalArgumentException(
                     "the version needs to contain major, minor, and revision, and optionally the build: " + version);
diff --git a/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java
index 7594f96e2df..ee3e3c74905 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java
@@ -311,7 +311,7 @@ public class DeprecationLogger {
      * @return the escaped string
      */
     public static String escape(String s) {
-        return s.replaceAll("(\\\\|\")", "\\\\$1");
+        return s.replaceAll("([\"\\\\])", "\\\\$1");
     }
 
 }

From 51b33f1fd549b474e901e70a460440d73a1751ce Mon Sep 17 00:00:00 2001
From: Ryan Ernst <ryan@iernst.net>
Date: Tue, 25 Apr 2017 23:43:20 -0700
Subject: [PATCH 18/34] S3 Repository: Deprecate remaining `repositories.s3.*`
 settings (#24144)

Most of these settings should always be pulled from the repository
settings. A couple were leftover that should be moved to client
settings. The path style access setting should be removed altogether.
This commit adds deprecations for all of these existing settings, as
well as adding new client specific settings for max retries and
throttling.

relates #24143
---
 .../repositories/s3/InternalAwsS3Service.java | 26 ++++---
 .../repositories/s3/S3ClientSettings.java     | 37 +++++++---
 .../repositories/s3/S3Repository.java         | 29 ++++----
 .../repositories/s3/S3RepositoryPlugin.java   |  2 +
 .../s3/AwsS3ServiceImplTests.java             | 68 +++++++++++++++----
 .../repositories/s3/S3RepositoryTests.java    |  3 +-
 .../org/elasticsearch/test/ESTestCase.java    |  1 +
 7 files changed, 115 insertions(+), 51 deletions(-)

diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java
index 1ba0414afe2..95b746a0a2b 100644
--- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java
+++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java
@@ -75,12 +75,6 @@ class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Se
                 Strings.collectionToDelimitedString(clientsSettings.keySet(), ","));
         }
 
-        Integer maxRetries = getValue(repositorySettings, settings,
-            S3Repository.Repository.MAX_RETRIES_SETTING,
-            S3Repository.Repositories.MAX_RETRIES_SETTING);
-        boolean useThrottleRetries = getValue(repositorySettings, settings,
-            S3Repository.Repository.USE_THROTTLE_RETRIES_SETTING,
-            S3Repository.Repositories.USE_THROTTLE_RETRIES_SETTING);
         // If the user defined a path style access setting, we rely on it,
         // otherwise we use the default value set by the SDK
         Boolean pathStyleAccess = null;
@@ -91,12 +85,11 @@ class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Se
                 S3Repository.Repositories.PATH_STYLE_ACCESS_SETTING);
         }
 
-        logger.debug("creating S3 client with client_name [{}], endpoint [{}], max_retries [{}], " +
-                "use_throttle_retries [{}], path_style_access [{}]",
-            clientName, clientSettings.endpoint, maxRetries, useThrottleRetries, pathStyleAccess);
+        logger.debug("creating S3 client with client_name [{}], endpoint [{}], path_style_access [{}]",
+            clientName, clientSettings.endpoint, pathStyleAccess);
 
         AWSCredentialsProvider credentials = buildCredentials(logger, clientSettings);
-        ClientConfiguration configuration = buildConfiguration(clientSettings, maxRetries, useThrottleRetries);
+        ClientConfiguration configuration = buildConfiguration(clientSettings, repositorySettings);
 
         client = new AmazonS3Client(credentials, configuration);
 
@@ -113,7 +106,7 @@ class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Se
     }
 
     // pkg private for tests
-    static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings, Integer maxRetries, boolean useThrottleRetries) {
+    static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings, Settings repositorySettings) {
         ClientConfiguration clientConfiguration = new ClientConfiguration();
         // the response metadata cache is only there for diagnostics purposes,
         // but can force objects from every response to the old generation.
@@ -128,10 +121,13 @@ class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Se
             clientConfiguration.setProxyPassword(clientSettings.proxyPassword);
         }
 
+        Integer maxRetries = getRepoValue(repositorySettings, S3Repository.Repository.MAX_RETRIES_SETTING, clientSettings.maxRetries);
         if (maxRetries != null) {
             // If not explicitly set, default to 3 with exponential backoff policy
             clientConfiguration.setMaxErrorRetry(maxRetries);
         }
+        boolean useThrottleRetries = getRepoValue(repositorySettings,
+            S3Repository.Repository.USE_THROTTLE_RETRIES_SETTING, clientSettings.throttleRetries);
         clientConfiguration.setUseThrottleRetries(useThrottleRetries);
         clientConfiguration.setSocketTimeout(clientSettings.readTimeoutMillis);
 
@@ -149,6 +145,14 @@ class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Se
         }
     }
 
+    /** Returns the value for a given setting from the repository, or returns the fallback value. */
+    private static <T> T getRepoValue(Settings repositorySettings, Setting<T> repositorySetting, T fallback) {
+        if (repositorySetting.exists(repositorySettings)) {
+            return repositorySetting.get(repositorySettings);
+        }
+        return fallback;
+    }
+
     @Override
     protected void doStart() throws ElasticsearchException {
     }
diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java
index ece4a5d29ec..8be6aaff74f 100644
--- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java
+++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java
@@ -31,6 +31,7 @@ import com.amazonaws.auth.BasicAWSCredentials;
 import org.elasticsearch.common.settings.SecureSetting;
 import org.elasticsearch.common.settings.SecureString;
 import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Setting.Property;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.unit.TimeValue;
 
@@ -52,20 +53,19 @@ class S3ClientSettings {
 
     /** An override for the s3 endpoint to connect to. */
     static final Setting.AffixSetting<String> ENDPOINT_SETTING = Setting.affixKeySetting(PREFIX, "endpoint",
-        key -> new Setting<>(key, "", s -> s.toLowerCase(Locale.ROOT),
-            Setting.Property.NodeScope));
+        key -> new Setting<>(key, "", s -> s.toLowerCase(Locale.ROOT), Property.NodeScope));
 
     /** The protocol to use to connect to s3. */
     static final Setting.AffixSetting<Protocol> PROTOCOL_SETTING = Setting.affixKeySetting(PREFIX, "protocol",
-        key -> new Setting<>(key, "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Setting.Property.NodeScope));
+        key -> new Setting<>(key, "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope));
 
     /** The host name of a proxy to connect to s3 through. */
     static final Setting.AffixSetting<String> PROXY_HOST_SETTING = Setting.affixKeySetting(PREFIX, "proxy.host",
-        key -> Setting.simpleString(key, Setting.Property.NodeScope));
+        key -> Setting.simpleString(key, Property.NodeScope));
 
     /** The port of a proxy to connect to s3 through. */
     static final Setting.AffixSetting<Integer> PROXY_PORT_SETTING = Setting.affixKeySetting(PREFIX, "proxy.port",
-        key -> Setting.intSetting(key, 80, 0, 1<<16, Setting.Property.NodeScope));
+        key -> Setting.intSetting(key, 80, 0, 1<<16, Property.NodeScope));
 
     /** The username of a proxy to connect to s3 through. */
     static final Setting.AffixSetting<SecureString> PROXY_USERNAME_SETTING = Setting.affixKeySetting(PREFIX, "proxy.username",
@@ -77,8 +77,15 @@ class S3ClientSettings {
 
     /** The socket timeout for connecting to s3. */
     static final Setting.AffixSetting<TimeValue> READ_TIMEOUT_SETTING = Setting.affixKeySetting(PREFIX, "read_timeout",
-        key -> Setting.timeSetting(key, TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT),
-            Setting.Property.NodeScope));
+        key -> Setting.timeSetting(key, TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope));
+
+    /** The number of retries to use when an s3 request fails. */
+    static final Setting.AffixSetting<Integer> MAX_RETRIES_SETTING = Setting.affixKeySetting(PREFIX, "max_retries",
+        key -> Setting.intSetting(key, S3Repository.Repositories.MAX_RETRIES_SETTING, 0, Property.NodeScope));
+
+    /** Whether retries should be throttled (ie use backoff). */
+    static final Setting.AffixSetting<Boolean> USE_THROTTLE_RETRIES_SETTING = Setting.affixKeySetting(PREFIX, "use_throttle_retries",
+        key -> Setting.boolSetting(key, S3Repository.Repositories.USE_THROTTLE_RETRIES_SETTING, Property.NodeScope));
 
     /** Credentials to authenticate with s3. */
     final BasicAWSCredentials credentials;
@@ -106,9 +113,15 @@ class S3ClientSettings {
     /** The read timeout for the s3 client. */
     final int readTimeoutMillis;
 
+    /** The number of retries to use for the s3 client. */
+    final int maxRetries;
+
+    /** Whether the s3 client should use an exponential backoff retry policy. */
+    final boolean throttleRetries;
+
     private S3ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol,
-                             String proxyHost, int proxyPort, String proxyUsername,
-                             String proxyPassword, int readTimeoutMillis) {
+                             String proxyHost, int proxyPort, String proxyUsername, String proxyPassword,
+                             int readTimeoutMillis, int maxRetries, boolean throttleRetries) {
         this.credentials = credentials;
         this.endpoint = endpoint;
         this.protocol = protocol;
@@ -117,6 +130,8 @@ class S3ClientSettings {
         this.proxyUsername = proxyUsername;
         this.proxyPassword = proxyPassword;
         this.readTimeoutMillis = readTimeoutMillis;
+        this.maxRetries = maxRetries;
+        this.throttleRetries = throttleRetries;
     }
 
     /**
@@ -163,7 +178,9 @@ class S3ClientSettings {
                 getConfigValue(settings, clientName, PROXY_PORT_SETTING),
                 proxyUsername.toString(),
                 proxyPassword.toString(),
-                (int)getConfigValue(settings, clientName, READ_TIMEOUT_SETTING).millis()
+                (int)getConfigValue(settings, clientName, READ_TIMEOUT_SETTING).millis(),
+                getConfigValue(settings, clientName, MAX_RETRIES_SETTING),
+                getConfigValue(settings, clientName, USE_THROTTLE_RETRIES_SETTING)
             );
         }
     }
diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java
index c9f37f24ded..b183fd3b814 100644
--- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java
+++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java
@@ -61,13 +61,13 @@ class S3Repository extends BlobStoreRepository {
         /**
          * repositories.s3.bucket: The name of the bucket to be used for snapshots.
          */
-        Setting<String> BUCKET_SETTING = Setting.simpleString("repositories.s3.bucket", Property.NodeScope);
+        Setting<String> BUCKET_SETTING = Setting.simpleString("repositories.s3.bucket", Property.NodeScope, Property.Deprecated);
         /**
          * repositories.s3.server_side_encryption: When set to true files are encrypted on server side using AES256 algorithm.
          * Defaults to false.
          */
         Setting<Boolean> SERVER_SIDE_ENCRYPTION_SETTING =
-            Setting.boolSetting("repositories.s3.server_side_encryption", false, Property.NodeScope);
+            Setting.boolSetting("repositories.s3.server_side_encryption", false, Property.NodeScope, Property.Deprecated);
 
         /**
          * Default is to use 100MB (S3 defaults) for heaps above 2GB and 5% of
@@ -89,41 +89,41 @@ class S3Repository extends BlobStoreRepository {
          */
         Setting<ByteSizeValue> BUFFER_SIZE_SETTING =
             Setting.byteSizeSetting("repositories.s3.buffer_size", DEFAULT_BUFFER_SIZE,
-                new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.TB), Property.NodeScope);
+                new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.TB), Property.NodeScope, Property.Deprecated);
         /**
          * repositories.s3.max_retries: Number of retries in case of S3 errors. Defaults to 3.
          */
-        Setting<Integer> MAX_RETRIES_SETTING = Setting.intSetting("repositories.s3.max_retries", 3, Property.NodeScope);
+        Setting<Integer> MAX_RETRIES_SETTING = Setting.intSetting("repositories.s3.max_retries", 3, Property.NodeScope, Property.Deprecated);
         /**
          * repositories.s3.use_throttle_retries: Set to `true` if you want to throttle retries. Defaults to AWS SDK default value (`false`).
          */
         Setting<Boolean> USE_THROTTLE_RETRIES_SETTING = Setting.boolSetting("repositories.s3.use_throttle_retries",
-            ClientConfiguration.DEFAULT_THROTTLE_RETRIES, Property.NodeScope);
+            ClientConfiguration.DEFAULT_THROTTLE_RETRIES, Property.NodeScope, Property.Deprecated);
         /**
          * repositories.s3.chunk_size: Big files can be broken down into chunks during snapshotting if needed. Defaults to 1g.
          */
         Setting<ByteSizeValue> CHUNK_SIZE_SETTING =
             Setting.byteSizeSetting("repositories.s3.chunk_size", new ByteSizeValue(1, ByteSizeUnit.GB),
-                new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.TB), Property.NodeScope);
+                new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.TB), Property.NodeScope, Property.Deprecated);
         /**
          * repositories.s3.compress: When set to true metadata files are stored in compressed format. This setting doesn’t affect index
          * files that are already compressed by default. Defaults to false.
          */
-        Setting<Boolean> COMPRESS_SETTING = Setting.boolSetting("repositories.s3.compress", false, Property.NodeScope);
+        Setting<Boolean> COMPRESS_SETTING = Setting.boolSetting("repositories.s3.compress", false, Property.NodeScope, Property.Deprecated);
         /**
          * repositories.s3.storage_class: Sets the S3 storage class type for the backup files. Values may be standard, reduced_redundancy,
          * standard_ia. Defaults to standard.
          */
-        Setting<String> STORAGE_CLASS_SETTING = Setting.simpleString("repositories.s3.storage_class", Property.NodeScope);
+        Setting<String> STORAGE_CLASS_SETTING = Setting.simpleString("repositories.s3.storage_class", Property.NodeScope, Property.Deprecated);
         /**
          * repositories.s3.canned_acl: The S3 repository supports all S3 canned ACLs : private, public-read, public-read-write,
          * authenticated-read, log-delivery-write, bucket-owner-read, bucket-owner-full-control. Defaults to private.
          */
-        Setting<String> CANNED_ACL_SETTING = Setting.simpleString("repositories.s3.canned_acl", Property.NodeScope);
+        Setting<String> CANNED_ACL_SETTING = Setting.simpleString("repositories.s3.canned_acl", Property.NodeScope, Property.Deprecated);
         /**
          * repositories.s3.base_path: Specifies the path within bucket to repository data. Defaults to root directory.
          */
-        Setting<String> BASE_PATH_SETTING = Setting.simpleString("repositories.s3.base_path", Property.NodeScope);
+        Setting<String> BASE_PATH_SETTING = Setting.simpleString("repositories.s3.base_path", Property.NodeScope, Property.Deprecated);
         /**
          * repositories.s3.path_style_access: When set to true configures the client to use path-style access for all requests.
          Amazon S3 supports virtual-hosted-style and path-style access in all Regions. The path-style syntax, however,
@@ -132,7 +132,8 @@ class S3Repository extends BlobStoreRepository {
          in path-style access) and the bucket being accessed (some buckets are not valid DNS names). Setting this flag
          will result in path-style access being used for all requests.
          */
-        Setting<Boolean> PATH_STYLE_ACCESS_SETTING = Setting.boolSetting("repositories.s3.path_style_access", false, Property.NodeScope);
+        Setting<Boolean> PATH_STYLE_ACCESS_SETTING = Setting.boolSetting("repositories.s3.path_style_access", false,
+            Property.NodeScope, Property.Deprecated);
     }
 
     /**
@@ -160,13 +161,13 @@ class S3Repository extends BlobStoreRepository {
          * max_retries
          * @see  Repositories#MAX_RETRIES_SETTING
          */
-        Setting<Integer> MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3);
+        Setting<Integer> MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3, Property.Deprecated);
         /**
          * use_throttle_retries
          * @see  Repositories#USE_THROTTLE_RETRIES_SETTING
          */
         Setting<Boolean> USE_THROTTLE_RETRIES_SETTING = Setting.boolSetting("use_throttle_retries",
-            ClientConfiguration.DEFAULT_THROTTLE_RETRIES);
+            ClientConfiguration.DEFAULT_THROTTLE_RETRIES, Property.Deprecated);
         /**
          * chunk_size
          * @see  Repositories#CHUNK_SIZE_SETTING
@@ -198,7 +199,7 @@ class S3Repository extends BlobStoreRepository {
          * path_style_access
          * @see  Repositories#PATH_STYLE_ACCESS_SETTING
          */
-        Setting<Boolean> PATH_STYLE_ACCESS_SETTING = Setting.boolSetting("path_style_access", false);
+        Setting<Boolean> PATH_STYLE_ACCESS_SETTING = Setting.boolSetting("path_style_access", false, Property.Deprecated);
     }
 
     private final S3BlobStore blobStore;
diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java
index 1ab0ca35441..a4512ab815d 100644
--- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java
+++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java
@@ -90,6 +90,8 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin {
             S3ClientSettings.PROXY_USERNAME_SETTING,
             S3ClientSettings.PROXY_PASSWORD_SETTING,
             S3ClientSettings.READ_TIMEOUT_SETTING,
+            S3ClientSettings.MAX_RETRIES_SETTING,
+            S3ClientSettings.USE_THROTTLE_RETRIES_SETTING,
 
             // Register S3 repositories settings: repositories.s3
             S3Repository.Repositories.BUCKET_SETTING,
diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java
index 18608b83627..017e7fbeb3f 100644
--- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java
+++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java
@@ -24,6 +24,7 @@ import com.amazonaws.Protocol;
 import com.amazonaws.auth.AWSCredentials;
 import com.amazonaws.auth.AWSCredentialsProvider;
 import org.elasticsearch.common.settings.MockSecureSettings;
+import org.elasticsearch.common.settings.Setting;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.test.ESTestCase;
 
@@ -86,21 +87,70 @@ public class AwsS3ServiceImplTests extends ESTestCase {
             "aws_proxy_password", 3, false, 10000);
     }
 
-    public void testGlobalMaxRetries() {
+    public void testGlobalMaxRetriesBackcompat() {
         Settings settings = Settings.builder()
             .put(S3Repository.Repositories.MAX_RETRIES_SETTING.getKey(), 10)
             .build();
         launchAWSConfigurationTest(settings, Settings.EMPTY, Protocol.HTTPS, null, -1, null,
             null, 10, false, 50000);
+        assertSettingDeprecationsAndWarnings(new Setting<?>[]{
+            S3Repository.Repositories.MAX_RETRIES_SETTING
+        });
     }
 
     public void testRepositoryMaxRetries() {
-        Settings repositorySettings = generateRepositorySettings(20);
+        Settings settings = Settings.builder()
+            .put("s3.client.default.max_retries", 5)
+            .build();
+        launchAWSConfigurationTest(settings, Settings.EMPTY, Protocol.HTTPS, null, -1, null,
+            null, 5, false, 50000);
+    }
+
+    public void testRepositoryMaxRetriesBackcompat() {
+        Settings repositorySettings = Settings.builder()
+            .put(S3Repository.Repository.MAX_RETRIES_SETTING.getKey(), 20).build();
         Settings settings = Settings.builder()
             .put(S3Repository.Repositories.MAX_RETRIES_SETTING.getKey(), 10)
             .build();
         launchAWSConfigurationTest(settings, repositorySettings, Protocol.HTTPS, null, -1, null,
             null, 20, false, 50000);
+        assertSettingDeprecationsAndWarnings(new Setting<?>[]{
+            S3Repository.Repositories.MAX_RETRIES_SETTING,
+            S3Repository.Repository.MAX_RETRIES_SETTING
+        });
+    }
+
+    public void testGlobalThrottleRetriesBackcompat() {
+        Settings settings = Settings.builder()
+            .put(S3Repository.Repositories.USE_THROTTLE_RETRIES_SETTING.getKey(), true)
+            .build();
+        launchAWSConfigurationTest(settings, Settings.EMPTY, Protocol.HTTPS, null, -1, null,
+            null, 3, true, 50000);
+        assertSettingDeprecationsAndWarnings(new Setting<?>[]{
+            S3Repository.Repositories.USE_THROTTLE_RETRIES_SETTING
+        });
+    }
+
+    public void testRepositoryThrottleRetries() {
+        Settings settings = Settings.builder()
+            .put("s3.client.default.use_throttle_retries", true)
+            .build();
+        launchAWSConfigurationTest(settings, Settings.EMPTY, Protocol.HTTPS, null, -1, null,
+            null, 3, true, 50000);
+    }
+
+    public void testRepositoryThrottleRetriesBackcompat() {
+        Settings repositorySettings = Settings.builder()
+            .put(S3Repository.Repository.USE_THROTTLE_RETRIES_SETTING.getKey(), true).build();
+        Settings settings = Settings.builder()
+            .put(S3Repository.Repositories.USE_THROTTLE_RETRIES_SETTING.getKey(), false)
+            .build();
+        launchAWSConfigurationTest(settings, repositorySettings, Protocol.HTTPS, null, -1, null,
+            null, 3, true, 50000);
+        assertSettingDeprecationsAndWarnings(new Setting<?>[]{
+            S3Repository.Repositories.USE_THROTTLE_RETRIES_SETTING,
+            S3Repository.Repository.USE_THROTTLE_RETRIES_SETTING
+        });
     }
 
     private void launchAWSConfigurationTest(Settings settings,
@@ -113,13 +163,9 @@ public class AwsS3ServiceImplTests extends ESTestCase {
                                               Integer expectedMaxRetries,
                                               boolean expectedUseThrottleRetries,
                                               int expectedReadTimeout) {
-        Integer maxRetries = S3Repository.getValue(singleRepositorySettings, settings,
-            S3Repository.Repository.MAX_RETRIES_SETTING, S3Repository.Repositories.MAX_RETRIES_SETTING);
-        Boolean useThrottleRetries = S3Repository.getValue(singleRepositorySettings, settings,
-            S3Repository.Repository.USE_THROTTLE_RETRIES_SETTING, S3Repository.Repositories.USE_THROTTLE_RETRIES_SETTING);
 
         S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default");
-        ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings, maxRetries, useThrottleRetries);
+        ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings, singleRepositorySettings);
 
         assertThat(configuration.getResponseMetadataCacheSize(), is(0));
         assertThat(configuration.getProtocol(), is(expectedProtocol));
@@ -132,14 +178,6 @@ public class AwsS3ServiceImplTests extends ESTestCase {
         assertThat(configuration.getSocketTimeout(), is(expectedReadTimeout));
     }
 
-    private static Settings generateRepositorySettings(Integer maxRetries) {
-        Settings.Builder builder = Settings.builder();
-        if (maxRetries != null) {
-            builder.put(S3Repository.Repository.MAX_RETRIES_SETTING.getKey(), maxRetries);
-        }
-        return builder.build();
-    }
-
     public void testEndpointSetting() {
         Settings settings = Settings.builder()
             .put("s3.client.default.endpoint", "s3.endpoint")
diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java
index 11daf7b18ff..ce20d05a37e 100644
--- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java
+++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java
@@ -108,7 +108,8 @@ public class S3RepositoryTests extends ESTestCase {
         Settings settings = Settings.builder().put(Repositories.BASE_PATH_SETTING.getKey(), "/foo/bar").build();
         s3repo = new S3Repository(metadata, settings, NamedXContentRegistry.EMPTY, new DummyS3Service());
         assertEquals("foo/bar/", s3repo.basePath().buildAsString()); // make sure leading `/` is removed and trailing is added
-        assertWarnings("S3 repository base_path" +
+        assertSettingDeprecationsAndWarnings(new Setting<?>[] { Repositories.BASE_PATH_SETTING },
+            "S3 repository base_path" +
                 " trimming the leading `/`, and leading `/` will not be supported for the S3 repository in future releases");
     }
 
diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
index fab12d2606b..29241b4b19d 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
@@ -327,6 +327,7 @@ public abstract class ESTestCase extends LuceneTestCase {
         }
         try {
             final List<String> actualWarnings = threadContext.getResponseHeaders().get("Warning");
+            assertNotNull(actualWarnings);
             final Set<String> actualWarningValues =
                     actualWarnings.stream().map(DeprecationLogger::extractWarningValueFromWarningHeader).collect(Collectors.toSet());
             for (String msg : expectedWarnings) {

From c17de49a6dc1d54fcfee3754211ae67a06bdcec7 Mon Sep 17 00:00:00 2001
From: Martijn van Groningen <martijn.v.groningen@gmail.com>
Date: Wed, 19 Apr 2017 20:37:20 +0200
Subject: [PATCH 19/34] [percolator] Fix memory leak when percolator uses
 bitset or field data cache.

The percolator doesn't close the IndexReader of the memory index any more.
Prior to 2.x the percolator had its own SearchContext (PercolatorContext) that did this,
but that was removed when the percolator was refactored as part of the 5.0 release.

I think an alternative way to fix this is to let percolator not use the bitset and fielddata caches,
that way we prevent the memory leak.

Closes #24108
---
 .../index/cache/bitset/BitsetFilterCache.java |   3 +-
 .../aggregations/AggregatorTestCase.java      |   8 ++
 .../bucket/nested/NestedAggregatorTests.java  |  10 +-
 .../nested/ReverseNestedAggregatorTests.java  |   4 +-
 .../percolator/PercolateQueryBuilder.java     |  47 ++++++-
 .../percolator/PercolatorQuerySearchIT.java   | 122 ++++++++++++++++++
 6 files changed, 181 insertions(+), 13 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
index 04d2ac47d18..7d3e75f6f5d 100644
--- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
+++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
@@ -121,8 +121,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements I
         }
         final IndexReader.CacheKey coreCacheReader = cacheHelper.getKey();
         final ShardId shardId = ShardUtils.extractShardId(context.reader());
-        if (shardId != null // can't require it because of the percolator
-                && indexSettings.getIndex().equals(shardId.getIndex()) == false) {
+        if (indexSettings.getIndex().equals(shardId.getIndex()) == false) {
             // insanity
             throw new IllegalStateException("Trying to load bit set for index " + shardId.getIndex()
                     + " with cache of index " + indexSettings.getIndex());
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
index 3dc530204bc..fd36a03ddf6 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
@@ -19,6 +19,7 @@
 package org.elasticsearch.search.aggregations;
 
 import org.apache.lucene.index.CompositeReaderContext;
+import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReaderContext;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.Collector;
@@ -31,8 +32,10 @@ import org.elasticsearch.Version;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
 import org.elasticsearch.common.lease.Releasable;
 import org.elasticsearch.common.lease.Releasables;
+import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.util.MockBigArrays;
+import org.elasticsearch.index.Index;
 import org.elasticsearch.index.IndexSettings;
 import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
 import org.elasticsearch.index.cache.bitset.BitsetFilterCache.Listener;
@@ -48,6 +51,7 @@ import org.elasticsearch.index.mapper.ObjectMapper;
 import org.elasticsearch.index.mapper.ObjectMapper.Nested;
 import org.elasticsearch.index.query.QueryShardContext;
 import org.elasticsearch.index.query.support.NestedScope;
+import org.elasticsearch.index.shard.ShardId;
 import org.elasticsearch.indices.breaker.CircuitBreakerService;
 import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
 import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
@@ -289,4 +293,8 @@ public abstract class AggregatorTestCase extends ESTestCase {
             return "ShardSearcher(" + ctx.get(0) + ")";
         }
     }
+
+    protected static DirectoryReader wrap(DirectoryReader directoryReader) throws IOException {
+        return ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("_index", "_na_"), 0));
+    }
 }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
index cdd2251fb6e..304b7f03c59 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
@@ -65,7 +65,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
             try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
                 // intentionally not writing any docs
             }
-            try (IndexReader indexReader = DirectoryReader.open(directory)) {
+            try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
                 NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
                     NESTED_OBJECT);
                 MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME)
@@ -112,7 +112,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 }
                 iw.commit();
             }
-            try (IndexReader indexReader = DirectoryReader.open(directory)) {
+            try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
                 NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
                     NESTED_OBJECT);
                 MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME)
@@ -160,7 +160,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 }
                 iw.commit();
             }
-            try (IndexReader indexReader = DirectoryReader.open(directory)) {
+            try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
                 NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
                     NESTED_OBJECT + "." + NESTED_OBJECT2);
                 MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME)
@@ -213,7 +213,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 iw.addDocuments(documents);
                 iw.commit();
             }
-            try (IndexReader indexReader = DirectoryReader.open(directory)) {
+            try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
                 NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
                     NESTED_OBJECT);
                 SumAggregationBuilder sumAgg = new SumAggregationBuilder(SUM_AGG_NAME)
@@ -292,7 +292,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 iw.commit();
                 iw.close();
             }
-            try (IndexReader indexReader = DirectoryReader.open(directory)) {
+            try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
 
                 NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
                     "nested_field");
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java
index ce6ec7794b2..74fb7ca9ca4 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java
@@ -54,7 +54,7 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
             try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
                 // intentionally not writing any docs
             }
-            try (IndexReader indexReader = DirectoryReader.open(directory)) {
+            try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
                 NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
                         NESTED_OBJECT);
                 ReverseNestedAggregationBuilder reverseNestedBuilder
@@ -117,7 +117,7 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
                 }
                 iw.commit();
             }
-            try (IndexReader indexReader = DirectoryReader.open(directory)) {
+            try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
                 NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
                         NESTED_OBJECT);
                 ReverseNestedAggregationBuilder reverseNestedBuilder
diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java
index 9ebb65c3275..0cd58365bf4 100644
--- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java
+++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java
@@ -23,16 +23,22 @@ import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
 import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReaderContext;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.ReaderUtil;
 import org.apache.lucene.index.memory.MemoryIndex;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.Weight;
+import org.apache.lucene.search.join.BitSetProducer;
 import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.BitDocIdSet;
+import org.apache.lucene.util.BitSet;
 import org.apache.lucene.util.BytesRef;
 import org.elasticsearch.ElasticsearchException;
 import org.elasticsearch.ResourceNotFoundException;
@@ -51,6 +57,8 @@ import org.elasticsearch.common.xcontent.XContentFactory;
 import org.elasticsearch.common.xcontent.XContentParser;
 import org.elasticsearch.common.xcontent.XContentType;
 import org.elasticsearch.index.analysis.FieldNameAnalyzer;
+import org.elasticsearch.index.fielddata.IndexFieldData;
+import org.elasticsearch.index.fielddata.IndexFieldDataCache;
 import org.elasticsearch.index.mapper.DocumentMapper;
 import org.elasticsearch.index.mapper.DocumentMapperForType;
 import org.elasticsearch.index.mapper.MappedFieldType;
@@ -62,6 +70,8 @@ import org.elasticsearch.index.query.QueryParseContext;
 import org.elasticsearch.index.query.QueryRewriteContext;
 import org.elasticsearch.index.query.QueryShardContext;
 import org.elasticsearch.index.query.QueryShardException;
+import org.elasticsearch.indices.breaker.CircuitBreakerService;
+import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
 
 import java.io.IOException;
 import java.util.Objects;
@@ -412,12 +422,9 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
             docSearcher.setQueryCache(null);
         }
 
-        Version indexVersionCreated = context.getIndexSettings().getIndexVersionCreated();
         boolean mapUnmappedFieldsAsString = context.getIndexSettings()
                 .getValue(PercolatorFieldMapper.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING);
-        // We have to make a copy of the QueryShardContext here so we can have a unfrozen version for parsing the legacy
-        // percolator queries
-        QueryShardContext percolateShardContext = new QueryShardContext(context);
+        QueryShardContext percolateShardContext = wrap(context);
         MappedFieldType fieldType = context.fieldMapper(field);
         if (fieldType == null) {
             throw new QueryShardException(context, "field [" + field + "] does not exist");
@@ -503,4 +510,36 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
         };
     }
 
+    static QueryShardContext wrap(QueryShardContext shardContext) {
+        return new QueryShardContext(shardContext) {
+
+            @Override
+            public BitSetProducer bitsetFilter(Query query) {
+                return context -> {
+                    final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
+                    final IndexSearcher searcher = new IndexSearcher(topLevelContext);
+                    searcher.setQueryCache(null);
+                    final Weight weight = searcher.createNormalizedWeight(query, false);
+                    final Scorer s = weight.scorer(context);
+
+                    if (s != null) {
+                        return new BitDocIdSet(BitSet.of(s.iterator(), context.reader().maxDoc())).bits();
+                    } else {
+                        return null;
+                    }
+                };
+            }
+
+            @Override
+            @SuppressWarnings("unchecked")
+            public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
+                IndexFieldData.Builder builder = fieldType.fielddataBuilder();
+                IndexFieldDataCache cache = new IndexFieldDataCache.None();
+                CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService();
+                return (IFD) builder.build(shardContext.getIndexSettings(), fieldType, cache, circuitBreaker,
+                        shardContext.getMapperService());
+            }
+        };
+    }
+
 }
diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
index ebf0c13b20a..5760b260b01 100644
--- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
+++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
@@ -26,9 +26,12 @@ import org.elasticsearch.action.search.SearchResponse;
 import org.elasticsearch.action.support.WriteRequest;
 import org.elasticsearch.common.bytes.BytesArray;
 import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.common.xcontent.XContentFactory;
 import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
+import org.elasticsearch.index.fielddata.ScriptDocValues;
 import org.elasticsearch.index.mapper.MapperParsingException;
 import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
 import org.elasticsearch.index.query.MultiMatchQueryBuilder;
@@ -39,6 +42,7 @@ import org.elasticsearch.script.MockScriptPlugin;
 import org.elasticsearch.script.Script;
 import org.elasticsearch.script.ScriptType;
 import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
+import org.elasticsearch.search.lookup.LeafDocLookup;
 import org.elasticsearch.search.sort.SortOrder;
 import org.elasticsearch.test.ESSingleNodeTestCase;
 
@@ -83,6 +87,11 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
         protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
             Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
             scripts.put("1==1", vars -> Boolean.TRUE);
+            scripts.put("use_fielddata_please", vars -> {
+                LeafDocLookup leafDocLookup = (LeafDocLookup) vars.get("_doc");
+                ScriptDocValues scriptDocValues = leafDocLookup.get("employees.name");
+                return "virginia_potts".equals(scriptDocValues.get(0));
+            });
             return scripts;
         }
     }
@@ -606,6 +615,119 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
         assertHitCount(response, 0);
     }
 
+    public void testPercolateQueryWithNestedDocuments_doNotLeakBitsetCacheEntries() throws Exception {
+        XContentBuilder mapping = XContentFactory.jsonBuilder();
+        mapping.startObject().startObject("properties").startObject("companyname").field("type", "text").endObject()
+            .startObject("employee").field("type", "nested").startObject("properties")
+            .startObject("name").field("type", "text").endObject().endObject().endObject().endObject()
+            .endObject();
+        createIndex("test", client().admin().indices().prepareCreate("test")
+            // to avoid normal document from being cached by BitsetFilterCache
+            .setSettings(Settings.builder().put(BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), false))
+            .addMapping("employee", mapping)
+            .addMapping("queries", "query", "type=percolator")
+        );
+        client().prepareIndex("test", "queries", "q1").setSource(jsonBuilder().startObject()
+            .field("query", QueryBuilders.nestedQuery("employee",
+                QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND), ScoreMode.Avg)
+            ).endObject())
+            .get();
+        client().admin().indices().prepareRefresh().get();
+
+        for (int i = 0; i < 32; i++) {
+            SearchResponse response = client().prepareSearch()
+                .setQuery(new PercolateQueryBuilder("query", "employee",
+                    XContentFactory.jsonBuilder()
+                        .startObject().field("companyname", "stark")
+                        .startArray("employee")
+                        .startObject().field("name", "virginia potts").endObject()
+                        .startObject().field("name", "tony stark").endObject()
+                        .endArray()
+                        .endObject().bytes(), XContentType.JSON))
+                .addSort("_doc", SortOrder.ASC)
+                // size 0, because other wise load bitsets for normal document in FetchPhase#findRootDocumentIfNested(...)
+                .setSize(0)
+                .get();
+            assertHitCount(response, 1);
+        }
+
+        // We can't check via api... because BitsetCacheListener requires that it can extract shardId from index reader
+        // and for percolator it can't do that, but that means we don't keep track of
+        // memory for BitsetCache in case of percolator
+        long bitsetSize = client().admin().cluster().prepareClusterStats().get()
+            .getIndicesStats().getSegments().getBitsetMemoryInBytes();
+        assertEquals("The percolator works with in-memory index and therefor shouldn't use bitset cache", 0L, bitsetSize);
+    }
+
+    public void testPercolateQueryWithNestedDocuments_doLeakFieldDataCacheEntries() throws Exception {
+        XContentBuilder mapping = XContentFactory.jsonBuilder();
+        mapping.startObject();
+        {
+            mapping.startObject("properties");
+            {
+                mapping.startObject("companyname");
+                mapping.field("type", "text");
+                mapping.endObject();
+            }
+            {
+                mapping.startObject("employees");
+                mapping.field("type", "nested");
+                {
+                    mapping.startObject("properties");
+                    {
+                        mapping.startObject("name");
+                        mapping.field("type", "text");
+                        mapping.field("fielddata", true);
+                        mapping.endObject();
+                    }
+                    mapping.endObject();
+                }
+                mapping.endObject();
+            }
+            mapping.endObject();
+        }
+        mapping.endObject();
+        createIndex("test", client().admin().indices().prepareCreate("test")
+            .addMapping("employee", mapping)
+            .addMapping("queries", "query", "type=percolator")
+        );
+        Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, "use_fielddata_please", Collections.emptyMap());
+        client().prepareIndex("test", "queries", "q1").setSource(jsonBuilder().startObject()
+            .field("query", QueryBuilders.nestedQuery("employees",
+                QueryBuilders.scriptQuery(script), ScoreMode.Avg)
+            ).endObject()).get();
+        client().admin().indices().prepareRefresh().get();
+        XContentBuilder doc = jsonBuilder();
+        doc.startObject();
+        {
+            doc.field("companyname", "stark");
+            doc.startArray("employees");
+            {
+                doc.startObject();
+                doc.field("name", "virginia_potts");
+                doc.endObject();
+            }
+            {
+                doc.startObject();
+                doc.field("name", "tony_stark");
+                doc.endObject();
+            }
+            doc.endArray();
+        }
+        doc.endObject();
+        for (int i = 0; i < 32; i++) {
+            SearchResponse response = client().prepareSearch()
+                .setQuery(new PercolateQueryBuilder("query", "employee", doc.bytes(), XContentType.JSON))
+                .addSort("_doc", SortOrder.ASC)
+                .get();
+            assertHitCount(response, 1);
+        }
+
+        long fieldDataSize = client().admin().cluster().prepareClusterStats().get()
+            .getIndicesStats().getFieldData().getMemorySizeInBytes();
+        assertEquals("The percolator works with in-memory index and therefor shouldn't use field-data cache", 0L, fieldDataSize);
+    }
+
     public void testPercolatorQueryViaMultiSearch() throws Exception {
         createIndex("test", client().admin().indices().prepareCreate("test")
             .addMapping("type", "field1", "type=text")

From 7f8fe8b81db5f90f5449638ba8a817f9d3dd5918 Mon Sep 17 00:00:00 2001
From: Jay Modi <jaymode@users.noreply.github.com>
Date: Wed, 26 Apr 2017 07:23:07 -0400
Subject: [PATCH 20/34] StreamInput throws exceptions instead of using
 assertions (#24294)

StreamInput has methods such as readVInt that perform sanity checks on the data using assertions,
which will catch bad data in tests but provide no safety when running as a node without assertions
enabled. The use of assertions also make testing with invalid data difficult since we would need
to handle assertion errors in the code using the stream input and errors like this should not be
something we try to catch. This commit introduces a flag that will throw an IOException instead of
using an assertion.
---
 .../org/elasticsearch/common/io/stream/StreamInput.java   | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
index e33c3ed840a..7d175916c8e 100644
--- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
+++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
@@ -202,7 +202,9 @@ public abstract class StreamInput extends InputStream {
             return i;
         }
         b = readByte();
-        assert (b & 0x80) == 0;
+        if ((b & 0x80) != 0) {
+            throw new IOException("Invalid vInt ((" + Integer.toHexString(b) + " & 0x7f) << 28) | " + Integer.toHexString(i));
+        }
         return i | ((b & 0x7F) << 28);
     }
 
@@ -367,7 +369,7 @@ public abstract class StreamInput extends InputStream {
                     buffer[i] = ((char) ((c & 0x0F) << 12 | (readByte() & 0x3F) << 6 | (readByte() & 0x3F) << 0));
                     break;
                 default:
-                    new AssertionError("unexpected character: " + c + " hex: " + Integer.toHexString(c));
+                    throw new IOException("Invalid string; unexpected character: " + c + " hex: " + Integer.toHexString(c));
             }
         }
         return spare.toString();
@@ -808,7 +810,7 @@ public abstract class StreamInput extends InputStream {
                 case 17:
                     return (T) readStackTrace(new IOException(readOptionalString(), readException()), this);
                 default:
-                    assert false : "no such exception for id: " + key;
+                    throw new IOException("no such exception for id: " + key);
             }
         }
         return null;

From 91b61ce569e943edb4fcdb539c53c5b7562d440f Mon Sep 17 00:00:00 2001
From: Yannick Welsch <yannick@welsch.lu>
Date: Wed, 26 Apr 2017 16:08:16 +0200
Subject: [PATCH 21/34] [TEST] Do a reroute with retry_failed after a bridge
 partition on testAckedIndexing

In case of a bridge partition, shard allocation can fail "index.allocation.max_retries" times if the master is the super-connected node and recovery
source and target are on opposite sides of the bridge. This commit adds a reroute with retry_failed after healing the network partition so that the
ensureGreen check succeeds.
---
 .../discovery/DiscoveryWithServiceDisruptionsIT.java        | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java
index 595d9a6d3f1..021e2be85ed 100644
--- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java
+++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java
@@ -585,6 +585,12 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
                     ensureStableCluster(nodes.size(), TimeValue.timeValueMillis(disruptionScheme.expectedTimeToHeal().millis() +
                         DISRUPTION_HEALING_OVERHEAD.millis()), true, node);
                 }
+                // in case of a bridge partition, shard allocation can fail "index.allocation.max_retries" times if the master
+                // is the super-connected node and recovery source and target are on opposite sides of the bridge
+                if (disruptionScheme instanceof NetworkDisruption &&
+                    ((NetworkDisruption) disruptionScheme).getDisruptedLinks() instanceof Bridge) {
+                    assertAcked(client().admin().cluster().prepareReroute().setRetryFailed(true));
+                }
                 ensureGreen("test");
 
                 logger.info("validating successful docs");

From 0c12d0ce37932b174dc2b2a5a91824e05caac9de Mon Sep 17 00:00:00 2001
From: Nik Everett <nik9000@gmail.com>
Date: Wed, 26 Apr 2017 10:40:35 -0400
Subject: [PATCH 22/34] Make bats tests refuse to start on non-VMs (#24315)

The bats tests are descructive and must be run as root. This is a
horrible combination on any sane system but perfectly fine to do
in a VM. This change modifies the tests so they revuse to start
unless they are in an environment with an `/etc/is_vagrant_vm`
file. The Vagrantfile creates it on startup.

Closes #24137
---
 Vagrantfile                                       |  8 +++++++-
 .../resources/packaging/tests/20_tar_package.bats | 11 ++++++++---
 .../resources/packaging/tests/30_deb_package.bats | 11 ++++++++---
 .../resources/packaging/tests/40_rpm_package.bats | 11 ++++++++---
 .../resources/packaging/tests/60_systemd.bats     | 11 ++++++++---
 .../resources/packaging/tests/70_sysv_initd.bats  | 15 ++++++++++-----
 .../packaging/tests/75_bad_data_paths.bats        | 11 ++++++++---
 .../resources/packaging/tests/80_upgrade.bats     | 11 ++++++++---
 .../resources/packaging/tests/90_reinstall.bats   | 11 ++++++++---
 .../tests/module_and_plugin_test_cases.bash       | 11 ++++++++---
 .../test/resources/packaging/utils/modules.bash   | 13 +++++++++----
 .../test/resources/packaging/utils/packages.bash  | 11 ++++++++---
 .../test/resources/packaging/utils/plugins.bash   | 15 ++++++++++-----
 .../src/test/resources/packaging/utils/tar.bash   | 15 ++++++++++-----
 .../src/test/resources/packaging/utils/utils.bash | 11 ++++++++---
 15 files changed, 126 insertions(+), 50 deletions(-)

diff --git a/Vagrantfile b/Vagrantfile
index f008b339c3f..a818d666655 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -104,6 +104,12 @@ SOURCE_PROMPT
 source /etc/profile.d/elasticsearch_prompt.sh
 SOURCE_PROMPT
       SHELL
+      # Creates a file to mark the machine as created by vagrant. Tests check
+      # for this file and refuse to run if it is not present so that they can't
+      # be run unexpectedly.
+      config.vm.provision "markerfile", type: "shell", inline: <<-SHELL
+        touch /etc/is_vagrant_vm
+      SHELL
     end
     config.config_procs.push ['2', set_prompt]
   end
@@ -263,7 +269,7 @@ def provision(config,
       echo "==> Installing Gradle"
       curl -sS -o /tmp/gradle.zip -L https://services.gradle.org/distributions/gradle-3.3-bin.zip
       unzip /tmp/gradle.zip -d /opt
-      rm -rf /tmp/gradle.zip 
+      rm -rf /tmp/gradle.zip
       ln -s /opt/gradle-3.3/bin/gradle /usr/bin/gradle
       # make nfs mounted gradle home dir writeable
       chown vagrant:vagrant /home/vagrant/.gradle
diff --git a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats
index 2d502084a4a..9c5ca24d0a0 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats
+++ b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats
@@ -3,9 +3,14 @@
 # This file is used to test the tar gz package.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # The test case can be executed with the Bash Automated
 # Testing System tool available at https://github.com/sstephenson/bats
diff --git a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats
index b7e925f2899..b07825d7f2f 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats
+++ b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats
@@ -4,9 +4,14 @@
 # of a Debian package.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # The test case can be executed with the Bash Automated
 # Testing System tool available at https://github.com/sstephenson/bats
diff --git a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats
index effd7e27ae9..b92b692401e 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats
+++ b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats
@@ -3,9 +3,14 @@
 # This file is used to test the installation of a RPM package.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # The test case can be executed with the Bash Automated
 # Testing System tool available at https://github.com/sstephenson/bats
diff --git a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats
index 10897753ab3..b46fd6786bb 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats
+++ b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats
@@ -3,9 +3,14 @@
 # This file is used to test the elasticsearch Systemd setup.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # The test case can be executed with the Bash Automated
 # Testing System tool available at https://github.com/sstephenson/bats
diff --git a/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats b/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats
index 70db8744456..fa6f91498ca 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats
+++ b/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats
@@ -3,9 +3,14 @@
 # This file is used to test the elasticsearch init.d scripts.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # The test case can be executed with the Bash Automated
 # Testing System tool available at https://github.com/sstephenson/bats
@@ -60,11 +65,11 @@ setup() {
 @test "[INIT.D] elasticsearch fails if startup script is not executable" {
     local INIT="/etc/init.d/elasticsearch"
     local DAEMON="$ESHOME/bin/elasticsearch"
-    
+
     sudo chmod -x "$DAEMON"
     run "$INIT"
     sudo chmod +x "$DAEMON"
-    
+
     [ "$status" -eq 1 ]
     [[ "$output" == *"The elasticsearch startup script does not exists or it is not executable, tried: $DAEMON"* ]]
 }
diff --git a/qa/vagrant/src/test/resources/packaging/tests/75_bad_data_paths.bats b/qa/vagrant/src/test/resources/packaging/tests/75_bad_data_paths.bats
index 0f802a439b7..59747bd6837 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/75_bad_data_paths.bats
+++ b/qa/vagrant/src/test/resources/packaging/tests/75_bad_data_paths.bats
@@ -4,9 +4,14 @@
 # default.data.path setting into the data.path even when it doesn't belong.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # The test case can be executed with the Bash Automated
 # Testing System tool available at https://github.com/sstephenson/bats
diff --git a/qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats b/qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats
index 9f5bf9c5024..f402305156b 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats
+++ b/qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats
@@ -5,9 +5,14 @@
 # fancy rolling restarts.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # The test case can be executed with the Bash Automated
 # Testing System tool available at https://github.com/sstephenson/bats
diff --git a/qa/vagrant/src/test/resources/packaging/tests/90_reinstall.bats b/qa/vagrant/src/test/resources/packaging/tests/90_reinstall.bats
index 4dd682efbdd..816247f95f9 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/90_reinstall.bats
+++ b/qa/vagrant/src/test/resources/packaging/tests/90_reinstall.bats
@@ -5,9 +5,14 @@
 # fancy rolling restarts.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # The test case can be executed with the Bash Automated
 # Testing System tool available at https://github.com/sstephenson/bats
diff --git a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash
index 64797a33f57..31b9ca4f3fd 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash
+++ b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash
@@ -5,9 +5,14 @@
 # rpm, and deb.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # The test case can be executed with the Bash Automated
 # Testing System tool available at https://github.com/sstephenson/bats
diff --git a/qa/vagrant/src/test/resources/packaging/utils/modules.bash b/qa/vagrant/src/test/resources/packaging/utils/modules.bash
index 2e80fd648f3..9691fd74701 100644
--- a/qa/vagrant/src/test/resources/packaging/utils/modules.bash
+++ b/qa/vagrant/src/test/resources/packaging/utils/modules.bash
@@ -4,9 +4,14 @@
 # the .deb/.rpm packages and the SysV/Systemd scripts.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # Licensed to Elasticsearch under one or more contributor
 # license agreements. See the NOTICE file distributed with
@@ -41,4 +46,4 @@ check_module() {
 
 check_secure_module() {
     check_module "$@" plugin-security.policy
-}
\ No newline at end of file
+}
diff --git a/qa/vagrant/src/test/resources/packaging/utils/packages.bash b/qa/vagrant/src/test/resources/packaging/utils/packages.bash
index 700c1c66185..dc1842c76f7 100644
--- a/qa/vagrant/src/test/resources/packaging/utils/packages.bash
+++ b/qa/vagrant/src/test/resources/packaging/utils/packages.bash
@@ -4,9 +4,14 @@
 # the .deb/.rpm packages.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # Licensed to Elasticsearch under one or more contributor
 # license agreements. See the NOTICE file distributed with
diff --git a/qa/vagrant/src/test/resources/packaging/utils/plugins.bash b/qa/vagrant/src/test/resources/packaging/utils/plugins.bash
index 55e7fdfc484..d6a4f389fd0 100644
--- a/qa/vagrant/src/test/resources/packaging/utils/plugins.bash
+++ b/qa/vagrant/src/test/resources/packaging/utils/plugins.bash
@@ -1,12 +1,17 @@
 #!/bin/bash
 
-# This file contains some utilities to test the elasticsearch scripts,
-# the .deb/.rpm packages and the SysV/Systemd scripts.
+# This file contains some utilities to test the elasticsearch
+# plugin installation and uninstallation process.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # Licensed to Elasticsearch under one or more contributor
 # license agreements. See the NOTICE file distributed with
diff --git a/qa/vagrant/src/test/resources/packaging/utils/tar.bash b/qa/vagrant/src/test/resources/packaging/utils/tar.bash
index b5edebaf41c..5a34cb5b9fd 100644
--- a/qa/vagrant/src/test/resources/packaging/utils/tar.bash
+++ b/qa/vagrant/src/test/resources/packaging/utils/tar.bash
@@ -1,12 +1,17 @@
 #!/bin/bash
 
-# This file contains some utilities to test the elasticsearch scripts,
-# the .deb/.rpm packages and the SysV/Systemd scripts.
+# This file contains some utilities to test the elasticsearch
+# tar distribution.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # Licensed to Elasticsearch under one or more contributor
 # license agreements. See the NOTICE file distributed with
diff --git a/qa/vagrant/src/test/resources/packaging/utils/utils.bash b/qa/vagrant/src/test/resources/packaging/utils/utils.bash
index 877f49b576d..bb0ebbf1814 100644
--- a/qa/vagrant/src/test/resources/packaging/utils/utils.bash
+++ b/qa/vagrant/src/test/resources/packaging/utils/utils.bash
@@ -4,9 +4,14 @@
 # the .deb/.rpm packages and the SysV/Systemd scripts.
 
 # WARNING: This testing file must be executed as root and can
-# dramatically change your system. It removes the 'elasticsearch'
-# user/group and also many directories. Do not execute this file
-# unless you know exactly what you are doing.
+# dramatically change your system. It should only be executed
+# in a throw-away VM like those made by the Vagrantfile at
+# the root of the Elasticsearch source code. This should
+# cause the script to fail if it is executed any other way:
+[ -f /etc/is_vagrant_vm ] || {
+  >&2 echo "must be run on a vagrant VM"
+  exit 1
+}
 
 # Licensed to Elasticsearch under one or more contributor
 # license agreements. See the NOTICE file distributed with

From db1b243343422b63f1ab01f2a4a6f7f0e95496ef Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Christoph=20B=C3=BCscher?= <christoph@elastic.co>
Date: Wed, 26 Apr 2017 19:15:48 +0200
Subject: [PATCH 23/34] InternalPercentilesBucket should not rely on ordered
 percents array (#24336)

Currently InternalPercentilesBucket#percentile() relies on the percent array passed in
to be in sorted order. This changes the aggregation to store an internal lookup table that
is constructed from the percent/percentiles arrays passed in that can be used to look up
the percentile values.

Closes #24331
---
 .../percentile/InternalPercentilesBucket.java | 32 ++++++-
 .../InternalPercentilesTestCase.java          |  2 +-
 .../InternalPercentilesBucketTests.java       | 92 +++++++++++++++++++
 3 files changed, 122 insertions(+), 4 deletions(-)
 create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java

diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java
index 375011c4e8e..a250769f685 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java
@@ -31,21 +31,35 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
 
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 
 public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket {
     private double[] percentiles;
     private double[] percents;
+    private final transient Map<Double, Double> percentileLookups = new HashMap<>();
 
     public InternalPercentilesBucket(String name, double[] percents, double[] percentiles,
                                      DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
                                      Map<String, Object> metaData) {
         super(name, pipelineAggregators, metaData);
+        if ((percentiles.length == percents.length) == false) {
+            throw new IllegalArgumentException("The number of provided percents and percentiles didn't match. percents: "
+                    + Arrays.toString(percents) + ", percentiles: " + Arrays.toString(percentiles));
+        }
         this.format = formatter;
         this.percentiles = percentiles;
         this.percents = percents;
+        computeLookup();
+    }
+
+    private void computeLookup() {
+        for (int i = 0; i < percents.length; i++) {
+            percentileLookups.put(percents[i], percentiles[i]);
+        }
     }
 
     /**
@@ -56,6 +70,7 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
         format = in.readNamedWriteable(DocValueFormat.class);
         percentiles = in.readDoubleArray();
         percents = in.readDoubleArray();
+        computeLookup();
     }
 
     @Override
@@ -72,12 +87,12 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
 
     @Override
     public double percentile(double percent) throws IllegalArgumentException {
-        int index = Arrays.binarySearch(percents, percent);
-        if (index < 0) {
+        Double percentile = percentileLookups.get(percent);
+        if (percentile == null) {
             throw new IllegalArgumentException("Percent requested [" + String.valueOf(percent) + "] was not" +
                     " one of the computed percentiles.  Available keys are: " + Arrays.toString(percents));
         }
-        return percentiles[index];
+        return percentile;
     }
 
     @Override
@@ -116,6 +131,17 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
         return builder;
     }
 
+    @Override
+    protected boolean doEquals(Object obj) {
+        InternalPercentilesBucket that = (InternalPercentilesBucket) obj;
+        return Arrays.equals(percents, that.percents) && Arrays.equals(percentiles, that.percentiles);
+    }
+
+    @Override
+    protected int doHashCode() {
+        return Objects.hash(Arrays.hashCode(percents), Arrays.hashCode(percentiles));
+    }
+
     public static class Iter implements Iterator<Percentile> {
 
         private final double[] percents;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java
index eb26c792eb8..e94cf753205 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java
@@ -50,7 +50,7 @@ public abstract class InternalPercentilesTestCase<T extends InternalAggregation>
     protected abstract T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData,
                                                boolean keyed, DocValueFormat format, double[] percents, double[] values);
 
-    protected static double[] randomPercents() {
+    public static double[] randomPercents() {
         List<Double> randomCdfValues = randomSubsetOf(randomIntBetween(1, 7), 0.01d, 0.05d, 0.25d, 0.50d, 0.75d, 0.95d, 0.99d);
         double[] percents = new double[randomCdfValues.size()];
         for (int i = 0; i < randomCdfValues.size(); i++) {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java
new file mode 100644
index 00000000000..0ea28f98384
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
+
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.search.DocValueFormat;
+import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
+import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
+import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import static org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase.randomPercents;
+
+public class InternalPercentilesBucketTests extends InternalAggregationTestCase<InternalPercentilesBucket> {
+
+    @Override
+    protected InternalPercentilesBucket createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
+            Map<String, Object> metaData) {
+        return createTestInstance(name, pipelineAggregators, metaData, randomPercents());
+    }
+
+    private static InternalPercentilesBucket createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
+            Map<String, Object> metaData, double[] percents) {
+        DocValueFormat format = randomNumericDocValueFormat();
+        final double[] percentiles = new double[percents.length];
+        for (int i = 0; i < percents.length; ++i) {
+            percentiles[i] = frequently() ? randomDouble() : Double.NaN;
+        }
+        return new InternalPercentilesBucket(name, percents, percentiles, format, pipelineAggregators, metaData);
+    }
+
+    @Override
+    public void testReduceRandom() {
+        expectThrows(UnsupportedOperationException.class,
+                () -> createTestInstance("name", Collections.emptyList(), null).reduce(null, null));
+    }
+
+    @Override
+    protected void assertReduced(InternalPercentilesBucket reduced, List<InternalPercentilesBucket> inputs) {
+        // no test since reduce operation is unsupported
+    }
+
+    @Override
+    protected Writeable.Reader<InternalPercentilesBucket> instanceReader() {
+        return InternalPercentilesBucket::new;
+    }
+
+    /**
+     * check that we don't rely on the percent array order and that the iterator returns the values in the original order
+     */
+    public void testPercentOrder() {
+        final double[] percents =  new double[]{ 0.50, 0.25, 0.01, 0.99, 0.60 };
+        InternalPercentilesBucket aggregation = createTestInstance("test", Collections.emptyList(), Collections.emptyMap(), percents);
+        Iterator<Percentile> iterator = aggregation.iterator();
+        for (double percent : percents) {
+            assertTrue(iterator.hasNext());
+            Percentile percentile = iterator.next();
+            assertEquals(percent, percentile.getPercent(), 0.0d);
+            assertEquals(aggregation.percentile(percent), percentile.getValue(), 0.0d);
+        }
+    }
+
+    public void testErrorOnDifferentArgumentSize() {
+        final double[] percents =  new double[]{ 0.1, 0.2, 0.3};
+        final double[] percentiles =  new double[]{ 0.10, 0.2};
+        IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new InternalPercentilesBucket("test", percents,
+                percentiles, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()));
+        assertEquals("The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]",
+                e.getMessage());
+    }
+}

From 7c3efb829bf73625a8a63b3a122db0b067360ed1 Mon Sep 17 00:00:00 2001
From: Nik Everett <nik9000@gmail.com>
Date: Wed, 26 Apr 2017 13:25:34 -0400
Subject: [PATCH 24/34] Move char filters into analysis-common (#24261)

Another step down the road to dropping the
lucene-analyzers-common dependency from core.

Note that this removes some tests that no longer compile from
core. I played around with adding them to the analysis-common
module where they would compile but we already test these in
the tests generated from the example usage in the documentation.

I'm not super happy with the way that `requriesAnalysisSettings`
works with regards to plugins. I think it'd be fairly bug-prone
for plugin authors to use. But I'm making it visible as is for
now and I'll rethink later.

A part of #23658
---
 .../index/analysis/CharFilterFactory.java     |   2 +-
 .../indices/analysis/AnalysisModule.java      |  21 +-
 .../elasticsearch/plugins/AnalysisPlugin.java |  33 +++-
 .../index/analysis/AnalysisRegistryTests.java |   2 -
 .../index/analysis/CharFilterTests.java       |  86 --------
 .../index/analysis/CustomNormalizerTests.java |  52 ++++-
 .../indices/analysis/AnalysisModuleTests.java |  29 +--
 .../indices/analyze/AnalyzeActionIT.java      | 183 ------------------
 .../elasticsearch/index/analysis/test1.json   |  28 ---
 .../elasticsearch/index/analysis/test1.yml    |  21 --
 .../analysis/common/CommonAnalysisPlugin.java |  14 ++
 .../common}/MappingCharFilterFactory.java     |   5 +-
 .../PatternReplaceCharFilterFactory.java      |   4 +-
 .../analysis/HtmlStripCharFilterFactory.java  |   0
 .../common/CommonAnalysisFactoryTests.java    |  11 +-
 .../test/analysis-common/20_analyzers.yaml    |  18 ++
 .../analysis-common/40_token_filters.yaml     |  54 ++++++
 .../test/analysis-common/50_char_filters.yaml |  48 ++++-
 .../AnalysisFactoryTestCase.java              |   9 +-
 19 files changed, 241 insertions(+), 379 deletions(-)
 delete mode 100644 core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java
 rename {core/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/MappingCharFilterFactory.java (95%)
 rename {core/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/PatternReplaceCharFilterFactory.java (92%)
 rename {core => modules/analysis-common}/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java (100%)

diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CharFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/CharFilterFactory.java
index 68692c89469..6f85615c95e 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/CharFilterFactory.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/CharFilterFactory.java
@@ -25,5 +25,5 @@ public interface CharFilterFactory {
 
     String name();
 
-    Reader create(Reader tokenStream);
+    Reader create(Reader reader);
 }
diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java
index c494c4cae9c..26a4e4c1c5c 100644
--- a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java
+++ b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java
@@ -69,7 +69,6 @@ import org.elasticsearch.index.analysis.GermanStemTokenFilterFactory;
 import org.elasticsearch.index.analysis.GreekAnalyzerProvider;
 import org.elasticsearch.index.analysis.HindiAnalyzerProvider;
 import org.elasticsearch.index.analysis.HindiNormalizationFilterFactory;
-import org.elasticsearch.index.analysis.HtmlStripCharFilterFactory;
 import org.elasticsearch.index.analysis.HungarianAnalyzerProvider;
 import org.elasticsearch.index.analysis.HunspellTokenFilterFactory;
 import org.elasticsearch.index.analysis.IndicNormalizationFilterFactory;
@@ -89,7 +88,6 @@ import org.elasticsearch.index.analysis.LimitTokenCountFilterFactory;
 import org.elasticsearch.index.analysis.LithuanianAnalyzerProvider;
 import org.elasticsearch.index.analysis.LowerCaseTokenFilterFactory;
 import org.elasticsearch.index.analysis.LowerCaseTokenizerFactory;
-import org.elasticsearch.index.analysis.MappingCharFilterFactory;
 import org.elasticsearch.index.analysis.MinHashTokenFilterFactory;
 import org.elasticsearch.index.analysis.NGramTokenFilterFactory;
 import org.elasticsearch.index.analysis.NGramTokenizerFactory;
@@ -97,7 +95,6 @@ import org.elasticsearch.index.analysis.NorwegianAnalyzerProvider;
 import org.elasticsearch.index.analysis.PathHierarchyTokenizerFactory;
 import org.elasticsearch.index.analysis.PatternAnalyzerProvider;
 import org.elasticsearch.index.analysis.PatternCaptureGroupTokenFilterFactory;
-import org.elasticsearch.index.analysis.PatternReplaceCharFilterFactory;
 import org.elasticsearch.index.analysis.PatternReplaceTokenFilterFactory;
 import org.elasticsearch.index.analysis.PatternTokenizerFactory;
 import org.elasticsearch.index.analysis.PersianAnalyzerProvider;
@@ -146,6 +143,8 @@ import org.elasticsearch.plugins.AnalysisPlugin;
 import java.io.IOException;
 import java.util.List;
 
+import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
+
 /**
  * Sets up {@link AnalysisRegistry}.
  */
@@ -184,9 +183,6 @@ public final class AnalysisModule {
 
     private NamedRegistry<AnalysisProvider<CharFilterFactory>> setupCharFilters(List<AnalysisPlugin> plugins) {
         NamedRegistry<AnalysisProvider<CharFilterFactory>> charFilters = new NamedRegistry<>("char_filter");
-        charFilters.register("html_strip", HtmlStripCharFilterFactory::new);
-        charFilters.register("pattern_replace", requriesAnalysisSettings(PatternReplaceCharFilterFactory::new));
-        charFilters.register("mapping", requriesAnalysisSettings(MappingCharFilterFactory::new));
         charFilters.extractAndRegister(plugins, AnalysisPlugin::getCharFilters);
         return charFilters;
     }
@@ -340,19 +336,6 @@ public final class AnalysisModule {
         return normalizers;
     }
 
-    private static <T> AnalysisModule.AnalysisProvider<T> requriesAnalysisSettings(AnalysisModule.AnalysisProvider<T> provider) {
-        return new AnalysisModule.AnalysisProvider<T>() {
-            @Override
-            public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
-                return provider.get(indexSettings, environment, name, settings);
-            }
-
-            @Override
-            public boolean requiresAnalysisSettings() {
-                return true;
-            }
-        };
-    }
 
     /**
      * The basic factory interface for analysis components.
diff --git a/core/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java b/core/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java
index 8c23e530e49..5e7e1053add 100644
--- a/core/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java
+++ b/core/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java
@@ -23,12 +23,16 @@ import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.CharFilter;
 import org.apache.lucene.analysis.TokenFilter;
 import org.apache.lucene.analysis.Tokenizer;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.index.IndexSettings;
 import org.elasticsearch.index.analysis.AnalyzerProvider;
 import org.elasticsearch.index.analysis.CharFilterFactory;
 import org.elasticsearch.index.analysis.TokenFilterFactory;
 import org.elasticsearch.index.analysis.TokenizerFactory;
 import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
 
+import java.io.IOException;
 import java.util.Map;
 
 import static java.util.Collections.emptyMap;
@@ -52,28 +56,32 @@ import static java.util.Collections.emptyMap;
  */
 public interface AnalysisPlugin {
     /**
-     * Override to add additional {@link CharFilter}s.
+     * Override to add additional {@link CharFilter}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
+     * how to on get the configuration from the index.
      */
     default Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
         return emptyMap();
     }
 
     /**
-     * Override to add additional {@link TokenFilter}s.
+     * Override to add additional {@link TokenFilter}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
+     * how to on get the configuration from the index.
      */
     default Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
         return emptyMap();
     }
 
     /**
-     * Override to add additional {@link Tokenizer}s.
+     * Override to add additional {@link Tokenizer}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
+     * how to on get the configuration from the index.
      */
     default Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
         return emptyMap();
     }
 
     /**
-     * Override to add additional {@link Analyzer}s.
+     * Override to add additional {@link Analyzer}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
+     * how to on get the configuration from the index.
      */
     default Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
         return emptyMap();
@@ -85,4 +93,21 @@ public interface AnalysisPlugin {
     default Map<String, org.apache.lucene.analysis.hunspell.Dictionary> getHunspellDictionaries() {
         return emptyMap();
     }
+
+    /**
+     * Mark an {@link AnalysisProvider} as requiring the index's settings.
+     */
+    static <T> AnalysisProvider<T> requriesAnalysisSettings(AnalysisProvider<T> provider) {
+        return new AnalysisProvider<T>() {
+            @Override
+            public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
+                return provider.get(indexSettings, environment, name, settings);
+            }
+
+            @Override
+            public boolean requiresAnalysisSettings() {
+                return true;
+            }
+        };
+    }
 }
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
index 0edd2fbe2c0..6033186c812 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
@@ -40,8 +40,6 @@ import org.elasticsearch.test.IndexSettingsModule;
 import org.elasticsearch.test.VersionUtils;
 
 import java.io.IOException;
-import java.util.Collections;
-import java.util.HashMap;
 import java.util.Map;
 
 import static java.util.Collections.emptyMap;
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java
deleted file mode 100644
index f993cc1490c..00000000000
--- a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.index.analysis;
-
-import org.elasticsearch.Version;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.env.Environment;
-import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.test.ESTokenStreamTestCase;
-import org.elasticsearch.test.IndexSettingsModule;
-
-import static org.elasticsearch.test.ESTestCase.createTestAnalysis;
-
-public class CharFilterTests extends ESTokenStreamTestCase {
-    public void testMappingCharFilter() throws Exception {
-        Settings settings = Settings.builder()
-                .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
-                .put("index.analysis.char_filter.my_mapping.type", "mapping")
-                .putArray("index.analysis.char_filter.my_mapping.mappings", "ph=>f", "qu=>q")
-                .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard")
-                .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "my_mapping")
-                .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
-                .build();
-        IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
-        IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
-        NamedAnalyzer analyzer1 = indexAnalyzers.get("custom_with_char_filter");
-
-        assertTokenStreamContents(analyzer1.tokenStream("test", "jeff quit phish"), new String[]{"jeff", "qit", "fish"});
-
-        // Repeat one more time to make sure that char filter is reinitialized correctly
-        assertTokenStreamContents(analyzer1.tokenStream("test", "jeff quit phish"), new String[]{"jeff", "qit", "fish"});
-    }
-
-    public void testHtmlStripCharFilter() throws Exception {
-        Settings settings = Settings.builder()
-                .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
-                .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard")
-                .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "html_strip")
-                .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
-                .build();
-        IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
-
-        IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
-        NamedAnalyzer analyzer1 = indexAnalyzers.get("custom_with_char_filter");
-
-        assertTokenStreamContents(analyzer1.tokenStream("test", "<b>hello</b>!"), new String[]{"hello"});
-
-        // Repeat one more time to make sure that char filter is reinitialized correctly
-        assertTokenStreamContents(analyzer1.tokenStream("test", "<b>hello</b>!"), new String[]{"hello"});
-    }
-
-    public void testPatternReplaceCharFilter() throws Exception {
-        Settings settings = Settings.builder()
-            .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
-            .put("index.analysis.char_filter.my_mapping.type", "pattern_replace")
-            .put("index.analysis.char_filter.my_mapping.pattern", "ab*")
-            .put("index.analysis.char_filter.my_mapping.replacement", "oo")
-            .put("index.analysis.char_filter.my_mapping.flags", "CASE_INSENSITIVE")
-            .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard")
-            .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "my_mapping")
-            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
-            .build();
-        IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
-        IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
-        NamedAnalyzer analyzer1 = indexAnalyzers.get("custom_with_char_filter");
-
-        assertTokenStreamContents(analyzer1.tokenStream("test", "faBBbBB aBbbbBf"), new String[]{"foo", "oof"});
-    }
-}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java
index 3e71a609737..c6b58060996 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java
@@ -22,13 +22,18 @@ package org.elasticsearch.index.analysis;
 import org.apache.lucene.util.BytesRef;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.env.Environment;
+import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
+import org.elasticsearch.plugins.AnalysisPlugin;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.test.ESTokenStreamTestCase;
 
 import java.io.IOException;
+import java.io.Reader;
+import java.util.Map;
+
+import static java.util.Collections.singletonMap;
 
 public class CustomNormalizerTests extends ESTokenStreamTestCase {
-
     public void testBasics() throws IOException {
         Settings settings = Settings.builder()
                 .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase", "asciifolding")
@@ -66,12 +71,11 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
 
     public void testCharFilters() throws IOException {
         Settings settings = Settings.builder()
-                .put("index.analysis.char_filter.my_mapping.type", "mapping")
-                .putArray("index.analysis.char_filter.my_mapping.mappings", "a => z")
+                .put("index.analysis.char_filter.my_mapping.type", "mock_char_filter")
                 .putArray("index.analysis.normalizer.my_normalizer.char_filter", "my_mapping")
                 .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
                 .build();
-        ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+        ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new MockCharFilterPlugin());
         assertNull(analysis.indexAnalyzers.get("my_normalizer"));
         NamedAnalyzer normalizer = analysis.indexAnalyzers.getNormalizer("my_normalizer");
         assertNotNull(normalizer);
@@ -99,4 +103,44 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
                 () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings));
         assertEquals("Custom normalizer [my_normalizer] may not use char filter [html_strip]", e.getMessage());
     }
+
+    private class MockCharFilterPlugin implements AnalysisPlugin {
+        @Override
+        public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
+            return singletonMap("mock_char_filter", (indexSettings, env, name, settings) -> {
+                class Factory implements CharFilterFactory, MultiTermAwareComponent {
+                    @Override
+                    public String name() {
+                        return name;
+                    }
+                    @Override
+                    public Reader create(Reader reader) {
+                        return new Reader() {
+
+                         @Override
+                         public int read(char[] cbuf, int off, int len) throws IOException {
+                             int result = reader.read(cbuf, off, len);
+                             for (int i = off; i < result; i++) {
+                                 if (cbuf[i] == 'a') {
+                                     cbuf[i] = 'z';
+                                 }
+                             }
+                             return result;
+                         }
+
+                         @Override
+                         public void close() throws IOException {
+                             reader.close();
+                         }
+                        };
+                    }
+                    @Override
+                    public Object getMultiTermComponent() {
+                        return this;
+                    }
+                }
+                return new Factory();
+            });
+        }
+    }
 }
diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java
index 960d135371c..3d479ca2da2 100644
--- a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java
@@ -32,7 +32,6 @@ import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.SimpleFSDirectory;
 import org.elasticsearch.Version;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.inject.ModuleTestCase;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.xcontent.XContentType;
 import org.elasticsearch.env.Environment;
@@ -40,17 +39,17 @@ import org.elasticsearch.index.IndexSettings;
 import org.elasticsearch.index.analysis.Analysis;
 import org.elasticsearch.index.analysis.AnalysisRegistry;
 import org.elasticsearch.index.analysis.AnalysisTestsHelper;
+import org.elasticsearch.index.analysis.CharFilterFactory;
 import org.elasticsearch.index.analysis.CustomAnalyzer;
 import org.elasticsearch.index.analysis.IndexAnalyzers;
-import org.elasticsearch.index.analysis.MappingCharFilterFactory;
 import org.elasticsearch.index.analysis.NamedAnalyzer;
-import org.elasticsearch.index.analysis.PatternReplaceCharFilterFactory;
 import org.elasticsearch.index.analysis.StandardTokenizerFactory;
 import org.elasticsearch.index.analysis.StopTokenFilterFactory;
 import org.elasticsearch.index.analysis.TokenFilterFactory;
 import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory;
 import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
 import org.elasticsearch.plugins.AnalysisPlugin;
+import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.test.IndexSettingsModule;
 import org.elasticsearch.test.VersionUtils;
 import org.hamcrest.MatcherAssert;
@@ -72,7 +71,7 @@ import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.instanceOf;
 import static org.hamcrest.Matchers.is;
 
-public class AnalysisModuleTests extends ModuleTestCase {
+public class AnalysisModuleTests extends ESTestCase {
 
     public IndexAnalyzers getIndexAnalyzers(Settings settings) throws IOException {
         return getIndexAnalyzers(getNewRegistry(settings), settings);
@@ -90,6 +89,11 @@ public class AnalysisModuleTests extends ModuleTestCase {
                 public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
                     return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
                 }
+
+                @Override
+                public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
+                    return AnalysisPlugin.super.getCharFilters();
+                }
             })).getAnalysisRegistry();
         } catch (IOException e) {
             throw new RuntimeException(e);
@@ -184,29 +188,12 @@ public class AnalysisModuleTests extends ModuleTestCase {
         StopTokenFilterFactory stop1 = (StopTokenFilterFactory) custom1.tokenFilters()[0];
         assertThat(stop1.stopWords().size(), equalTo(1));
 
-        analyzer = indexAnalyzers.get("custom2").analyzer();
-        assertThat(analyzer, instanceOf(CustomAnalyzer.class));
-
         // verify position increment gap
         analyzer = indexAnalyzers.get("custom6").analyzer();
         assertThat(analyzer, instanceOf(CustomAnalyzer.class));
         CustomAnalyzer custom6 = (CustomAnalyzer) analyzer;
         assertThat(custom6.getPositionIncrementGap("any_string"), equalTo(256));
 
-        // verify characters  mapping
-        analyzer = indexAnalyzers.get("custom5").analyzer();
-        assertThat(analyzer, instanceOf(CustomAnalyzer.class));
-        CustomAnalyzer custom5 = (CustomAnalyzer) analyzer;
-        assertThat(custom5.charFilters()[0], instanceOf(MappingCharFilterFactory.class));
-
-        // check custom pattern replace filter
-        analyzer = indexAnalyzers.get("custom3").analyzer();
-        assertThat(analyzer, instanceOf(CustomAnalyzer.class));
-        CustomAnalyzer custom3 = (CustomAnalyzer) analyzer;
-        PatternReplaceCharFilterFactory patternReplaceCharFilterFactory = (PatternReplaceCharFilterFactory) custom3.charFilters()[0];
-        assertThat(patternReplaceCharFilterFactory.getPattern().pattern(), equalTo("sample(.*)"));
-        assertThat(patternReplaceCharFilterFactory.getReplacement(), equalTo("replacedSample $1"));
-
         // check custom class name (my)
         analyzer = indexAnalyzers.get("custom4").analyzer();
         assertThat(analyzer, instanceOf(CustomAnalyzer.class));
diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
index 4f9b13d91ce..ebfeb5f92d1 100644
--- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
@@ -111,36 +111,6 @@ public class AnalyzeActionIT extends ESIntegTestCase {
         assertThat(analyzeResponse.getTokens().get(0).getPositionLength(), equalTo(1));
     }
 
-    public void testAnalyzeWithCharFilters() throws Exception {
-        assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
-                .setSettings(Settings.builder().put(indexSettings())
-                        .put("index.analysis.char_filter.custom_mapping.type", "mapping")
-                        .putArray("index.analysis.char_filter.custom_mapping.mappings", "ph=>f", "qu=>q")
-                        .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard")
-                        .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "custom_mapping")));
-        ensureGreen();
-
-        AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("<h2><b>THIS</b> IS A</h2> <a href=\"#\">TEST</a>").setTokenizer("standard").addCharFilter("html_strip").get();
-        assertThat(analyzeResponse.getTokens().size(), equalTo(4));
-
-        analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A <b>TEST</b>").setTokenizer("keyword").addTokenFilter("lowercase").addCharFilter("html_strip").get();
-        assertThat(analyzeResponse.getTokens().size(), equalTo(1));
-        assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("this is a test"));
-
-        analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "jeff quit phish").setTokenizer("keyword").addTokenFilter("lowercase").addCharFilter("custom_mapping").get();
-        assertThat(analyzeResponse.getTokens().size(), equalTo(1));
-        assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("jeff qit fish"));
-
-        analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "<a href=\"#\">jeff quit fish</a>").setTokenizer("standard").addCharFilter("html_strip").addCharFilter("custom_mapping").get();
-        assertThat(analyzeResponse.getTokens().size(), equalTo(3));
-        AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
-        assertThat(token.getTerm(), equalTo("jeff"));
-        token = analyzeResponse.getTokens().get(1);
-        assertThat(token.getTerm(), equalTo("qit"));
-        token = analyzeResponse.getTokens().get(2);
-        assertThat(token.getTerm(), equalTo("fish"));
-    }
-
     public void testAnalyzeWithNonDefaultPostionLength() throws Exception {
         assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
             .setSettings(Settings.builder().put(indexSettings())
@@ -263,46 +233,6 @@ public class AnalyzeActionIT extends ESIntegTestCase {
         assertThat(token.getPositionLength(), equalTo(1));
     }
 
-    public void testDetailAnalyze() throws Exception {
-        assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
-            .setSettings(
-                    Settings.builder()
-                    .put("index.analysis.char_filter.my_mapping.type", "mapping")
-                    .putArray("index.analysis.char_filter.my_mapping.mappings", "PH=>F")
-                    .put("index.analysis.analyzer.test_analyzer.type", "custom")
-                    .put("index.analysis.analyzer.test_analyzer.position_increment_gap", "100")
-                    .put("index.analysis.analyzer.test_analyzer.tokenizer", "standard")
-                    .putArray("index.analysis.analyzer.test_analyzer.char_filter", "my_mapping")
-                    .putArray("index.analysis.analyzer.test_analyzer.filter", "snowball")));
-        ensureGreen();
-
-        for (int i = 0; i < 10; i++) {
-            AnalyzeResponse analyzeResponse = admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText("THIS IS A PHISH")
-                .setExplain(true).addCharFilter("my_mapping").setTokenizer("keyword").addTokenFilter("lowercase").get();
-
-            assertThat(analyzeResponse.detail().analyzer(), IsNull.nullValue());
-            //charfilters
-            assertThat(analyzeResponse.detail().charfilters().length, equalTo(1));
-            assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("my_mapping"));
-            assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(1));
-            assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[0], equalTo("THIS IS A FISH"));
-            //tokenizer
-            assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("keyword"));
-            assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(1));
-            assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("THIS IS A FISH"));
-            assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getStartOffset(), equalTo(0));
-            assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getEndOffset(), equalTo(15));
-            //tokenfilters
-            assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1));
-            assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("lowercase"));
-            assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(1));
-            assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getTerm(), equalTo("this is a fish"));
-            assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getPosition(), equalTo(0));
-            assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getStartOffset(), equalTo(0));
-            assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getEndOffset(), equalTo(15));
-        }
-    }
-
     public void testDetailAnalyzeWithNoIndex() throws Exception {
         //analyzer only
         AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST")
@@ -414,90 +344,6 @@ public class AnalyzeActionIT extends ESIntegTestCase {
         assertThat(token.getPositionLength(), equalTo(1));
     }
 
-    public void testDetailAnalyzeWithMultiValuesWithCustomAnalyzer() throws Exception {
-        assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
-            .setSettings(
-                    Settings.builder()
-                    .put("index.analysis.char_filter.my_mapping.type", "mapping")
-                    .putArray("index.analysis.char_filter.my_mapping.mappings", "PH=>F")
-                    .put("index.analysis.analyzer.test_analyzer.type", "custom")
-                    .put("index.analysis.analyzer.test_analyzer.position_increment_gap", "100")
-                    .put("index.analysis.analyzer.test_analyzer.tokenizer", "standard")
-                    .putArray("index.analysis.analyzer.test_analyzer.char_filter", "my_mapping")
-                    .putArray("index.analysis.analyzer.test_analyzer.filter", "snowball", "lowercase")));
-        ensureGreen();
-
-        client().admin().indices().preparePutMapping("test")
-            .setType("document").setSource("simple", "type=text,analyzer=simple,position_increment_gap=100").get();
-
-        //only analyzer =
-        String[] texts = new String[]{"this is a PHISH", "the troubled text"};
-        AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts)
-            .setExplain(true).setAnalyzer("test_analyzer").setText(texts).execute().get();
-
-        // charfilter
-        assertThat(analyzeResponse.detail().charfilters().length, equalTo(1));
-        assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("my_mapping"));
-        assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(2));
-        assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[0], equalTo("this is a FISH"));
-        assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[1], equalTo("the troubled text"));
-
-        // tokenizer
-        assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("standard"));
-        assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(7));
-        AnalyzeResponse.AnalyzeToken token = analyzeResponse.detail().tokenizer().getTokens()[3];
-
-        assertThat(token.getTerm(), equalTo("FISH"));
-        assertThat(token.getPosition(), equalTo(3));
-        assertThat(token.getStartOffset(), equalTo(10));
-        assertThat(token.getEndOffset(), equalTo(15));
-        assertThat(token.getPositionLength(), equalTo(1));
-
-        token = analyzeResponse.detail().tokenizer().getTokens()[5];
-        assertThat(token.getTerm(), equalTo("troubled"));
-        assertThat(token.getPosition(), equalTo(105));
-        assertThat(token.getStartOffset(), equalTo(20));
-        assertThat(token.getEndOffset(), equalTo(28));
-        assertThat(token.getPositionLength(), equalTo(1));
-
-        // tokenfilter(snowball)
-        assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(2));
-        assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("snowball"));
-        assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(7));
-        token = analyzeResponse.detail().tokenfilters()[0].getTokens()[3];
-
-        assertThat(token.getTerm(), equalTo("FISH"));
-        assertThat(token.getPosition(), equalTo(3));
-        assertThat(token.getStartOffset(), equalTo(10));
-        assertThat(token.getEndOffset(), equalTo(15));
-        assertThat(token.getPositionLength(), equalTo(1));
-
-        token = analyzeResponse.detail().tokenfilters()[0].getTokens()[5];
-        assertThat(token.getTerm(), equalTo("troubl"));
-        assertThat(token.getPosition(), equalTo(105));
-        assertThat(token.getStartOffset(), equalTo(20));
-        assertThat(token.getEndOffset(), equalTo(28));
-        assertThat(token.getPositionLength(), equalTo(1));
-
-        // tokenfilter(lowercase)
-        assertThat(analyzeResponse.detail().tokenfilters()[1].getName(), equalTo("lowercase"));
-        assertThat(analyzeResponse.detail().tokenfilters()[1].getTokens().length, equalTo(7));
-        token = analyzeResponse.detail().tokenfilters()[1].getTokens()[3];
-
-        assertThat(token.getTerm(), equalTo("fish"));
-        assertThat(token.getPosition(), equalTo(3));
-        assertThat(token.getStartOffset(), equalTo(10));
-        assertThat(token.getEndOffset(), equalTo(15));
-        assertThat(token.getPositionLength(), equalTo(1));
-
-        token = analyzeResponse.detail().tokenfilters()[0].getTokens()[5];
-        assertThat(token.getTerm(), equalTo("troubl"));
-        assertThat(token.getPosition(), equalTo(105));
-        assertThat(token.getStartOffset(), equalTo(20));
-        assertThat(token.getEndOffset(), equalTo(28));
-        assertThat(token.getPositionLength(), equalTo(1));
-    }
-
     public void testNonExistTokenizer() {
         IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
             () -> client().admin().indices()
@@ -575,35 +421,6 @@ public class AnalyzeActionIT extends ESIntegTestCase {
         assertThat(analyzeResponse.detail().tokenfilters()[1].getTokens()[0].getPositionLength(), equalTo(1));
     }
 
-
-    public void testCustomCharFilterInRequest() throws Exception {
-        Map<String, Object> charFilterSettings = new HashMap<>();
-        charFilterSettings.put("type", "mapping");
-        charFilterSettings.put("mappings", new String[]{"ph => f", "qu => q"});
-        AnalyzeResponse analyzeResponse = client().admin().indices()
-            .prepareAnalyze()
-            .setText("jeff quit phish")
-            .setTokenizer("keyword")
-            .addCharFilter(charFilterSettings)
-            .setExplain(true)
-            .get();
-
-        assertThat(analyzeResponse.detail().analyzer(), IsNull.nullValue());
-        //charfilters
-        assertThat(analyzeResponse.detail().charfilters().length, equalTo(1));
-        assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("_anonymous_charfilter_[0]"));
-        assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(1));
-        assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[0], equalTo("jeff qit fish"));
-        //tokenizer
-        assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("keyword"));
-        assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(1));
-        assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("jeff qit fish"));
-        assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getStartOffset(), equalTo(0));
-        assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getEndOffset(), equalTo(15));
-        assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getPositionLength(), equalTo(1));
-    }
-
-
     public void testCustomTokenizerInRequest() throws Exception {
         Map<String, Object> tokenizerSettings = new HashMap<>();
         tokenizerSettings.put("type", "nGram");
diff --git a/core/src/test/resources/org/elasticsearch/index/analysis/test1.json b/core/src/test/resources/org/elasticsearch/index/analysis/test1.json
index 2244ea4361c..38937a9b5af 100644
--- a/core/src/test/resources/org/elasticsearch/index/analysis/test1.json
+++ b/core/src/test/resources/org/elasticsearch/index/analysis/test1.json
@@ -6,22 +6,6 @@
                     "type":"standard"
                 }
             },
-            "char_filter":{
-                "my_html":{
-                    "type":"html_strip",
-                    "escaped_tags":["xxx", "yyy"],
-                    "read_ahead":1024
-                },
-                "my_pattern":{
-                    "type":"pattern_replace",
-                    "pattern":"sample(.*)",
-                    "replacement":"replacedSample $1"
-                },
-                "my_mapping":{
-                    "type":"mapping",
-                    "mappings":["ph=>f", "qu=>q"]
-                }
-            },
             "filter":{
                 "stop":{
                     "type":"stop",
@@ -48,22 +32,10 @@
                     "tokenizer":"standard",
                     "filter":["stop", "stop2"]
                 },
-                "custom2":{
-                    "tokenizer":"standard",
-                    "char_filter":["html_strip", "my_html"]
-                },
-                "custom3":{
-                    "tokenizer":"standard",
-                    "char_filter":["my_pattern"]
-                },
                 "custom4":{
                     "tokenizer":"standard",
                     "filter":["my"]
                 },
-                "custom5":{
-                    "tokenizer":"standard",
-                    "char_filter":["my_mapping"]
-                },
                 "custom6":{
                     "tokenizer":"standard",
                     "position_increment_gap": 256
diff --git a/core/src/test/resources/org/elasticsearch/index/analysis/test1.yml b/core/src/test/resources/org/elasticsearch/index/analysis/test1.yml
index afcdb9a88ef..f7a57d14dbe 100644
--- a/core/src/test/resources/org/elasticsearch/index/analysis/test1.yml
+++ b/core/src/test/resources/org/elasticsearch/index/analysis/test1.yml
@@ -3,18 +3,6 @@ index :
     tokenizer :
       standard :
         type : standard
-    char_filter :
-      my_html :
-        type : html_strip
-        escaped_tags : [xxx, yyy]
-        read_ahead : 1024
-      my_pattern :
-        type: pattern_replace
-        pattern: sample(.*)
-        replacement: replacedSample $1
-      my_mapping :
-        type : mapping
-        mappings : [ph=>f, qu=>q]
     filter :
       stop :
         type : stop
@@ -34,18 +22,9 @@ index :
       custom1 :
         tokenizer : standard
         filter : [stop, stop2]
-      custom2 :
-        tokenizer : standard
-        char_filter : [html_strip, my_html]
-      custom3 :
-        tokenizer : standard
-        char_filter : [my_pattern]
       custom4 :
         tokenizer : standard
         filter : [my]
-      custom5 :
-        tokenizer : standard
-        char_filter : [my_mapping]
       custom6 :
         tokenizer : standard
         position_increment_gap: 256
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java
index bfd1bbdcc97..e17df4b4463 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java
@@ -19,6 +19,8 @@
 
 package org.elasticsearch.analysis.common;
 
+import org.elasticsearch.index.analysis.CharFilterFactory;
+import org.elasticsearch.index.analysis.HtmlStripCharFilterFactory;
 import org.elasticsearch.index.analysis.TokenFilterFactory;
 import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
 import org.elasticsearch.plugins.AnalysisPlugin;
@@ -26,6 +28,9 @@ import org.elasticsearch.plugins.Plugin;
 
 import java.util.HashMap;
 import java.util.Map;
+import java.util.TreeMap;
+
+import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
 
 public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
     @Override
@@ -36,4 +41,13 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
         filters.put("word_delimiter_graph", WordDelimiterGraphTokenFilterFactory::new);
         return filters;
     }
+
+    @Override
+    public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
+        Map<String, AnalysisProvider<CharFilterFactory>> filters = new TreeMap<>();
+        filters.put("html_strip", HtmlStripCharFilterFactory::new);
+        filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceCharFilterFactory::new));
+        filters.put("mapping", requriesAnalysisSettings(MappingCharFilterFactory::new));
+        return filters;
+    }
 }
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/MappingCharFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MappingCharFilterFactory.java
similarity index 95%
rename from core/src/main/java/org/elasticsearch/index/analysis/MappingCharFilterFactory.java
rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MappingCharFilterFactory.java
index c5a4e4bbdcc..a4ba9ece547 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/MappingCharFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MappingCharFilterFactory.java
@@ -17,13 +17,16 @@
  * under the License.
  */
 
-package org.elasticsearch.index.analysis;
+package org.elasticsearch.analysis.common;
 
 import org.apache.lucene.analysis.charfilter.MappingCharFilter;
 import org.apache.lucene.analysis.charfilter.NormalizeCharMap;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.env.Environment;
 import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.analysis.AbstractCharFilterFactory;
+import org.elasticsearch.index.analysis.Analysis;
+import org.elasticsearch.index.analysis.MultiTermAwareComponent;
 
 import java.io.Reader;
 import java.util.List;
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceCharFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternReplaceCharFilterFactory.java
similarity index 92%
rename from core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceCharFilterFactory.java
rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternReplaceCharFilterFactory.java
index 2562f20373b..b243618b53f 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/PatternReplaceCharFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternReplaceCharFilterFactory.java
@@ -16,7 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.elasticsearch.index.analysis;
+package org.elasticsearch.analysis.common;
 
 import java.io.Reader;
 import java.util.regex.Pattern;
@@ -27,6 +27,8 @@ import org.elasticsearch.common.regex.Regex;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.env.Environment;
 import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.analysis.AbstractCharFilterFactory;
+import org.elasticsearch.index.analysis.MultiTermAwareComponent;
 
 public class PatternReplaceCharFilterFactory extends AbstractCharFilterFactory implements MultiTermAwareComponent {
 
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java
similarity index 100%
rename from core/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java
rename to modules/analysis-common/src/main/java/org/elasticsearch/index/analysis/HtmlStripCharFilterFactory.java
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
index 886dad37b56..78522f3b6f3 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
@@ -20,10 +20,12 @@
 package org.elasticsearch.analysis.common;
 
 import org.elasticsearch.AnalysisFactoryTestCase;
+import org.elasticsearch.index.analysis.HtmlStripCharFilterFactory;
 
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.TreeMap;
 
 import static java.util.Collections.emptyList;
 import static java.util.stream.Collectors.toList;
@@ -46,7 +48,14 @@ public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase {
 
     @Override
     protected Map<String, Class<?>> getCharFilters() {
-        Map<String, Class<?>> filters = new HashMap<>(super.getCharFilters());
+        Map<String, Class<?>> filters = new TreeMap<>(super.getCharFilters());
+        filters.put("htmlstrip",      HtmlStripCharFilterFactory.class);
+        filters.put("mapping",        MappingCharFilterFactory.class);
+        filters.put("patternreplace", PatternReplaceCharFilterFactory.class);
+
+        // TODO: these charfilters are not yet exposed: useful?
+        // handling of zwnj for persian
+        filters.put("persian",        Void.class);
         return filters;
     }
 
diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yaml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yaml
index 9fb34e7a821..abd9a3f5ae0 100644
--- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yaml
+++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yaml
@@ -9,3 +9,21 @@
     - length: { tokens: 2 }
     - match:  { tokens.0.token: Foo }
     - match:  { tokens.1.token: Bar! }
+
+    - do:
+        indices.analyze:
+          body:
+            text:     Foo Bar!
+            explain:  true
+            analyzer: whitespace
+    - match:  { detail.custom_analyzer: false }
+    - match:  { detail.analyzer.name: whitespace }
+    - length: { detail.analyzer.tokens: 2 }
+    - match:  { detail.analyzer.tokens.0.token: Foo }
+    - match:  { detail.analyzer.tokens.0.start_offset: 0 }
+    - match:  { detail.analyzer.tokens.0.end_offset: 3 }
+    - match:  { detail.analyzer.tokens.0.position: 0 }
+    - match:  { detail.analyzer.tokens.1.token: Bar! }
+    - match:  { detail.analyzer.tokens.1.start_offset: 4 }
+    - match:  { detail.analyzer.tokens.1.end_offset: 8 }
+    - match:  { detail.analyzer.tokens.1.position: 1 }
diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yaml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yaml
index ac5bcb82e57..0666a31623b 100644
--- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yaml
+++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yaml
@@ -80,3 +80,57 @@
     - match:  { tokens.1.token: qu1ck }
     - match:  { tokens.2.token: brown }
     - match:  { tokens.3.token: fox }
+
+    - do:
+        indices.analyze:
+          body:
+            text:      the qu1ck brown fox
+            explain:   true
+            tokenizer: standard
+            filter:    [word_delimiter_graph]
+    - match:  { detail.custom_analyzer: true }
+    - match:  { detail.tokenizer.name: standard }
+    - length: { detail.tokenizer.tokens: 4 }
+    - match:  { detail.tokenizer.tokens.0.token: the }
+    - match:  { detail.tokenizer.tokens.0.start_offset: 0 }
+    - match:  { detail.tokenizer.tokens.0.end_offset: 3 }
+    - match:  { detail.tokenizer.tokens.0.position: 0 }
+    - match:  { detail.tokenizer.tokens.1.token: qu1ck }
+    - match:  { detail.tokenizer.tokens.1.start_offset: 4 }
+    - match:  { detail.tokenizer.tokens.1.end_offset: 9 }
+    - match:  { detail.tokenizer.tokens.1.position: 1 }
+    - match:  { detail.tokenizer.tokens.2.token: brown }
+    - match:  { detail.tokenizer.tokens.2.start_offset: 10 }
+    - match:  { detail.tokenizer.tokens.2.end_offset: 15 }
+    - match:  { detail.tokenizer.tokens.2.position: 2 }
+    - match:  { detail.tokenizer.tokens.3.token: fox }
+    - match:  { detail.tokenizer.tokens.3.start_offset: 16 }
+    - match:  { detail.tokenizer.tokens.3.end_offset: 19 }
+    - match:  { detail.tokenizer.tokens.3.position: 3 }
+    - length: { detail.tokenfilters: 1 }
+    - match:  { detail.tokenfilters.0.name: word_delimiter_graph }
+    - length: { detail.tokenfilters.0.tokens: 6 }
+    - match:  { detail.tokenfilters.0.tokens.0.token: the }
+    - match:  { detail.tokenfilters.0.tokens.0.start_offset: 0 }
+    - match:  { detail.tokenfilters.0.tokens.0.end_offset: 3 }
+    - match:  { detail.tokenfilters.0.tokens.0.position: 0 }
+    - match:  { detail.tokenfilters.0.tokens.1.token: qu }
+    - match:  { detail.tokenfilters.0.tokens.1.start_offset: 4 }
+    - match:  { detail.tokenfilters.0.tokens.1.end_offset: 6 }
+    - match:  { detail.tokenfilters.0.tokens.1.position: 1 }
+    - match:  { detail.tokenfilters.0.tokens.2.token: "1" }
+    - match:  { detail.tokenfilters.0.tokens.2.start_offset: 6 }
+    - match:  { detail.tokenfilters.0.tokens.2.end_offset: 7 }
+    - match:  { detail.tokenfilters.0.tokens.2.position: 2 }
+    - match:  { detail.tokenfilters.0.tokens.3.token: ck }
+    - match:  { detail.tokenfilters.0.tokens.3.start_offset: 7 }
+    - match:  { detail.tokenfilters.0.tokens.3.end_offset: 9 }
+    - match:  { detail.tokenfilters.0.tokens.3.position: 3 }
+    - match:  { detail.tokenfilters.0.tokens.4.token: brown }
+    - match:  { detail.tokenfilters.0.tokens.4.start_offset: 10 }
+    - match:  { detail.tokenfilters.0.tokens.4.end_offset: 15 }
+    - match:  { detail.tokenfilters.0.tokens.4.position: 4 }
+    - match:  { detail.tokenfilters.0.tokens.5.token: fox }
+    - match:  { detail.tokenfilters.0.tokens.5.start_offset: 16 }
+    - match:  { detail.tokenfilters.0.tokens.5.end_offset: 19 }
+    - match:  { detail.tokenfilters.0.tokens.5.position: 5 }
diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/50_char_filters.yaml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/50_char_filters.yaml
index 06775a2a722..67e68428c07 100644
--- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/50_char_filters.yaml
+++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/50_char_filters.yaml
@@ -1,5 +1,33 @@
-## Smoke tests for analyzers included in the analysis-common module
+## Smoke tests for char filters included in the analysis-common module
 
+"html_strip":
+    - do:
+        indices.analyze:
+          body:
+            text: <html>test<yyy>foo</yyy></html>
+            tokenizer: keyword
+            char_filter:
+              - type: html_strip
+                escaped_tags: ["xxx", "yyy"]
+                read_ahead: 1024
+    - length: { tokens: 1 }
+    - match:  { tokens.0.token: "\ntest<yyy>foo</yyy>\n" }
+
+---
+"pattern_replace":
+    - do:
+        indices.analyze:
+          body:
+            text: sample6 sample1
+            tokenizer: keyword
+            char_filter:
+              - type: pattern_replace
+                pattern: sample(.*)
+                replacement: replacedSample $1
+    - length: { tokens: 1 }
+    - match:  { tokens.0.token: "replacedSample 6 sample1" }
+
+---
 "mapping":
     - do:
         indices.analyze:
@@ -11,3 +39,21 @@
                 mappings: ["ph => f", "qu => q"]
     - length: { tokens: 1 }
     - match:  { tokens.0.token: "jeff qit fish" }
+
+    - do:
+        indices.analyze:
+          body:
+            text: jeff quit phish
+            explain: true
+            tokenizer: keyword
+            char_filter:
+              - type: mapping
+                mappings: ["ph => f", "qu => q"]
+    - match:  { detail.custom_analyzer: true }
+    - length: { detail.charfilters.0.filtered_text: 1 }
+    - match:  { detail.charfilters.0.filtered_text.0: "jeff qit fish" }
+    - length: { detail.tokenizer.tokens: 1 }
+    - match:  { detail.tokenizer.tokens.0.token: "jeff qit fish" }
+    - match:  { detail.tokenizer.tokens.0.start_offset: 0 }
+    - match:  { detail.tokenizer.tokens.0.end_offset: 15 }
+    - match:  { detail.tokenizer.tokens.0.position: 0 }
diff --git a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
index cbabdeef4af..d14f81c61df 100644
--- a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
@@ -45,7 +45,6 @@ import org.elasticsearch.index.analysis.FlattenGraphTokenFilterFactory;
 import org.elasticsearch.index.analysis.GermanNormalizationFilterFactory;
 import org.elasticsearch.index.analysis.GermanStemTokenFilterFactory;
 import org.elasticsearch.index.analysis.HindiNormalizationFilterFactory;
-import org.elasticsearch.index.analysis.HtmlStripCharFilterFactory;
 import org.elasticsearch.index.analysis.HunspellTokenFilterFactory;
 import org.elasticsearch.index.analysis.IndicNormalizationFilterFactory;
 import org.elasticsearch.index.analysis.KStemTokenFilterFactory;
@@ -58,14 +57,12 @@ import org.elasticsearch.index.analysis.LetterTokenizerFactory;
 import org.elasticsearch.index.analysis.LimitTokenCountFilterFactory;
 import org.elasticsearch.index.analysis.LowerCaseTokenFilterFactory;
 import org.elasticsearch.index.analysis.LowerCaseTokenizerFactory;
-import org.elasticsearch.index.analysis.MappingCharFilterFactory;
 import org.elasticsearch.index.analysis.MinHashTokenFilterFactory;
 import org.elasticsearch.index.analysis.MultiTermAwareComponent;
 import org.elasticsearch.index.analysis.NGramTokenFilterFactory;
 import org.elasticsearch.index.analysis.NGramTokenizerFactory;
 import org.elasticsearch.index.analysis.PathHierarchyTokenizerFactory;
 import org.elasticsearch.index.analysis.PatternCaptureGroupTokenFilterFactory;
-import org.elasticsearch.index.analysis.PatternReplaceCharFilterFactory;
 import org.elasticsearch.index.analysis.PatternReplaceTokenFilterFactory;
 import org.elasticsearch.index.analysis.PatternTokenizerFactory;
 import org.elasticsearch.index.analysis.PersianNormalizationFilterFactory;
@@ -325,9 +322,9 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase {
 
     static final Map<String,Class<?>> KNOWN_CHARFILTERS = new MapBuilder<String,Class<?>>()
         // exposed in ES
-        .put("htmlstrip",      HtmlStripCharFilterFactory.class)
-        .put("mapping",        MappingCharFilterFactory.class)
-        .put("patternreplace", PatternReplaceCharFilterFactory.class)
+        .put("htmlstrip",      MovedToAnalysisCommon.class)
+        .put("mapping",        MovedToAnalysisCommon.class)
+        .put("patternreplace", MovedToAnalysisCommon.class)
 
         // TODO: these charfilters are not yet exposed: useful?
         // handling of zwnj for persian

From 416feeb7f925abd09b5a7059ffaf90f70068c9cd Mon Sep 17 00:00:00 2001
From: Nik Everett <nik9000@gmail.com>
Date: Wed, 26 Apr 2017 14:09:26 -0400
Subject: [PATCH 25/34] Rewrite description of `bool`'s `should` (#24342)

Docs: rewrite description of `bool`'s `should`

Rewrites the description of the `bool` query's `should`
clauses so it is (hopefully) more clear what the defaults
for `minimum_should_match` are.

There is still an `[IMPORTANT]` section about `minimum_should_match`
in a filter context. I think it is worth keeping because it is, well,
important.

Closes #23831
---
 docs/reference/query-dsl/bool-query.asciidoc | 15 +++++++++------
 1 file changed, 9 insertions(+), 6 deletions(-)

diff --git a/docs/reference/query-dsl/bool-query.asciidoc b/docs/reference/query-dsl/bool-query.asciidoc
index 4d66e5e7f64..a7092aaaab1 100644
--- a/docs/reference/query-dsl/bool-query.asciidoc
+++ b/docs/reference/query-dsl/bool-query.asciidoc
@@ -17,12 +17,15 @@ contribute to the score.
 in <<query-filter-context,filter context>>, meaning that scoring is ignored
 and clauses are considered for caching.
 
-|`should` |The clause (query) should appear in the matching document. In
-a boolean query with no `must` or `filter` clauses, one or more `should` clauses
-must match a document. The minimum number of should clauses to match can
-be set using the
-<<query-dsl-minimum-should-match,`minimum_should_match`>>
-parameter.
+|`should` |The clause (query) should appear in the matching document. If the
+`bool` query is in a <<query-filter-context,query context>> and has a `must` or
+`filter` clause then a document will match the `bool` query even if none of the
+`should` queries match. In this case these clauses are only used to influence
+the score. If the `bool` query is a <<query-filter-context,filter context>>
+or has neither `must` or `filter` then at least one of the `should` queries
+must match a document for it to match the `bool` query. This behavior may be
+explicitly controlled by settings the
+<<query-dsl-minimum-should-match,`minimum_should_match`>> parameter.
 
 |`must_not` |The clause (query) must not appear in the matching
 documents.  Clauses are executed in <<query-filter-context,filter context>> meaning

From 4c0eb35c22306fa5518c6bd432df59e58ef5c3a2 Mon Sep 17 00:00:00 2001
From: Koen De Groote <kdg.private@gmail.com>
Date: Wed, 26 Apr 2017 20:32:35 +0200
Subject: [PATCH 26/34] Removal of dead code from SnapshotsService (#24347)

This code removes a few lines of dead code from SnapshotsService.
Looks like a forgotten remnant of a past implementation.
---
 .../main/java/org/elasticsearch/snapshots/SnapshotsService.java | 2 --
 1 file changed, 2 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
index 1b829d700d4..16f865e2fb9 100644
--- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
+++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
@@ -913,13 +913,11 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
                 try {
                     final Repository repository = repositoriesService.repository(snapshot.getRepository());
                     logger.trace("[{}] finalizing snapshot in repository, state: [{}], failure[{}]", snapshot, entry.state(), failure);
-                    ArrayList<ShardSearchFailure> failures = new ArrayList<>();
                     ArrayList<SnapshotShardFailure> shardFailures = new ArrayList<>();
                     for (ObjectObjectCursor<ShardId, ShardSnapshotStatus> shardStatus : entry.shards()) {
                         ShardId shardId = shardStatus.key;
                         ShardSnapshotStatus status = shardStatus.value;
                         if (status.state().failed()) {
-                            failures.add(new ShardSearchFailure(status.reason(), new SearchShardTarget(status.nodeId(), shardId)));
                             shardFailures.add(new SnapshotShardFailure(status.nodeId(), shardId, status.reason()));
                         }
                     }

From 3187ed73fc4fd8527c695e18b7186abbd6d29453 Mon Sep 17 00:00:00 2001
From: Koen De Groote <kdg.private@gmail.com>
Date: Wed, 26 Apr 2017 20:44:03 +0200
Subject: [PATCH 27/34] Removal of dead code in
 ScriptedMetricAggregationBuilder (#24346)

This code removes a few lines of dead code from ScriptedMetricAggregationBuilder.
Just completely dead code, it adds things to a Set that is then not used in any way.
---
 .../metrics/scripted/ScriptedMetricAggregationBuilder.java | 7 -------
 1 file changed, 7 deletions(-)

diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java
index ea246feda4d..2d6398ee31e 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java
@@ -37,10 +37,8 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
 import org.elasticsearch.search.internal.SearchContext;
 
 import java.io.IOException;
-import java.util.HashSet;
 import java.util.Map;
 import java.util.Objects;
-import java.util.Set;
 import java.util.function.Function;
 
 public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder<ScriptedMetricAggregationBuilder> {
@@ -239,11 +237,6 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder
         Map<String, Object> params = null;
         XContentParser.Token token;
         String currentFieldName = null;
-        Set<String> scriptParameters = new HashSet<>();
-        scriptParameters.add(INIT_SCRIPT_FIELD.getPreferredName());
-        scriptParameters.add(MAP_SCRIPT_FIELD.getPreferredName());
-        scriptParameters.add(COMBINE_SCRIPT_FIELD.getPreferredName());
-        scriptParameters.add(REDUCE_SCRIPT_FIELD.getPreferredName());
 
         XContentParser parser = context.parser();
         while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {

From 0e52e3420e7222e46dfb15b79f0c4615a14ca0d3 Mon Sep 17 00:00:00 2001
From: Ali Beyad <ali@elastic.co>
Date: Wed, 26 Apr 2017 14:48:10 -0400
Subject: [PATCH 28/34] Fixes restore of a shrunken index when initial recovery
 node is gone (#24322)

When an index is shrunk using the shrink APIs, the shrink operation adds
some internal index settings to the shrink index, for example
`index.shrink.source.name|uuid` to denote the source index, as well as
`index.routing.allocation.initial_recovery._id` to denote the node on
which all shards for the source index resided when the shrunken index
was created.  However, this presents a problem when taking a snapshot of
the shrunken index and restoring it to a cluster where the initial
recovery node is not present, or restoring to the same cluster where the
initial recovery node is offline or decomissioned.  The restore
operation fails to allocate the shard in the shrunken index to a node
when the initial recovery node is not present, and a restore type of
recovery will *not* go through the PrimaryShardAllocator, meaning that
it will not have the chance to force allocate the primary to a node in
the cluster.  Rather, restore initiated shard allocation goes through
the BalancedShardAllocator which does not attempt to force allocate a
primary.

This commit fixes the aforementioned problem by not requiring allocation
to occur on the initial recovery node when the recovery type is a
restore of a snapshot.  This commit also ensures that the internal
shrink index settings are recognized and not archived (which can trip an
assertion in the restore scenario).

Closes #24257
---
 .../cluster/metadata/IndexMetaData.java       | 11 ++-
 .../metadata/MetaDataCreateIndexService.java  |  2 +-
 .../cluster/routing/IndexRoutingTable.java    |  4 +-
 .../cluster/routing/RecoverySource.java       | 12 ---
 .../decider/FilterAllocationDecider.java      | 15 +++-
 .../common/settings/IndexScopedSettings.java  |  4 +-
 .../cluster/routing/RoutingTableTests.java    |  3 +-
 .../FilterAllocationDeciderTests.java         | 24 ++---
 .../DedicatedClusterSnapshotRestoreIT.java    | 88 +++++++++++++------
 9 files changed, 93 insertions(+), 70 deletions(-)
 rename core/src/test/java/org/elasticsearch/cluster/routing/allocation/{ => decider}/FilterAllocationDeciderTests.java (89%)

diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
index 67f4d71bd4e..f1f6f8aee22 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
@@ -41,8 +41,6 @@ import org.elasticsearch.common.collect.MapBuilder;
 import org.elasticsearch.common.compress.CompressedXContent;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLoggerFactory;
 import org.elasticsearch.common.settings.Setting;
 import org.elasticsearch.common.settings.Setting.Property;
 import org.elasticsearch.common.settings.Settings;
@@ -62,7 +60,6 @@ import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 
 import java.io.IOException;
-import java.text.ParseException;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.EnumSet;
@@ -440,12 +437,14 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContent {
         return mappings.get(mappingType);
     }
 
-    public static final Setting<String> INDEX_SHRINK_SOURCE_UUID = Setting.simpleString("index.shrink.source.uuid");
-    public static final Setting<String> INDEX_SHRINK_SOURCE_NAME = Setting.simpleString("index.shrink.source.name");
+    public static final String INDEX_SHRINK_SOURCE_UUID_KEY = "index.shrink.source.uuid";
+    public static final String INDEX_SHRINK_SOURCE_NAME_KEY = "index.shrink.source.name";
+    public static final Setting<String> INDEX_SHRINK_SOURCE_UUID = Setting.simpleString(INDEX_SHRINK_SOURCE_UUID_KEY);
+    public static final Setting<String> INDEX_SHRINK_SOURCE_NAME = Setting.simpleString(INDEX_SHRINK_SOURCE_NAME_KEY);
 
 
     public Index getMergeSourceIndex() {
-        return INDEX_SHRINK_SOURCE_UUID.exists(settings) ? new Index(INDEX_SHRINK_SOURCE_NAME.get(settings),  INDEX_SHRINK_SOURCE_UUID.get(settings)) : null;
+        return INDEX_SHRINK_SOURCE_UUID.exists(settings) ? new Index(INDEX_SHRINK_SOURCE_NAME.get(settings), INDEX_SHRINK_SOURCE_UUID.get(settings)) : null;
     }
 
     /**
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
index a3292e2cfd4..fc9ee533090 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
@@ -598,7 +598,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
         indexSettingsBuilder
             // we use "i.r.a.initial_recovery" rather than "i.r.a.require|include" since we want the replica to allocate right away
             // once we are allocated.
-            .put("index.routing.allocation.initial_recovery._id",
+            .put(IndexMetaData.INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING.getKey() + "_id",
                 Strings.arrayToCommaDelimitedString(nodesToAllocateOn.toArray()))
             // we only try once and then give up with a shrink index
             .put("index.allocation.max_retries", 1)
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java
index d80a1c326cf..c587629ef0c 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java
@@ -139,8 +139,8 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
                         "allocation set " + inSyncAllocationIds);
                 }
 
-                if (shardRouting.primary() && shardRouting.initializing() && shardRouting.relocating() == false &&
-                    RecoverySource.isInitialRecovery(shardRouting.recoverySource().getType()) == false &&
+                if (shardRouting.primary() && shardRouting.initializing() &&
+                    shardRouting.recoverySource().getType() == RecoverySource.Type.EXISTING_STORE &&
                     inSyncAllocationIds.contains(shardRouting.allocationId().getId()) == false)
                     throw new IllegalStateException("a primary shard routing " + shardRouting + " is a primary that is recovering from " +
                         "a known allocation id but has no corresponding entry in the in-sync " +
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/core/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java
index f613cdbbada..32afad99f27 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java
@@ -20,7 +20,6 @@
 package org.elasticsearch.cluster.routing;
 
 import org.elasticsearch.Version;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
 import org.elasticsearch.common.io.stream.Writeable;
@@ -29,7 +28,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.snapshots.Snapshot;
 
 import java.io.IOException;
-import java.util.EnumSet;
 import java.util.Objects;
 
 /**
@@ -249,14 +247,4 @@ public abstract class RecoverySource implements Writeable, ToXContent {
             return "peer recovery";
         }
     }
-
-    private static EnumSet<RecoverySource.Type> INITIAL_RECOVERY_TYPES = EnumSet.of(Type.EMPTY_STORE, Type.LOCAL_SHARDS, Type.SNAPSHOT);
-
-    /**
-     * returns true for recovery types that indicate that a primary is being allocated for the very first time.
-     * This recoveries can be controlled by {@link IndexMetaData#INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING}
-     */
-    public static boolean isInitialRecovery(RecoverySource.Type type) {
-           return INITIAL_RECOVERY_TYPES.contains(type);
-    }
 }
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java
index 85069392eb6..933b0a829d5 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java
@@ -30,6 +30,8 @@ import org.elasticsearch.common.settings.Setting;
 import org.elasticsearch.common.settings.Setting.Property;
 import org.elasticsearch.common.settings.Settings;
 
+import java.util.EnumSet;
+
 import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.IP_VALIDATOR;
 import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND;
 import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR;
@@ -75,6 +77,17 @@ public class FilterAllocationDecider extends AllocationDecider {
     public static final Setting<Settings> CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING =
         Setting.groupSetting(CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX + ".", IP_VALIDATOR, Property.Dynamic, Property.NodeScope);
 
+    /**
+     * The set of {@link RecoverySource.Type} values for which the
+     * {@link IndexMetaData#INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING} should apply.
+     * Note that we do not include the {@link RecoverySource.Type#SNAPSHOT} type here
+     * because if the snapshot is restored to a different cluster that does not contain
+     * the initial recovery node id, or to the same cluster where the initial recovery node
+     * id has been decommissioned, then the primary shards will never be allocated.
+     */
+    static EnumSet<RecoverySource.Type> INITIAL_RECOVERY_TYPES =
+        EnumSet.of(RecoverySource.Type.EMPTY_STORE, RecoverySource.Type.LOCAL_SHARDS);
+
     private volatile DiscoveryNodeFilters clusterRequireFilters;
     private volatile DiscoveryNodeFilters clusterIncludeFilters;
     private volatile DiscoveryNodeFilters clusterExcludeFilters;
@@ -98,7 +111,7 @@ public class FilterAllocationDecider extends AllocationDecider {
             IndexMetaData indexMd = allocation.metaData().getIndexSafe(shardRouting.index());
             DiscoveryNodeFilters initialRecoveryFilters = indexMd.getInitialRecoveryFilters();
             if (initialRecoveryFilters != null  &&
-                RecoverySource.isInitialRecovery(shardRouting.recoverySource().getType()) &&
+                INITIAL_RECOVERY_TYPES.contains(shardRouting.recoverySource().getType()) &&
                 initialRecoveryFilters.match(node.node()) == false) {
                 String explanation = (shardRouting.recoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) ?
                     "initial allocation of the shrunken index is only allowed on nodes [%s] that hold a copy of every shard in the index" :
diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
index 4094d69edde..1568ac288d9 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
@@ -191,9 +191,11 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
             case IndexMetaData.SETTING_VERSION_UPGRADED:
             case IndexMetaData.SETTING_INDEX_PROVIDED_NAME:
             case MergePolicyConfig.INDEX_MERGE_ENABLED:
+            case IndexMetaData.INDEX_SHRINK_SOURCE_UUID_KEY:
+            case IndexMetaData.INDEX_SHRINK_SOURCE_NAME_KEY:
                 return true;
             default:
-                return false;
+                return IndexMetaData.INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING.getRawKey().match(key);
         }
     }
 }
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java
index e26fece7c6d..a76ad4657c6 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java
@@ -352,8 +352,7 @@ public class RoutingTableTests extends ESAllocationTestCase {
                 Set<String> insyncAids = shardTable.activeShards().stream().map(
                     shr -> shr.allocationId().getId()).collect(Collectors.toSet());
                 final ShardRouting primaryShard = shardTable.primaryShard();
-                if (primaryShard.initializing() && primaryShard.relocating() == false &&
-                    RecoverySource.isInitialRecovery(primaryShard.recoverySource().getType()) == false ) {
+                if (primaryShard.initializing() && shardRouting.recoverySource().getType() == RecoverySource.Type.EXISTING_STORE) {
                     // simulate a primary was initialized based on aid
                     insyncAids.add(primaryShard.allocationId().getId());
                 }
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java
similarity index 89%
rename from core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterAllocationDeciderTests.java
rename to core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java
index c201736c51c..c857a3f30ed 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterAllocationDeciderTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java
@@ -16,7 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.elasticsearch.cluster.routing.allocation;
+package org.elasticsearch.cluster.routing.allocation.decider;
 
 import org.elasticsearch.Version;
 import org.elasticsearch.cluster.ClusterState;
@@ -29,19 +29,14 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
 import org.elasticsearch.cluster.routing.RecoverySource;
 import org.elasticsearch.cluster.routing.RoutingTable;
 import org.elasticsearch.cluster.routing.ShardRouting;
+import org.elasticsearch.cluster.routing.allocation.AllocationService;
+import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
 import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;
-import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
-import org.elasticsearch.cluster.routing.allocation.decider.Decision;
 import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type;
-import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
-import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider;
-import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
 import org.elasticsearch.common.settings.ClusterSettings;
 import org.elasticsearch.common.settings.IndexScopedSettings;
 import org.elasticsearch.common.settings.Setting;
 import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.snapshots.Snapshot;
-import org.elasticsearch.snapshots.SnapshotId;
 import org.elasticsearch.test.gateway.TestGatewayAllocator;
 
 import java.util.Arrays;
@@ -144,8 +139,7 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase {
     }
 
     private ClusterState createInitialClusterState(AllocationService service, Settings settings) {
-        RecoverySource.Type recoveryType = randomFrom(RecoverySource.Type.EMPTY_STORE,
-            RecoverySource.Type.LOCAL_SHARDS, RecoverySource.Type.SNAPSHOT);
+        RecoverySource.Type recoveryType = randomFrom(FilterAllocationDecider.INITIAL_RECOVERY_TYPES);
         MetaData.Builder metaData = MetaData.builder();
         final Settings.Builder indexSettings = settings(Version.CURRENT).put(settings);
         final IndexMetaData sourceIndex;
@@ -164,9 +158,6 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase {
         }
         final IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder("idx").settings(indexSettings)
             .numberOfShards(1).numberOfReplicas(1);
-        if (recoveryType == RecoverySource.Type.SNAPSHOT) {
-            indexMetaDataBuilder.putInSyncAllocationIds(0, Collections.singleton("_snapshot_restore"));
-        }
         final IndexMetaData indexMetaData = indexMetaDataBuilder.build();
         metaData.put(indexMetaData, false);
         RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
@@ -174,11 +165,6 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase {
             case EMPTY_STORE:
                 routingTableBuilder.addAsNew(indexMetaData);
                 break;
-            case SNAPSHOT:
-                routingTableBuilder.addAsRestore(indexMetaData, new RecoverySource.SnapshotRecoverySource(
-                    new Snapshot("repository", new SnapshotId("snapshot_name", "snapshot_uuid")),
-                    Version.CURRENT, indexMetaData.getIndex().getName()));
-                break;
             case LOCAL_SHARDS:
                 routingTableBuilder.addAsFromCloseToOpen(sourceIndex);
                 routingTableBuilder.addAsNew(indexMetaData);
@@ -192,7 +178,7 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase {
             .getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).build();
         clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2")))
             .build();
-        return service.reroute(clusterState, "reroute", false);
+        return service.reroute(clusterState, "reroute");
     }
 
     public void testInvalidIPFilter() {
diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java
index 08a3308172b..10c49b431a5 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java
@@ -54,6 +54,7 @@ import org.elasticsearch.common.unit.TimeValue;
 import org.elasticsearch.common.xcontent.NamedXContentRegistry;
 import org.elasticsearch.common.xcontent.XContentParser;
 import org.elasticsearch.discovery.zen.ElectMasterService;
+import org.elasticsearch.env.Environment;
 import org.elasticsearch.indices.recovery.RecoveryState;
 import org.elasticsearch.node.Node;
 import org.elasticsearch.plugins.Plugin;
@@ -127,7 +128,6 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
                 NonSnapshottableGatewayMetadata::readDiffFrom, NonSnapshottableGatewayMetadata::fromXContent);
             registerMetaDataCustom(SnapshotableGatewayNoApiMetadata.TYPE, SnapshotableGatewayNoApiMetadata::readFrom,
                 NonSnapshottableGatewayMetadata::readDiffFrom, SnapshotableGatewayNoApiMetadata::fromXContent);
-
         }
 
         @Override
@@ -154,8 +154,6 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
         logger.info("--> wait for the second node to join the cluster");
         assertThat(client.admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut(), equalTo(false));
 
-        int random = randomIntBetween(10, 42);
-
         logger.info("--> set test persistent setting");
         client.admin().cluster().prepareUpdateSettings().setPersistentSettings(
                 Settings.builder()
@@ -723,7 +721,6 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
         if (clusterStateError.get() != null) {
             throw clusterStateError.get();
         }
-
     }
 
     public void testMasterShutdownDuringSnapshot() throws Exception {
@@ -801,33 +798,72 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
         assertEquals(0, snapshotInfo.failedShards());
     }
 
+    /**
+     * Tests that a shrunken index (created via the shrink APIs) and subsequently snapshotted
+     * can be restored when the node the shrunken index was created on is no longer part of
+     * the cluster.
+     */
+    public void testRestoreShrinkIndex() throws Exception {
+        logger.info("-->  starting a master node and a data node");
+        internalCluster().startMasterOnlyNode();
+        internalCluster().startDataOnlyNode();
 
-    private boolean snapshotIsDone(String repository, String snapshot) {
-        try {
-            SnapshotsStatusResponse snapshotsStatusResponse = client().admin().cluster().prepareSnapshotStatus(repository).setSnapshots(snapshot).get();
-            if (snapshotsStatusResponse.getSnapshots().isEmpty()) {
-                return false;
-            }
-            for (SnapshotStatus snapshotStatus : snapshotsStatusResponse.getSnapshots()) {
-                if (snapshotStatus.getState().completed()) {
-                    return true;
-                }
-            }
-            return false;
-        } catch (SnapshotMissingException ex) {
-            return false;
+        final Client client = client();
+        final String repo = "test-repo";
+        final String snapshot = "test-snap";
+        final String sourceIdx = "test-idx";
+        final String shrunkIdx = "test-idx-shrunk";
+
+        logger.info("-->  creating repository");
+        assertAcked(client.admin().cluster().preparePutRepository(repo).setType("fs")
+            .setSettings(Settings.builder().put("location", randomRepoPath())
+                             .put("compress", randomBoolean())));
+
+        assertAcked(prepareCreate(sourceIdx, 0, Settings.builder()
+            .put("number_of_shards", between(1, 20)).put("number_of_replicas", 0)));
+        ensureGreen();
+
+        logger.info("--> indexing some data");
+        IndexRequestBuilder[] builders = new IndexRequestBuilder[randomIntBetween(10, 100)];
+        for (int i = 0; i < builders.length; i++) {
+            builders[i] = client().prepareIndex(sourceIdx, "type1",
+                Integer.toString(i)).setSource("field1", "bar " + i);
         }
-    }
+        indexRandom(true, builders);
+        flushAndRefresh();
 
-    private void createTestIndex(String name) {
-        assertAcked(prepareCreate(name, 0, Settings.builder().put("number_of_shards", between(1, 6))
-                .put("number_of_replicas", between(1, 6))));
+        logger.info("--> shrink the index");
+        assertAcked(client.admin().indices().prepareUpdateSettings(sourceIdx)
+            .setSettings(Settings.builder().put("index.blocks.write", true)).get());
+        assertAcked(client.admin().indices().prepareShrinkIndex(sourceIdx, shrunkIdx).get());
 
-        logger.info("--> indexing some data into {}", name);
-        for (int i = 0; i < between(10, 500); i++) {
-            index(name, "doc", Integer.toString(i), "foo", "bar" + i);
-        }
+        logger.info("--> snapshot the shrunk index");
+        CreateSnapshotResponse createResponse = client.admin().cluster()
+            .prepareCreateSnapshot(repo, snapshot)
+            .setWaitForCompletion(true).setIndices(shrunkIdx).get();
+        assertEquals(SnapshotState.SUCCESS, createResponse.getSnapshotInfo().state());
 
+        logger.info("--> delete index and stop the data node");
+        assertAcked(client.admin().indices().prepareDelete(sourceIdx).get());
+        assertAcked(client.admin().indices().prepareDelete(shrunkIdx).get());
+        internalCluster().stopRandomDataNode();
+        client().admin().cluster().prepareHealth().setTimeout("30s").setWaitForNodes("1");
+
+        logger.info("--> start a new data node");
+        final Settings dataSettings = Settings.builder()
+            .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLength(5))
+            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) // to get a new node id
+            .build();
+        internalCluster().startDataOnlyNode(dataSettings);
+        client().admin().cluster().prepareHealth().setTimeout("30s").setWaitForNodes("2");
+
+        logger.info("--> restore the shrunk index and ensure all shards are allocated");
+        RestoreSnapshotResponse restoreResponse = client().admin().cluster()
+            .prepareRestoreSnapshot(repo, snapshot).setWaitForCompletion(true)
+            .setIndices(shrunkIdx).get();
+        assertEquals(restoreResponse.getRestoreInfo().totalShards(),
+            restoreResponse.getRestoreInfo().successfulShards());
+        ensureYellow();
     }
 
     public static class SnapshottableMetadata extends TestCustomMetaData {

From ebe98f9d621291717a68b1719aef48cfd4a4e469 Mon Sep 17 00:00:00 2001
From: Martijn van Groningen <martijn.v.groningen@gmail.com>
Date: Wed, 26 Apr 2017 21:07:33 +0200
Subject: [PATCH 29/34] test: don't randomly wrap index reader

---
 .../bucket/nested/NestedAggregatorTests.java           | 10 +++++-----
 .../bucket/nested/ReverseNestedAggregatorTests.java    |  4 ++--
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
index 304b7f03c59..7000924001f 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
@@ -75,7 +75,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                     NumberFieldMapper.NumberType.LONG);
                 fieldType.setName(VALUE_FIELD_NAME);
 
-                Nested nested = search(newSearcher(indexReader, true, true),
+                Nested nested = search(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), nestedBuilder, fieldType);
 
                 assertEquals(NESTED_AGG, nested.getName());
@@ -122,7 +122,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                     NumberFieldMapper.NumberType.LONG);
                 fieldType.setName(VALUE_FIELD_NAME);
 
-                Nested nested = search(newSearcher(indexReader, true, true),
+                Nested nested = search(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 assertEquals(expectedNestedDocs, nested.getDocCount());
 
@@ -171,7 +171,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                     NumberFieldMapper.NumberType.LONG);
                 fieldType.setName(VALUE_FIELD_NAME);
 
-                Nested nested = search(newSearcher(indexReader, true, true),
+                Nested nested = search(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 assertEquals(expectedNestedDocs, nested.getDocCount());
 
@@ -223,7 +223,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                     NumberFieldMapper.NumberType.LONG);
                 fieldType.setName(VALUE_FIELD_NAME);
 
-                Nested nested = search(newSearcher(indexReader, true, true),
+                Nested nested = search(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 assertEquals(expectedNestedDocs, nested.getDocCount());
 
@@ -304,7 +304,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 bq.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
                 bq.add(new TermQuery(new Term(UidFieldMapper.NAME, "type#2")), BooleanClause.Occur.MUST_NOT);
 
-                Nested nested = search(newSearcher(indexReader, true, true),
+                Nested nested = search(newSearcher(indexReader, false, true),
                     new ConstantScoreQuery(bq.build()), nestedBuilder, fieldType);
 
                 assertEquals(NESTED_AGG, nested.getName());
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java
index 74fb7ca9ca4..70e6c355940 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java
@@ -67,7 +67,7 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
                         NumberFieldMapper.NumberType.LONG);
                 fieldType.setName(VALUE_FIELD_NAME);
 
-                Nested nested = search(newSearcher(indexReader, true, true),
+                Nested nested = search(newSearcher(indexReader, false, true),
                         new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 ReverseNested reverseNested = (ReverseNested)
                         ((InternalAggregation)nested).getProperty(REVERSE_AGG_NAME);
@@ -130,7 +130,7 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
                         NumberFieldMapper.NumberType.LONG);
                 fieldType.setName(VALUE_FIELD_NAME);
 
-                Nested nested = search(newSearcher(indexReader, true, true),
+                Nested nested = search(newSearcher(indexReader, false, true),
                         new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 assertEquals(expectedNestedDocs, nested.getDocCount());
 

From b7bf651738c11fb7a35e07319e8eeffaea1d96ff Mon Sep 17 00:00:00 2001
From: Yannick Welsch <yannick@welsch.lu>
Date: Wed, 26 Apr 2017 21:06:20 +0200
Subject: [PATCH 30/34] [TEST] Fix cluster forming in
 testDynamicUpdateMinimumMasterNodes

This test can run into a split-brain situation as minimum_master_nodes is not properly set. To prevent this, make sure that at least one of the two
master nodes that are initially started has minimum_master_nodes correctly set.
---
 .../cluster/MinimumMasterNodesIT.java         | 21 +++++++++----------
 1 file changed, 10 insertions(+), 11 deletions(-)

diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java
index c9057f4373f..3fc67f3eb0e 100644
--- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java
@@ -263,18 +263,17 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
     }
 
     public void testDynamicUpdateMinimumMasterNodes() throws Exception {
-        Settings settings = Settings.builder()
-                .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms")
-                .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), "1")
-                .build();
+        Settings settingsWithMinMaster1 = Settings.builder()
+            .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms")
+            .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1)
+            .build();
 
-        logger.info("--> start first node and wait for it to be a master");
-        internalCluster().startNode(settings);
-        ensureClusterSizeConsistency();
+        Settings settingsWithMinMaster2 = Settings.builder()
+            .put(settingsWithMinMaster1).put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2)
+            .build();
 
-        // wait until second node join the cluster
-        logger.info("--> start second node and wait for it to join");
-        internalCluster().startNode(settings);
+        logger.info("--> start two nodes and wait for them to form a cluster");
+        internalCluster().startNodes(settingsWithMinMaster1, settingsWithMinMaster2);
         ensureClusterSizeConsistency();
 
         logger.info("--> setting minimum master node to 2");
@@ -292,7 +291,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
         assertNoMasterBlockOnAllNodes();
 
         logger.info("--> bringing another node up");
-        internalCluster().startNode(Settings.builder().put(settings).put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2).build());
+        internalCluster().startNode(settingsWithMinMaster2);
         ensureClusterSizeConsistency();
     }
 

From 2ed1f7a3390420835f9b270d20c5d512ba197827 Mon Sep 17 00:00:00 2001
From: Jason Tedor <jason@tedor.me>
Date: Wed, 26 Apr 2017 15:26:36 -0400
Subject: [PATCH 31/34] Avoid leaks in Long GC disruption tests

We can leak disrupted threads here since we never wait for them to
complete after freeing them from their loops. This commit addresses this
by joining on disrupted threads, and addresses fallout from trying to
join here.

Relates #24338
---
 .../test/disruption/LongGCDisruption.java     | 55 ++++++++++++++-----
 .../disruption/LongGCDisruptionTests.java     | 20 ++++++-
 2 files changed, 57 insertions(+), 18 deletions(-)

diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java
index 98349086df5..45acde09325 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java
@@ -106,6 +106,15 @@ public class LongGCDisruption extends SingleNodeDisruption {
                     logger.warn("failed to stop node [{}]'s threads within [{}] millis. Stopping thread stack trace:\n {}"
                         , disruptedNode, getStoppingTimeoutInMillis(), stackTrace(stoppingThread.getStackTrace()));
                     stoppingThread.interrupt(); // best effort;
+                    try {
+                        /*
+                         * We need to join on the stopping thread in case it has stopped a thread that is in a critical section and needs to
+                         * be resumed.
+                         */
+                        stoppingThread.join();
+                    } catch (InterruptedException e) {
+                        throw new RuntimeException(e);
+                    }
                     throw new RuntimeException("stopping node threads took too long");
                 }
                 // block detection checks if other threads are blocked waiting on an object that is held by one
@@ -228,23 +237,39 @@ public class LongGCDisruption extends SingleNodeDisruption {
                 if (thread.isAlive() && nodeThreads.add(thread)) {
                     liveThreadsFound = true;
                     logger.trace("stopping thread [{}]", threadName);
-                    thread.suspend();
-                    // double check the thread is not in a shared resource like logging. If so, let it go and come back..
-                    boolean safe = true;
-                    safe:
-                    for (StackTraceElement stackElement : thread.getStackTrace()) {
-                        String className = stackElement.getClassName();
-                        for (Pattern unsafePattern : getUnsafeClasses()) {
-                            if (unsafePattern.matcher(className).find()) {
-                                safe = false;
-                                break safe;
+                    // we assume it is not safe to suspend the thread
+                    boolean safe = false;
+                    try {
+                        /*
+                         * At the bottom of this try-block we will know whether or not it is safe to suspend this thread; we start by
+                         * assuming that it is safe.
+                         */
+                        boolean definitelySafe = true;
+                        thread.suspend();
+                        // double check the thread is not in a shared resource like logging; if so, let it go and come back
+                        safe:
+                        for (StackTraceElement stackElement : thread.getStackTrace()) {
+                            String className = stackElement.getClassName();
+                            for (Pattern unsafePattern : getUnsafeClasses()) {
+                                if (unsafePattern.matcher(className).find()) {
+                                    // it is definitely not safe to suspend the thread
+                                    definitelySafe = false;
+                                    break safe;
+                                }
                             }
                         }
-                    }
-                    if (!safe) {
-                        logger.trace("resuming thread [{}] as it is in a critical section", threadName);
-                        thread.resume();
-                        nodeThreads.remove(thread);
+                        safe = definitelySafe;
+                    } finally {
+                        if (!safe) {
+                            /*
+                             * Do not log before resuming as we might be interrupted while logging in which case we will throw an
+                             * interrupted exception and never resume the stopped thread that is in a critical section. Also, logging before
+                             * resuming makes for confusing log messages if we never hit the resume.
+                             */
+                            thread.resume();
+                            logger.trace("resumed thread [{}] as it is in a critical section", threadName);
+                            nodeThreads.remove(thread);
+                        }
                     }
                 }
             }
diff --git a/test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java b/test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java
index 48bd18986c2..147a6df608a 100644
--- a/test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java
+++ b/test/framework/src/test/java/org/elasticsearch/test/disruption/LongGCDisruptionTests.java
@@ -22,6 +22,8 @@ import org.elasticsearch.common.Nullable;
 import org.elasticsearch.test.ESTestCase;
 
 import java.lang.management.ThreadInfo;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -69,8 +71,8 @@ public class LongGCDisruptionTests extends ESTestCase {
         final CountDownLatch pauseUnderLock = new CountDownLatch(1);
         final LockedExecutor lockedExecutor = new LockedExecutor();
         final AtomicLong ops = new AtomicLong();
+        final Thread[] threads = new Thread[10];
         try {
-            Thread[] threads = new Thread[10];
             for (int i = 0; i < 10; i++) {
                 // at least one locked and one none lock thread
                 final boolean lockedExec = (i < 9 && randomBoolean()) || i == 0;
@@ -101,6 +103,9 @@ public class LongGCDisruptionTests extends ESTestCase {
         } finally {
             stop.set(true);
             pauseUnderLock.countDown();
+            for (final Thread thread : threads) {
+                thread.join();
+            }
         }
     }
 
@@ -121,8 +126,8 @@ public class LongGCDisruptionTests extends ESTestCase {
         final AtomicBoolean stop = new AtomicBoolean();
         final LockedExecutor lockedExecutor = new LockedExecutor();
         final AtomicLong ops = new AtomicLong();
+        final Thread[] threads = new Thread[10];
         try {
-            Thread[] threads = new Thread[10];
             for (int i = 0; i < 10; i++) {
                 threads[i] = new Thread(() -> {
                     for (int iter = 0; stop.get() == false; iter++) {
@@ -150,6 +155,9 @@ public class LongGCDisruptionTests extends ESTestCase {
             assertBusy(() -> assertThat(ops.get(), greaterThan(first)));
         } finally {
             stop.set(true);
+            for (final Thread thread : threads) {
+                thread.join();
+            }
         }
     }
 
@@ -183,6 +191,7 @@ public class LongGCDisruptionTests extends ESTestCase {
         final CountDownLatch pauseUnderLock = new CountDownLatch(1);
         final LockedExecutor lockedExecutor = new LockedExecutor();
         final AtomicLong ops = new AtomicLong();
+        final List<Thread> threads = new ArrayList<>();
         try {
             for (int i = 0; i < 5; i++) {
                 // at least one locked and one none lock thread
@@ -206,6 +215,7 @@ public class LongGCDisruptionTests extends ESTestCase {
                 });
 
                 thread.setName("[" + disruptedNodeName + "][" + i + "]");
+                threads.add(thread);
                 thread.start();
             }
 
@@ -224,12 +234,13 @@ public class LongGCDisruptionTests extends ESTestCase {
                     }
                 });
                 thread.setName("[" + blockedNodeName + "][" + i + "]");
+                threads.add(thread);
                 thread.start();
             }
             // make sure some threads of test_node are under lock
             underLock.await();
             disruption.startDisrupting();
-            waitForBlockDetectionResult.await(30, TimeUnit.SECONDS);
+            assertTrue(waitForBlockDetectionResult.await(30, TimeUnit.SECONDS));
             disruption.stopDisrupting();
 
             ThreadInfo threadInfo = blockDetectionResult.get();
@@ -240,6 +251,9 @@ public class LongGCDisruptionTests extends ESTestCase {
         } finally {
             stop.set(true);
             pauseUnderLock.countDown();
+            for (final Thread thread : threads) {
+                thread.join();
+            }
         }
     }
 }

From 149629fec657e9948488195087f30583cd561f11 Mon Sep 17 00:00:00 2001
From: Luca Cavanna <javanna@users.noreply.github.com>
Date: Wed, 26 Apr 2017 21:45:49 +0200
Subject: [PATCH 32/34] Cross Cluster Search: propagate original indices per
 cluster (#24328)

In case of a Cross Cluster Search, the coordinating node should split the original indices per cluster, and send over to each cluster only its own set of original indices, rather than the set taken from the original search request which contains all the indices.

In fact, each remote cluster should not be aware of the indices belonging to other remote clusters.
---
 .../elasticsearch/action/OriginalIndices.java |   8 +-
 .../TransportClusterSearchShardsAction.java   |   2 +-
 .../search/AbstractSearchAsyncAction.java     |  15 ++-
 .../action/search/DfsQueryPhase.java          |   5 +-
 .../action/search/FetchSearchPhase.java       |  20 +--
 .../action/search/InitialSearchPhase.java     |  17 +--
 .../search/RemoteClusterConnection.java       |   6 +-
 .../action/search/RemoteClusterService.java   |  27 ++--
 .../action/search/SearchActionListener.java   |   1 -
 .../SearchDfsQueryThenFetchAsyncAction.java   |   5 +-
 .../action/search/SearchPhaseContext.java     |   8 +-
 .../SearchQueryThenFetchAsyncAction.java      |   8 +-
 .../action/search/SearchShardIterator.java    |  55 ++++++++
 .../action/search/SearchTransportService.java |   8 +-
 .../action/search/ShardSearchFailure.java     |   4 +-
 .../action/search/TransportSearchAction.java  |  60 +++++----
 .../broadcast/TransportBroadcastAction.java   |   4 +-
 .../node/TransportBroadcastByNodeAction.java  |   2 +-
 .../TransportTermVectorsAction.java           |   2 +-
 .../cluster/routing/GroupShardsIterator.java  |  10 +-
 .../cluster/routing/IndexRoutingTable.java    |  32 -----
 .../cluster/routing/OperationRouting.java     |   4 +-
 .../cluster/routing/PlainShardIterator.java   |   1 -
 .../cluster/routing/PlainShardsIterator.java  |  11 +-
 .../cluster/routing/RoutingTable.java         |  17 ++-
 .../cluster/routing/ShardsIterator.java       |   9 +-
 .../org/elasticsearch/search/SearchHit.java   |   3 +-
 .../elasticsearch/search/SearchService.java   |   6 +-
 .../search/SearchShardTarget.java             |  15 ++-
 .../search/fetch/ShardFetchSearchRequest.java |   5 +-
 .../internal/ShardSearchTransportRequest.java |   5 +-
 .../search/query/QuerySearchRequest.java      |   5 +-
 .../snapshots/SnapshotsService.java           |   1 +
 .../ElasticsearchExceptionTests.java          |   5 +-
 .../AbstractSearchAsyncActionTookTests.java   |  50 +------
 .../action/search/MockSearchPhaseContext.java |   9 +-
 .../search/RemoteClusterConnectionTests.java  |   2 +-
 .../search/RemoteClusterServiceTests.java     |  41 +++++-
 .../action/search/SearchAsyncActionTests.java |  24 ++--
 .../search/ShardSearchFailureTests.java       |   5 +-
 .../search/TransportSearchActionTests.java    | 122 ++++++++++++++++++
 .../TransportBroadcastByNodeActionTests.java  |  10 +-
 ...rdFailedClusterStateTaskExecutorTests.java |   5 +-
 .../routing/GroupShardsIteratorTests.java     |   4 +-
 .../structure/RoutingIteratorTests.java       |   4 +-
 .../index/store/CorruptedFileIT.java          |   7 +-
 .../index/suggest/stats/SuggestStatsIT.java   |   4 +-
 .../elasticsearch/search/SearchHitTests.java  |   4 +-
 .../search/SearchServiceTests.java            |  42 +++---
 .../ShardSearchTransportRequestTests.java     |   3 +-
 .../search/stats/SearchStatsIT.java           |   4 +-
 51 files changed, 443 insertions(+), 283 deletions(-)
 create mode 100644 core/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java
 create mode 100644 core/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java

diff --git a/core/src/main/java/org/elasticsearch/action/OriginalIndices.java b/core/src/main/java/org/elasticsearch/action/OriginalIndices.java
index cc299f544b3..39cf5c63242 100644
--- a/core/src/main/java/org/elasticsearch/action/OriginalIndices.java
+++ b/core/src/main/java/org/elasticsearch/action/OriginalIndices.java
@@ -28,7 +28,10 @@ import java.io.IOException;
 /**
  * Used to keep track of original indices within internal (e.g. shard level) requests
  */
-public class OriginalIndices implements IndicesRequest {
+public final class OriginalIndices implements IndicesRequest {
+
+    //constant to use when original indices are not applicable and will not be serialized across the wire
+    public static final OriginalIndices NONE = new OriginalIndices(null, null);
 
     private final String[] indices;
     private final IndicesOptions indicesOptions;
@@ -39,7 +42,6 @@ public class OriginalIndices implements IndicesRequest {
 
     public OriginalIndices(String[] indices, IndicesOptions indicesOptions) {
         this.indices = indices;
-        assert indicesOptions != null;
         this.indicesOptions = indicesOptions;
     }
 
@@ -57,8 +59,8 @@ public class OriginalIndices implements IndicesRequest {
         return new OriginalIndices(in.readStringArray(), IndicesOptions.readIndicesOptions(in));
     }
 
-
     public static void writeOriginalIndices(OriginalIndices originalIndices, StreamOutput out) throws IOException {
+        assert originalIndices != NONE;
         out.writeStringArrayNullable(originalIndices.indices);
         originalIndices.indicesOptions.writeIndicesOptions(out);
     }
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java
index 01aafc0b0a9..8825a426768 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java
@@ -88,7 +88,7 @@ public class TransportClusterSearchShardsAction extends
         }
 
         Set<String> nodeIds = new HashSet<>();
-        GroupShardsIterator groupShardsIterator = clusterService.operationRouting().searchShards(clusterState, concreteIndices,
+        GroupShardsIterator<ShardIterator> groupShardsIterator = clusterService.operationRouting().searchShards(clusterState, concreteIndices,
                 routingMap, request.preference());
         ShardRouting shard;
         ClusterSearchShardsGroup[] groupResponses = new ClusterSearchShardsGroup[groupShardsIterator.size()];
diff --git a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java
index c2137803411..0abebebdb18 100644
--- a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java
@@ -29,7 +29,6 @@ import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.ShardOperationFailedException;
 import org.elasticsearch.action.support.TransportActions;
 import org.elasticsearch.cluster.routing.GroupShardsIterator;
-import org.elasticsearch.cluster.routing.ShardIterator;
 import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.common.Nullable;
 import org.elasticsearch.common.util.concurrent.AtomicArray;
@@ -75,8 +74,9 @@ abstract class AbstractSearchAsyncAction<Result extends SearchPhaseResult> exten
                                         Function<String, Transport.Connection> nodeIdToConnection,
                                         Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
                                         Executor executor, SearchRequest request,
-                                        ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts, TransportSearchAction.SearchTimeProvider timeProvider,
-                                        long clusterStateVersion, SearchTask task, SearchPhaseResults<Result> resultConsumer) {
+                                        ActionListener<SearchResponse> listener, GroupShardsIterator<SearchShardIterator> shardsIts,
+                                        TransportSearchAction.SearchTimeProvider timeProvider, long clusterStateVersion,
+                                        SearchTask task, SearchPhaseResults<Result> resultConsumer) {
         super(name, request, shardsIts, logger);
         this.timeProvider = timeProvider;
         this.logger = logger;
@@ -209,8 +209,9 @@ abstract class AbstractSearchAsyncAction<Result extends SearchPhaseResult> exten
     private void raisePhaseFailure(SearchPhaseExecutionException exception) {
         results.getSuccessfulResults().forEach((entry) -> {
             try {
-                Transport.Connection connection = nodeIdToConnection.apply(entry.getSearchShardTarget().getNodeId());
-                sendReleaseSearchContext(entry.getRequestId(), connection);
+                SearchShardTarget searchShardTarget = entry.getSearchShardTarget();
+                Transport.Connection connection = nodeIdToConnection.apply(searchShardTarget.getNodeId());
+                sendReleaseSearchContext(entry.getRequestId(), connection, searchShardTarget.getOriginalIndices());
             } catch (Exception inner) {
                 inner.addSuppressed(exception);
                 logger.trace("failed to release context", inner);
@@ -296,11 +297,11 @@ abstract class AbstractSearchAsyncAction<Result extends SearchPhaseResult> exten
         listener.onFailure(e);
     }
 
-    public final ShardSearchTransportRequest buildShardSearchRequest(ShardIterator shardIt, ShardRouting shard) {
+    public final ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt, ShardRouting shard) {
         AliasFilter filter = aliasFilter.get(shard.index().getUUID());
         assert filter != null;
         float indexBoost = concreteIndexBoosts.getOrDefault(shard.index().getUUID(), DEFAULT_INDEX_BOOST);
-        return new ShardSearchTransportRequest(request, shardIt.shardId(), getNumShards(),
+        return new ShardSearchTransportRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), getNumShards(),
             filter, indexBoost, timeProvider.getAbsoluteStartMillis());
     }
 
diff --git a/core/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/core/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java
index 353baf11750..66a88ce2fee 100644
--- a/core/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java
+++ b/core/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java
@@ -73,7 +73,8 @@ final class DfsQueryPhase extends SearchPhase {
         for (final DfsSearchResult dfsResult : resultList) {
             final SearchShardTarget searchShardTarget = dfsResult.getSearchShardTarget();
             Transport.Connection connection = context.getConnection(searchShardTarget.getNodeId());
-            QuerySearchRequest querySearchRequest = new QuerySearchRequest(context.getRequest(), dfsResult.getRequestId(), dfs);
+            QuerySearchRequest querySearchRequest = new QuerySearchRequest(searchShardTarget.getOriginalIndices(),
+                    dfsResult.getRequestId(), dfs);
             final int shardIndex = dfsResult.getShardIndex();
             searchTransportService.sendExecuteQuery(connection, querySearchRequest, context.getTask(),
                 new SearchActionListener<QuerySearchResult>(searchShardTarget, shardIndex) {
@@ -95,7 +96,7 @@ final class DfsQueryPhase extends SearchPhase {
                             // the query might not have been executed at all (for example because thread pool rejected
                             // execution) and the search context that was created in dfs phase might not be released.
                             // release it again to be in the safe side
-                            context.sendReleaseSearchContext(querySearchRequest.id(), connection);
+                            context.sendReleaseSearchContext(querySearchRequest.id(), connection, searchShardTarget.getOriginalIndices());
                         }
                     }
                 });
diff --git a/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java
index a0e313f1d73..25231efe49b 100644
--- a/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java
+++ b/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java
@@ -24,6 +24,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage;
 import org.apache.logging.log4j.util.Supplier;
 import org.apache.lucene.search.ScoreDoc;
 import org.elasticsearch.action.ActionRunnable;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.common.util.concurrent.AtomicArray;
 import org.elasticsearch.search.SearchPhaseResult;
 import org.elasticsearch.search.SearchShardTarget;
@@ -73,7 +74,6 @@ final class FetchSearchPhase extends SearchPhase {
         this.context = context;
         this.logger = context.getLogger();
         this.resultConsumer = resultConsumer;
-
     }
 
     @Override
@@ -112,7 +112,7 @@ final class FetchSearchPhase extends SearchPhase {
             final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(numShards, reducedQueryPhase.scoreDocs);
             if (reducedQueryPhase.scoreDocs.length == 0) { // no docs to fetch -- sidestep everything and return
                 phaseResults.stream()
-                    .map(e -> e.queryResult())
+                    .map(SearchPhaseResult::queryResult)
                     .forEach(this::releaseIrrelevantSearchContext); // we have to release contexts here to free up resources
                 finishPhase.run();
             } else {
@@ -135,10 +135,11 @@ final class FetchSearchPhase extends SearchPhase {
                         // in any case we count down this result since we don't talk to this shard anymore
                         counter.countDown();
                     } else {
-                        Transport.Connection connection = context.getConnection(queryResult.getSearchShardTarget().getNodeId());
+                        SearchShardTarget searchShardTarget = queryResult.getSearchShardTarget();
+                        Transport.Connection connection = context.getConnection(searchShardTarget.getNodeId());
                         ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult().getRequestId(), i, entry,
-                            lastEmittedDocPerShard);
-                        executeFetch(i, queryResult.getSearchShardTarget(), counter, fetchSearchRequest, queryResult.queryResult(),
+                            lastEmittedDocPerShard, searchShardTarget.getOriginalIndices());
+                        executeFetch(i, searchShardTarget, counter, fetchSearchRequest, queryResult.queryResult(),
                             connection);
                     }
                 }
@@ -147,9 +148,9 @@ final class FetchSearchPhase extends SearchPhase {
     }
 
     protected ShardFetchSearchRequest createFetchRequest(long queryId, int index, IntArrayList entry,
-                                                               ScoreDoc[] lastEmittedDocPerShard) {
+                                                               ScoreDoc[] lastEmittedDocPerShard, OriginalIndices originalIndices) {
         final ScoreDoc lastEmittedDoc = (lastEmittedDocPerShard != null) ? lastEmittedDocPerShard[index] : null;
-        return new ShardFetchSearchRequest(context.getRequest(), queryId, entry, lastEmittedDoc);
+        return new ShardFetchSearchRequest(originalIndices, queryId, entry, lastEmittedDoc);
     }
 
     private void executeFetch(final int shardIndex, final SearchShardTarget shardTarget,
@@ -189,8 +190,9 @@ final class FetchSearchPhase extends SearchPhase {
         // and if it has at lease one hit that didn't make it to the global topDocs
         if (context.getRequest().scroll() == null && queryResult.hasSearchContext()) {
             try {
-                Transport.Connection connection = context.getConnection(queryResult.getSearchShardTarget().getNodeId());
-                context.sendReleaseSearchContext(queryResult.getRequestId(), connection);
+                SearchShardTarget searchShardTarget = queryResult.getSearchShardTarget();
+                Transport.Connection connection = context.getConnection(searchShardTarget.getNodeId());
+                context.sendReleaseSearchContext(queryResult.getRequestId(), connection, searchShardTarget.getOriginalIndices());
             } catch (Exception e) {
                 context.getLogger().trace("failed to release context", e);
             }
diff --git a/core/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java b/core/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java
index be91cebe501..2453e2b80b5 100644
--- a/core/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java
+++ b/core/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java
@@ -46,12 +46,12 @@ import java.util.stream.Stream;
  */
 abstract class InitialSearchPhase<FirstResult extends SearchPhaseResult> extends SearchPhase {
     private final SearchRequest request;
-    private final GroupShardsIterator shardsIts;
+    private final GroupShardsIterator<SearchShardIterator> shardsIts;
     private final Logger logger;
     private final int expectedTotalOps;
     private final AtomicInteger totalOps = new AtomicInteger();
 
-    InitialSearchPhase(String name, SearchRequest request, GroupShardsIterator shardsIts, Logger logger) {
+    InitialSearchPhase(String name, SearchRequest request, GroupShardsIterator<SearchShardIterator> shardsIts, Logger logger) {
         super(name);
         this.request = request;
         this.shardsIts = shardsIts;
@@ -64,10 +64,10 @@ abstract class InitialSearchPhase<FirstResult extends SearchPhaseResult> extends
     }
 
     private void onShardFailure(final int shardIndex, @Nullable ShardRouting shard, @Nullable String nodeId,
-                                final ShardIterator shardIt, Exception e) {
+                                final SearchShardIterator shardIt, Exception e) {
         // we always add the shard failure for a specific shard instance
         // we do make sure to clean it on a successful response from a shard
-        SearchShardTarget shardTarget = new SearchShardTarget(nodeId, shardIt.shardId());
+        SearchShardTarget shardTarget = new SearchShardTarget(nodeId, shardIt.shardId(), shardIt.getOriginalIndices());
         onShardFailure(shardIndex, shardTarget, e);
 
         if (totalOps.incrementAndGet() == expectedTotalOps) {
@@ -124,7 +124,7 @@ abstract class InitialSearchPhase<FirstResult extends SearchPhaseResult> extends
     @Override
     public final void run() throws IOException {
         int shardIndex = -1;
-        for (final ShardIterator shardIt : shardsIts) {
+        for (final SearchShardIterator shardIt : shardsIts) {
             shardIndex++;
             final ShardRouting shard = shardIt.nextOrNull();
             if (shard != null) {
@@ -136,7 +136,7 @@ abstract class InitialSearchPhase<FirstResult extends SearchPhaseResult> extends
         }
     }
 
-    private void performPhaseOnShard(final int shardIndex, final ShardIterator shardIt, final ShardRouting shard) {
+    private void performPhaseOnShard(final int shardIndex, final SearchShardIterator shardIt, final ShardRouting shard) {
         if (shard == null) {
             // TODO upgrade this to an assert...
             // no more active shards... (we should not really get here, but just for safety)
@@ -144,7 +144,7 @@ abstract class InitialSearchPhase<FirstResult extends SearchPhaseResult> extends
         } else {
             try {
                 executePhaseOnShard(shardIt, shard, new SearchActionListener<FirstResult>(new SearchShardTarget(shard.currentNodeId(),
-                    shardIt.shardId()), shardIndex) {
+                    shardIt.shardId(), shardIt.getOriginalIndices()), shardIndex) {
                     @Override
                     public void innerOnResponse(FirstResult result) {
                         onShardResult(result, shardIt);
@@ -213,7 +213,8 @@ abstract class InitialSearchPhase<FirstResult extends SearchPhaseResult> extends
      * @param shard the shard routing to send the request for
      * @param listener the listener to notify on response
      */
-    protected abstract void executePhaseOnShard(ShardIterator shardIt, ShardRouting shard, SearchActionListener<FirstResult> listener);
+    protected abstract void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting shard,
+                                                SearchActionListener<FirstResult> listener);
 
     /**
      * This class acts as a basic result collection that can be extended to do on-the-fly reduction or result processing
diff --git a/core/src/main/java/org/elasticsearch/action/search/RemoteClusterConnection.java b/core/src/main/java/org/elasticsearch/action/search/RemoteClusterConnection.java
index a9739cfe21a..a3f3f3a9612 100644
--- a/core/src/main/java/org/elasticsearch/action/search/RemoteClusterConnection.java
+++ b/core/src/main/java/org/elasticsearch/action/search/RemoteClusterConnection.java
@@ -162,7 +162,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
     /**
      * Fetches all shards for the search request from this remote connection. This is used to later run the search on the remote end.
      */
-    public void fetchSearchShards(SearchRequest searchRequest, final List<String> indices,
+    public void fetchSearchShards(SearchRequest searchRequest, final String[] indices,
                                   ActionListener<ClusterSearchShardsResponse> listener) {
         if (connectedNodes.isEmpty()) {
             // just in case if we are not connected for some reason we try to connect and if we fail we have to notify the listener
@@ -176,10 +176,10 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
         }
     }
 
-    private void fetchShardsInternal(SearchRequest searchRequest, List<String> indices,
+    private void fetchShardsInternal(SearchRequest searchRequest, String[] indices,
                                      final ActionListener<ClusterSearchShardsResponse> listener) {
         final DiscoveryNode node = nodeSupplier.get();
-        ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest(indices.toArray(new String[indices.size()]))
+        ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest(indices)
             .indicesOptions(searchRequest.indicesOptions()).local(true).preference(searchRequest.preference())
             .routing(searchRequest.routing());
         transportService.sendRequest(node, ClusterSearchShardsAction.NAME, searchShardsRequest,
diff --git a/core/src/main/java/org/elasticsearch/action/search/RemoteClusterService.java b/core/src/main/java/org/elasticsearch/action/search/RemoteClusterService.java
index 34cb5a84da7..40fed0299b3 100644
--- a/core/src/main/java/org/elasticsearch/action/search/RemoteClusterService.java
+++ b/core/src/main/java/org/elasticsearch/action/search/RemoteClusterService.java
@@ -22,14 +22,13 @@ import org.apache.logging.log4j.util.Supplier;
 import org.apache.lucene.util.IOUtils;
 import org.elasticsearch.Version;
 import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup;
 import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
 import org.elasticsearch.action.support.GroupedActionListener;
 import org.elasticsearch.action.support.PlainActionFuture;
 import org.elasticsearch.cluster.metadata.ClusterNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.cluster.routing.PlainShardIterator;
-import org.elasticsearch.cluster.routing.ShardIterator;
 import org.elasticsearch.common.Booleans;
 import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.component.AbstractComponent;
@@ -243,18 +242,18 @@ public final class RemoteClusterService extends AbstractComponent implements Clo
         return remoteClusters.containsKey(clusterName);
     }
 
-    void collectSearchShards(SearchRequest searchRequest, Map<String, List<String>> remoteIndicesByCluster,
+    void collectSearchShards(SearchRequest searchRequest, Map<String, OriginalIndices> remoteIndicesByCluster,
                              ActionListener<Map<String, ClusterSearchShardsResponse>> listener) {
         final CountDown responsesCountDown = new CountDown(remoteIndicesByCluster.size());
         final Map<String, ClusterSearchShardsResponse> searchShardsResponses = new ConcurrentHashMap<>();
         final AtomicReference<TransportException> transportException = new AtomicReference<>();
-        for (Map.Entry<String, List<String>> entry : remoteIndicesByCluster.entrySet()) {
+        for (Map.Entry<String, OriginalIndices> entry : remoteIndicesByCluster.entrySet()) {
             final String clusterName = entry.getKey();
             RemoteClusterConnection remoteClusterConnection = remoteClusters.get(clusterName);
             if (remoteClusterConnection == null) {
                 throw new IllegalArgumentException("no such remote cluster: " + clusterName);
             }
-            final List<String> indices = entry.getValue();
+            final String[] indices = entry.getValue().indices();
             remoteClusterConnection.fetchSearchShards(searchRequest, indices,
                 new ActionListener<ClusterSearchShardsResponse>() {
                     @Override
@@ -288,16 +287,16 @@ public final class RemoteClusterService extends AbstractComponent implements Clo
         }
     }
 
-
     Function<String, Transport.Connection> processRemoteShards(Map<String, ClusterSearchShardsResponse> searchShardsResponses,
-                                                                       List<ShardIterator> remoteShardIterators,
-                                                                       Map<String, AliasFilter> aliasFilterMap) {
+                                                               Map<String, OriginalIndices> remoteIndicesByCluster,
+                                                               List<SearchShardIterator> remoteShardIterators,
+                                                               Map<String, AliasFilter> aliasFilterMap) {
         Map<String, Supplier<Transport.Connection>> nodeToCluster = new HashMap<>();
         for (Map.Entry<String, ClusterSearchShardsResponse> entry : searchShardsResponses.entrySet()) {
-            String clusterName = entry.getKey();
+            String clusterAlias = entry.getKey();
             ClusterSearchShardsResponse searchShardsResponse = entry.getValue();
             for (DiscoveryNode remoteNode : searchShardsResponse.getNodes()) {
-                nodeToCluster.put(remoteNode.getId(), () -> getConnection(remoteNode, clusterName));
+                nodeToCluster.put(remoteNode.getId(), () -> getConnection(remoteNode, clusterAlias));
             }
             Map<String, AliasFilter> indicesAndFilters = searchShardsResponse.getIndicesAndFilters();
             for (ClusterSearchShardsGroup clusterSearchShardsGroup : searchShardsResponse.getGroups()) {
@@ -305,9 +304,11 @@ public final class RemoteClusterService extends AbstractComponent implements Clo
                 //this ends up in the hits returned with the search response
                 ShardId shardId = clusterSearchShardsGroup.getShardId();
                 Index remoteIndex = shardId.getIndex();
-                Index index = new Index(clusterName + REMOTE_CLUSTER_INDEX_SEPARATOR + remoteIndex.getName(), remoteIndex.getUUID());
-                ShardIterator shardIterator = new PlainShardIterator(new ShardId(index, shardId.getId()),
-                    Arrays.asList(clusterSearchShardsGroup.getShards()));
+                Index index = new Index(clusterAlias + REMOTE_CLUSTER_INDEX_SEPARATOR + remoteIndex.getName(), remoteIndex.getUUID());
+                OriginalIndices originalIndices = remoteIndicesByCluster.get(clusterAlias);
+                assert originalIndices != null;
+                SearchShardIterator shardIterator = new SearchShardIterator(new ShardId(index, shardId.getId()),
+                    Arrays.asList(clusterSearchShardsGroup.getShards()), originalIndices);
                 remoteShardIterators.add(shardIterator);
                 AliasFilter aliasFilter;
                 if (indicesAndFilters == null) {
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchActionListener.java b/core/src/main/java/org/elasticsearch/action/search/SearchActionListener.java
index 709d1e5e237..67de87b1bb1 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchActionListener.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchActionListener.java
@@ -49,5 +49,4 @@ abstract class SearchActionListener<T extends SearchPhaseResult> implements Acti
     }
 
     protected abstract void innerOnResponse(T response);
-
 }
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java
index 7151c8712ed..be8cb0cff01 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java
@@ -22,7 +22,6 @@ package org.elasticsearch.action.search;
 import org.apache.logging.log4j.Logger;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.cluster.routing.GroupShardsIterator;
-import org.elasticsearch.cluster.routing.ShardIterator;
 import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.search.dfs.DfsSearchResult;
 import org.elasticsearch.search.internal.AliasFilter;
@@ -46,7 +45,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction
             final Executor executor,
             final SearchRequest request,
             final ActionListener<SearchResponse> listener,
-            final GroupShardsIterator shardsIts,
+            final GroupShardsIterator<SearchShardIterator> shardsIts,
             final TransportSearchAction.SearchTimeProvider timeProvider,
             final long clusterStateVersion,
             final SearchTask task) {
@@ -70,7 +69,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction
 
     @Override
     protected void executePhaseOnShard(
-            final ShardIterator shardIt,
+            final SearchShardIterator shardIt,
             final ShardRouting shard,
             final SearchActionListener<DfsSearchResult> listener) {
         getSearchTransport().sendExecuteDfs(getConnection(shard.currentNodeId()),
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java
index 26c5403f4ab..a109ab96397 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java
@@ -20,7 +20,7 @@ package org.elasticsearch.action.search;
 
 import org.apache.logging.log4j.Logger;
 import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.cluster.routing.ShardIterator;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.common.Nullable;
 import org.elasticsearch.search.SearchShardTarget;
@@ -97,16 +97,16 @@ interface SearchPhaseContext extends ActionListener<SearchResponse>, Executor {
      * @see org.elasticsearch.search.fetch.FetchSearchResult#getRequestId()
      *
      */
-    default void sendReleaseSearchContext(long contextId, Transport.Connection connection) {
+    default void sendReleaseSearchContext(long contextId, Transport.Connection connection, OriginalIndices originalIndices) {
         if (connection != null) {
-            getSearchTransport().sendFreeContext(connection, contextId, getRequest());
+            getSearchTransport().sendFreeContext(connection, contextId, originalIndices);
         }
     }
 
     /**
      * Builds an request for the initial search phase.
      */
-    ShardSearchTransportRequest buildShardSearchRequest(ShardIterator shardIt, ShardRouting shard);
+    ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt, ShardRouting shard);
 
     /**
      * Processes the phase transition from on phase to another. This method handles all errors that happen during the initial run execution
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java
index fd1d1977029..855e0216284 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java
@@ -22,7 +22,6 @@ package org.elasticsearch.action.search;
 import org.apache.logging.log4j.Logger;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.cluster.routing.GroupShardsIterator;
-import org.elasticsearch.cluster.routing.ShardIterator;
 import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.search.SearchPhaseResult;
 import org.elasticsearch.search.internal.AliasFilter;
@@ -32,8 +31,7 @@ import java.util.Map;
 import java.util.concurrent.Executor;
 import java.util.function.Function;
 
-final class SearchQueryThenFetchAsyncAction
-        extends AbstractSearchAsyncAction<SearchPhaseResult> {
+final class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<SearchPhaseResult> {
 
     private final SearchPhaseController searchPhaseController;
 
@@ -47,7 +45,7 @@ final class SearchQueryThenFetchAsyncAction
             final Executor executor,
             final SearchRequest request,
             final ActionListener<SearchResponse> listener,
-            final GroupShardsIterator shardsIts,
+            final GroupShardsIterator<SearchShardIterator> shardsIts,
             final TransportSearchAction.SearchTimeProvider timeProvider,
             long clusterStateVersion,
             SearchTask task) {
@@ -70,7 +68,7 @@ final class SearchQueryThenFetchAsyncAction
     }
 
     protected void executePhaseOnShard(
-            final ShardIterator shardIt,
+            final SearchShardIterator shardIt,
             final ShardRouting shard,
             final SearchActionListener<SearchPhaseResult> listener) {
         getSearchTransport().sendExecuteQuery(
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java b/core/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java
new file mode 100644
index 00000000000..ca78945a299
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.search;
+
+import org.elasticsearch.action.OriginalIndices;
+import org.elasticsearch.cluster.routing.PlainShardIterator;
+import org.elasticsearch.cluster.routing.ShardRouting;
+import org.elasticsearch.index.shard.ShardId;
+
+import java.util.List;
+
+/**
+ * Extension of {@link PlainShardIterator} used in the search api, which also holds the {@link OriginalIndices}
+ * of the search request. Useful especially with cross cluster search, as each cluster has its own set of original indices.
+ */
+public final class SearchShardIterator extends PlainShardIterator {
+
+    private final OriginalIndices originalIndices;
+
+    /**
+     * Creates a {@link PlainShardIterator} instance that iterates over a subset of the given shards
+     * this the a given <code>shardId</code>.
+     *
+     * @param shardId shard id of the group
+     * @param shards  shards to iterate
+     */
+    public SearchShardIterator(ShardId shardId, List<ShardRouting> shards, OriginalIndices originalIndices) {
+        super(shardId, shards);
+        this.originalIndices = originalIndices;
+    }
+
+    /**
+     * Returns the original indices associated with this shard iterator, specifically with the cluster that this shard belongs to.
+     */
+    public OriginalIndices getOriginalIndices() {
+        return originalIndices;
+    }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java
index 80583e24c9c..436d8da95eb 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java
@@ -92,8 +92,8 @@ public class SearchTransportService extends AbstractLifecycleComponent {
         }
     }
 
-    public void sendFreeContext(Transport.Connection connection, final long contextId, SearchRequest request) {
-        transportService.sendRequest(connection, FREE_CONTEXT_ACTION_NAME, new SearchFreeContextRequest(request, contextId),
+    public void sendFreeContext(Transport.Connection connection, final long contextId, OriginalIndices originalIndices) {
+        transportService.sendRequest(connection, FREE_CONTEXT_ACTION_NAME, new SearchFreeContextRequest(originalIndices, contextId),
             TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(new ActionListener<SearchFreeContextResponse>() {
                 @Override
                 public void onResponse(SearchFreeContextResponse response) {
@@ -219,9 +219,9 @@ public class SearchTransportService extends AbstractLifecycleComponent {
         SearchFreeContextRequest() {
         }
 
-        SearchFreeContextRequest(SearchRequest request, long id) {
+        SearchFreeContextRequest(OriginalIndices originalIndices, long id) {
             super(id);
-            this.originalIndices = new OriginalIndices(request);
+            this.originalIndices = originalIndices;
         }
 
         @Override
diff --git a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java
index 2aa0ad3c7be..6d5b30fd9bd 100644
--- a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java
+++ b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java
@@ -21,6 +21,7 @@ package org.elasticsearch.action.search;
 
 import org.elasticsearch.ElasticsearchException;
 import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.action.ShardOperationFailedException;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
 import org.elasticsearch.common.Nullable;
@@ -212,7 +213,8 @@ public class ShardSearchFailure implements ShardOperationFailedException {
             }
         }
         return new ShardSearchFailure(exception,
-                new SearchShardTarget(nodeId, new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), shardId)));
+                new SearchShardTarget(nodeId,
+                        new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), shardId), OriginalIndices.NONE));
     }
 
     @Override
diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java
index 63a3ad0b62d..6f7cc26e59e 100644
--- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java
@@ -20,6 +20,7 @@
 package org.elasticsearch.action.search;
 
 import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.action.support.ActionFilters;
 import org.elasticsearch.action.support.HandledTransportAction;
 import org.elasticsearch.cluster.ClusterState;
@@ -162,10 +163,8 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
         long getRelativeCurrentNanos() {
             return relativeCurrentNanosProvider.getAsLong();
         }
-
     }
 
-
     @Override
     protected void doExecute(Task task, SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
         final long absoluteStartMillis = System.currentTimeMillis();
@@ -173,17 +172,27 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
         final SearchTimeProvider timeProvider =
                 new SearchTimeProvider(absoluteStartMillis, relativeStartNanos, System::nanoTime);
 
-        final String[] localIndices;
-        final Map<String, List<String>> remoteClusterIndices;
+        final OriginalIndices localIndices;
+        final Map<String, OriginalIndices> remoteClusterIndices;
         final ClusterState clusterState = clusterService.state();
         if (remoteClusterService.isCrossClusterSearchEnabled()) {
-            remoteClusterIndices = remoteClusterService.groupClusterIndices( searchRequest.indices(), // empty string is not allowed
+            final Map<String, List<String>> groupedIndices = remoteClusterService.groupClusterIndices(searchRequest.indices(),
+                // empty string is not allowed
                 idx -> indexNameExpressionResolver.hasIndexOrAlias(idx, clusterState));
-            List<String> remove = remoteClusterIndices.remove(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY);
-            localIndices = remove == null ? Strings.EMPTY_ARRAY : remove.toArray(new String[remove.size()]);
+            List<String> remove = groupedIndices.remove(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY);
+            String[] indices = remove == null ? Strings.EMPTY_ARRAY : remove.toArray(new String[remove.size()]);
+            localIndices = new OriginalIndices(indices, searchRequest.indicesOptions());
+            Map<String, OriginalIndices> originalIndicesMap = new HashMap<>();
+            for (Map.Entry<String, List<String>> entry : groupedIndices.entrySet()) {
+                String clusterAlias = entry.getKey();
+                List<String> originalIndices = entry.getValue();
+                originalIndicesMap.put(clusterAlias,
+                        new OriginalIndices(originalIndices.toArray(new String[originalIndices.size()]), searchRequest.indicesOptions()));
+            }
+            remoteClusterIndices = Collections.unmodifiableMap(originalIndicesMap);
         } else {
             remoteClusterIndices = Collections.emptyMap();
-            localIndices = searchRequest.indices();
+            localIndices = new OriginalIndices(searchRequest);
         }
 
         if (remoteClusterIndices.isEmpty()) {
@@ -192,18 +201,18 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
         } else {
             remoteClusterService.collectSearchShards(searchRequest, remoteClusterIndices,
                 ActionListener.wrap((searchShardsResponses) -> {
-                    List<ShardIterator> remoteShardIterators = new ArrayList<>();
+                    List<SearchShardIterator> remoteShardIterators = new ArrayList<>();
                     Map<String, AliasFilter> remoteAliasFilters = new HashMap<>();
                     Function<String, Transport.Connection> connectionFunction = remoteClusterService.processRemoteShards(
-                        searchShardsResponses, remoteShardIterators, remoteAliasFilters);
+                        searchShardsResponses, remoteClusterIndices, remoteShardIterators, remoteAliasFilters);
                     executeSearch((SearchTask)task, timeProvider, searchRequest, localIndices, remoteShardIterators,
                         connectionFunction, clusterState, remoteAliasFilters, listener);
                 }, listener::onFailure));
         }
     }
 
-    private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, SearchRequest searchRequest, String[] localIndices,
-                               List<ShardIterator> remoteShardIterators, Function<String, Transport.Connection> remoteConnections,
+    private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, SearchRequest searchRequest, OriginalIndices localIndices,
+                               List<SearchShardIterator> remoteShardIterators, Function<String, Transport.Connection> remoteConnections,
                                ClusterState clusterState, Map<String, AliasFilter> remoteAliasMap,
                                ActionListener<SearchResponse> listener) {
 
@@ -212,11 +221,11 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
         // date math expressions and $now in scripts. This way all apis will deal with now in the same way instead
         // of just for the _search api
         final Index[] indices;
-        if (localIndices.length == 0 && remoteShardIterators.size() > 0) {
+        if (localIndices.indices().length == 0 && remoteShardIterators.size() > 0) {
             indices = Index.EMPTY_ARRAY; // don't search on _all if only remote indices were specified
         } else {
             indices = indexNameExpressionResolver.concreteIndices(clusterState, searchRequest.indicesOptions(),
-                timeProvider.getAbsoluteStartMillis(), localIndices);
+                timeProvider.getAbsoluteStartMillis(), localIndices.indices());
         }
         Map<String, AliasFilter> aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap);
         Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(),
@@ -225,9 +234,9 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
         for (int i = 0; i < indices.length; i++) {
             concreteIndices[i] = indices[i].getName();
         }
-        GroupShardsIterator localShardsIterator = clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap,
+        GroupShardsIterator<ShardIterator> localShardsIterator = clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap,
             searchRequest.preference());
-        GroupShardsIterator shardIterators = mergeShardsIterators(localShardsIterator, remoteShardIterators);
+        GroupShardsIterator<SearchShardIterator> shardIterators = mergeShardsIterators(localShardsIterator, localIndices, remoteShardIterators);
 
         failIfOverShardCountLimit(clusterService, shardIterators.size());
 
@@ -268,19 +277,17 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
             Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, listener).start();
     }
 
-    private static GroupShardsIterator mergeShardsIterators(GroupShardsIterator localShardsIterator,
-                                                            List<ShardIterator> remoteShardIterators) {
-        if (remoteShardIterators.isEmpty()) {
-            return localShardsIterator;
-        }
-        List<ShardIterator> shards = new ArrayList<>();
-        for (ShardIterator shardIterator : remoteShardIterators) {
+    static GroupShardsIterator<SearchShardIterator> mergeShardsIterators(GroupShardsIterator<ShardIterator> localShardsIterator,
+                                                             OriginalIndices localIndices,
+                                                             List<SearchShardIterator> remoteShardIterators) {
+        List<SearchShardIterator> shards = new ArrayList<>();
+        for (SearchShardIterator shardIterator : remoteShardIterators) {
             shards.add(shardIterator);
         }
         for (ShardIterator shardIterator : localShardsIterator) {
-            shards.add(shardIterator);
+            shards.add(new SearchShardIterator(shardIterator.shardId(), shardIterator.getShardRoutings(), localIndices));
         }
-        return new GroupShardsIterator(shards);
+        return new GroupShardsIterator<>(shards);
     }
 
     @Override
@@ -288,7 +295,8 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
         throw new UnsupportedOperationException("the task parameter is required");
     }
 
-    private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators,
+    private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest,
+                                                        GroupShardsIterator<SearchShardIterator> shardIterators,
                                                         SearchTimeProvider timeProvider, Function<String, Transport.Connection> connectionLookup,
                                                         long clusterStateVersion, Map<String, AliasFilter> aliasFilter,
                                                         Map<String, Float> concreteIndexBoosts,
diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
index 0408b04cc83..53764f4ee88 100644
--- a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
@@ -94,7 +94,7 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
      * Determines the shards this operation will be executed on. The operation is executed once per shard iterator, typically
      * on the first shard in it. If the operation fails, it will be retried on the next shard in the iterator.
      */
-    protected abstract GroupShardsIterator shards(ClusterState clusterState, Request request, String[] concreteIndices);
+    protected abstract GroupShardsIterator<ShardIterator> shards(ClusterState clusterState, Request request, String[] concreteIndices);
 
     protected abstract ClusterBlockException checkGlobalBlock(ClusterState state, Request request);
 
@@ -107,7 +107,7 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
         private final ActionListener<Response> listener;
         private final ClusterState clusterState;
         private final DiscoveryNodes nodes;
-        private final GroupShardsIterator shardsIts;
+        private final GroupShardsIterator<ShardIterator> shardsIts;
         private final int expectedOps;
         private final AtomicInteger counterOps = new AtomicInteger();
         private final AtomicReferenceArray shardsResponses;
diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
index 412f54f4354..3ef967472a5 100644
--- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
@@ -270,7 +270,7 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
             ShardsIterator shardIt = shards(clusterState, request, concreteIndices);
             nodeIds = new HashMap<>();
 
-            for (ShardRouting shard : shardIt.asUnordered()) {
+            for (ShardRouting shard : shardIt) {
                 // send a request to the shard only if it is assigned to a node that is in the local node's cluster state
                 // a scenario in which a shard can be assigned but to a node that is not in the local node's cluster state
                 // is when the shard is assigned to the master node, the local node has detected the master as failed
diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java
index bb1e776f2e9..5ff55a6fa55 100644
--- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java
@@ -58,7 +58,7 @@ public class TransportTermVectorsAction extends TransportSingleShardAction<TermV
     protected ShardIterator shards(ClusterState state, InternalRequest request) {
         if (request.request().doc() != null && request.request().routing() == null) {
             // artificial document without routing specified, ignore its "id" and use either random shard or according to preference
-            GroupShardsIterator groupShardsIter = clusterService.operationRouting().searchShards(state,
+            GroupShardsIterator<ShardIterator> groupShardsIter = clusterService.operationRouting().searchShards(state,
                     new String[] { request.concreteIndex() }, null, request.request().preference());
             return groupShardsIter.iterator().next();
         }
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java b/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java
index e8e752fda12..7b33c24d15f 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java
@@ -30,14 +30,14 @@ import java.util.List;
  * ShardsIterators are always returned in ascending order independently of their order at construction
  * time. The incoming iterators are sorted to ensure consistent iteration behavior across Nodes / JVMs.
 */
-public final class GroupShardsIterator implements Iterable<ShardIterator> {
+public final class GroupShardsIterator<ShardIt extends ShardIterator> implements Iterable<ShardIt> {
 
-    private final List<ShardIterator> iterators;
+    private final List<ShardIt> iterators;
 
     /**
      * Constructs a enw GroupShardsIterator from the given list.
      */
-    public GroupShardsIterator(List<ShardIterator> iterators) {
+    public GroupShardsIterator(List<ShardIt> iterators) {
         CollectionUtil.timSort(iterators);
         this.iterators = iterators;
     }
@@ -60,7 +60,7 @@ public final class GroupShardsIterator implements Iterable<ShardIterator> {
      */
     public int totalSizeWith1ForEmpty() {
         int size = 0;
-        for (ShardIterator shard : iterators) {
+        for (ShardIt shard : iterators) {
             size += Math.max(1, shard.size());
         }
         return size;
@@ -75,7 +75,7 @@ public final class GroupShardsIterator implements Iterable<ShardIterator> {
     }
 
     @Override
-    public Iterator<ShardIterator> iterator() {
+    public Iterator<ShardIt> iterator() {
         return iterators.iterator();
     }
 }
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java
index c587629ef0c..5a0bd0d4263 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java
@@ -23,7 +23,6 @@ import com.carrotsearch.hppc.IntSet;
 import com.carrotsearch.hppc.cursors.IntCursor;
 import com.carrotsearch.hppc.cursors.IntObjectCursor;
 import org.apache.lucene.util.CollectionUtil;
-import org.elasticsearch.Version;
 import org.elasticsearch.cluster.AbstractDiffable;
 import org.elasticsearch.cluster.Diff;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -265,37 +264,6 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
         return new PlainShardsIterator(shuffler.shuffle(allActiveShards));
     }
 
-    /**
-     * A group shards iterator where each group ({@link ShardIterator}
-     * is an iterator across shard replication group.
-     */
-    public GroupShardsIterator groupByShardsIt() {
-        // use list here since we need to maintain identity across shards
-        ArrayList<ShardIterator> set = new ArrayList<>(shards.size());
-        for (IndexShardRoutingTable indexShard : this) {
-            set.add(indexShard.shardsIt());
-        }
-        return new GroupShardsIterator(set);
-    }
-
-    /**
-     * A groups shards iterator where each groups is a single {@link ShardRouting} and a group
-     * is created for each shard routing.
-     * <p>
-     * This basically means that components that use the {@link GroupShardsIterator} will iterate
-     * over *all* the shards (all the replicas) within the index.</p>
-     */
-    public GroupShardsIterator groupByAllIt() {
-        // use list here since we need to maintain identity across shards
-        ArrayList<ShardIterator> set = new ArrayList<>();
-        for (IndexShardRoutingTable indexShard : this) {
-            for (ShardRouting shardRouting : indexShard) {
-                set.add(shardRouting.shardsIt());
-            }
-        }
-        return new GroupShardsIterator(set);
-    }
-
     @Override
     public boolean equals(Object o) {
         if (this == o) return true;
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
index 6881cc75657..52807251699 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
@@ -68,7 +68,7 @@ public class OperationRouting extends AbstractComponent {
         return preferenceActiveShardIterator(indexShard, clusterState.nodes().getLocalNodeId(), clusterState.nodes(), preference);
     }
 
-    public GroupShardsIterator searchShards(ClusterState clusterState, String[] concreteIndices, @Nullable Map<String, Set<String>> routing, @Nullable String preference) {
+    public GroupShardsIterator<ShardIterator> searchShards(ClusterState clusterState, String[] concreteIndices, @Nullable Map<String, Set<String>> routing, @Nullable String preference) {
         final Set<IndexShardRoutingTable> shards = computeTargetedShards(clusterState, concreteIndices, routing);
         final Set<ShardIterator> set = new HashSet<>(shards.size());
         for (IndexShardRoutingTable shard : shards) {
@@ -77,7 +77,7 @@ public class OperationRouting extends AbstractComponent {
                 set.add(iterator);
             }
         }
-        return new GroupShardsIterator(new ArrayList<>(set));
+        return new GroupShardsIterator<>(new ArrayList<>(set));
     }
 
     private static final Map<String, Set<String>> EMPTY_ROUTING = Collections.emptyMap();
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java b/core/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java
index 5950bd35d37..bb45ca66956 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java
@@ -43,7 +43,6 @@ public class PlainShardIterator extends PlainShardsIterator implements ShardIter
         this.shardId = shardId;
     }
 
-
     @Override
     public ShardId shardId() {
         return this.shardId;
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/PlainShardsIterator.java b/core/src/main/java/org/elasticsearch/cluster/routing/PlainShardsIterator.java
index c2ac9416079..6cb1989a8dd 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/PlainShardsIterator.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/PlainShardsIterator.java
@@ -18,6 +18,8 @@
  */
 package org.elasticsearch.cluster.routing;
 
+import java.util.Collections;
+import java.util.Iterator;
 import java.util.List;
 
 /**
@@ -74,7 +76,12 @@ public class PlainShardsIterator implements ShardsIterator {
     }
 
     @Override
-    public Iterable<ShardRouting> asUnordered() {
-        return shards;
+    public List<ShardRouting> getShardRoutings() {
+        return Collections.unmodifiableList(shards);
+    }
+
+    @Override
+    public Iterator<ShardRouting> iterator() {
+        return shards.iterator();
     }
 }
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java
index 0b1a0044567..a248d6a939a 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java
@@ -238,7 +238,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
         return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ACTIVE_PREDICATE);
     }
 
-    public GroupShardsIterator allAssignedShardsGrouped(String[] indices, boolean includeEmpty) {
+    public GroupShardsIterator<ShardIterator> allAssignedShardsGrouped(String[] indices, boolean includeEmpty) {
         return allAssignedShardsGrouped(indices, includeEmpty, false);
     }
 
@@ -249,14 +249,14 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
      * @param includeRelocationTargets if true, an <b>extra</b> shard iterator will be added for relocating shards. The extra
      *                                 iterator contains a single ShardRouting pointing at the relocating target
      */
-    public GroupShardsIterator allAssignedShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets) {
+    public GroupShardsIterator<ShardIterator> allAssignedShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets) {
         return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ASSIGNED_PREDICATE);
     }
 
-    private static Predicate<ShardRouting> ACTIVE_PREDICATE = shardRouting -> shardRouting.active();
-    private static Predicate<ShardRouting> ASSIGNED_PREDICATE = shardRouting -> shardRouting.assignedToNode();
+    private static Predicate<ShardRouting> ACTIVE_PREDICATE = ShardRouting::active;
+    private static Predicate<ShardRouting> ASSIGNED_PREDICATE = ShardRouting::assignedToNode;
 
-    private GroupShardsIterator allSatisfyingPredicateShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets, Predicate<ShardRouting> predicate) {
+    private GroupShardsIterator<ShardIterator> allSatisfyingPredicateShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets, Predicate<ShardRouting> predicate) {
         // use list here since we need to maintain identity across shards
         ArrayList<ShardIterator> set = new ArrayList<>();
         for (String index : indices) {
@@ -278,7 +278,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
                 }
             }
         }
-        return new GroupShardsIterator(set);
+        return new GroupShardsIterator<>(set);
     }
 
     public ShardsIterator allShards(String[] indices) {
@@ -320,9 +320,8 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
      * @param indices The indices to return all the shards (replicas)
      * @return All the primary shards grouped into a single shard element group each
      * @throws IndexNotFoundException If an index passed does not exists
-     * @see IndexRoutingTable#groupByAllIt()
      */
-    public GroupShardsIterator activePrimaryShardsGrouped(String[] indices, boolean includeEmpty) {
+    public GroupShardsIterator<ShardIterator> activePrimaryShardsGrouped(String[] indices, boolean includeEmpty) {
         // use list here since we need to maintain identity across shards
         ArrayList<ShardIterator> set = new ArrayList<>();
         for (String index : indices) {
@@ -339,7 +338,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
                 }
             }
         }
-        return new GroupShardsIterator(set);
+        return new GroupShardsIterator<>(set);
     }
 
     @Override
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/ShardsIterator.java b/core/src/main/java/org/elasticsearch/cluster/routing/ShardsIterator.java
index 024138e4db6..638875ea071 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/ShardsIterator.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/ShardsIterator.java
@@ -18,10 +18,12 @@
  */
 package org.elasticsearch.cluster.routing;
 
+import java.util.List;
+
 /**
  * Allows to iterate over unrelated shards.
  */
-public interface ShardsIterator {
+public interface ShardsIterator extends Iterable<ShardRouting> {
 
     /**
      * Resets the iterator to its initial state.
@@ -60,6 +62,9 @@ public interface ShardsIterator {
     @Override
     boolean equals(Object other);
 
-    Iterable<ShardRouting> asUnordered();
+    /**
+     * Returns the {@link ShardRouting}s that this shards iterator holds.
+     */
+    List<ShardRouting> getShardRoutings();
 }
 
diff --git a/core/src/main/java/org/elasticsearch/search/SearchHit.java b/core/src/main/java/org/elasticsearch/search/SearchHit.java
index 71b0b9127b2..d0d5047863f 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchHit.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchHit.java
@@ -21,6 +21,7 @@ package org.elasticsearch.search;
 
 import org.apache.lucene.search.Explanation;
 import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.common.Nullable;
 import org.elasticsearch.common.ParseField;
 import org.elasticsearch.common.ParsingException;
@@ -544,7 +545,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
         ShardId shardId = get(Fields._SHARD, values, null);
         String nodeId = get(Fields._NODE, values, null);
         if (shardId != null && nodeId != null) {
-            searchHit.shard(new SearchShardTarget(nodeId, shardId));
+            searchHit.shard(new SearchShardTarget(nodeId, shardId, OriginalIndices.NONE));
         }
         searchHit.fields(fields);
         return searchHit;
diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java
index e601cec0fea..b1192c59e4c 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchService.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchService.java
@@ -24,6 +24,7 @@ import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.util.IOUtils;
 import org.elasticsearch.ElasticsearchException;
 import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.action.search.SearchTask;
 import org.elasticsearch.cluster.ClusterState;
 import org.elasticsearch.cluster.service.ClusterService;
@@ -40,7 +41,6 @@ import org.elasticsearch.index.Index;
 import org.elasticsearch.index.IndexService;
 import org.elasticsearch.index.IndexSettings;
 import org.elasticsearch.index.engine.Engine;
-import org.elasticsearch.search.collapse.CollapseContext;
 import org.elasticsearch.index.query.InnerHitBuilder;
 import org.elasticsearch.index.query.QueryShardContext;
 import org.elasticsearch.index.shard.IndexEventListener;
@@ -55,6 +55,7 @@ import org.elasticsearch.search.aggregations.AggregationInitializationException;
 import org.elasticsearch.search.aggregations.AggregatorFactories;
 import org.elasticsearch.search.aggregations.SearchContextAggregations;
 import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.search.collapse.CollapseContext;
 import org.elasticsearch.search.dfs.DfsPhase;
 import org.elasticsearch.search.dfs.DfsSearchResult;
 import org.elasticsearch.search.fetch.FetchPhase;
@@ -498,7 +499,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
         throws IOException {
         IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
         IndexShard indexShard = indexService.getShard(request.shardId().getId());
-        SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().getId(), indexShard.shardId());
+        SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().getId(),
+                indexShard.shardId(), OriginalIndices.NONE);
         Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher;
 
         final DefaultSearchContext searchContext = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget,
diff --git a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java
index 5fd20555f81..88045cd3618 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java
@@ -19,6 +19,7 @@
 
 package org.elasticsearch.search;
 
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.common.Nullable;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
@@ -36,6 +37,9 @@ public class SearchShardTarget implements Writeable, Comparable<SearchShardTarge
 
     private final Text nodeId;
     private final ShardId shardId;
+    //original indices are only needed in the coordinating node throughout the search request execution.
+    //no need to serialize them as part of SearchShardTarget.
+    private final transient OriginalIndices originalIndices;
 
     public SearchShardTarget(StreamInput in) throws IOException {
         if (in.readBoolean()) {
@@ -44,15 +48,18 @@ public class SearchShardTarget implements Writeable, Comparable<SearchShardTarge
             nodeId = null;
         }
         shardId = ShardId.readShardId(in);
+        this.originalIndices = null;
     }
 
-    public SearchShardTarget(String nodeId, ShardId shardId) {
+    public SearchShardTarget(String nodeId, ShardId shardId, OriginalIndices originalIndices) {
         this.nodeId = nodeId == null ? null : new Text(nodeId);
         this.shardId = shardId;
+        this.originalIndices = originalIndices;
     }
 
+    //this constructor is only used in tests
     public SearchShardTarget(String nodeId, Index index, int shardId) {
-        this(nodeId,  new ShardId(index, shardId));
+        this(nodeId,  new ShardId(index, shardId), OriginalIndices.NONE);
     }
 
     @Nullable
@@ -72,6 +79,10 @@ public class SearchShardTarget implements Writeable, Comparable<SearchShardTarge
         return shardId;
     }
 
+    public OriginalIndices getOriginalIndices() {
+        return originalIndices;
+    }
+
     @Override
     public int compareTo(SearchShardTarget o) {
         int i = shardId.getIndexName().compareTo(o.getIndex());
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java
index f6738f99725..fdfc582c952 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java
@@ -23,7 +23,6 @@ import com.carrotsearch.hppc.IntArrayList;
 import org.apache.lucene.search.ScoreDoc;
 import org.elasticsearch.action.IndicesRequest;
 import org.elasticsearch.action.OriginalIndices;
-import org.elasticsearch.action.search.SearchRequest;
 import org.elasticsearch.action.support.IndicesOptions;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
@@ -42,9 +41,9 @@ public class ShardFetchSearchRequest extends ShardFetchRequest implements Indice
 
     }
 
-    public ShardFetchSearchRequest(SearchRequest request, long id, IntArrayList list, ScoreDoc lastEmittedDoc) {
+    public ShardFetchSearchRequest(OriginalIndices originalIndices, long id, IntArrayList list, ScoreDoc lastEmittedDoc) {
         super(id, list, lastEmittedDoc);
-        this.originalIndices = new OriginalIndices(request);
+        this.originalIndices = originalIndices;
     }
 
     @Override
diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java
index 62b99a425dc..b9b78fca54a 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java
@@ -53,11 +53,11 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha
     public ShardSearchTransportRequest(){
     }
 
-    public ShardSearchTransportRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards,
+    public ShardSearchTransportRequest(OriginalIndices originalIndices, SearchRequest searchRequest, ShardId shardId, int numberOfShards,
                                        AliasFilter aliasFilter, float indexBoost, long nowInMillis) {
         this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardId, numberOfShards, aliasFilter, indexBoost,
             nowInMillis);
-        this.originalIndices = new OriginalIndices(searchRequest);
+        this.originalIndices = originalIndices;
     }
 
     @Override
@@ -76,7 +76,6 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha
         return originalIndices.indicesOptions();
     }
 
-
     @Override
     public ShardId shardId() {
         return shardSearchLocalRequest.shardId();
diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java
index 3a92b72dd49..86a9c70dc0b 100644
--- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java
@@ -21,7 +21,6 @@ package org.elasticsearch.search.query;
 
 import org.elasticsearch.action.IndicesRequest;
 import org.elasticsearch.action.OriginalIndices;
-import org.elasticsearch.action.search.SearchRequest;
 import org.elasticsearch.action.search.SearchTask;
 import org.elasticsearch.action.support.IndicesOptions;
 import org.elasticsearch.common.Strings;
@@ -47,10 +46,10 @@ public class QuerySearchRequest extends TransportRequest implements IndicesReque
     public QuerySearchRequest() {
     }
 
-    public QuerySearchRequest(SearchRequest request, long id, AggregatedDfs dfs) {
+    public QuerySearchRequest(OriginalIndices originalIndices, long id, AggregatedDfs dfs) {
         this.id = id;
         this.dfs = dfs;
-        this.originalIndices = new OriginalIndices(request);
+        this.originalIndices = originalIndices;
     }
 
     public long id() {
diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
index 16f865e2fb9..392127ac30b 100644
--- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
+++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
@@ -26,6 +26,7 @@ import org.apache.logging.log4j.util.Supplier;
 import org.apache.lucene.util.CollectionUtil;
 import org.elasticsearch.ExceptionsHelper;
 import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.action.search.ShardSearchFailure;
 import org.elasticsearch.action.support.IndicesOptions;
 import org.elasticsearch.cluster.ClusterChangedEvent;
diff --git a/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java
index a64acbf9a36..1c02a9d3090 100644
--- a/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java
+++ b/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch;
 
 import org.apache.lucene.util.Constants;
 import org.elasticsearch.action.NoShardAvailableActionException;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.action.RoutingMissingException;
 import org.elasticsearch.action.search.SearchPhaseExecutionException;
 import org.elasticsearch.action.search.ShardSearchFailure;
@@ -758,9 +759,9 @@ public class ElasticsearchExceptionTests extends ESTestCase {
                 failureCause = new NoShardAvailableActionException(new ShardId("_index_g", "_uuid_g", 6), "node_g", failureCause);
                 ShardSearchFailure[] shardFailures = new ShardSearchFailure[]{
                         new ShardSearchFailure(new ParsingException(0, 0, "Parsing g", null),
-                                new SearchShardTarget("node_g", new ShardId(new Index("_index_g", "_uuid_g"), 61))),
+                                new SearchShardTarget("node_g", new ShardId(new Index("_index_g", "_uuid_g"), 61), OriginalIndices.NONE)),
                         new ShardSearchFailure(new RepositoryException("repository_g", "Repo"),
-                                new SearchShardTarget("node_g", new ShardId(new Index("_index_g", "_uuid_g"), 62))),
+                                new SearchShardTarget("node_g", new ShardId(new Index("_index_g", "_uuid_g"), 62), OriginalIndices.NONE)),
                         new ShardSearchFailure(new SearchContextMissingException(0L), null)
                 };
                 failure = new SearchPhaseExecutionException("phase_g", "G", failureCause, shardFailures);
diff --git a/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java b/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java
index beec582b13f..508fcc69d64 100644
--- a/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java
+++ b/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java
@@ -19,11 +19,8 @@
 
 package org.elasticsearch.action.search;
 
-import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.cluster.routing.GroupShardsIterator;
-import org.elasticsearch.cluster.routing.ShardIterator;
 import org.elasticsearch.cluster.routing.ShardRouting;
-import org.elasticsearch.index.shard.ShardId;
 import org.elasticsearch.search.SearchPhaseResult;
 import org.elasticsearch.test.ESTestCase;
 
@@ -56,48 +53,6 @@ public class AbstractSearchAsyncActionTookTests extends ESTestCase {
                     System::nanoTime);
         }
 
-        final ShardIterator it = new ShardIterator() {
-            @Override
-            public ShardId shardId() {
-                return null;
-            }
-
-            @Override
-            public void reset() {
-
-            }
-
-            @Override
-            public int compareTo(ShardIterator o) {
-                return 0;
-            }
-
-            @Override
-            public int size() {
-                return 0;
-            }
-
-            @Override
-            public int sizeActive() {
-                return 0;
-            }
-
-            @Override
-            public ShardRouting nextOrNull() {
-                return null;
-            }
-
-            @Override
-            public int remaining() {
-                return 0;
-            }
-
-            @Override
-            public Iterable<ShardRouting> asUnordered() {
-                return null;
-            }
-        };
-
         return new AbstractSearchAsyncAction<SearchPhaseResult>(
                 "test",
                 null,
@@ -108,7 +63,7 @@ public class AbstractSearchAsyncActionTookTests extends ESTestCase {
                 null,
                 null,
                 null,
-                new GroupShardsIterator(Collections.singletonList(it)),
+                new GroupShardsIterator<>(Collections.singletonList(new SearchShardIterator(null, Collections.emptyList(), null))),
                 timeProvider,
                 0,
                 null,
@@ -123,7 +78,7 @@ public class AbstractSearchAsyncActionTookTests extends ESTestCase {
 
             @Override
             protected void executePhaseOnShard(
-                    final ShardIterator shardIt,
+                    final SearchShardIterator shardIt,
                     final ShardRouting shard,
                     final SearchActionListener<SearchPhaseResult> listener) {
 
@@ -157,5 +112,4 @@ public class AbstractSearchAsyncActionTookTests extends ESTestCase {
             assertThat(actual, greaterThanOrEqualTo(TimeUnit.NANOSECONDS.toMillis(expected.get())));
         }
     }
-
 }
diff --git a/core/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/core/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java
index 4a5b65c0a0a..98b6d2e7527 100644
--- a/core/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java
+++ b/core/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java
@@ -19,7 +19,7 @@
 package org.elasticsearch.action.search;
 
 import org.apache.logging.log4j.Logger;
-import org.elasticsearch.cluster.routing.ShardIterator;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.common.Nullable;
 import org.elasticsearch.common.logging.Loggers;
@@ -29,14 +29,11 @@ import org.elasticsearch.search.internal.ShardSearchTransportRequest;
 import org.elasticsearch.transport.Transport;
 import org.junit.Assert;
 
-import java.io.IOException;
-import java.io.UncheckedIOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
-import java.util.concurrent.Executor;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
@@ -114,7 +111,7 @@ public final class MockSearchPhaseContext implements SearchPhaseContext {
     }
 
     @Override
-    public ShardSearchTransportRequest buildShardSearchRequest(ShardIterator shardIt, ShardRouting shard) {
+    public ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt, ShardRouting shard) {
         Assert.fail("should not be called");
         return null;
     }
@@ -145,7 +142,7 @@ public final class MockSearchPhaseContext implements SearchPhaseContext {
     }
 
     @Override
-    public void sendReleaseSearchContext(long contextId, Transport.Connection connection) {
+    public void sendReleaseSearchContext(long contextId, Transport.Connection connection, OriginalIndices originalIndices) {
         releasedSearchContexts.add(contextId);
     }
 }
diff --git a/core/src/test/java/org/elasticsearch/action/search/RemoteClusterConnectionTests.java b/core/src/test/java/org/elasticsearch/action/search/RemoteClusterConnectionTests.java
index d73b6709121..8cf6d7d48c7 100644
--- a/core/src/test/java/org/elasticsearch/action/search/RemoteClusterConnectionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/search/RemoteClusterConnectionTests.java
@@ -382,7 +382,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
                             failReference.set(x);
                             responseLatch.countDown();
                         });
-                    connection.fetchSearchShards(request, Arrays.asList("test-index"), shardsListener);
+                    connection.fetchSearchShards(request, new String[]{"test-index"}, shardsListener);
                     responseLatch.await();
                     assertNull(failReference.get());
                     assertNotNull(reference.get());
diff --git a/core/src/test/java/org/elasticsearch/action/search/RemoteClusterServiceTests.java b/core/src/test/java/org/elasticsearch/action/search/RemoteClusterServiceTests.java
index 81ee9141e2b..63f6e8aa5a6 100644
--- a/core/src/test/java/org/elasticsearch/action/search/RemoteClusterServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/action/search/RemoteClusterServiceTests.java
@@ -20,10 +20,11 @@ package org.elasticsearch.action.search;
 
 import org.elasticsearch.Version;
 import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup;
 import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
+import org.elasticsearch.action.support.IndicesOptions;
 import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.cluster.routing.ShardIterator;
 import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.cluster.routing.ShardRoutingState;
 import org.elasticsearch.cluster.routing.TestShardRouting;
@@ -204,7 +205,7 @@ public class RemoteClusterServiceTests extends ESTestCase {
     public void testProcessRemoteShards() throws IOException {
         try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, null)) {
             assertFalse(service.isCrossClusterSearchEnabled());
-            List<ShardIterator> iteratorList = new ArrayList<>();
+            List<SearchShardIterator> iteratorList = new ArrayList<>();
             Map<String, ClusterSearchShardsResponse> searchShardsResponseMap = new HashMap<>();
             DiscoveryNode[] nodes = new DiscoveryNode[] {
                 new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT),
@@ -225,11 +226,26 @@ public class RemoteClusterServiceTests extends ESTestCase {
                         TestShardRouting.newShardRouting("bar", 0, "node1", false, ShardRoutingState.STARTED)})
             };
             searchShardsResponseMap.put("test_cluster_1", new ClusterSearchShardsResponse(groups, nodes, indicesAndAliases));
+            DiscoveryNode[] nodes2 = new DiscoveryNode[] {
+                    new DiscoveryNode("node3", buildNewFakeTransportAddress(), Version.CURRENT)
+            };
+            ClusterSearchShardsGroup[] groups2 = new ClusterSearchShardsGroup[] {
+                    new ClusterSearchShardsGroup(new ShardId("xyz", "xyz_id", 0),
+                            new ShardRouting[] {TestShardRouting.newShardRouting("xyz", 0, "node3", true, ShardRoutingState.STARTED)})
+            };
+            searchShardsResponseMap.put("test_cluster_2", new ClusterSearchShardsResponse(groups2, nodes2, null));
+
+            Map<String, OriginalIndices> remoteIndicesByCluster = new HashMap<>();
+            remoteIndicesByCluster.put("test_cluster_1",
+                    new OriginalIndices(new String[]{"fo*", "ba*"}, IndicesOptions.strictExpandOpenAndForbidClosed()));
+            remoteIndicesByCluster.put("test_cluster_2",
+                    new OriginalIndices(new String[]{"x*"}, IndicesOptions.strictExpandOpenAndForbidClosed()));
             Map<String, AliasFilter> remoteAliases = new HashMap<>();
-            service.processRemoteShards(searchShardsResponseMap, iteratorList, remoteAliases);
-            assertEquals(3, iteratorList.size());
-            for (ShardIterator iterator : iteratorList) {
+            service.processRemoteShards(searchShardsResponseMap, remoteIndicesByCluster, iteratorList, remoteAliases);
+            assertEquals(4, iteratorList.size());
+            for (SearchShardIterator iterator : iteratorList) {
                 if (iterator.shardId().getIndexName().endsWith("foo")) {
+                    assertArrayEquals(new String[]{"fo*", "ba*"}, iterator.getOriginalIndices().indices());
                     assertTrue(iterator.shardId().getId() == 0 || iterator.shardId().getId() == 1);
                     assertEquals("test_cluster_1:foo", iterator.shardId().getIndexName());
                     ShardRouting shardRouting = iterator.nextOrNull();
@@ -239,7 +255,8 @@ public class RemoteClusterServiceTests extends ESTestCase {
                     assertNotNull(shardRouting);
                     assertEquals(shardRouting.getIndexName(), "foo");
                     assertNull(iterator.nextOrNull());
-                } else {
+                } else if (iterator.shardId().getIndexName().endsWith("bar")) {
+                    assertArrayEquals(new String[]{"fo*", "ba*"}, iterator.getOriginalIndices().indices());
                     assertEquals(0, iterator.shardId().getId());
                     assertEquals("test_cluster_1:bar", iterator.shardId().getIndexName());
                     ShardRouting shardRouting = iterator.nextOrNull();
@@ -249,13 +266,23 @@ public class RemoteClusterServiceTests extends ESTestCase {
                     assertNotNull(shardRouting);
                     assertEquals(shardRouting.getIndexName(), "bar");
                     assertNull(iterator.nextOrNull());
+                } else if (iterator.shardId().getIndexName().endsWith("xyz")) {
+                    assertArrayEquals(new String[]{"x*"}, iterator.getOriginalIndices().indices());
+                    assertEquals(0, iterator.shardId().getId());
+                    assertEquals("test_cluster_2:xyz", iterator.shardId().getIndexName());
+                    ShardRouting shardRouting = iterator.nextOrNull();
+                    assertNotNull(shardRouting);
+                    assertEquals(shardRouting.getIndexName(), "xyz");
+                    assertNull(iterator.nextOrNull());
                 }
             }
-            assertEquals(2, remoteAliases.size());
+            assertEquals(3, remoteAliases.size());
             assertTrue(remoteAliases.toString(), remoteAliases.containsKey("foo_id"));
             assertTrue(remoteAliases.toString(), remoteAliases.containsKey("bar_id"));
+            assertTrue(remoteAliases.toString(), remoteAliases.containsKey("xyz_id"));
             assertEquals(new TermsQueryBuilder("foo", "bar"), remoteAliases.get("foo_id").getQueryBuilder());
             assertEquals(new MatchAllQueryBuilder(), remoteAliases.get("bar_id").getQueryBuilder());
+            assertNull(remoteAliases.get("xyz_id").getQueryBuilder());
         }
     }
 
diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java
index 4813dc8ae7d..2d94fe2edd0 100644
--- a/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java
@@ -20,11 +20,11 @@ package org.elasticsearch.action.search;
 
 import org.elasticsearch.Version;
 import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.OriginalIndices;
+import org.elasticsearch.action.support.IndicesOptions;
 import org.elasticsearch.cluster.node.DiscoveryNode;
 import org.elasticsearch.cluster.routing.GroupShardsIterator;
-import org.elasticsearch.cluster.routing.PlainShardIterator;
 import org.elasticsearch.cluster.routing.RecoverySource;
-import org.elasticsearch.cluster.routing.ShardIterator;
 import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.cluster.routing.UnassignedInfo;
 import org.elasticsearch.common.Strings;
@@ -76,12 +76,14 @@ public class SearchAsyncActionTests extends ESTestCase {
 
         Map<DiscoveryNode, Set<Long>> nodeToContextMap = new HashMap<>();
         AtomicInteger contextIdGenerator = new AtomicInteger(0);
-        GroupShardsIterator shardsIter = getShardsIter("idx", randomIntBetween(1, 10), randomBoolean(), primaryNode, replicaNode);
+        GroupShardsIterator<SearchShardIterator> shardsIter = getShardsIter("idx",
+                new OriginalIndices(new String[]{"idx"}, IndicesOptions.strictExpandOpenAndForbidClosed()),
+                randomIntBetween(1, 10), randomBoolean(), primaryNode, replicaNode);
         AtomicInteger numFreedContext = new AtomicInteger();
         SearchTransportService transportService = new SearchTransportService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY,
                 Collections.singleton(RemoteClusterService.REMOTE_CLUSTERS_SEEDS)), null) {
             @Override
-            public void sendFreeContext(Transport.Connection connection, long contextId, SearchRequest request) {
+            public void sendFreeContext(Transport.Connection connection, long contextId, OriginalIndices originalIndices) {
                 numFreedContext.incrementAndGet();
                 assertTrue(nodeToContextMap.containsKey(connection.getNode()));
                 assertTrue(nodeToContextMap.get(connection.getNode()).remove(contextId));
@@ -110,7 +112,7 @@ public class SearchAsyncActionTests extends ESTestCase {
             TestSearchResponse response = new TestSearchResponse();
 
             @Override
-            protected void executePhaseOnShard(ShardIterator shardIt, ShardRouting shard, SearchActionListener<TestSearchPhaseResult>
+            protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting shard, SearchActionListener<TestSearchPhaseResult>
                 listener) {
                 assertTrue("shard: " + shard.shardId() + " has been queried twice", response.queried.add(shard.shardId()));
                 Transport.Connection connection = getConnection(shard.currentNodeId());
@@ -133,7 +135,7 @@ public class SearchAsyncActionTests extends ESTestCase {
                         for (int i = 0; i < results.getNumShards(); i++) {
                             TestSearchPhaseResult result = results.results.get(i);
                             assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId());
-                            sendReleaseSearchContext(result.getRequestId(), new MockConnection(result.node));
+                            sendReleaseSearchContext(result.getRequestId(), new MockConnection(result.node), OriginalIndices.NONE);
                         }
                         responseListener.onResponse(response);
                         latch.countDown();
@@ -154,9 +156,9 @@ public class SearchAsyncActionTests extends ESTestCase {
         }
     }
 
-    private GroupShardsIterator getShardsIter(String index, int numShards, boolean doReplicas, DiscoveryNode primaryNode,
-                                              DiscoveryNode replicaNode) {
-        ArrayList<ShardIterator> list = new ArrayList<>();
+    private static GroupShardsIterator<SearchShardIterator> getShardsIter(String index, OriginalIndices originalIndices, int numShards,
+                                                     boolean doReplicas, DiscoveryNode primaryNode, DiscoveryNode replicaNode) {
+        ArrayList<SearchShardIterator> list = new ArrayList<>();
         for (int i = 0; i < numShards; i++) {
             ArrayList<ShardRouting> started = new ArrayList<>();
             ArrayList<ShardRouting> initializing = new ArrayList<>();
@@ -184,9 +186,9 @@ public class SearchAsyncActionTests extends ESTestCase {
             }
             Collections.shuffle(started, random());
             started.addAll(initializing);
-            list.add(new PlainShardIterator(new ShardId(new Index(index, "_na_"), i), started));
+            list.add(new SearchShardIterator(new ShardId(new Index(index, "_na_"), i), started, originalIndices));
         }
-        return new GroupShardsIterator(list);
+        return new GroupShardsIterator<>(list);
     }
 
     public static class TestSearchResponse extends SearchResponse {
diff --git a/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java b/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java
index 96afbb276d5..eac949c7753 100644
--- a/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java
+++ b/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java
@@ -19,6 +19,7 @@
 
 package org.elasticsearch.action.search;
 
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.common.ParsingException;
 import org.elasticsearch.common.bytes.BytesReference;
 import org.elasticsearch.common.xcontent.XContentParser;
@@ -42,7 +43,7 @@ public class ShardSearchFailureTests extends ESTestCase {
         String indexUuid = randomAlphaOfLengthBetween(5, 10);
         int shardId = randomInt();
         return new ShardSearchFailure(ex,
-                new SearchShardTarget(nodeId, new ShardId(new Index(indexName, indexUuid), shardId)));
+                new SearchShardTarget(nodeId, new ShardId(new Index(indexName, indexUuid), shardId), null));
     }
 
     public void testFromXContent() throws IOException {
@@ -73,7 +74,7 @@ public class ShardSearchFailureTests extends ESTestCase {
 
     public void testToXContent() throws IOException {
         ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(0, 0, "some message", null),
-                new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123)));
+                new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123), OriginalIndices.NONE));
         BytesReference xContent = toXContent(failure, XContentType.JSON, randomBoolean());
         assertEquals(
                 "{\"shard\":123,"
diff --git a/core/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/core/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java
new file mode 100644
index 00000000000..696e25de75e
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java
@@ -0,0 +1,122 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.search;
+
+import org.elasticsearch.action.OriginalIndices;
+import org.elasticsearch.action.support.IndicesOptions;
+import org.elasticsearch.cluster.routing.GroupShardsIterator;
+import org.elasticsearch.cluster.routing.PlainShardIterator;
+import org.elasticsearch.cluster.routing.ShardIterator;
+import org.elasticsearch.cluster.routing.ShardRouting;
+import org.elasticsearch.cluster.routing.TestShardRouting;
+import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
+
+public class TransportSearchActionTests extends ESTestCase {
+
+    public void testMergeShardsIterators() throws IOException {
+        List<ShardIterator> localShardIterators = new ArrayList<>();
+        {
+            ShardId shardId = new ShardId("local_index", "local_index_uuid", 0);
+            ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, "local_node", true, STARTED);
+            ShardIterator shardIterator = new PlainShardIterator(shardId, Collections.singletonList(shardRouting));
+            localShardIterators.add(shardIterator);
+        }
+        {
+            ShardId shardId2 = new ShardId("local_index_2", "local_index_2_uuid", 1);
+            ShardRouting shardRouting2 = TestShardRouting.newShardRouting(shardId2, "local_node", true, STARTED);
+            ShardIterator shardIterator2 = new PlainShardIterator(shardId2, Collections.singletonList(shardRouting2));
+            localShardIterators.add(shardIterator2);
+        }
+        GroupShardsIterator<ShardIterator> localShardsIterator = new GroupShardsIterator<>(localShardIterators);
+
+        OriginalIndices localIndices = new OriginalIndices(new String[]{"local_alias", "local_index_2"},
+                IndicesOptions.strictExpandOpenAndForbidClosed());
+
+        OriginalIndices remoteIndices = new OriginalIndices(new String[]{"remote_alias", "remote_index_2"},
+                IndicesOptions.strictExpandOpen());
+        List<SearchShardIterator> remoteShardIterators = new ArrayList<>();
+        {
+            ShardId remoteShardId = new ShardId("remote_index", "remote_index_uuid", 2);
+            ShardRouting remoteShardRouting = TestShardRouting.newShardRouting(remoteShardId, "remote_node", true, STARTED);
+            SearchShardIterator remoteShardIterator = new SearchShardIterator(remoteShardId,
+                    Collections.singletonList(remoteShardRouting), remoteIndices);
+            remoteShardIterators.add(remoteShardIterator);
+        }
+        {
+            ShardId remoteShardId2 = new ShardId("remote_index_2", "remote_index_2_uuid", 3);
+            ShardRouting remoteShardRouting2 = TestShardRouting.newShardRouting(remoteShardId2, "remote_node", true, STARTED);
+            SearchShardIterator remoteShardIterator2 = new SearchShardIterator(remoteShardId2,
+                    Collections.singletonList(remoteShardRouting2), remoteIndices);
+            remoteShardIterators.add(remoteShardIterator2);
+        }
+        OriginalIndices remoteIndices2 = new OriginalIndices(new String[]{"remote_index_3"}, IndicesOptions.strictExpand());
+
+        {
+            ShardId remoteShardId3 = new ShardId("remote_index_3", "remote_index_3_uuid", 4);
+            ShardRouting remoteShardRouting3 = TestShardRouting.newShardRouting(remoteShardId3, "remote_node", true, STARTED);
+            SearchShardIterator remoteShardIterator3 = new SearchShardIterator(remoteShardId3,
+                    Collections.singletonList(remoteShardRouting3), remoteIndices2);
+            remoteShardIterators.add(remoteShardIterator3);
+        }
+
+        GroupShardsIterator<SearchShardIterator> searchShardIterators = TransportSearchAction.mergeShardsIterators(localShardsIterator,
+                localIndices, remoteShardIterators);
+
+        assertEquals(searchShardIterators.size(), 5);
+        int i = 0;
+        for (SearchShardIterator searchShardIterator : searchShardIterators) {
+            switch(i++) {
+                case 0:
+                    assertEquals("local_index", searchShardIterator.shardId().getIndexName());
+                    assertEquals(0, searchShardIterator.shardId().getId());
+                    assertSame(localIndices, searchShardIterator.getOriginalIndices());
+                    break;
+                case 1:
+                    assertEquals("local_index_2", searchShardIterator.shardId().getIndexName());
+                    assertEquals(1, searchShardIterator.shardId().getId());
+                    assertSame(localIndices, searchShardIterator.getOriginalIndices());
+                    break;
+                case 2:
+                    assertEquals("remote_index", searchShardIterator.shardId().getIndexName());
+                    assertEquals(2, searchShardIterator.shardId().getId());
+                    assertSame(remoteIndices, searchShardIterator.getOriginalIndices());
+                    break;
+                case 3:
+                    assertEquals("remote_index_2", searchShardIterator.shardId().getIndexName());
+                    assertEquals(3, searchShardIterator.shardId().getId());
+                    assertSame(remoteIndices, searchShardIterator.getOriginalIndices());
+                    break;
+                case 4:
+                    assertEquals("remote_index_3", searchShardIterator.shardId().getIndexName());
+                    assertEquals(4, searchShardIterator.shardId().getId());
+                    assertSame(remoteIndices2, searchShardIterator.getOriginalIndices());
+                    break;
+            }
+        }
+    }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java
index ef025e3c37b..93d8be990de 100644
--- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java
@@ -296,7 +296,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
 
         ShardsIterator shardIt = clusterService.state().routingTable().allShards(new String[]{TEST_INDEX});
         Set<String> set = new HashSet<>();
-        for (ShardRouting shard : shardIt.asUnordered()) {
+        for (ShardRouting shard : shardIt) {
             set.add(shard.currentNodeId());
         }
 
@@ -332,7 +332,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
         // the master should not be in the list of nodes that requests were sent to
         ShardsIterator shardIt = clusterService.state().routingTable().allShards(new String[]{TEST_INDEX});
         Set<String> set = new HashSet<>();
-        for (ShardRouting shard : shardIt.asUnordered()) {
+        for (ShardRouting shard : shardIt) {
             if (!shard.currentNodeId().equals(masterNode.getId())) {
                 set.add(shard.currentNodeId());
             }
@@ -352,8 +352,8 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
     public void testOperationExecution() throws Exception {
         ShardsIterator shardIt = clusterService.state().routingTable().allShards(new String[]{TEST_INDEX});
         Set<ShardRouting> shards = new HashSet<>();
-        String nodeId = shardIt.asUnordered().iterator().next().currentNodeId();
-        for (ShardRouting shard : shardIt.asUnordered()) {
+        String nodeId = shardIt.iterator().next().currentNodeId();
+        for (ShardRouting shard : shardIt) {
             if (nodeId.equals(shard.currentNodeId())) {
                 shards.add(shard);
             }
@@ -417,7 +417,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
 
         ShardsIterator shardIt = clusterService.state().getRoutingTable().allShards(new String[]{TEST_INDEX});
         Map<String, List<ShardRouting>> map = new HashMap<>();
-        for (ShardRouting shard : shardIt.asUnordered()) {
+        for (ShardRouting shard : shardIt) {
             if (!map.containsKey(shard.currentNodeId())) {
                 map.put(shard.currentNodeId(), new ArrayList<>());
             }
diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java
index 91420fa227a..fe11cc9cd5f 100644
--- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java
@@ -165,10 +165,9 @@ public class ShardFailedClusterStateTaskExecutorTests extends ESAllocationTestCa
 
     private List<ShardStateAction.ShardEntry> createExistingShards(ClusterState currentState, String reason) {
         List<ShardRouting> shards = new ArrayList<>();
-        GroupShardsIterator shardGroups =
-            currentState.routingTable().allAssignedShardsGrouped(new String[] { INDEX }, true);
+        GroupShardsIterator<ShardIterator> shardGroups = currentState.routingTable().allAssignedShardsGrouped(new String[] { INDEX }, true);
         for (ShardIterator shardIt : shardGroups) {
-            for (ShardRouting shard : shardIt.asUnordered()) {
+            for (ShardRouting shard : shardIt) {
                 shards.add(shard);
             }
         }
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java
index 72cf0391fd4..f2571fce339 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java
@@ -43,7 +43,7 @@ public class GroupShardsIteratorTests extends ESTestCase {
 
         list.add(new PlainShardIterator(new ShardId(index, 0), Arrays.asList(newRouting(index, 0, true))));
         list.add(new PlainShardIterator(new ShardId(index, 1), Arrays.asList(newRouting(index, 1, true))));
-        GroupShardsIterator iter = new GroupShardsIterator(list);
+        GroupShardsIterator iter = new GroupShardsIterator<>(list);
         assertEquals(7, iter.totalSizeWith1ForEmpty());
         assertEquals(5, iter.size());
         assertEquals(6, iter.totalSize());
@@ -67,7 +67,7 @@ public class GroupShardsIteratorTests extends ESTestCase {
 
         Collections.shuffle(list, random());
         ArrayList<ShardIterator> actualIterators = new ArrayList<>();
-        GroupShardsIterator iter = new GroupShardsIterator(list);
+        GroupShardsIterator<ShardIterator> iter = new GroupShardsIterator<>(list);
         for (ShardIterator shardsIterator : iter) {
             actualIterators.add(shardsIterator);
         }
diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
index 86fa25872e0..172bcd6bd55 100644
--- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
@@ -376,7 +376,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase {
         OperationRouting operationRouting = new OperationRouting(Settings.EMPTY, new ClusterSettings(Settings.EMPTY,
             ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
 
-        GroupShardsIterator shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_shards:0");
+        GroupShardsIterator<ShardIterator> shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_shards:0");
         assertThat(shardIterators.size(), equalTo(1));
         assertThat(shardIterators.iterator().next().shardId().id(), equalTo(0));
 
@@ -443,7 +443,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase {
         clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING));
 
         // When replicas haven't initialized, it comes back with the primary first, then initializing replicas
-        GroupShardsIterator shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first");
+        GroupShardsIterator<ShardIterator> shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first");
         assertThat(shardIterators.size(), equalTo(2)); // two potential shards
         ShardIterator iter = shardIterators.iterator().next();
         assertThat(iter.size(), equalTo(3)); // three potential candidates for the shard
diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java
index 91cfba0c70d..9bb180c9818 100644
--- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java
+++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java
@@ -35,7 +35,6 @@ import org.elasticsearch.client.Requests;
 import org.elasticsearch.cluster.ClusterState;
 import org.elasticsearch.cluster.health.ClusterHealthStatus;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.cluster.node.DiscoveryNode;
 import org.elasticsearch.cluster.routing.GroupShardsIterator;
 import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
 import org.elasticsearch.cluster.routing.ShardIterator;
@@ -52,7 +51,6 @@ import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.unit.ByteSizeUnit;
 import org.elasticsearch.common.unit.ByteSizeValue;
 import org.elasticsearch.env.NodeEnvironment;
-import org.elasticsearch.gateway.PrimaryShardAllocator;
 import org.elasticsearch.index.Index;
 import org.elasticsearch.index.IndexSettings;
 import org.elasticsearch.index.MergePolicyConfig;
@@ -73,9 +71,6 @@ import org.elasticsearch.test.MockIndexEventListener;
 import org.elasticsearch.test.junit.annotations.TestLogging;
 import org.elasticsearch.test.store.MockFSIndexStore;
 import org.elasticsearch.test.transport.MockTransportService;
-import org.elasticsearch.transport.ConnectionProfile;
-import org.elasticsearch.transport.Transport;
-import org.elasticsearch.transport.TransportException;
 import org.elasticsearch.transport.TransportRequest;
 import org.elasticsearch.transport.TransportRequestOptions;
 import org.elasticsearch.transport.TransportService;
@@ -292,7 +287,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
         }
         assertThat(response.getStatus(), is(ClusterHealthStatus.RED));
         ClusterState state = client().admin().cluster().prepareState().get().getState();
-        GroupShardsIterator shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false);
+        GroupShardsIterator<ShardIterator> shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false);
         for (ShardIterator iterator : shardIterators) {
             ShardRouting routing;
             while ((routing = iterator.nextOrNull()) != null) {
diff --git a/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java b/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java
index 925bf56fe70..25eb6df4c27 100644
--- a/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java
+++ b/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java
@@ -150,10 +150,10 @@ public class SuggestStatsIT extends ESIntegTestCase {
 
     private Set<String> nodeIdsWithIndex(String... indices) {
         ClusterState state = client().admin().cluster().prepareState().execute().actionGet().getState();
-        GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true);
+        GroupShardsIterator<ShardIterator> allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true);
         Set<String> nodes = new HashSet<>();
         for (ShardIterator shardIterator : allAssignedShardsGrouped) {
-            for (ShardRouting routing : shardIterator.asUnordered()) {
+            for (ShardRouting routing : shardIterator) {
                 if (routing.active()) {
                     nodes.add(routing.currentNodeId());
                 }
diff --git a/core/src/test/java/org/elasticsearch/search/SearchHitTests.java b/core/src/test/java/org/elasticsearch/search/SearchHitTests.java
index 51fffc3e95f..a2c11e8a641 100644
--- a/core/src/test/java/org/elasticsearch/search/SearchHitTests.java
+++ b/core/src/test/java/org/elasticsearch/search/SearchHitTests.java
@@ -20,6 +20,7 @@
 package org.elasticsearch.search;
 
 import org.apache.lucene.search.Explanation;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.common.bytes.BytesArray;
 import org.elasticsearch.common.bytes.BytesReference;
 import org.elasticsearch.common.collect.Tuple;
@@ -128,7 +129,8 @@ public class SearchHitTests extends ESTestCase {
         }
         if (randomBoolean()) {
             hit.shard(new SearchShardTarget(randomAlphaOfLengthBetween(5, 10),
-                    new ShardId(new Index(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)), randomInt())));
+                    new ShardId(new Index(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)), randomInt()),
+                    OriginalIndices.NONE));
         }
         return hit;
     }
diff --git a/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java
index 6fc795a8825..31edc3ac808 100644
--- a/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java
@@ -184,8 +184,8 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
             for (int i = 0; i < rounds; i++) {
                 try {
                     SearchPhaseResult searchPhaseResult = service.executeQueryPhase(
-                        new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT,
-                            new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f),
+                            new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT,
+                                    new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f),
                         new SearchTask(123L, "", "", "", null));
                     IntArrayList intCursors = new IntArrayList(1);
                     intCursors.add(0);
@@ -213,16 +213,16 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
         final IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index"));
         final IndexShard indexShard = indexService.getShard(0);
         final SearchContext contextWithDefaultTimeout = service.createContext(
-            new ShardSearchLocalRequest(
-                indexShard.shardId(),
-                1,
-                SearchType.DEFAULT,
-                new SearchSourceBuilder(),
-                new String[0],
-                false,
-                new AliasFilter(null, Strings.EMPTY_ARRAY),
-                1.0f),
-            null);
+                new ShardSearchLocalRequest(
+                        indexShard.shardId(),
+                        1,
+                        SearchType.DEFAULT,
+                        new SearchSourceBuilder(),
+                        new String[0],
+                        false,
+                        new AliasFilter(null, Strings.EMPTY_ARRAY),
+                        1.0f),
+                null);
         try {
             // the search context should inherit the default timeout
             assertThat(contextWithDefaultTimeout.timeout(), equalTo(TimeValue.timeValueSeconds(5)));
@@ -233,15 +233,15 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
 
         final long seconds = randomIntBetween(6, 10);
         final SearchContext context = service.createContext(
-            new ShardSearchLocalRequest(
-                indexShard.shardId(),
-                1,
-                SearchType.DEFAULT,
-                new SearchSourceBuilder().timeout(TimeValue.timeValueSeconds(seconds)),
-                new String[0],
-                false,
-                new AliasFilter(null, Strings.EMPTY_ARRAY),
-                1.0f),
+                new ShardSearchLocalRequest(
+                        indexShard.shardId(),
+                        1,
+                        SearchType.DEFAULT,
+                        new SearchSourceBuilder().timeout(TimeValue.timeValueSeconds(seconds)),
+                        new String[0],
+                        false,
+                        new AliasFilter(null, Strings.EMPTY_ARRAY),
+                        1.0f),
             null);
         try {
             // the search context should inherit the query timeout
diff --git a/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java
index b93b3795cb5..7a0e10af99c 100644
--- a/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java
@@ -20,6 +20,7 @@
 package org.elasticsearch.search.internal;
 
 import org.elasticsearch.Version;
+import org.elasticsearch.action.OriginalIndices;
 import org.elasticsearch.action.search.SearchRequest;
 import org.elasticsearch.cluster.metadata.AliasMetaData;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -95,7 +96,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
         } else {
             filteringAliases = new AliasFilter(null, Strings.EMPTY_ARRAY);
         }
-        return new ShardSearchTransportRequest(searchRequest, shardId,
+        return new ShardSearchTransportRequest(new OriginalIndices(searchRequest), searchRequest, shardId,
                 randomIntBetween(1, 100), filteringAliases, randomBoolean() ? 1.0f : randomFloat(), Math.abs(randomLong()));
     }
 
diff --git a/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java b/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java
index 83fb38f18a2..11806a1cea9 100644
--- a/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java
@@ -165,10 +165,10 @@ public class SearchStatsIT extends ESIntegTestCase {
 
     private Set<String> nodeIdsWithIndex(String... indices) {
         ClusterState state = client().admin().cluster().prepareState().execute().actionGet().getState();
-        GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true);
+        GroupShardsIterator<ShardIterator> allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true);
         Set<String> nodes = new HashSet<>();
         for (ShardIterator shardIterator : allAssignedShardsGrouped) {
-            for (ShardRouting routing : shardIterator.asUnordered()) {
+            for (ShardRouting routing : shardIterator) {
                 if (routing.active()) {
                     nodes.add(routing.currentNodeId());
                 }

From bc45d10e82e169e93d331866c7e2a7f3cae51db7 Mon Sep 17 00:00:00 2001
From: Nik Everett <nik9000@gmail.com>
Date: Wed, 26 Apr 2017 16:04:38 -0400
Subject: [PATCH 33/34] Remove most usages of 1-arg Script ctor (#24325)

The one argument ctor for `Script` creates a script with the
default language but most usages of are for testing and either
don't care about the language or are for use with
`MockScriptEngine`. This replaces most usages of the one argument
ctor on `Script` with calls to `ESTestCase#mockScript` to make
it clear that the tests don't need the default scripting language.

I've also factored out some copy and pasted script generation
code into a single place. I would have had to change that code
to use `mockScript` anyway, so it was easier to perform the
refactor.

Relates to #16314
---
 .../elasticsearch/client/RequestTests.java    |  5 ++---
 .../functionscore/ScoreFunctionBuilders.java  |  5 ++++-
 .../function/ScriptScoreFunctionTests.java    |  5 ++---
 .../aggregations/BaseAggregationTestCase.java | 19 +++++++++++++++++
 .../bucket/SignificantTermsTests.java         | 21 ++++---------------
 .../aggregations/bucket/TermsTests.java       | 18 ++--------------
 .../DiversifiedAggregationBuilderTests.java   | 15 +------------
 .../AbstractNumericMetricTestCase.java        | 15 +------------
 .../metrics/GeoCentroidTests.java             | 15 +------------
 .../aggregations/metrics/MissingTests.java    | 15 +------------
 .../metrics/PercentileRanksTests.java         | 14 +------------
 .../metrics/PercentilesTests.java             | 15 +------------
 .../metrics/ScriptedMetricTests.java          |  2 +-
 .../aggregations/metrics/TopHitsTests.java    |  6 +++---
 .../aggregations/metrics/ValueCountTests.java | 15 +------------
 .../metrics/cardinality/CardinalityTests.java | 15 +------------
 .../pipeline/BucketScriptTests.java           |  2 +-
 .../pipeline/BucketSelectorTests.java         |  2 +-
 .../search/sort/ScriptSortBuilderTests.java   |  6 +++---
 .../ingest/common/ScriptProcessorTests.java   |  2 +-
 ...AsyncBulkByScrollActionScriptTestCase.java |  5 +----
 .../index/reindex/ReindexRequestTests.java    |  3 +--
 .../reindex/UpdateByQueryRequestTests.java    |  3 +--
 .../search/RandomSearchRequestGenerator.java  | 14 +++++++++----
 .../org/elasticsearch/test/ESTestCase.java    | 12 +++++++++++
 25 files changed, 76 insertions(+), 173 deletions(-)

diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java
index 58b7df92272..c527125e10b 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java
@@ -39,7 +39,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.common.xcontent.XContentParser;
 import org.elasticsearch.common.xcontent.XContentType;
 import org.elasticsearch.index.VersionType;
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.test.RandomObjects;
@@ -287,7 +286,7 @@ public class RequestTests extends ESTestCase {
                 expectedParams.put("doc_as_upsert", "true");
             }
         } else {
-            updateRequest.script(new Script("_value + 1"));
+            updateRequest.script(mockScript("_value + 1"));
             updateRequest.scriptedUpsert(randomBoolean());
         }
         if (randomBoolean()) {
@@ -520,7 +519,7 @@ public class RequestTests extends ESTestCase {
         {
             BulkRequest bulkRequest = new BulkRequest();
             bulkRequest.add(new DeleteRequest("index", "type", "0"));
-            bulkRequest.add(new UpdateRequest("index", "type", "1").script(new Script("test")));
+            bulkRequest.add(new UpdateRequest("index", "type", "1").script(mockScript("test")));
             bulkRequest.add(new DeleteRequest("index", "type", "2"));
 
             Request request = Request.bulk(bulkRequest);
diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java
index e6fb632f5aa..100ff29dfeb 100644
--- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java
+++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java
@@ -20,6 +20,9 @@
 package org.elasticsearch.index.query.functionscore;
 
 import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptType;
+
+import static java.util.Collections.emptyMap;
 
 /**
  * Static method aliases for constructors of known {@link ScoreFunctionBuilder}s.
@@ -69,7 +72,7 @@ public class ScoreFunctionBuilders {
     }
 
     public static ScriptScoreFunctionBuilder scriptFunction(String script) {
-        return (new ScriptScoreFunctionBuilder(new Script(script)));
+        return (new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, script, emptyMap())));
     }
 
     public static RandomScoreFunctionBuilder randomFunction(int seed) {
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java
index 21fbf43a133..d7ee7629c92 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java
@@ -21,9 +21,8 @@ package org.elasticsearch.common.lucene.search.function;
 
 import org.apache.lucene.index.LeafReaderContext;
 import org.elasticsearch.script.AbstractDoubleSearchScript;
-import org.elasticsearch.script.LeafSearchScript;
-import org.elasticsearch.script.Script;
 import org.elasticsearch.script.GeneralScriptException;
+import org.elasticsearch.script.LeafSearchScript;
 import org.elasticsearch.script.SearchScript;
 import org.elasticsearch.test.ESTestCase;
 
@@ -35,7 +34,7 @@ public class ScriptScoreFunctionTests extends ESTestCase {
      */
     public void testScriptScoresReturnsNaN() throws IOException {
         // script that always returns NaN
-        ScoreFunction scoreFunction = new ScriptScoreFunction(new Script("Double.NaN"), new SearchScript() {
+        ScoreFunction scoreFunction = new ScriptScoreFunction(mockScript("Double.NaN"), new SearchScript() {
             @Override
             public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
                 return new AbstractDoubleSearchScript() {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java
index 651c261aa81..c76d1a5f0dd 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java
@@ -34,6 +34,7 @@ import org.elasticsearch.env.Environment;
 import org.elasticsearch.index.query.QueryParseContext;
 import org.elasticsearch.indices.IndicesModule;
 import org.elasticsearch.search.SearchModule;
+import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
 import org.elasticsearch.test.AbstractQueryTestCase;
 import org.elasticsearch.test.ESTestCase;
 
@@ -176,4 +177,22 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
                 return INT_FIELD_NAME;
         }
     }
+
+    protected void randomFieldOrScript(ValuesSourceAggregationBuilder<?, ?> factory, String field) {
+        int choice = randomInt(2);
+        switch (choice) {
+        case 0:
+            factory.field(field);
+            break;
+        case 1:
+            factory.field(field);
+            factory.script(mockScript("_value + 1"));
+            break;
+        case 2:
+            factory.script(mockScript("doc[" + field + "] + 1"));
+            break;
+        default:
+            throw new AssertionError("Unknow random operation [" + choice + "]");
+        }
+    }
 }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java
index f545ae500a2..9fe1c0ea479 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java
@@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.automaton.RegExp;
 import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
 import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder;
 import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
@@ -34,6 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Scrip
 import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
 import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode;
 import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
+
 import java.util.SortedSet;
 import java.util.TreeSet;
 
@@ -54,21 +54,8 @@ public class SignificantTermsTests extends BaseAggregationTestCase<SignificantTe
         String name = randomAlphaOfLengthBetween(3, 20);
         SignificantTermsAggregationBuilder factory = new SignificantTermsAggregationBuilder(name, null);
         String field = randomAlphaOfLengthBetween(3, 20);
-        int randomFieldBranch = randomInt(2);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        default:
-            fail();
-        }
+        randomFieldOrScript(factory, field);
+
         if (randomBoolean()) {
             factory.missing("MISSING");
         }
@@ -179,7 +166,7 @@ public class SignificantTermsTests extends BaseAggregationTestCase<SignificantTe
                 significanceHeuristic = new MutualInformation(randomBoolean(), randomBoolean());
                 break;
             case 4:
-                significanceHeuristic = new ScriptHeuristic(new Script("foo"));
+                significanceHeuristic = new ScriptHeuristic(mockScript("foo"));
                 break;
             case 5:
                 significanceHeuristic = new JLHScore();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java
index d0a38d0890a..42f6ef78f4b 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java
@@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.bucket;
 
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.automaton.RegExp;
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
 import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
 import org.elasticsearch.search.aggregations.bucket.terms.Terms;
 import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
 import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode;
 import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
+
 import java.util.ArrayList;
 import java.util.List;
 import java.util.SortedSet;
@@ -50,21 +50,7 @@ public class TermsTests extends BaseAggregationTestCase<TermsAggregationBuilder>
         String name = randomAlphaOfLengthBetween(3, 20);
         TermsAggregationBuilder factory = new TermsAggregationBuilder(name, null);
         String field = randomAlphaOfLengthBetween(3, 20);
-        int randomFieldBranch = randomInt(2);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        default:
-            fail();
-        }
+        randomFieldOrScript(factory, field);
         if (randomBoolean()) {
             factory.missing("MISSING");
         }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilderTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilderTests.java
index eed258bb788..635cedcb097 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilderTests.java
@@ -19,7 +19,6 @@
 
 package org.elasticsearch.search.aggregations.bucket.sampler;
 
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
 import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator.ExecutionMode;
 
@@ -29,19 +28,7 @@ public class DiversifiedAggregationBuilderTests extends BaseAggregationTestCase<
     protected final DiversifiedAggregationBuilder createTestAggregatorBuilder() {
         DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder("foo");
         String field = randomNumericField();
-        int randomFieldBranch = randomInt(3);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        }
+        randomFieldOrScript(factory, field);
         if (randomBoolean()) {
             factory.missing("MISSING");
         }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java
index f1ccf344a7c..975a1610a61 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java
@@ -19,7 +19,6 @@
 
 package org.elasticsearch.search.aggregations.metrics;
 
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
 import org.elasticsearch.search.aggregations.support.ValuesSource;
 import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
@@ -31,19 +30,7 @@ public abstract class AbstractNumericMetricTestCase<AF extends ValuesSourceAggre
     protected final AF createTestAggregatorBuilder() {
         AF factory = doCreateTestAggregatorFactory();
         String field = randomNumericField();
-        int randomFieldBranch = randomInt(3);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        }
+        randomFieldOrScript(factory, field);
         if (randomBoolean()) {
             factory.missing("MISSING");
         }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java
index c61336134fa..90067df6013 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java
@@ -19,7 +19,6 @@
 
 package org.elasticsearch.search.aggregations.metrics;
 
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
 import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
 
@@ -29,19 +28,7 @@ public class GeoCentroidTests extends BaseAggregationTestCase<GeoCentroidAggrega
     protected GeoCentroidAggregationBuilder createTestAggregatorBuilder() {
         GeoCentroidAggregationBuilder factory = new GeoCentroidAggregationBuilder(randomAlphaOfLengthBetween(1, 20));
         String field = randomNumericField();
-        int randomFieldBranch = randomInt(3);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        }
+        randomFieldOrScript(factory, field);
         if (randomBoolean()) {
             factory.missing("0,0");
         }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java
index 979747ade2e..168e01a269f 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java
@@ -19,7 +19,6 @@
 
 package org.elasticsearch.search.aggregations.metrics;
 
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
 import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
 
@@ -29,19 +28,7 @@ public class MissingTests extends BaseAggregationTestCase<MissingAggregationBuil
     protected final MissingAggregationBuilder createTestAggregatorBuilder() {
         MissingAggregationBuilder factory = new MissingAggregationBuilder("foo", null);
         String field = randomNumericField();
-        int randomFieldBranch = randomInt(3);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        }
+        randomFieldOrScript(factory, field);
         return factory;
     }
 
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java
index c4a8a2837ac..b78a67e9511 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java
@@ -44,19 +44,7 @@ public class PercentileRanksTests extends BaseAggregationTestCase<PercentileRank
             factory.compression(randomIntBetween(1, 50000));
         }
         String field = randomNumericField();
-        int randomFieldBranch = randomInt(3);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        }
+        randomFieldOrScript(factory, field);
         if (randomBoolean()) {
             factory.missing("MISSING");
         }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java
index 3c9b5dfeb48..679bf324172 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java
@@ -19,7 +19,6 @@
 
 package org.elasticsearch.search.aggregations.metrics;
 
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
 import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
 
@@ -46,19 +45,7 @@ public class PercentilesTests extends BaseAggregationTestCase<PercentilesAggrega
             factory.compression(randomIntBetween(1, 50000));
         }
         String field = randomNumericField();
-        int randomFieldBranch = randomInt(3);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        }
+        randomFieldOrScript(factory, field);
         if (randomBoolean()) {
             factory.missing("MISSING");
         }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java
index 15a78a5d4fd..3ca4dfd253d 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java
@@ -53,7 +53,7 @@ public class ScriptedMetricTests extends BaseAggregationTestCase<ScriptedMetricA
 
     private Script randomScript(String script) {
         if (randomBoolean()) {
-            return new Script(script);
+            return mockScript(script);
         } else {
             ScriptType type = randomFrom(ScriptType.values());
             return new Script(
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java
index 3d8a64ee00b..9e7ae836176 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java
@@ -90,9 +90,9 @@ public class TopHitsTests extends BaseAggregationTestCase<TopHitsAggregationBuil
             int scriptFieldsSize = randomInt(25);
             for (int i = 0; i < scriptFieldsSize; i++) {
                 if (randomBoolean()) {
-                    factory.scriptField(randomAlphaOfLengthBetween(5, 50), new Script("foo"), randomBoolean());
+                    factory.scriptField(randomAlphaOfLengthBetween(5, 50), mockScript("foo"), randomBoolean());
                 } else {
-                    factory.scriptField(randomAlphaOfLengthBetween(5, 50), new Script("foo"));
+                    factory.scriptField(randomAlphaOfLengthBetween(5, 50), mockScript("foo"));
                 }
             }
         }
@@ -148,7 +148,7 @@ public class TopHitsTests extends BaseAggregationTestCase<TopHitsAggregationBuil
                     factory.sort(SortBuilders.scoreSort().order(randomFrom(SortOrder.values())));
                     break;
                 case 3:
-                    factory.sort(SortBuilders.scriptSort(new Script("foo"), ScriptSortType.NUMBER).order(randomFrom(SortOrder.values())));
+                    factory.sort(SortBuilders.scriptSort(mockScript("foo"), ScriptSortType.NUMBER).order(randomFrom(SortOrder.values())));
                     break;
                 case 4:
                     factory.sort(randomAlphaOfLengthBetween(5, 20));
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java
index 99d4d41839c..9a3ed326044 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java
@@ -19,7 +19,6 @@
 
 package org.elasticsearch.search.aggregations.metrics;
 
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
 import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
 
@@ -29,19 +28,7 @@ public class ValueCountTests extends BaseAggregationTestCase<ValueCountAggregati
     protected final ValueCountAggregationBuilder createTestAggregatorBuilder() {
         ValueCountAggregationBuilder factory = new ValueCountAggregationBuilder("foo", null);
         String field = randomNumericField();
-        int randomFieldBranch = randomInt(3);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        }
+        randomFieldOrScript(factory, field);
         if (randomBoolean()) {
             factory.missing("MISSING");
         }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java
index ab0377c6331..1b3a1858176 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java
@@ -19,7 +19,6 @@
 
 package org.elasticsearch.search.aggregations.metrics.cardinality;
 
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
 
 public class CardinalityTests extends BaseAggregationTestCase<CardinalityAggregationBuilder> {
@@ -28,19 +27,7 @@ public class CardinalityTests extends BaseAggregationTestCase<CardinalityAggrega
     protected final CardinalityAggregationBuilder createTestAggregatorBuilder() {
         CardinalityAggregationBuilder factory = new CardinalityAggregationBuilder("foo", null);
         String field = randomNumericField();
-        int randomFieldBranch = randomInt(3);
-        switch (randomFieldBranch) {
-        case 0:
-            factory.field(field);
-            break;
-        case 1:
-            factory.field(field);
-            factory.script(new Script("_value + 1"));
-            break;
-        case 2:
-            factory.script(new Script("doc[" + field + "] + 1"));
-            break;
-        }
+        randomFieldOrScript(factory, field);
         if (randomBoolean()) {
             factory.missing("MISSING");
         }
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java
index b6d133f1711..c213619183b 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java
@@ -40,7 +40,7 @@ public class BucketScriptTests extends BasePipelineAggregationTestCase<BucketScr
         }
         Script script;
         if (randomBoolean()) {
-            script = new Script("script");
+            script = mockScript("script");
         } else {
             Map<String, Object> params = new HashMap<>();
             if (randomBoolean()) {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java
index c3e477a3a50..8dd63942d86 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java
@@ -40,7 +40,7 @@ public class BucketSelectorTests extends BasePipelineAggregationTestCase<BucketS
         }
         Script script;
         if (randomBoolean()) {
-            script = new Script("script");
+            script = mockScript("script");
         } else {
             Map<String, Object> params = new HashMap<>();
             if (randomBoolean()) {
diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java
index 526be491f3d..0b9d250832b 100644
--- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java
@@ -43,7 +43,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
 
     public static ScriptSortBuilder randomScriptSortBuilder() {
         ScriptSortType type = randomBoolean() ? ScriptSortType.NUMBER : ScriptSortType.STRING;
-        ScriptSortBuilder builder = new ScriptSortBuilder(new Script(randomAlphaOfLengthBetween(5, 10)),
+        ScriptSortBuilder builder = new ScriptSortBuilder(mockScript(randomAlphaOfLengthBetween(5, 10)),
                 type);
         if (randomBoolean()) {
                 builder.order(randomFrom(SortOrder.values()));
@@ -76,7 +76,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
             Script script = original.script();
             ScriptSortType type = original.type();
             if (randomBoolean()) {
-                result = new ScriptSortBuilder(new Script(script.getIdOrCode() + "_suffix"), type);
+                result = new ScriptSortBuilder(mockScript(script.getIdOrCode() + "_suffix"), type);
             } else {
                 result = new ScriptSortBuilder(script, type.equals(ScriptSortType.NUMBER) ? ScriptSortType.STRING : ScriptSortType.NUMBER);
             }
@@ -251,7 +251,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
      * script sort of type {@link ScriptSortType} does not work with {@link SortMode#AVG}, {@link SortMode#MEDIAN} or {@link SortMode#SUM}
      */
     public void testBadSortMode() throws IOException {
-        ScriptSortBuilder builder = new ScriptSortBuilder(new Script("something"), ScriptSortType.STRING);
+        ScriptSortBuilder builder = new ScriptSortBuilder(mockScript("something"), ScriptSortType.STRING);
         String sortMode = randomFrom(new String[] { "avg", "median", "sum" });
         Exception e = expectThrows(IllegalArgumentException.class, () -> builder.sortMode(SortMode.fromString(sortMode)));
         assertEquals("script sort of type [string] doesn't support mode [" + sortMode + "]", e.getMessage());
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java
index e76f3016dda..5356d9c9e0b 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java
@@ -45,7 +45,7 @@ public class ScriptProcessorTests extends ESTestCase {
         int randomBytesTotal = randomBytesIn + randomBytesOut;
 
         ScriptService scriptService = mock(ScriptService.class);
-        Script script = new Script("_script");
+        Script script = mockScript("_script");
         ExecutableScript executableScript = mock(ExecutableScript.class);
         when(scriptService.executable(any(CompiledScript.class), any())).thenReturn(executableScript);
 
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java
index b565f0749f1..fd41a6d25f3 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java
@@ -30,7 +30,6 @@ import org.elasticsearch.action.delete.DeleteRequest;
 import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.script.CompiledScript;
 import org.elasticsearch.script.ExecutableScript;
-import org.elasticsearch.script.Script;
 import org.elasticsearch.script.ScriptService;
 import org.junit.Before;
 import org.mockito.Matchers;
@@ -49,8 +48,6 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
                 Response extends BulkByScrollResponse>
         extends AbstractAsyncBulkByScrollActionTestCase<Request, Response> {
 
-    private static final Script EMPTY_SCRIPT = new Script("");
-
     protected ScriptService scriptService;
 
     @Before
@@ -66,7 +63,7 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
 
         when(scriptService.executable(any(CompiledScript.class), Matchers.<Map<String, Object>>any()))
                 .thenReturn(executableScript);
-        AbstractAsyncBulkByScrollAction<Request> action = action(scriptService, request().setScript(EMPTY_SCRIPT));
+        AbstractAsyncBulkByScrollAction<Request> action = action(scriptService, request().setScript(mockScript("")));
         RequestWrapper<?> result = action.buildScriptApplier().apply(AbstractAsyncBulkByScrollAction.wrap(index), doc);
         return (result != null) ? (T) result.self() : null;
     }
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java
index c2b07239671..d1bb6f6096c 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java
@@ -25,7 +25,6 @@ import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.action.search.SearchRequest;
 import org.elasticsearch.common.bytes.BytesArray;
 import org.elasticsearch.index.reindex.remote.RemoteInfo;
-import org.elasticsearch.script.Script;
 import org.elasticsearch.search.slice.SliceBuilder;
 
 import static java.util.Collections.emptyMap;
@@ -71,7 +70,7 @@ public class ReindexRequestTests extends AbstractBulkByScrollRequestTestCase<Rei
     @Override
     protected void extraRandomizationForSlice(ReindexRequest original) {
         if (randomBoolean()) {
-            original.setScript(new Script(randomAlphaOfLength(5)));
+            original.setScript(mockScript(randomAlphaOfLength(5)));
         }
         if (randomBoolean()) {
             original.setRemoteInfo(new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), between(1, 10000),
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java
index c932e83ce1c..700f45b42c5 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java
@@ -22,7 +22,6 @@ package org.elasticsearch.index.reindex;
 import org.elasticsearch.action.bulk.byscroll.AbstractBulkByScrollRequestTestCase;
 import org.elasticsearch.action.search.SearchRequest;
 import org.elasticsearch.action.support.IndicesOptions;
-import org.elasticsearch.script.Script;
 
 import static org.apache.lucene.util.TestUtil.randomSimpleString;
 
@@ -68,7 +67,7 @@ public class UpdateByQueryRequestTests extends AbstractBulkByScrollRequestTestCa
     @Override
     protected void extraRandomizationForSlice(UpdateByQueryRequest original) {
         if (randomBoolean()) {
-            original.setScript(new Script(randomAlphaOfLength(5)));
+            original.setScript(mockScript(randomAlphaOfLength(5)));
         }
         if (randomBoolean()) {
             original.setPipeline(randomAlphaOfLength(5));
diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java
index 2a072a1d3be..18448b5829c 100644
--- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java
+++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java
@@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
 import org.elasticsearch.common.xcontent.XContentType;
 import org.elasticsearch.index.query.QueryBuilders;
 import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptType;
 import org.elasticsearch.search.aggregations.AggregationBuilders;
 import org.elasticsearch.search.builder.SearchSourceBuilder;
 import org.elasticsearch.search.collapse.CollapseBuilder;
@@ -50,8 +51,10 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.function.Supplier;
 
+import static java.util.Collections.emptyMap;
 import static org.elasticsearch.test.ESTestCase.between;
 import static org.elasticsearch.test.ESTestCase.generateRandomStringArray;
+import static org.elasticsearch.test.ESTestCase.mockScript;
 import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween;
 import static org.elasticsearch.test.ESTestCase.randomBoolean;
 import static org.elasticsearch.test.ESTestCase.randomByte;
@@ -164,9 +167,9 @@ public class RandomSearchRequestGenerator {
             int scriptFieldsSize = randomInt(25);
             for (int i = 0; i < scriptFieldsSize; i++) {
                 if (randomBoolean()) {
-                    builder.scriptField(randomAlphaOfLengthBetween(5, 50), new Script("foo"), randomBoolean());
+                    builder.scriptField(randomAlphaOfLengthBetween(5, 50), mockScript("foo"), randomBoolean());
                 } else {
-                    builder.scriptField(randomAlphaOfLengthBetween(5, 50), new Script("foo"));
+                    builder.scriptField(randomAlphaOfLengthBetween(5, 50), mockScript("foo"));
                 }
             }
         }
@@ -242,8 +245,11 @@ public class RandomSearchRequestGenerator {
                         builder.sort(SortBuilders.scoreSort().order(randomFrom(SortOrder.values())));
                         break;
                     case 3:
-                        builder.sort(SortBuilders.scriptSort(new Script("foo"),
-                                ScriptSortBuilder.ScriptSortType.NUMBER).order(randomFrom(SortOrder.values())));
+                        builder.sort(SortBuilders
+                                .scriptSort(
+                                        new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "foo", emptyMap()),
+                                        ScriptSortBuilder.ScriptSortType.NUMBER)
+                                .order(randomFrom(SortOrder.values())));
                         break;
                     case 4:
                         builder.sort(randomAlphaOfLengthBetween(5, 20));
diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
index 29241b4b19d..58d67ea3b98 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
@@ -29,6 +29,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 import com.carrotsearch.randomizedtesting.generators.RandomStrings;
 import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
+
 import org.apache.logging.log4j.Level;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
@@ -90,8 +91,10 @@ import org.elasticsearch.indices.analysis.AnalysisModule;
 import org.elasticsearch.plugins.AnalysisPlugin;
 import org.elasticsearch.plugins.MapperPlugin;
 import org.elasticsearch.script.MockScriptEngine;
+import org.elasticsearch.script.Script;
 import org.elasticsearch.script.ScriptModule;
 import org.elasticsearch.script.ScriptService;
+import org.elasticsearch.script.ScriptType;
 import org.elasticsearch.search.MockSearchService;
 import org.elasticsearch.test.junit.listeners.LoggingListener;
 import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
@@ -133,6 +136,7 @@ import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
 import static java.util.Collections.emptyList;
+import static java.util.Collections.emptyMap;
 import static java.util.Collections.singletonList;
 import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList;
 import static org.hamcrest.Matchers.empty;
@@ -1083,6 +1087,14 @@ public abstract class ESTestCase extends LuceneTestCase {
         return new NamedXContentRegistry(ClusterModule.getNamedXWriteables());
     }
 
+    /**
+     * Create a "mock" script for use either with {@link MockScriptEngine} or anywhere where you need a script but don't really care about
+     * its contents.
+     */
+    public static final Script mockScript(String id) {
+        return new Script(ScriptType.INLINE, MockScriptEngine.NAME, id, emptyMap());
+    }
+
     /** Returns the suite failure marker: internal use only! */
     public static TestRuleMarkFailure getSuiteFailureMarker() {
         return suiteFailureMarker;

From 0e74f5ddb16dbd5377cea304b4075c048ff1eb22 Mon Sep 17 00:00:00 2001
From: Ali Beyad <ali@elastic.co>
Date: Wed, 26 Apr 2017 16:34:53 -0400
Subject: [PATCH 34/34] [TEST] fixes shard count of source shard index in a
 restore shrink index test

---
 .../snapshots/DedicatedClusterSnapshotRestoreIT.java            | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java
index 10c49b431a5..2c1dfc899b6 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java
@@ -820,7 +820,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
                              .put("compress", randomBoolean())));
 
         assertAcked(prepareCreate(sourceIdx, 0, Settings.builder()
-            .put("number_of_shards", between(1, 20)).put("number_of_replicas", 0)));
+            .put("number_of_shards", between(2, 10)).put("number_of_replicas", 0)));
         ensureGreen();
 
         logger.info("--> indexing some data");