diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
index f994bd528e1..f8d80679504 100644
--- a/buildSrc/build.gradle
+++ b/buildSrc/build.gradle
@@ -1,3 +1,5 @@
+import java.nio.file.Files
+
 /*
  * Licensed to Elasticsearch under one or more contributor
  * license agreements. See the NOTICE file distributed with
@@ -40,6 +42,15 @@ archivesBaseName = 'build-tools'
 Properties props = new Properties()
 props.load(project.file('version.properties').newDataInputStream())
 version = props.getProperty('elasticsearch')
+boolean snapshot = "true".equals(System.getProperty("build.snapshot", "true"));
+if (snapshot) {
+  // we update the version property to reflect if we are building a snapshot or a release build
+  // we write this back out below to load it in the Build.java which will be shown in rest main action
+  // to indicate this being a snapshot build or a release build.
+  version += "-SNAPSHOT"
+  props.put("elasticsearch", version);
+}
+
 
 repositories {
   mavenCentral()
@@ -66,9 +77,22 @@ dependencies {
   compile 'org.apache.rat:apache-rat:0.11'
 }
 
+File tempPropertiesFile = new File(project.buildDir, "version.properties")
+task writeVersionProperties {
+  inputs.properties(props)
+  doLast {
+    OutputStream stream = Files.newOutputStream(tempPropertiesFile.toPath());
+    try {
+      props.store(stream, "UTF-8");
+    } finally {
+      stream.close();
+    }
+  }
+}
+
 processResources {
-  inputs.file('version.properties')
-  from 'version.properties'
+  dependsOn writeVersionProperties
+  from tempPropertiesFile
 }
 
 extraArchive {
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy
index 8d31720de91..f43481f9af4 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy
@@ -354,11 +354,17 @@ class BuildPlugin implements Plugin<Project> {
     static void configureJarManifest(Project project) {
         project.tasks.withType(Jar) { Jar jarTask ->
             jarTask.doFirst {
+                boolean isSnapshot = VersionProperties.elasticsearch.endsWith("-SNAPSHOT");
+                String version = VersionProperties.elasticsearch;
+                if (isSnapshot) {
+                    version = version.substring(0, version.length() - 9)
+                }
                 // this doFirst is added before the info plugin, therefore it will run
                 // after the doFirst added by the info plugin, and we can override attributes
                 jarTask.manifest.attributes(
-                        'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch,
+                        'X-Compile-Elasticsearch-Version': version,
                         'X-Compile-Lucene-Version': VersionProperties.lucene,
+                        'X-Compile-Elasticsearch-Snapshot': isSnapshot,
                         'Build-Date': ZonedDateTime.now(ZoneOffset.UTC),
                         'Build-Java-Version': project.javaVersion)
                 if (jarTask.manifest.attributes.containsKey('Change') == false) {
diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml
index 2a5f25364be..6c04effc58c 100644
--- a/buildSrc/src/main/resources/checkstyle_suppressions.xml
+++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml
@@ -1566,13 +1566,6 @@
   <suppress files="plugins[/\\]lang-javascript[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]javascript[/\\]JavaScriptScriptMultiThreadedTests.java" checks="LineLength" />
   <suppress files="plugins[/\\]lang-javascript[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]javascript[/\\]JavaScriptSecurityTests.java" checks="LineLength" />
   <suppress files="plugins[/\\]lang-javascript[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]javascript[/\\]SimpleBench.java" checks="LineLength" />
-  <suppress files="plugins[/\\]lang-painless[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]Definition.java" checks="LineLength" />
-  <suppress files="plugins[/\\]lang-painless[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]PainlessPlugin.java" checks="LineLength" />
-  <suppress files="plugins[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ConditionalTests.java" checks="LineLength" />
-  <suppress files="plugins[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]FieldTests.java" checks="LineLength" />
-  <suppress files="plugins[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]FloatOverflowEnabledTests.java" checks="LineLength" />
-  <suppress files="plugins[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]IntegerOverflowEnabledTests.java" checks="LineLength" />
-  <suppress files="plugins[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ScriptEngineTests.java" checks="LineLength" />
   <suppress files="plugins[/\\]lang-python[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugin[/\\]python[/\\]PythonPlugin.java" checks="LineLength" />
   <suppress files="plugins[/\\]lang-python[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]python[/\\]PythonScriptEngineTests.java" checks="LineLength" />
   <suppress files="plugins[/\\]lang-python[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]python[/\\]PythonScriptMultiThreadedTests.java" checks="LineLength" />
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 110765d3535..3ca58b7bee1 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,5 +1,5 @@
-elasticsearch     = 3.0.0-SNAPSHOT
-lucene            = 5.5.0-snapshot-850c6c2
+elasticsearch     = 3.0.0
+lucene            = 5.5.0
 
 # optional dependencies
 spatial4j         = 0.5
diff --git a/core/src/main/java/org/elasticsearch/Build.java b/core/src/main/java/org/elasticsearch/Build.java
index 248040de0a8..a3b6cb0885a 100644
--- a/core/src/main/java/org/elasticsearch/Build.java
+++ b/core/src/main/java/org/elasticsearch/Build.java
@@ -45,6 +45,7 @@ public class Build {
     static {
         final String shortHash;
         final String date;
+        final boolean isSnapshot;
 
         Path path = getElasticsearchCodebase();
         if (path.toString().endsWith(".jar")) {
@@ -52,6 +53,7 @@ public class Build {
                 Manifest manifest = jar.getManifest();
                 shortHash = manifest.getMainAttributes().getValue("Change");
                 date = manifest.getMainAttributes().getValue("Build-Date");
+                isSnapshot = "true".equals(manifest.getMainAttributes().getValue("X-Compile-Elasticsearch-Snapshot"));
             } catch (IOException e) {
                 throw new RuntimeException(e);
             }
@@ -59,6 +61,7 @@ public class Build {
             // not running from a jar (unit tests, IDE)
             shortHash = "Unknown";
             date = "Unknown";
+            isSnapshot = true;
         }
         if (shortHash == null) {
             throw new IllegalStateException("Error finding the build shortHash. " +
@@ -69,9 +72,11 @@ public class Build {
                 "Stopping Elasticsearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
         }
 
-        CURRENT = new Build(shortHash, date);
+        CURRENT = new Build(shortHash, date, isSnapshot);
     }
 
+    private final boolean isSnapshot;
+
     /**
      * Returns path to elasticsearch codebase path
      */
@@ -88,9 +93,10 @@ public class Build {
     private String shortHash;
     private String date;
 
-    Build(String shortHash, String date) {
+    Build(String shortHash, String date, boolean isSnapshot) {
         this.shortHash = shortHash;
         this.date = date;
+        this.isSnapshot = isSnapshot;
     }
 
     public String shortHash() {
@@ -104,12 +110,18 @@ public class Build {
     public static Build readBuild(StreamInput in) throws IOException {
         String hash = in.readString();
         String date = in.readString();
-        return new Build(hash, date);
+        boolean snapshot = in.readBoolean();
+        return new Build(hash, date, snapshot);
     }
 
     public static void writeBuild(Build build, StreamOutput out) throws IOException {
         out.writeString(build.shortHash());
         out.writeString(build.date());
+        out.writeBoolean(build.isSnapshot());
+    }
+
+    public boolean isSnapshot() {
+        return isSnapshot;
     }
 
     @Override
diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java
index bcd59d5c35a..07fb07a2dfa 100644
--- a/core/src/main/java/org/elasticsearch/Version.java
+++ b/core/src/main/java/org/elasticsearch/Version.java
@@ -43,253 +43,229 @@ public class Version {
     public static final org.apache.lucene.util.Version LUCENE_3_EMULATION_VERSION = org.apache.lucene.util.Version.LUCENE_4_0_0;
 
     public static final int V_0_18_0_ID = /*00*/180099;
-    public static final Version V_0_18_0 = new Version(V_0_18_0_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_18_0 = new Version(V_0_18_0_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_18_1_ID = /*00*/180199;
-    public static final Version V_0_18_1 = new Version(V_0_18_1_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_18_1 = new Version(V_0_18_1_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_18_2_ID = /*00*/180299;
-    public static final Version V_0_18_2 = new Version(V_0_18_2_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_18_2 = new Version(V_0_18_2_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_18_3_ID = /*00*/180399;
-    public static final Version V_0_18_3 = new Version(V_0_18_3_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_18_3 = new Version(V_0_18_3_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_18_4_ID = /*00*/180499;
-    public static final Version V_0_18_4 = new Version(V_0_18_4_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_18_4 = new Version(V_0_18_4_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_18_5_ID = /*00*/180599;
-    public static final Version V_0_18_5 = new Version(V_0_18_5_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_18_5 = new Version(V_0_18_5_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_18_6_ID = /*00*/180699;
-    public static final Version V_0_18_6 = new Version(V_0_18_6_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_18_6 = new Version(V_0_18_6_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_18_7_ID = /*00*/180799;
-    public static final Version V_0_18_7 = new Version(V_0_18_7_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_18_7 = new Version(V_0_18_7_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_18_8_ID = /*00*/180899;
-    public static final Version V_0_18_8 = new Version(V_0_18_8_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_18_8 = new Version(V_0_18_8_ID, LUCENE_3_EMULATION_VERSION);
 
     public static final int V_0_19_0_RC1_ID = /*00*/190051;
-    public static final Version V_0_19_0_RC1 = new Version(V_0_19_0_RC1_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_0_RC1 = new Version(V_0_19_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
 
     public static final int V_0_19_0_RC2_ID = /*00*/190052;
-    public static final Version V_0_19_0_RC2 = new Version(V_0_19_0_RC2_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_0_RC2 = new Version(V_0_19_0_RC2_ID, LUCENE_3_EMULATION_VERSION);
 
     public static final int V_0_19_0_RC3_ID = /*00*/190053;
-    public static final Version V_0_19_0_RC3 = new Version(V_0_19_0_RC3_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_0_RC3 = new Version(V_0_19_0_RC3_ID, LUCENE_3_EMULATION_VERSION);
 
     public static final int V_0_19_0_ID = /*00*/190099;
-    public static final Version V_0_19_0 = new Version(V_0_19_0_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_0 = new Version(V_0_19_0_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_1_ID = /*00*/190199;
-    public static final Version V_0_19_1 = new Version(V_0_19_1_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_1 = new Version(V_0_19_1_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_2_ID = /*00*/190299;
-    public static final Version V_0_19_2 = new Version(V_0_19_2_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_2 = new Version(V_0_19_2_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_3_ID = /*00*/190399;
-    public static final Version V_0_19_3 = new Version(V_0_19_3_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_3 = new Version(V_0_19_3_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_4_ID = /*00*/190499;
-    public static final Version V_0_19_4 = new Version(V_0_19_4_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_4 = new Version(V_0_19_4_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_5_ID = /*00*/190599;
-    public static final Version V_0_19_5 = new Version(V_0_19_5_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_5 = new Version(V_0_19_5_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_6_ID = /*00*/190699;
-    public static final Version V_0_19_6 = new Version(V_0_19_6_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_6 = new Version(V_0_19_6_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_7_ID = /*00*/190799;
-    public static final Version V_0_19_7 = new Version(V_0_19_7_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_7 = new Version(V_0_19_7_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_8_ID = /*00*/190899;
-    public static final Version V_0_19_8 = new Version(V_0_19_8_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_8 = new Version(V_0_19_8_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_9_ID = /*00*/190999;
-    public static final Version V_0_19_9 = new Version(V_0_19_9_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_9 = new Version(V_0_19_9_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_10_ID = /*00*/191099;
-    public static final Version V_0_19_10 = new Version(V_0_19_10_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_10 = new Version(V_0_19_10_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_11_ID = /*00*/191199;
-    public static final Version V_0_19_11 = new Version(V_0_19_11_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_11 = new Version(V_0_19_11_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_12_ID = /*00*/191299;
-    public static final Version V_0_19_12 = new Version(V_0_19_12_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_12 = new Version(V_0_19_12_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_19_13_ID = /*00*/191399;
-    public static final Version V_0_19_13 = new Version(V_0_19_13_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_19_13 = new Version(V_0_19_13_ID, LUCENE_3_EMULATION_VERSION);
 
     public static final int V_0_20_0_RC1_ID = /*00*/200051;
-    public static final Version V_0_20_0_RC1 = new Version(V_0_20_0_RC1_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_20_0_RC1 = new Version(V_0_20_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_20_0_ID = /*00*/200099;
-    public static final Version V_0_20_0 = new Version(V_0_20_0_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_20_0 = new Version(V_0_20_0_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_20_1_ID = /*00*/200199;
-    public static final Version V_0_20_1 = new Version(V_0_20_1_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_20_1 = new Version(V_0_20_1_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_20_2_ID = /*00*/200299;
-    public static final Version V_0_20_2 = new Version(V_0_20_2_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_20_2 = new Version(V_0_20_2_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_20_3_ID = /*00*/200399;
-    public static final Version V_0_20_3 = new Version(V_0_20_3_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_20_3 = new Version(V_0_20_3_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_20_4_ID = /*00*/200499;
-    public static final Version V_0_20_4 = new Version(V_0_20_4_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_20_4 = new Version(V_0_20_4_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_20_5_ID = /*00*/200599;
-    public static final Version V_0_20_5 = new Version(V_0_20_5_ID, false, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_20_5 = new Version(V_0_20_5_ID, LUCENE_3_EMULATION_VERSION);
     public static final int V_0_20_6_ID = /*00*/200699;
-    public static final Version V_0_20_6 = new Version(V_0_20_6_ID, false, LUCENE_3_EMULATION_VERSION);
-    public static final int V_0_20_7_ID = /*00*/200799;
-    public static final Version V_0_20_7 = new Version(V_0_20_7_ID, true, LUCENE_3_EMULATION_VERSION);
+    public static final Version V_0_20_6 = new Version(V_0_20_6_ID, LUCENE_3_EMULATION_VERSION);
 
     public static final int V_0_90_0_Beta1_ID = /*00*/900001;
-    public static final Version V_0_90_0_Beta1 = new Version(V_0_90_0_Beta1_ID, false, org.apache.lucene.util.Version.LUCENE_4_1);
+    public static final Version V_0_90_0_Beta1 = new Version(V_0_90_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_1);
     public static final int V_0_90_0_RC1_ID = /*00*/900051;
-    public static final Version V_0_90_0_RC1 = new Version(V_0_90_0_RC1_ID, false, org.apache.lucene.util.Version.LUCENE_4_1);
+    public static final Version V_0_90_0_RC1 = new Version(V_0_90_0_RC1_ID, org.apache.lucene.util.Version.LUCENE_4_1);
     public static final int V_0_90_0_RC2_ID = /*00*/900052;
-    public static final Version V_0_90_0_RC2 = new Version(V_0_90_0_RC2_ID, false, org.apache.lucene.util.Version.LUCENE_4_2);
+    public static final Version V_0_90_0_RC2 = new Version(V_0_90_0_RC2_ID, org.apache.lucene.util.Version.LUCENE_4_2);
     public static final int V_0_90_0_ID = /*00*/900099;
-    public static final Version V_0_90_0 = new Version(V_0_90_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_2);
+    public static final Version V_0_90_0 = new Version(V_0_90_0_ID, org.apache.lucene.util.Version.LUCENE_4_2);
     public static final int V_0_90_1_ID = /*00*/900199;
-    public static final Version V_0_90_1 = new Version(V_0_90_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_3);
+    public static final Version V_0_90_1 = new Version(V_0_90_1_ID, org.apache.lucene.util.Version.LUCENE_4_3);
     public static final int V_0_90_2_ID = /*00*/900299;
-    public static final Version V_0_90_2 = new Version(V_0_90_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_3);
+    public static final Version V_0_90_2 = new Version(V_0_90_2_ID, org.apache.lucene.util.Version.LUCENE_4_3);
     public static final int V_0_90_3_ID = /*00*/900399;
-    public static final Version V_0_90_3 = new Version(V_0_90_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_4);
+    public static final Version V_0_90_3 = new Version(V_0_90_3_ID, org.apache.lucene.util.Version.LUCENE_4_4);
     public static final int V_0_90_4_ID = /*00*/900499;
-    public static final Version V_0_90_4 = new Version(V_0_90_4_ID, false, org.apache.lucene.util.Version.LUCENE_4_4);
+    public static final Version V_0_90_4 = new Version(V_0_90_4_ID, org.apache.lucene.util.Version.LUCENE_4_4);
     public static final int V_0_90_5_ID = /*00*/900599;
-    public static final Version V_0_90_5 = new Version(V_0_90_5_ID, false, org.apache.lucene.util.Version.LUCENE_4_4);
+    public static final Version V_0_90_5 = new Version(V_0_90_5_ID, org.apache.lucene.util.Version.LUCENE_4_4);
     public static final int V_0_90_6_ID = /*00*/900699;
-    public static final Version V_0_90_6 = new Version(V_0_90_6_ID, false, org.apache.lucene.util.Version.LUCENE_4_5);
+    public static final Version V_0_90_6 = new Version(V_0_90_6_ID, org.apache.lucene.util.Version.LUCENE_4_5);
     public static final int V_0_90_7_ID = /*00*/900799;
-    public static final Version V_0_90_7 = new Version(V_0_90_7_ID, false, org.apache.lucene.util.Version.LUCENE_4_5);
+    public static final Version V_0_90_7 = new Version(V_0_90_7_ID, org.apache.lucene.util.Version.LUCENE_4_5);
     public static final int V_0_90_8_ID = /*00*/900899;
-    public static final Version V_0_90_8 = new Version(V_0_90_8_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_0_90_8 = new Version(V_0_90_8_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_0_90_9_ID = /*00*/900999;
-    public static final Version V_0_90_9 = new Version(V_0_90_9_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_0_90_9 = new Version(V_0_90_9_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_0_90_10_ID = /*00*/901099;
-    public static final Version V_0_90_10 = new Version(V_0_90_10_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_0_90_10 = new Version(V_0_90_10_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_0_90_11_ID = /*00*/901199;
-    public static final Version V_0_90_11 = new Version(V_0_90_11_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_0_90_11 = new Version(V_0_90_11_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_0_90_12_ID = /*00*/901299;
-    public static final Version V_0_90_12 = new Version(V_0_90_12_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_0_90_12 = new Version(V_0_90_12_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_0_90_13_ID = /*00*/901399;
-    public static final Version V_0_90_13 = new Version(V_0_90_13_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
-    public static final int V_0_90_14_ID = /*00*/901499;
-    public static final Version V_0_90_14 = new Version(V_0_90_14_ID, true, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_0_90_13 = new Version(V_0_90_13_ID, org.apache.lucene.util.Version.LUCENE_4_6);
 
     public static final int V_1_0_0_Beta1_ID = 1000001;
-    public static final Version V_1_0_0_Beta1 = new Version(V_1_0_0_Beta1_ID, false, org.apache.lucene.util.Version.LUCENE_4_5);
+    public static final Version V_1_0_0_Beta1 = new Version(V_1_0_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_5);
     public static final int V_1_0_0_Beta2_ID = 1000002;
-    public static final Version V_1_0_0_Beta2 = new Version(V_1_0_0_Beta2_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_1_0_0_Beta2 = new Version(V_1_0_0_Beta2_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_1_0_0_RC1_ID = 1000051;
-    public static final Version V_1_0_0_RC1 = new Version(V_1_0_0_RC1_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_1_0_0_RC1 = new Version(V_1_0_0_RC1_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_1_0_0_RC2_ID = 1000052;
-    public static final Version V_1_0_0_RC2 = new Version(V_1_0_0_RC2_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_1_0_0_RC2 = new Version(V_1_0_0_RC2_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_1_0_0_ID = 1000099;
-    public static final Version V_1_0_0 = new Version(V_1_0_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_1_0_0 = new Version(V_1_0_0_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_1_0_1_ID = 1000199;
-    public static final Version V_1_0_1 = new Version(V_1_0_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_1_0_1 = new Version(V_1_0_1_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_1_0_2_ID = 1000299;
-    public static final Version V_1_0_2 = new Version(V_1_0_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_1_0_2 = new Version(V_1_0_2_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_1_0_3_ID = 1000399;
-    public static final Version V_1_0_3 = new Version(V_1_0_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_6);
-    public static final int V_1_0_4_ID = 1000499;
-    public static final Version V_1_0_4 = new Version(V_1_0_4_ID, true, org.apache.lucene.util.Version.LUCENE_4_6);
+    public static final Version V_1_0_3 = new Version(V_1_0_3_ID, org.apache.lucene.util.Version.LUCENE_4_6);
     public static final int V_1_1_0_ID = 1010099;
-    public static final Version V_1_1_0 = new Version(V_1_1_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_7);
+    public static final Version V_1_1_0 = new Version(V_1_1_0_ID, org.apache.lucene.util.Version.LUCENE_4_7);
     public static final int V_1_1_1_ID = 1010199;
-    public static final Version V_1_1_1 = new Version(V_1_1_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_7);
+    public static final Version V_1_1_1 = new Version(V_1_1_1_ID, org.apache.lucene.util.Version.LUCENE_4_7);
     public static final int V_1_1_2_ID = 1010299;
-    public static final Version V_1_1_2 = new Version(V_1_1_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_7);
+    public static final Version V_1_1_2 = new Version(V_1_1_2_ID, org.apache.lucene.util.Version.LUCENE_4_7);
     public static final int V_1_2_0_ID = 1020099;
-    public static final Version V_1_2_0 = new Version(V_1_2_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
+    public static final Version V_1_2_0 = new Version(V_1_2_0_ID, org.apache.lucene.util.Version.LUCENE_4_8);
     public static final int V_1_2_1_ID = 1020199;
-    public static final Version V_1_2_1 = new Version(V_1_2_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
+    public static final Version V_1_2_1 = new Version(V_1_2_1_ID, org.apache.lucene.util.Version.LUCENE_4_8);
     public static final int V_1_2_2_ID = 1020299;
-    public static final Version V_1_2_2 = new Version(V_1_2_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
+    public static final Version V_1_2_2 = new Version(V_1_2_2_ID, org.apache.lucene.util.Version.LUCENE_4_8);
     public static final int V_1_2_3_ID = 1020399;
-    public static final Version V_1_2_3 = new Version(V_1_2_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
+    public static final Version V_1_2_3 = new Version(V_1_2_3_ID, org.apache.lucene.util.Version.LUCENE_4_8);
     public static final int V_1_2_4_ID = 1020499;
-    public static final Version V_1_2_4 = new Version(V_1_2_4_ID, false, org.apache.lucene.util.Version.LUCENE_4_8);
-    public static final int V_1_2_5_ID = 1020599;
-    public static final Version V_1_2_5 = new Version(V_1_2_5_ID, true, org.apache.lucene.util.Version.LUCENE_4_8);
+    public static final Version V_1_2_4 = new Version(V_1_2_4_ID, org.apache.lucene.util.Version.LUCENE_4_8);
     public static final int V_1_3_0_ID = 1030099;
-    public static final Version V_1_3_0 = new Version(V_1_3_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_0 = new Version(V_1_3_0_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_3_1_ID = 1030199;
-    public static final Version V_1_3_1 = new Version(V_1_3_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_1 = new Version(V_1_3_1_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_3_2_ID = 1030299;
-    public static final Version V_1_3_2 = new Version(V_1_3_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_2 = new Version(V_1_3_2_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_3_3_ID = 1030399;
-    public static final Version V_1_3_3 = new Version(V_1_3_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_3 = new Version(V_1_3_3_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_3_4_ID = 1030499;
-    public static final Version V_1_3_4 = new Version(V_1_3_4_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_4 = new Version(V_1_3_4_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_3_5_ID = 1030599;
-    public static final Version V_1_3_5 = new Version(V_1_3_5_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_5 = new Version(V_1_3_5_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_3_6_ID = 1030699;
-    public static final Version V_1_3_6 = new Version(V_1_3_6_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_6 = new Version(V_1_3_6_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_3_7_ID = 1030799;
-    public static final Version V_1_3_7 = new Version(V_1_3_7_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_7 = new Version(V_1_3_7_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_3_8_ID = 1030899;
-    public static final Version V_1_3_8 = new Version(V_1_3_8_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_8 = new Version(V_1_3_8_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_3_9_ID = 1030999;
-    public static final Version V_1_3_9 = new Version(V_1_3_9_ID, false, org.apache.lucene.util.Version.LUCENE_4_9);
-    public static final int V_1_3_10_ID = /*00*/1031099;
-    public static final Version V_1_3_10 = new Version(V_1_3_10_ID, true, org.apache.lucene.util.Version.LUCENE_4_9);
+    public static final Version V_1_3_9 = new Version(V_1_3_9_ID, org.apache.lucene.util.Version.LUCENE_4_9);
     public static final int V_1_4_0_Beta1_ID = 1040001;
-    public static final Version V_1_4_0_Beta1 = new Version(V_1_4_0_Beta1_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_1);
+    public static final Version V_1_4_0_Beta1 = new Version(V_1_4_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_10_1);
     public static final int V_1_4_0_ID = 1040099;
-    public static final Version V_1_4_0 = new Version(V_1_4_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_2);
+    public static final Version V_1_4_0 = new Version(V_1_4_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_2);
     public static final int V_1_4_1_ID = 1040199;
-    public static final Version V_1_4_1 = new Version(V_1_4_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_2);
+    public static final Version V_1_4_1 = new Version(V_1_4_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_2);
     public static final int V_1_4_2_ID = 1040299;
-    public static final Version V_1_4_2 = new Version(V_1_4_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_2);
+    public static final Version V_1_4_2 = new Version(V_1_4_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_2);
     public static final int V_1_4_3_ID = 1040399;
-    public static final Version V_1_4_3 = new Version(V_1_4_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_3);
+    public static final Version V_1_4_3 = new Version(V_1_4_3_ID, org.apache.lucene.util.Version.LUCENE_4_10_3);
     public static final int V_1_4_4_ID = 1040499;
-    public static final Version V_1_4_4 = new Version(V_1_4_4_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_3);
+    public static final Version V_1_4_4 = new Version(V_1_4_4_ID, org.apache.lucene.util.Version.LUCENE_4_10_3);
     public static final int V_1_4_5_ID = 1040599;
-    public static final Version V_1_4_5 = new Version(V_1_4_5_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
-    public static final int V_1_4_6_ID = 1040699;
-    public static final Version V_1_4_6 = new Version(V_1_4_6_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_4_5 = new Version(V_1_4_5_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_5_0_ID = 1050099;
-    public static final Version V_1_5_0 = new Version(V_1_5_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_5_0 = new Version(V_1_5_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_5_1_ID = 1050199;
-    public static final Version V_1_5_1 = new Version(V_1_5_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_5_1 = new Version(V_1_5_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_5_2_ID = 1050299;
-    public static final Version V_1_5_2 = new Version(V_1_5_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
-    public static final int V_1_5_3_ID = 1050399;
-    public static final Version V_1_5_3 = new Version(V_1_5_3_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_5_2 = new Version(V_1_5_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_6_0_ID = 1060099;
-    public static final Version V_1_6_0 = new Version(V_1_6_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_6_0 = new Version(V_1_6_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_6_1_ID = 1060199;
-    public static final Version V_1_6_1 = new Version(V_1_6_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_6_1 = new Version(V_1_6_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_6_2_ID = 1060299;
-    public static final Version V_1_6_2 = new Version(V_1_6_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
-    public static final int V_1_6_3_ID = 1060399;
-    public static final Version V_1_6_3 = new Version(V_1_6_3_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_6_2 = new Version(V_1_6_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_7_0_ID = 1070099;
-    public static final Version V_1_7_0 = new Version(V_1_7_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_7_0 = new Version(V_1_7_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_7_1_ID = 1070199;
-    public static final Version V_1_7_1 = new Version(V_1_7_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_7_1 = new Version(V_1_7_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_7_2_ID = 1070299;
-    public static final Version V_1_7_2 = new Version(V_1_7_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_7_2 = new Version(V_1_7_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_7_3_ID = 1070399;
-    public static final Version V_1_7_3 = new Version(V_1_7_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_7_3 = new Version(V_1_7_3_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_7_4_ID = 1070499;
-    public static final Version V_1_7_4 = new Version(V_1_7_4_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_7_4 = new Version(V_1_7_4_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
     public static final int V_1_7_5_ID = 1070599;
-    public static final Version V_1_7_5 = new Version(V_1_7_5_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
-    public static final int V_1_7_6_ID = 1070699;
-    public static final Version V_1_7_6 = new Version(V_1_7_6_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
+    public static final Version V_1_7_5 = new Version(V_1_7_5_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
 
     public static final int V_2_0_0_beta1_ID = 2000001;
-    public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
+    public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
     public static final int V_2_0_0_beta2_ID = 2000002;
-    public static final Version V_2_0_0_beta2 = new Version(V_2_0_0_beta2_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
+    public static final Version V_2_0_0_beta2 = new Version(V_2_0_0_beta2_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
     public static final int V_2_0_0_rc1_ID = 2000051;
-    public static final Version V_2_0_0_rc1 = new Version(V_2_0_0_rc1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
+    public static final Version V_2_0_0_rc1 = new Version(V_2_0_0_rc1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
     public static final int V_2_0_0_ID = 2000099;
-    public static final Version V_2_0_0 = new Version(V_2_0_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
+    public static final Version V_2_0_0 = new Version(V_2_0_0_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
     public static final int V_2_0_1_ID = 2000199;
-    public static final Version V_2_0_1 = new Version(V_2_0_1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
+    public static final Version V_2_0_1 = new Version(V_2_0_1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
     public static final int V_2_0_2_ID = 2000299;
-    public static final Version V_2_0_2 = new Version(V_2_0_2_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
-    public static final int V_2_0_3_ID = 2000399;
-    public static final Version V_2_0_3 = new Version(V_2_0_3_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1);
+    public static final Version V_2_0_2 = new Version(V_2_0_2_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
     public static final int V_2_1_0_ID = 2010099;
-    public static final Version V_2_1_0 = new Version(V_2_1_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1);
+    public static final Version V_2_1_0 = new Version(V_2_1_0_ID, org.apache.lucene.util.Version.LUCENE_5_3_1);
     public static final int V_2_1_1_ID = 2010199;
-    public static final Version V_2_1_1 = new Version(V_2_1_1_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1);
+    public static final Version V_2_1_1 = new Version(V_2_1_1_ID, org.apache.lucene.util.Version.LUCENE_5_3_1);
     public static final int V_2_1_2_ID = 2010299;
-    public static final Version V_2_1_2 = new Version(V_2_1_2_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1);
-    public static final int V_2_1_3_ID = 2010399;
-    public static final Version V_2_1_3 = new Version(V_2_1_3_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1);
+    public static final Version V_2_1_2 = new Version(V_2_1_2_ID, org.apache.lucene.util.Version.LUCENE_5_3_1);
     public static final int V_2_2_0_ID = 2020099;
-    public static final Version V_2_2_0 = new Version(V_2_2_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_4_1);
-    public static final int V_2_2_1_ID = 2020199;
-    public static final Version V_2_2_1 = new Version(V_2_2_1_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_1);
+    public static final Version V_2_2_0 = new Version(V_2_2_0_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
     public static final int V_2_3_0_ID = 2030099;
-    public static final Version V_2_3_0 = new Version(V_2_3_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_5_0);
+    public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
     public static final int V_3_0_0_ID = 3000099;
-    public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_5_0);
+    public static final Version V_3_0_0 = new Version(V_3_0_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
     public static final Version CURRENT = V_3_0_0;
 
     static {
@@ -307,20 +283,14 @@ public class Version {
                 return V_3_0_0;
             case V_2_3_0_ID:
                 return V_2_3_0;
-            case V_2_2_1_ID:
-                return V_2_2_1;
             case V_2_2_0_ID:
                 return V_2_2_0;
-            case V_2_1_3_ID:
-                return V_2_1_3;
             case V_2_1_2_ID:
                 return V_2_1_2;
             case V_2_1_1_ID:
                 return V_2_1_1;
             case V_2_1_0_ID:
                 return V_2_1_0;
-            case V_2_0_3_ID:
-                return V_2_0_3;
             case V_2_0_2_ID:
                 return V_2_0_2;
             case V_2_0_1_ID:
@@ -333,8 +303,6 @@ public class Version {
                 return V_2_0_0_beta2;
             case V_2_0_0_beta1_ID:
                 return V_2_0_0_beta1;
-            case V_1_7_6_ID:
-                return V_1_7_6;
             case V_1_7_5_ID:
                 return V_1_7_5;
             case V_1_7_4_ID:
@@ -347,24 +315,18 @@ public class Version {
                 return V_1_7_1;
             case V_1_7_0_ID:
                 return V_1_7_0;
-            case V_1_6_3_ID:
-                return V_1_6_3;
             case V_1_6_2_ID:
                 return V_1_6_2;
             case V_1_6_1_ID:
                 return V_1_6_1;
             case V_1_6_0_ID:
                 return V_1_6_0;
-            case V_1_5_3_ID:
-                return V_1_5_3;
             case V_1_5_2_ID:
                 return V_1_5_2;
             case V_1_5_1_ID:
                 return V_1_5_1;
             case V_1_5_0_ID:
                 return V_1_5_0;
-            case V_1_4_6_ID:
-                return V_1_4_6;
             case V_1_4_5_ID:
                 return V_1_4_5;
             case V_1_4_4_ID:
@@ -379,8 +341,6 @@ public class Version {
                 return V_1_4_0;
             case V_1_4_0_Beta1_ID:
                 return V_1_4_0_Beta1;
-            case V_1_3_10_ID:
-                return V_1_3_10;
             case V_1_3_9_ID:
                 return V_1_3_9;
             case V_1_3_8_ID:
@@ -401,8 +361,6 @@ public class Version {
                 return V_1_3_1;
             case V_1_3_0_ID:
                 return V_1_3_0;
-            case V_1_2_5_ID:
-                return V_1_2_5;
             case V_1_2_4_ID:
                 return V_1_2_4;
             case V_1_2_3_ID:
@@ -419,8 +377,6 @@ public class Version {
                 return V_1_1_1;
             case V_1_1_0_ID:
                 return V_1_1_0;
-            case V_1_0_4_ID:
-                return V_1_0_4;
             case V_1_0_3_ID:
                 return V_1_0_3;
             case V_1_0_2_ID:
@@ -437,8 +393,6 @@ public class Version {
                 return V_1_0_0_Beta2;
             case V_1_0_0_Beta1_ID:
                 return V_1_0_0_Beta1;
-            case V_0_90_14_ID:
-                return V_0_90_14;
             case V_0_90_13_ID:
                 return V_0_90_13;
             case V_0_90_12_ID:
@@ -473,8 +427,6 @@ public class Version {
                 return V_0_90_0_RC1;
             case V_0_90_0_Beta1_ID:
                 return V_0_90_0_Beta1;
-            case V_0_20_7_ID:
-                return V_0_20_7;
             case V_0_20_6_ID:
                 return V_0_20_6;
             case V_0_20_5_ID:
@@ -544,7 +496,7 @@ public class Version {
             case V_0_18_8_ID:
                 return V_0_18_8;
             default:
-                return new Version(id, false, org.apache.lucene.util.Version.LATEST);
+                return new Version(id, org.apache.lucene.util.Version.LATEST);
         }
     }
 
@@ -579,10 +531,6 @@ public class Version {
         if (!Strings.hasLength(version)) {
             return Version.CURRENT;
         }
-        final boolean snapshot;
-        if (snapshot = version.endsWith("-SNAPSHOT")) {
-            version = version.substring(0, version.length() - 9);
-        }
         String[] parts = version.split("\\.|\\-");
         if (parts.length < 3 || parts.length > 4) {
             throw new IllegalArgumentException("the version needs to contain major, minor, and revision, and optionally the build: " + version);
@@ -607,11 +555,7 @@ public class Version {
                 }
             }
 
-            final Version versionFromId = fromId(major + minor + revision + build);
-            if (snapshot != versionFromId.snapshot()) {
-                return new Version(versionFromId.id, snapshot, versionFromId.luceneVersion);
-            }
-            return versionFromId;
+            return fromId(major + minor + revision + build);
 
         } catch (NumberFormatException e) {
             throw new IllegalArgumentException("unable to parse version " + version, e);
@@ -623,23 +567,17 @@ public class Version {
     public final byte minor;
     public final byte revision;
     public final byte build;
-    public final Boolean snapshot;
     public final org.apache.lucene.util.Version luceneVersion;
 
-    Version(int id, boolean snapshot, org.apache.lucene.util.Version luceneVersion) {
+    Version(int id, org.apache.lucene.util.Version luceneVersion) {
         this.id = id;
         this.major = (byte) ((id / 1000000) % 100);
         this.minor = (byte) ((id / 10000) % 100);
         this.revision = (byte) ((id / 100) % 100);
         this.build = (byte) (id % 100);
-        this.snapshot = snapshot;
         this.luceneVersion = luceneVersion;
     }
 
-    public boolean snapshot() {
-        return snapshot;
-    }
-
     public boolean after(Version version) {
         return version.id < id;
     }
@@ -667,10 +605,13 @@ public class Version {
         return Version.smallest(this, fromId(major * 1000000 + 99));
     }
 
-    /**
-     * Just the version number (without -SNAPSHOT if snapshot).
-     */
-    public String number() {
+    @SuppressForbidden(reason = "System.out.*")
+    public static void main(String[] args) {
+        System.out.println("Version: " + Version.CURRENT + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + ", JVM: " + JvmInfo.jvmInfo().version());
+    }
+
+    @Override
+    public String toString() {
         StringBuilder sb = new StringBuilder();
         sb.append(major).append('.').append(minor).append('.').append(revision);
         if (isBeta()) {
@@ -691,21 +632,6 @@ public class Version {
         return sb.toString();
     }
 
-    @SuppressForbidden(reason = "System.out.*")
-    public static void main(String[] args) {
-        System.out.println("Version: " + Version.CURRENT + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + ", JVM: " + JvmInfo.jvmInfo().version());
-    }
-
-    @Override
-    public String toString() {
-        StringBuilder sb = new StringBuilder();
-        sb.append(number());
-        if (snapshot()) {
-            sb.append("-SNAPSHOT");
-        }
-        return sb.toString();
-    }
-
     @Override
     public boolean equals(Object o) {
         if (this == o) {
diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java
index 9d52c96f969..d687d48fb0c 100644
--- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java
+++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java
@@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentType;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.search.aggregations.AggregatorBuilder;
 import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
+import org.elasticsearch.search.builder.SearchSourceBuilder;
 import org.elasticsearch.search.highlight.HighlightBuilder;
 import org.elasticsearch.search.sort.ScoreSortBuilder;
 import org.elasticsearch.search.sort.SortBuilder;
@@ -170,7 +171,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
             builder.field("track_scores", trackScores);
         }
         if (highlightBuilder != null) {
-            highlightBuilder.toXContent(builder, params);
+            builder.field(SearchSourceBuilder.HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder);
         }
         if (aggregationBuilders != null || pipelineAggregationBuilders != null) {
             builder.field("aggregations");
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
index 004b0541eee..0d95edadb17 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
@@ -33,6 +33,7 @@ import org.elasticsearch.common.lease.Releasables;
 import org.elasticsearch.common.logging.ESLogger;
 import org.elasticsearch.common.logging.Loggers;
 import org.elasticsearch.common.logging.log4j.LogConfigurator;
+import org.elasticsearch.common.network.NetworkService;
 import org.elasticsearch.common.settings.Setting;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.env.Environment;
@@ -41,12 +42,17 @@ import org.elasticsearch.monitor.os.OsProbe;
 import org.elasticsearch.monitor.process.ProcessProbe;
 import org.elasticsearch.node.Node;
 import org.elasticsearch.node.internal.InternalSettingsPreparer;
+import org.elasticsearch.transport.TransportSettings;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
 import java.util.Locale;
+import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 
 import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
@@ -57,7 +63,6 @@ import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
 final class Bootstrap {
 
     private static volatile Bootstrap INSTANCE;
-
     private volatile Node node;
     private final CountDownLatch keepAliveLatch = new CountDownLatch(1);
     private final Thread keepAliveThread;
@@ -184,12 +189,13 @@ final class Bootstrap {
                 .put(settings)
                 .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true)
                 .build();
+        enforceOrLogLimits(nodeSettings);
 
         node = new Node(nodeSettings);
     }
 
     @SuppressForbidden(reason = "Exception#printStackTrace()")
-    private static void setupLogging(Settings settings, Environment environment) {
+    private static void setupLogging(Settings settings) {
         try {
             Class.forName("org.apache.log4j.Logger");
             LogConfigurator.configure(settings, true);
@@ -249,18 +255,13 @@ final class Bootstrap {
 
         Environment environment = initialSettings(foreground);
         Settings settings = environment.settings();
-        setupLogging(settings, environment);
+        setupLogging(settings);
         checkForCustomConfFile();
 
         if (environment.pidFile() != null) {
             PidFile.create(environment.pidFile(), true);
         }
 
-        if (System.getProperty("es.max-open-files", "false").equals("true")) {
-            ESLogger logger = Loggers.getLogger(Bootstrap.class);
-            logger.info("max_open_files [{}]", ProcessProbe.getInstance().getMaxFileDescriptorCount());
-        }
-
         // warn if running using the client VM
         if (JvmInfo.jvmInfo().getVmName().toLowerCase(Locale.ROOT).contains("client")) {
             ESLogger logger = Loggers.getLogger(Bootstrap.class);
@@ -362,4 +363,48 @@ final class Bootstrap {
                 + Version.CURRENT.luceneVersion + "]  but the current lucene version is [" + org.apache.lucene.util.Version.LATEST + "]");
         }
     }
+
+    static final Set<Setting> ENFORCE_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
+        TransportSettings.BIND_HOST,
+        TransportSettings.HOST,
+        TransportSettings.PUBLISH_HOST,
+        NetworkService.GLOBAL_NETWORK_HOST_SETTING,
+        NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING,
+        NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING
+    )));
+
+    private static boolean enforceLimits(Settings settings) {
+        for (Setting setting : ENFORCE_SETTINGS) {
+            if (setting.exists(settings)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    static void enforceOrLogLimits(Settings settings) { // pkg private for testing
+        /* We enforce limits once any network host is configured. In this case we assume the node is running in production
+         * and all production limit checks must pass. This should be extended as we go to settings like:
+         *   - discovery.zen.minimum_master_nodes
+         *   - discovery.zen.ping.unicast.hosts is set if we use zen disco
+         *   - ensure we can write in all data directories
+         *   - fail if mlockall failed and was configured
+         *   - fail if vm.max_map_count is under a certain limit (not sure if this works cross platform)
+         *   - fail if the default cluster.name is used, if this is setup on network a real clustername should be used?*/
+        final boolean enforceLimits = enforceLimits(settings);
+        final ESLogger logger = Loggers.getLogger(Bootstrap.class);
+        final long maxFileDescriptorCount = ProcessProbe.getInstance().getMaxFileDescriptorCount();
+        if (maxFileDescriptorCount != -1) {
+            final int fileDescriptorCountThreshold = (1 << 16);
+            if (maxFileDescriptorCount < fileDescriptorCountThreshold) {
+                if (enforceLimits){
+                    throw new IllegalStateException("max file descriptors [" + maxFileDescriptorCount
+                        + "] for elasticsearch process likely too low, increase it to at least [" + fileDescriptorCountThreshold +"]");
+                }
+                logger.warn(
+                    "max file descriptors [{}] for elasticsearch process likely too low, consider increasing to at least [{}]",
+                    maxFileDescriptorCount, fileDescriptorCountThreshold);
+            }
+        }
+    }
 }
diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java
index 5eecafa252f..0eec5c5765e 100644
--- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java
+++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java
@@ -40,9 +40,11 @@ import org.elasticsearch.common.settings.Setting.Scope;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.unit.ByteSizeValue;
 import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.gateway.MetaDataStateFormat;
 import org.elasticsearch.index.Index;
 import org.elasticsearch.index.IndexSettings;
 import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.index.shard.ShardPath;
 import org.elasticsearch.index.store.FsDirectoryService;
 import org.elasticsearch.monitor.fs.FsInfo;
 import org.elasticsearch.monitor.fs.FsProbe;
@@ -76,7 +78,7 @@ import static java.util.Collections.unmodifiableSet;
 /**
  * A component that holds all data paths for a single node.
  */
-public class NodeEnvironment extends AbstractComponent implements Closeable {
+public final class NodeEnvironment extends AbstractComponent implements Closeable {
     public static class NodePath {
         /* ${data.paths}/nodes/{node.id} */
         public final Path path;
@@ -167,64 +169,71 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
             localNodeId = -1;
             return;
         }
-
         final NodePath[] nodePaths = new NodePath[environment.dataWithClusterFiles().length];
         final Lock[] locks = new Lock[nodePaths.length];
-        sharedDataPath = environment.sharedDataFile();
+        boolean success = false;
 
-        int localNodeId = -1;
-        IOException lastException = null;
-        int maxLocalStorageNodes = MAX_LOCAL_STORAGE_NODES_SETTING.get(settings);
-        for (int possibleLockId = 0; possibleLockId < maxLocalStorageNodes; possibleLockId++) {
-            for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) {
-                Path dir = environment.dataWithClusterFiles()[dirIndex].resolve(NODES_FOLDER).resolve(Integer.toString(possibleLockId));
-                Files.createDirectories(dir);
+        try {
+            sharedDataPath = environment.sharedDataFile();
+            int localNodeId = -1;
+            IOException lastException = null;
+            int maxLocalStorageNodes = MAX_LOCAL_STORAGE_NODES_SETTING.get(settings);
+            for (int possibleLockId = 0; possibleLockId < maxLocalStorageNodes; possibleLockId++) {
+                for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) {
+                    Path dir = environment.dataWithClusterFiles()[dirIndex].resolve(NODES_FOLDER).resolve(Integer.toString(possibleLockId));
+                    Files.createDirectories(dir);
 
-                try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) {
-                    logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath());
-                    try {
-                        locks[dirIndex] = luceneDir.obtainLock(NODE_LOCK_FILENAME);
-                        nodePaths[dirIndex] = new NodePath(dir, environment);
-                        localNodeId = possibleLockId;
-                    } catch (LockObtainFailedException ex) {
-                        logger.trace("failed to obtain node lock on {}", dir.toAbsolutePath());
+                    try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) {
+                        logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath());
+                        try {
+                            locks[dirIndex] = luceneDir.obtainLock(NODE_LOCK_FILENAME);
+                            nodePaths[dirIndex] = new NodePath(dir, environment);
+                            localNodeId = possibleLockId;
+                        } catch (LockObtainFailedException ex) {
+                            logger.trace("failed to obtain node lock on {}", dir.toAbsolutePath());
+                            // release all the ones that were obtained up until now
+                            releaseAndNullLocks(locks);
+                            break;
+                        }
+
+                    } catch (IOException e) {
+                        logger.trace("failed to obtain node lock on {}", e, dir.toAbsolutePath());
+                        lastException = new IOException("failed to obtain lock on " + dir.toAbsolutePath(), e);
                         // release all the ones that were obtained up until now
                         releaseAndNullLocks(locks);
                         break;
                     }
-
-                } catch (IOException e) {
-                    logger.trace("failed to obtain node lock on {}", e, dir.toAbsolutePath());
-                    lastException = new IOException("failed to obtain lock on " + dir.toAbsolutePath(), e);
-                    // release all the ones that were obtained up until now
-                    releaseAndNullLocks(locks);
+                }
+                if (locks[0] != null) {
+                    // we found a lock, break
                     break;
                 }
             }
-            if (locks[0] != null) {
-                // we found a lock, break
-                break;
+
+            if (locks[0] == null) {
+                throw new IllegalStateException("Failed to obtain node lock, is the following location writable?: "
+                    + Arrays.toString(environment.dataWithClusterFiles()), lastException);
+            }
+
+            this.localNodeId = localNodeId;
+            this.locks = locks;
+            this.nodePaths = nodePaths;
+
+            if (logger.isDebugEnabled()) {
+                logger.debug("using node location [{}], local_node_id [{}]", nodePaths, localNodeId);
+            }
+
+            maybeLogPathDetails();
+            maybeLogHeapDetails();
+            
+            applySegmentInfosTrace(settings);
+            assertCanWrite();
+            success = true;
+        } finally {
+            if (success == false) {
+                IOUtils.closeWhileHandlingException(locks);
             }
         }
-
-        if (locks[0] == null) {
-            throw new IllegalStateException("Failed to obtain node lock, is the following location writable?: "
-                    + Arrays.toString(environment.dataWithClusterFiles()), lastException);
-        }
-
-        this.localNodeId = localNodeId;
-        this.locks = locks;
-        this.nodePaths = nodePaths;
-
-        if (logger.isDebugEnabled()) {
-            logger.debug("using node location [{}], local_node_id [{}]", nodePaths, localNodeId);
-        }
-
-        maybeLogPathDetails();
-        maybeLogHeapDetails();
-        maybeWarnFileDescriptors();
-
-        applySegmentInfosTrace(settings);
     }
 
     private static void releaseAndNullLocks(Lock[] locks) {
@@ -315,19 +324,6 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
         logger.info("heap size [{}], compressed ordinary object pointers [{}]", maxHeapSize, useCompressedOops);
     }
 
-    private void maybeWarnFileDescriptors() {
-        long maxFileDescriptorCount = ProcessProbe.getInstance().getMaxFileDescriptorCount();
-        if (maxFileDescriptorCount == -1) {
-            return;
-        }
-        int fileDescriptorCountThreshold = (1 << 16);
-        if (maxFileDescriptorCount < fileDescriptorCountThreshold) {
-            logger.warn(
-                    "max file descriptors [{}] for elasticsearch process likely too low, consider increasing to at least [{}]",
-                    maxFileDescriptorCount,
-                    fileDescriptorCountThreshold);
-        }
-    }
 
     @SuppressForbidden(reason = "System.out.*")
     static void applySegmentInfosTrace(Settings settings) {
@@ -807,7 +803,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
     }
 
     @Override
-    public void close() {
+    public final void close() {
         if (closed.compareAndSet(false, true) && locks != null) {
             for (Lock lock : locks) {
                 try {
@@ -923,4 +919,45 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
 
         return shardPath.getParent().getParent().getParent();
     }
+
+    /**
+     * This is a best effort to ensure that we actually have write permissions to write in all our data directories.
+     * This prevents disasters if nodes are started under the wrong username etc.
+     */
+    private void assertCanWrite() throws IOException {
+        for (Path path : nodeDataPaths()) { // check node-paths are writable
+            tryWriteTempFile(path);
+        }
+        for (String index : this.findAllIndices()) {
+            for (Path path : this.indexPaths(index)) { // check index paths are writable
+                Path statePath = path.resolve(MetaDataStateFormat.STATE_DIR_NAME);
+                tryWriteTempFile(statePath);
+                tryWriteTempFile(path);
+            }
+            for (ShardId shardID : this.findAllShardIds(new Index(index, IndexMetaData.INDEX_UUID_NA_VALUE))) {
+                Path[] paths = this.availableShardPaths(shardID);
+                for (Path path : paths) { // check shard paths are writable
+                    Path indexDir = path.resolve(ShardPath.INDEX_FOLDER_NAME);
+                    Path statePath = path.resolve(MetaDataStateFormat.STATE_DIR_NAME);
+                    Path translogDir = path.resolve(ShardPath.TRANSLOG_FOLDER_NAME);
+                    tryWriteTempFile(indexDir);
+                    tryWriteTempFile(translogDir);
+                    tryWriteTempFile(statePath);
+                    tryWriteTempFile(path);
+                }
+            }
+        }
+    }
+
+    private static void tryWriteTempFile(Path path) throws IOException {
+        if (Files.exists(path)) {
+            Path resolve = path.resolve(".es_temp_file");
+            try {
+                Files.createFile(resolve);
+                Files.deleteIfExists(resolve);
+            } catch (IOException ex) {
+                throw new IOException("failed to write in data directory [" + path + "] write permission is required", ex);
+            }
+        }
+    }
 }
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java
index 343d3a407d0..5d0d77fb6de 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java
@@ -154,9 +154,6 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
                     throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
                 }
             }
-            builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
-            builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
-            builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
             parseTextField(builder, fieldName, node, parserContext);
             for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
                 Map.Entry<String, Object> entry = iterator.next();
@@ -174,6 +171,17 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
                         throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed.");
                     }
                     builder.positionIncrementGap(newPositionIncrementGap);
+                    // we need to update to actual analyzers if they are not set in this case...
+                    // so we can inject the position increment gap...
+                    if (builder.fieldType().indexAnalyzer() == null) {
+                        builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
+                    }
+                    if (builder.fieldType().searchAnalyzer() == null) {
+                        builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
+                    }
+                    if (builder.fieldType().searchQuoteAnalyzer() == null) {
+                        builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
+                    }
                     iterator.remove();
                 } else if (propName.equals("ignore_above")) {
                     builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));
diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java
index 940fa89f03a..3c5ca1ce444 100644
--- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java
@@ -26,6 +26,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.join.JoinUtil;
 import org.apache.lucene.search.join.ScoreMode;
+import org.apache.lucene.search.similarities.Similarity;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
 import org.elasticsearch.common.lucene.search.Queries;
@@ -261,7 +262,8 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
         if (maxChildren == 0) {
             maxChildren = Integer.MAX_VALUE;
         }
-        return new LateParsingQuery(parentDocMapper.typeFilter(), innerQuery, minChildren(), maxChildren, parentType, scoreMode, parentChildIndexFieldData);
+        return new LateParsingQuery(parentDocMapper.typeFilter(), innerQuery, minChildren(), maxChildren,
+                                    parentType, scoreMode, parentChildIndexFieldData, context.getSearchSimilarity());
     }
 
     final static class LateParsingQuery extends Query {
@@ -273,8 +275,11 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
         private final String parentType;
         private final ScoreMode scoreMode;
         private final ParentChildIndexFieldData parentChildIndexFieldData;
+        private final Similarity similarity;
 
-        LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren, String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData) {
+        LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren,
+                         String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData,
+                         Similarity similarity) {
             this.toQuery = toQuery;
             this.innerQuery = innerQuery;
             this.minChildren = minChildren;
@@ -282,6 +287,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
             this.parentType = parentType;
             this.scoreMode = scoreMode;
             this.parentChildIndexFieldData = parentChildIndexFieldData;
+            this.similarity = similarity;
         }
 
         @Override
@@ -294,6 +300,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
                 String joinField = ParentFieldMapper.joinField(parentType);
                 IndexSearcher indexSearcher = new IndexSearcher(reader);
                 indexSearcher.setQueryCache(null);
+                indexSearcher.setSimilarity(similarity);
                 IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal((DirectoryReader) reader);
                 MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
                 return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
@@ -348,6 +355,10 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
         public Query getInnerQuery() {
             return innerQuery;
         }
+
+        public Similarity getSimilarity() {
+            return similarity;
+        }
     }
 
     @Override
diff --git a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java
index 9a3637de3f9..281bc061170 100644
--- a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java
@@ -194,7 +194,14 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
         // wrap the query with type query
         innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter());
         Query childrenFilter = Queries.not(parentTypeQuery);
-        return new HasChildQueryBuilder.LateParsingQuery(childrenFilter, innerQuery, HasChildQueryBuilder.DEFAULT_MIN_CHILDREN, HasChildQueryBuilder.DEFAULT_MAX_CHILDREN, type, score ? ScoreMode.Max : ScoreMode.None, parentChildIndexFieldData);
+        return new HasChildQueryBuilder.LateParsingQuery(childrenFilter,
+                                                         innerQuery,
+                                                         HasChildQueryBuilder.DEFAULT_MIN_CHILDREN,
+                                                         HasChildQueryBuilder.DEFAULT_MAX_CHILDREN,
+                                                         type,
+                                                         score ? ScoreMode.Max : ScoreMode.None,
+                                                         parentChildIndexFieldData,
+                                                         context.getSearchSimilarity());
     }
 
     @Override
diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java
index f044ae7b023..104d9295299 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java
@@ -248,7 +248,7 @@ public class RestNodesAction extends AbstractCatAction {
                 table.addCell("-");
             }
 
-            table.addCell(node.getVersion().number());
+            table.addCell(node.getVersion().toString());
             table.addCell(info == null ? null : info.getBuild().shortHash());
             table.addCell(jvmInfo == null ? null : jvmInfo.version());
             table.addCell(fsInfo == null ? null : fsInfo.getTotal().getAvailable());
diff --git a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java
index 5504878bbef..bf3f0a3e5df 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java
@@ -81,10 +81,10 @@ public class RestMainAction extends BaseRestHandler {
         builder.field("name", Node.NODE_NAME_SETTING.get(settings));
         builder.field("cluster_name", clusterName.value());
         builder.startObject("version")
-                .field("number", version.number())
+                .field("number", version.toString())
                 .field("build_hash", Build.CURRENT.shortHash())
                 .field("build_date", Build.CURRENT.date())
-                .field("build_snapshot", version.snapshot)
+                .field("build_snapshot", Build.CURRENT.isSnapshot())
                 .field("lucene_version", version.luceneVersion.toString())
                 .endObject();
         builder.field("tagline", "You Know, for Search");
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java
index a15a61eaae2..9f5f3e443ef 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java
@@ -31,8 +31,8 @@ import org.elasticsearch.common.xcontent.XContentType;
 import org.elasticsearch.script.Script;
 import org.elasticsearch.search.aggregations.AggregationInitializationException;
 import org.elasticsearch.search.aggregations.AggregatorBuilder;
-import org.elasticsearch.search.aggregations.AggregatorFactory;
 import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
+import org.elasticsearch.search.aggregations.AggregatorFactory;
 import org.elasticsearch.search.aggregations.support.AggregationContext;
 import org.elasticsearch.search.builder.SearchSourceBuilder;
 import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
@@ -520,7 +520,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
             builder.field(SearchSourceBuilder.TRACK_SCORES_FIELD.getPreferredName(), true);
         }
         if (highlightBuilder != null) {
-            this.highlightBuilder.toXContent(builder, params);
+            builder.field(SearchSourceBuilder.HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder);
         }
         builder.endObject();
         return builder;
diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
index 8827c3cf43f..e1a9e9fe67b 100644
--- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
@@ -1038,7 +1038,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
             }
 
         if (highlightBuilder != null) {
-            this.highlightBuilder.toXContent(builder, params);
+            builder.field(HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder);
         }
 
         if (innerHitsBuilder != null) {
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java
index 325541844ca..1a3b1b46cfc 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java
@@ -58,8 +58,6 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
 
     public static final HighlightBuilder PROTOTYPE = new HighlightBuilder();
 
-    public static final String HIGHLIGHT_ELEMENT_NAME = "highlight";
-
     /** default for whether to highlight fields based on the source even if stored separately */
     public static final boolean DEFAULT_FORCE_SOURCE = false;
     /** default for whether a field should be highlighted only if a query matches that field */
@@ -226,7 +224,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
 
     @Override
     public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        builder.startObject(HIGHLIGHT_ELEMENT_NAME);
+        builder.startObject();
         innerXContent(builder);
         builder.endObject();
         return builder;
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
index 1077554aa23..608b33db0fe 100644
--- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
@@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" {
 //// Very special jar permissions:
 //// These are dangerous permissions that we don't want to grant to everything.
 
-grant codeBase "${codebase.lucene-core-5.5.0-snapshot-850c6c2.jar}" {
+grant codeBase "${codebase.lucene-core-5.5.0.jar}" {
   // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die)
   permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
   permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
index 5f393afbe62..856cd50e2a9 100644
--- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
@@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
   permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
 };
 
-grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-850c6c2.jar}" {
+grant codeBase "${codebase.lucene-test-framework-5.5.0.jar}" {
   // needed by RamUsageTester
   permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
 };
diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java
index 52508f8dc83..4669f5bc718 100644
--- a/core/src/test/java/org/elasticsearch/VersionTests.java
+++ b/core/src/test/java/org/elasticsearch/VersionTests.java
@@ -41,15 +41,6 @@ import static org.hamcrest.Matchers.sameInstance;
 
 public class VersionTests extends ESTestCase {
 
-    public void testMavenVersion() {
-        // maven sets this property to ensure that the latest version
-        // we use here is the version that is actually set to the project.version
-        // in maven
-        String property = System.getProperty("tests.version", null);
-        assumeTrue("tests.version is set", property != null);
-        assertEquals(property, Version.CURRENT.toString());
-    }
-
     public void testVersionComparison() throws Exception {
         assertThat(V_0_20_0.before(V_0_90_0), is(true));
         assertThat(V_0_20_0.before(V_0_20_0), is(false));
@@ -93,12 +84,7 @@ public class VersionTests extends ESTestCase {
         final int iters = scaledRandomIntBetween(100, 1000);
         for (int i = 0; i < iters; i++) {
             Version version = randomVersion(random());
-            if (version.snapshot()) { // number doesn't include SNAPSHOT but the parser checks for that
-                assertEquals(Version.fromString(version.number()), version);
-            } else {
-                assertThat(Version.fromString(version.number()), sameInstance(version));
-            }
-            assertFalse(Version.fromString(version.number()).snapshot());
+            assertThat(Version.fromString(version.toString()), sameInstance(version));
         }
     }
 
@@ -155,9 +141,9 @@ public class VersionTests extends ESTestCase {
 
     public void testToString() {
         // with 2.0.beta we lowercase
-        assertEquals("2.0.0-beta1", Version.V_2_0_0_beta1.number());
-        assertEquals("1.4.0.Beta1", Version.V_1_4_0_Beta1.number());
-        assertEquals("1.4.0", Version.V_1_4_0.number());
+        assertEquals("2.0.0-beta1", Version.V_2_0_0_beta1.toString());
+        assertEquals("1.4.0.Beta1", Version.V_1_4_0_Beta1.toString());
+        assertEquals("1.4.0", Version.V_1_4_0.toString());
     }
 
     public void testIsBeta() {
@@ -170,12 +156,11 @@ public class VersionTests extends ESTestCase {
         final int iters = scaledRandomIntBetween(100, 1000);
         for (int i = 0; i < iters; i++) {
             Version version = randomVersion(random());
-            if (version.snapshot() == false && random().nextBoolean()) {
-                version = new Version(version.id, true, version.luceneVersion);
+            if (random().nextBoolean()) {
+                version = new Version(version.id, version.luceneVersion);
             }
             Version parsedVersion = Version.fromString(version.toString());
             assertEquals(version, parsedVersion);
-            assertEquals(version.snapshot(), parsedVersion.snapshot());
         }
     }
 
@@ -207,7 +192,7 @@ public class VersionTests extends ESTestCase {
                 assertEquals("Version id " + field.getName() + " does not point to " + constantName, v, Version.fromId(versionId));
                 assertEquals("Version " + constantName + " does not have correct id", versionId, v.id);
                 if (v.major >= 2) {
-                    String number = v.number();
+                    String number = v.toString();
                     if (v.isBeta()) {
                         number = number.replace("-beta", "_beta");
                     } else if (v.isRC()) {
@@ -215,7 +200,7 @@ public class VersionTests extends ESTestCase {
                     }
                     assertEquals("V_" + number.replace('.', '_'), constantName);
                 } else {
-                    assertEquals("V_" + v.number().replace('.', '_'), constantName);
+                    assertEquals("V_" + v.toString().replace('.', '_'), constantName);
                 }
 
                 // only the latest version for a branch should be a snapshot (ie unreleased)
@@ -225,7 +210,7 @@ public class VersionTests extends ESTestCase {
                     maxBranchVersions.put(branchName, v);
                 } else if (v.after(maxBranchVersion)) {
 
-                    assertFalse("Version " + maxBranchVersion + " cannot be a snapshot because version " + v + " exists", maxBranchVersion.snapshot());
+                    assertFalse("Version " + maxBranchVersion + " cannot be a snapshot because version " + v + " exists", VersionUtils.isSnapshot(maxBranchVersion));
                     maxBranchVersions.put(branchName, v);
                 }
             }
diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapSettingsTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapSettingsTests.java
index c032d3ddee8..0ed6c8da6c1 100644
--- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapSettingsTests.java
@@ -19,7 +19,10 @@
 
 package org.elasticsearch.bootstrap;
 
+import org.elasticsearch.common.settings.Setting;
 import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.monitor.os.OsProbe;
+import org.elasticsearch.monitor.process.ProcessProbe;
 import org.elasticsearch.test.ESTestCase;
 
 public class BootstrapSettingsTests extends ESTestCase {
@@ -31,4 +34,21 @@ public class BootstrapSettingsTests extends ESTestCase {
         assertTrue(BootstrapSettings.CTRLHANDLER_SETTING.get(Settings.EMPTY));
     }
 
+    public void testEnforceMaxFileDescriptorLimits() {
+        // nothing should happen since we are in OOB mode
+        Bootstrap.enforceOrLogLimits(Settings.EMPTY);
+
+        Settings build = Settings.builder().put(randomFrom(Bootstrap.ENFORCE_SETTINGS.toArray(new Setting[0])).getKey(),
+            "127.0.0.1").build();
+        long maxFileDescriptorCount = ProcessProbe.getInstance().getMaxFileDescriptorCount();
+        try {
+            Bootstrap.enforceOrLogLimits(build);
+            if (maxFileDescriptorCount != -1 && maxFileDescriptorCount < (1 << 16)) {
+                fail("must have enforced limits: " + maxFileDescriptorCount);
+            }
+        } catch (IllegalStateException ex) {
+            assertTrue(ex.getMessage(), ex.getMessage().startsWith("max file descriptors"));
+        }
+    }
+
 }
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
index b5b1f955ae0..c39ba1fddc9 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
@@ -259,7 +259,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
     public void testAllVersionsTested() throws Exception {
         SortedSet<String> expectedVersions = new TreeSet<>();
         for (Version v : VersionUtils.allVersions()) {
-            if (v.snapshot()) continue;  // snapshots are unreleased, so there is no backcompat yet
+            if (VersionUtils.isSnapshot(v)) continue;  // snapshots are unreleased, so there is no backcompat yet
             if (v.onOrBefore(Version.V_2_0_0_beta1)) continue; // we can only test back one major lucene version
             if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself
             expectedVersions.add("index-" + v.toString() + ".zip");
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java
index eabb954c2c6..ec73edd493f 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java
@@ -37,6 +37,7 @@ import org.elasticsearch.snapshots.SnapshotInfo;
 import org.elasticsearch.snapshots.SnapshotRestoreException;
 import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
 import org.elasticsearch.test.ESIntegTestCase.Scope;
+import org.elasticsearch.test.VersionUtils;
 
 import java.io.IOException;
 import java.lang.reflect.Modifier;
@@ -98,7 +99,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase {
         for (java.lang.reflect.Field field : Version.class.getFields()) {
             if (Modifier.isStatic(field.getModifiers()) && field.getType() == Version.class) {
                 Version v = (Version) field.get(Version.class);
-                if (v.snapshot()) continue;
+                if (VersionUtils.isSnapshot(v)) continue;
                 if (v.onOrBefore(Version.V_2_0_0_beta1)) continue;
                 if (v.equals(Version.CURRENT)) continue;
                 expectedVersions.add(v.toString());
diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java
index d4b46016125..a7d127a60c8 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java
@@ -48,6 +48,7 @@ import org.elasticsearch.search.sort.SortOrder;
 import org.elasticsearch.snapshots.SnapshotState;
 import org.elasticsearch.test.ESIntegTestCase;
 import org.elasticsearch.test.InternalTestCluster;
+import org.elasticsearch.test.junit.annotations.TestLogging;
 import org.elasticsearch.test.transport.MockTransportService;
 import org.elasticsearch.transport.TransportException;
 import org.elasticsearch.transport.TransportRequest;
@@ -172,6 +173,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
 
     }
 
+    @TestLogging("gateway:TRACE")
     public void testIndexWithFewDocuments() throws Exception {
         final Path dataPath = createTempDir();
         Settings nodeSettings = nodeSettings(dataPath);
diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
index b706a4ce5d8..51c9d48218c 100644
--- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
@@ -28,17 +28,22 @@ import org.apache.lucene.search.ConstantScoreQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.join.ScoreMode;
+import org.apache.lucene.search.similarities.DFISimilarity;
+import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
+import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.util.BytesRef;
 import org.elasticsearch.ElasticsearchParseException;
 import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
 import org.elasticsearch.common.ParsingException;
 import org.elasticsearch.common.compress.CompressedXContent;
+import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.index.fielddata.IndexFieldDataService;
 import org.elasticsearch.index.mapper.MapperService;
 import org.elasticsearch.index.mapper.Uid;
 import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
 import org.elasticsearch.index.mapper.internal.UidFieldMapper;
 import org.elasticsearch.index.query.support.QueryInnerHits;
+import org.elasticsearch.index.similarity.SimilarityService;
 import org.elasticsearch.script.Script.ScriptParseException;
 import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
 import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
@@ -48,6 +53,7 @@ import org.elasticsearch.test.TestSearchContext;
 import org.junit.BeforeClass;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Collections;
 
 import static org.hamcrest.CoreMatchers.equalTo;
@@ -58,8 +64,11 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
     protected static final String PARENT_TYPE = "parent";
     protected static final String CHILD_TYPE = "child";
 
+    private static String similarity;
+
     @BeforeClass
     public static void before() throws Exception {
+        similarity = randomFrom("classic", "BM25");
         MapperService mapperService = queryShardContext().getMapperService();
         mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
                 STRING_FIELD_NAME, "type=text",
@@ -72,6 +81,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
         mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
                 "_parent", "type=" + PARENT_TYPE,
                 STRING_FIELD_NAME, "type=text",
+                "custom_string", "type=text,similarity=" + similarity,
                 INT_FIELD_NAME, "type=integer",
                 DOUBLE_FIELD_NAME, "type=double",
                 BOOLEAN_FIELD_NAME, "type=boolean",
@@ -300,4 +310,12 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
             }
         }
     }
+
+    public void testNonDefaultSimilarity() throws Exception {
+        QueryShardContext shardContext = createShardContext();
+        HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(CHILD_TYPE, new TermQueryBuilder("custom_string", "value"));
+        HasChildQueryBuilder.LateParsingQuery query = (HasChildQueryBuilder.LateParsingQuery) hasChildQueryBuilder.toQuery(shardContext);
+        Similarity expected = SimilarityService.BUILT_IN.get(similarity).apply(similarity, Settings.EMPTY).get();
+        assertThat(((PerFieldSimilarityWrapper) query.getSimilarity()).get("custom_string"), instanceOf(expected.getClass()));
+    }
 }
diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
index a9aa5e42146..d74a148a2f9 100644
--- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
@@ -51,6 +51,7 @@ import org.hamcrest.Matchers;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java
index 6d165f2d37c..b8f775639f9 100644
--- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java
@@ -142,9 +142,7 @@ public class HighlightBuilderTests extends ESTestCase {
             if (randomBoolean()) {
                 builder.prettyPrint();
             }
-            builder.startObject();
-            highlightBuilder.innerXContent(builder);
-            builder.endObject();
+            highlightBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
 
             XContentParser parser = XContentHelper.createParser(builder.bytes());
             context.reset(parser);
diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 24263216da9..00000000000
--- a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-94f03500c4b0256199b4dfcecf20be5b71c29177
\ No newline at end of file
diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1
new file mode 100644
index 00000000000..dcdeb2cb477
--- /dev/null
+++ b/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1
@@ -0,0 +1 @@
+1e0e8243a4410be20c34683034fafa7bb52e55cc
\ No newline at end of file
diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index c9df04f4d6b..00000000000
--- a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-44365f83efda343500793c43a16903f2aa74ddbd
\ No newline at end of file
diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1
new file mode 100644
index 00000000000..dd5c846363a
--- /dev/null
+++ b/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1
@@ -0,0 +1 @@
+68480974b2f54f519763632a7c1c5d51cbff3805
\ No newline at end of file
diff --git a/distribution/licenses/lucene-core-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-core-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 103b8e1258c..00000000000
--- a/distribution/licenses/lucene-core-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7aca3e6bfe610df9cdc1b8fd671eac071016c228
\ No newline at end of file
diff --git a/distribution/licenses/lucene-core-5.5.0.jar.sha1 b/distribution/licenses/lucene-core-5.5.0.jar.sha1
new file mode 100644
index 00000000000..70bd0b63bba
--- /dev/null
+++ b/distribution/licenses/lucene-core-5.5.0.jar.sha1
@@ -0,0 +1 @@
+a74fd869bb5ad7fe6b4cd29df9543a34aea81164
\ No newline at end of file
diff --git a/distribution/licenses/lucene-grouping-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 861f05f5c5d..00000000000
--- a/distribution/licenses/lucene-grouping-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8c588d4d4c8fc6894dd6725dcf69ffa690c260f7
\ No newline at end of file
diff --git a/distribution/licenses/lucene-grouping-5.5.0.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0.jar.sha1
new file mode 100644
index 00000000000..f905a2081b6
--- /dev/null
+++ b/distribution/licenses/lucene-grouping-5.5.0.jar.sha1
@@ -0,0 +1 @@
+437cacec0cfa349b1dee049a7c0e32df3b8ecc07
\ No newline at end of file
diff --git a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index b64c63539c7..00000000000
--- a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3ccad9ccffe94decc7c8c2a97fee3574c54b804c
\ No newline at end of file
diff --git a/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1
new file mode 100644
index 00000000000..6ea3c5a0c13
--- /dev/null
+++ b/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1
@@ -0,0 +1 @@
+ecdd913cb7c61a5435591f0a7268b01ab3fc782a
\ No newline at end of file
diff --git a/distribution/licenses/lucene-join-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-join-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 85c0b7df5e7..00000000000
--- a/distribution/licenses/lucene-join-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b7eba4721b52f0490e71d8fdbc92112be538592b
\ No newline at end of file
diff --git a/distribution/licenses/lucene-join-5.5.0.jar.sha1 b/distribution/licenses/lucene-join-5.5.0.jar.sha1
new file mode 100644
index 00000000000..3cc19b170ed
--- /dev/null
+++ b/distribution/licenses/lucene-join-5.5.0.jar.sha1
@@ -0,0 +1 @@
+af4f55e36e3a7d1f4e9ed9efdccf7e22b767d6e8
\ No newline at end of file
diff --git a/distribution/licenses/lucene-memory-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 492e7193486..00000000000
--- a/distribution/licenses/lucene-memory-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6dde326efe42926c57dc49153536c689b9951203
\ No newline at end of file
diff --git a/distribution/licenses/lucene-memory-5.5.0.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0.jar.sha1
new file mode 100644
index 00000000000..1f4ebc783ee
--- /dev/null
+++ b/distribution/licenses/lucene-memory-5.5.0.jar.sha1
@@ -0,0 +1 @@
+09a327fe9f20fc7e3912ed213bdd5cb4b6d2a65a
\ No newline at end of file
diff --git a/distribution/licenses/lucene-misc-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index ea0e372cd6e..00000000000
--- a/distribution/licenses/lucene-misc-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3b8008f6b4195009960516fb1978912c0e068df2
\ No newline at end of file
diff --git a/distribution/licenses/lucene-misc-5.5.0.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0.jar.sha1
new file mode 100644
index 00000000000..76131ae81c5
--- /dev/null
+++ b/distribution/licenses/lucene-misc-5.5.0.jar.sha1
@@ -0,0 +1 @@
+504d855a1a38190622fdf990b2298c067e7d60ca
\ No newline at end of file
diff --git a/distribution/licenses/lucene-queries-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 5c1d70e5800..00000000000
--- a/distribution/licenses/lucene-queries-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-00c681bca8129811901d2eff850e8b7855385448
\ No newline at end of file
diff --git a/distribution/licenses/lucene-queries-5.5.0.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0.jar.sha1
new file mode 100644
index 00000000000..5790b2e4776
--- /dev/null
+++ b/distribution/licenses/lucene-queries-5.5.0.jar.sha1
@@ -0,0 +1 @@
+60ca161c1dd5f127907423b6f039b846fb713de0
\ No newline at end of file
diff --git a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index c554fcdc765..00000000000
--- a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f8856c8286fde66ffa3d4745306f3849b4be808b
\ No newline at end of file
diff --git a/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1
new file mode 100644
index 00000000000..8e4a1e66138
--- /dev/null
+++ b/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1
@@ -0,0 +1 @@
+0fddc49725b562fd48dff0cff004336ad2a090a4
\ No newline at end of file
diff --git a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index b986aa67de5..00000000000
--- a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dd5e43774a033b65c66c5e877104ffaf6a17c0b8
\ No newline at end of file
diff --git a/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1
new file mode 100644
index 00000000000..20c2a1c9527
--- /dev/null
+++ b/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1
@@ -0,0 +1 @@
+b7da8e187acd6e4d7781ba41fac8b9082dd27409
\ No newline at end of file
diff --git a/distribution/licenses/lucene-spatial-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 44d3e9f616d..00000000000
--- a/distribution/licenses/lucene-spatial-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-29fcb449512c0095e77ad2c96eca03b36e59745f
\ No newline at end of file
diff --git a/distribution/licenses/lucene-spatial-5.5.0.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0.jar.sha1
new file mode 100644
index 00000000000..dd645be87e3
--- /dev/null
+++ b/distribution/licenses/lucene-spatial-5.5.0.jar.sha1
@@ -0,0 +1 @@
+c14965bf67179bee93cc8efc58d09a75d230c891
\ No newline at end of file
diff --git a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 6ec23a5e5cf..00000000000
--- a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ea8d939136c58dbc388939ddc50bf9f6315528a4
\ No newline at end of file
diff --git a/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1
new file mode 100644
index 00000000000..c0b9d4ba838
--- /dev/null
+++ b/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1
@@ -0,0 +1 @@
+3e5ab4ea3e2052166100482f7a56b75bfa4ab0ad
\ No newline at end of file
diff --git a/distribution/licenses/lucene-suggest-5.5.0-snapshot-850c6c2.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index f725c019f7d..00000000000
--- a/distribution/licenses/lucene-suggest-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b6dfab425bb5a0cbaf6adeb9ebec770cdce00046
\ No newline at end of file
diff --git a/distribution/licenses/lucene-suggest-5.5.0.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0.jar.sha1
new file mode 100644
index 00000000000..adce0756ecf
--- /dev/null
+++ b/distribution/licenses/lucene-suggest-5.5.0.jar.sha1
@@ -0,0 +1 @@
+51f9d52332f556976a5099817e35d37c69a24597
\ No newline at end of file
diff --git a/docs/plugins/discovery-gce.asciidoc b/docs/plugins/discovery-gce.asciidoc
index b455fc689db..38dad90d1fc 100644
--- a/docs/plugins/discovery-gce.asciidoc
+++ b/docs/plugins/discovery-gce.asciidoc
@@ -48,6 +48,16 @@ discovery:
 
 The following gce settings (prefixed with `cloud.gce`) are supported:
 
+ `project_id`::
+
+     Your Google project id (mandatory).
+
+ `zone`::
+
+     helps to retrieve instances running in a given zone (mandatory). It should be one of the
+     https://developers.google.com/compute/docs/zones#available[GCE supported zones].
+     See also <<discovery-gce-usage-zones>>.
+
  `retry`::
 
      If set to `true`, client will use
@@ -56,8 +66,14 @@ The following gce settings (prefixed with `cloud.gce`) are supported:
 
  `max_wait`::
 
-     The maximum elapsed time in milliseconds after the client instantiating retry. If the time elapsed goes past the
-     `max_wait`, client stops to retry. Defaults to 15 minutes (900000 milliseconds).
+     The maximum elapsed time after the client instantiating retry. If the time elapsed goes past the
+     `max_wait`, client stops to retry. A negative value means that it will wait indefinitely. Defaults to `0s` (retry
+     indefinitely).
+
+ `refresh_interval`::
+
+     How long the list of hosts is cached to prevent further requests to the GCE API. `0s` disables caching.
+     A negative value will cause infinite caching. Defaults to `0s`.
 
 
 [IMPORTANT]
diff --git a/docs/reference/index-modules/allocation/total_shards.asciidoc b/docs/reference/index-modules/allocation/total_shards.asciidoc
index 691ab8d937d..9d8a7fdbd26 100644
--- a/docs/reference/index-modules/allocation/total_shards.asciidoc
+++ b/docs/reference/index-modules/allocation/total_shards.asciidoc
@@ -23,7 +23,7 @@ You can also limit the amount of shards a node can have regardless of the index:
 
 [WARNING]
 =======================================
-Thess setting impose a hard limit which can result in some shards not being
+These settings impose a hard limit which can result in some shards not being
 allocated.
 
 Use with caution.
diff --git a/docs/reference/indices/shadow-replicas.asciidoc b/docs/reference/indices/shadow-replicas.asciidoc
index 0d589adb64a..60360c147b5 100644
--- a/docs/reference/indices/shadow-replicas.asciidoc
+++ b/docs/reference/indices/shadow-replicas.asciidoc
@@ -51,7 +51,7 @@ curl -XPUT 'localhost:9200/my_index' -d '
         "number_of_replicas" : 4,
         "data_path": "/opt/data/my_index",
         "shadow_replicas": true
-    } 
+    }
 }'
 --------------------------------------------------
 
@@ -63,6 +63,9 @@ ensure that the Elasticsearch process has the correct permissions to read from
 and write to the directory used in the `index.data_path` setting.
 ========================
 
+The `data_path` does not have to contain the index name, in this case,
+"my_index" was used but it could easily also have been "/opt/data/"
+
 An index that has been created with the `index.shadow_replicas` setting set to
 "true" will not replicate document operations to any of the replica shards,
 instead, it will only continually refresh. Once segments are available on the
diff --git a/docs/reference/migration/migrate_2_0/settings.asciidoc b/docs/reference/migration/migrate_2_0/settings.asciidoc
index 8695b7ba7b2..3e52542f804 100644
--- a/docs/reference/migration/migrate_2_0/settings.asciidoc
+++ b/docs/reference/migration/migrate_2_0/settings.asciidoc
@@ -37,8 +37,8 @@ achieve the same result:
 
 [source,yaml]
 ---------------
-script.inline: on
-script.indexed: on
+script.inline: true
+script.indexed: true
 ---------------
 
 ==== Units required for time and byte-sized settings
diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc
index eeb19ed8f39..d0de8d1c50b 100644
--- a/docs/reference/migration/migrate_3_0.asciidoc
+++ b/docs/reference/migration/migrate_3_0.asciidoc
@@ -303,6 +303,14 @@ The 'default' similarity has been renamed to 'classic'.
 `indices.memory.min_shard_index_buffer_size` and `indices.memory.max_shard_index_buffer_size` are removed since Elasticsearch now allows any one shard to any
 amount of heap as long as the total indexing buffer heap used across all shards is below the node's `indices.memory.index_buffer_size` (default: 10% of the JVM heap)
 
+==== Removed es.max-open-files
+
+Setting the system property es.max-open-files to true to get
+Elasticsearch to print the number of maximum open files for the
+Elasticsearch process has been removed. This same information can be
+obtained from the <<cluster-nodes-info>> API, and a warning is logged
+on startup if it is set too low.
+
 [[breaking_30_mapping_changes]]
 === Mapping changes
 
diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc
index 5713a42d8d1..c10659f89d4 100644
--- a/docs/reference/modules/snapshots.asciidoc
+++ b/docs/reference/modules/snapshots.asciidoc
@@ -186,7 +186,7 @@ The verification process can also be executed manually by running the following
 
 [source,js]
 -----------------------------------
-POST /_snapshot/my_backup/_verify
+POST /_snapshot/s3_repository/_verify
 -----------------------------------
 // AUTOSENSE
 
diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc
index e4c79fc6582..3d12745bac2 100644
--- a/docs/reference/setup/configuration.asciidoc
+++ b/docs/reference/setup/configuration.asciidoc
@@ -35,11 +35,7 @@ Make sure to increase the number of open files descriptors on the
 machine (or for the user running elasticsearch). Setting it to 32k or
 even 64k is recommended.
 
-In order to test how many open files the process can open, start it with
-`-Des.max-open-files` set to `true`. This will print the number of open
-files the process can open on startup.
-
-Alternatively, you can retrieve the `max_file_descriptors` for each node
+You can retrieve the `max_file_descriptors` for each node
 using the <<cluster-nodes-info>> API, with:
 
 [source,js]
diff --git a/docs/resiliency/index.asciidoc b/docs/resiliency/index.asciidoc
index 3929bc7c8a8..ede2ea97c87 100644
--- a/docs/resiliency/index.asciidoc
+++ b/docs/resiliency/index.asciidoc
@@ -55,29 +55,6 @@ If you encounter an issue, https://github.com/elasticsearch/elasticsearch/issues
 
 We are committed to tracking down and fixing all the issues that are posted.
 
-[float]
-=== Use two phase commit for Cluster State publishing (STATUS: ONGOING, v3.0.0)
-
-A master node in Elasticsearch continuously https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-discovery-zen.html#fault-detection[monitors the cluster nodes]
-and removes any node from the cluster that doesn't respond to its pings in a timely
-fashion. If the master is left with fewer nodes than the `discovery.zen.minimum_master_nodes`
-settings, it will step down and a new master election will start.
-
-When a network partition causes a master node to lose many followers, there is a short window
-in time until the node loss is detected and the master steps down. During that window, the
-master may erroneously accept and acknowledge cluster state changes. To avoid this, we introduce
-a new phase to cluster state publishing where the proposed cluster state is sent to all nodes
-but is not yet committed. Only once enough nodes (`discovery.zen.minimum_master_nodes`) actively acknowledge
-the change, it is committed and commit messages are sent to the nodes. See {GIT}13062[#13062].
-
-[float]
-=== Make index creation more user friendly (STATUS: ONGOING)
-
-Today, Elasticsearch returns as soon as a create-index request has been processed,
-but before the shards are allocated.  Users should wait for a `green` cluster health
-before continuing, but we can make this easier for users by waiting for a quorum
-of shards to be allocated before returning.  See {GIT}9126[#9126]
-
 [float]
 === Better request retry mechanism when nodes are disconnected (STATUS: ONGOING)
 
@@ -113,15 +90,37 @@ space.  The following issues have been identified:
 * Set a hard limit on `from`/`size` parameters {GIT}9311[#9311]. (STATUS: DONE, v2.1.0)
 * Prevent combinatorial explosion in aggregations from causing OOM {GIT}8081[#8081]. (STATUS: ONGOING)
 * Add the byte size of each hit to the request circuit breaker {GIT}9310[#9310]. (STATUS: ONGOING)
+* Limit the size of individual requests and also add a circuit breaker for the total memory used by in-flight request objects {GIT}16011[#16011]. (STATUS: ONGOING)
+
+Other safeguards are tracked in the meta-issue {GIT}11511[#11511].
 
 [float]
 === Loss of documents during network partition (STATUS: ONGOING)
 
 If a network partition separates a node from the master, there is some window of time before the node detects it. The length of the window is dependent on the type of the partition. This window is extremely small if a socket is broken. More adversarial partitions, for example, silently dropping requests without breaking the socket can take longer (up to 3x30s using current defaults).
 
-If the node hosts a primary shard at the moment of partition, and ends up being isolated from the cluster (which could have resulted in {GIT}2488[split-brain] before), some documents that are being indexed into the primary may be lost if they fail to reach one of the allocated replicas (due to the partition) and that replica is later promoted to primary by the master. {GIT}7572[#7572]
+If the node hosts a primary shard at the moment of partition, and ends up being isolated from the cluster (which could have resulted in {GIT}2488[split-brain] before), some documents that are being indexed into the primary may be lost if they fail to reach one of the allocated replicas (due to the partition) and that replica is later promoted to primary by the master ({GIT}7572[#7572]).
+To prevent this situation, the primary needs to wait for the master to acknowledge replica shard failures before acknowledging the write to the client. {GIT}14252[#14252]
 
-A test to replicate this condition was added in {GIT}7493[#7493].
+[float]
+=== Safe primary relocations (STATUS: ONGOING)
+
+When primary relocation completes, a cluster state is propagated that deactivates the old primary and marks the new primary as active. As
+cluster state changes are not applied synchronously on all nodes, there can be a time interval where the relocation target has processed the
+cluster state and believes to be the active primary and the relocation source has not yet processed the cluster state update and still
+believes itself to be the active primary. This means that an index request that gets routed to the new primary does not get replicated to
+the old primary (as it has been deactivated from point of view of the new primary). If a subsequent read request gets routed to the old
+primary, it cannot see the indexed document. {GIT}15900[#15900]
+
+In the reverse situation where a cluster state update that completes primary relocation is first applied on the relocation source and then
+on the relocation target, each of the nodes believes the other to be the active primary. This leads to the issue of indexing requests
+chasing the primary being quickly sent back and forth between the nodes, potentially making them both go OOM. {GIT}12573[#12573]
+
+[float]
+=== Relocating shards omitted by reporting infrastructure (STATUS: ONGOING)
+
+Indices stats and indices segments requests reach out to all nodes that have shards of that index. Shards that have relocated from a node
+while the stats request arrives will make that part of the request fail and are just ignored in the overall stats result. {GIT}13719[#13719]
 
 [float]
 === Jepsen Test Failures (STATUS: ONGOING)
@@ -134,12 +133,42 @@ We have increased our test coverage to include scenarios tested by Jepsen. We ma
 This status page is a start, but we can do a better job of explicitly documenting the processes at work in Elasticsearch, and what happens in the case of each type of failure. The plan is to have a test case that validates each behavior under simulated conditions. Every test will document the expected results, the associated test code and an explicit PASS or FAIL status for each simulated case.
 
 [float]
-=== Do not allow stale shards to automatically be promoted to primary (STATUS: ONGOING)
+=== Do not allow stale shards to automatically be promoted to primary (STATUS: ONGOING, v3.0.0)
 
-In some scenarios, after the loss of all valid copies, a stale replica shard can be assigned as a primary. This can lead to
-a loss of acknowledged writes if the valid copies are not lost but are rather temporarily isolated. Work is underway
-({GIT}14671[#14671]) to prevent the automatic promotion of a stale primary and only allow such promotion to occur when
-a system operator manually intervenes.
+In some scenarios, after the loss of all valid copies, a stale replica shard can be automatically assigned as a primary, preferring old data
+to no data at all ({GIT}14671[#14671]). This can lead to a loss of acknowledged writes if the valid copies are not lost but are rather
+temporarily unavailable. Allocation IDs ({GIT}14739[#14739]) solve this issue by tracking non-stale shard copies in the cluster and using
+this tracking information to allocate primary shards. When all shard copies are lost or only stale ones available, Elasticsearch will wait
+for one of the good shard copies to reappear. In case where all good copies are lost, a manual override command can be used to allocate a
+stale shard copy.
+
+[float]
+=== Make index creation resilient to index closing and full cluster crashes (STATUS: ONGOING, v3.0.0)
+
+Recovering an index requires a quorum (with an exception for 2) of shard copies to be available to allocate a primary. This means that
+a primary cannot be assigned if the cluster dies before enough shards have been allocated ({GIT}9126[#9126]). The same happens if an index
+is closed before enough shard copies were started, making it impossible to reopen the index ({GIT}15281[#15281]).
+Allocation IDs ({GIT}14739[#14739]) solve this issue by tracking allocated shard copies in the cluster. This makes it possible to safely
+recover an index in the presence of a single shard copy. Allocation IDs can also distinguish the situation where an index has been created
+but none of the shards have been started. If such an index was inadvertently closed before at least one shard could be started, a fresh
+shard will be allocated upon reopening the index.
+
+== Unreleased
+
+[float]
+=== Use two phase commit for Cluster State publishing (STATUS: UNRELEASED, v3.0.0)
+
+A master node in Elasticsearch continuously https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-discovery-zen.html#fault-detection[monitors the cluster nodes]
+and removes any node from the cluster that doesn't respond to its pings in a timely
+fashion. If the master is left with fewer nodes than the `discovery.zen.minimum_master_nodes`
+settings, it will step down and a new master election will start.
+
+When a network partition causes a master node to lose many followers, there is a short window
+in time until the node loss is detected and the master steps down. During that window, the
+master may erroneously accept and acknowledge cluster state changes. To avoid this, we introduce
+a new phase to cluster state publishing where the proposed cluster state is sent to all nodes
+but is not yet committed. Only once enough nodes (`discovery.zen.minimum_master_nodes`) actively acknowledge
+the change, it is committed and commit messages are sent to the nodes. See {GIT}13062[#13062].
 
 == Completed
 
diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-850c6c2.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index c0305515558..00000000000
--- a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4017aff15660b508221e482c19ac6323b601229e
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1
new file mode 100644
index 00000000000..15c992bf460
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1
@@ -0,0 +1 @@
+4766406a2933ac9df62c49d6619caabb9943aba2
\ No newline at end of file
diff --git a/plugins/lang-painless/ant.xml b/modules/lang-painless/ant.xml
similarity index 100%
rename from plugins/lang-painless/ant.xml
rename to modules/lang-painless/ant.xml
diff --git a/plugins/lang-painless/build.gradle b/modules/lang-painless/build.gradle
similarity index 100%
rename from plugins/lang-painless/build.gradle
rename to modules/lang-painless/build.gradle
diff --git a/plugins/lang-painless/licenses/antlr4-runtime-4.5.1-1.jar.sha1 b/modules/lang-painless/licenses/antlr4-runtime-4.5.1-1.jar.sha1
similarity index 100%
rename from plugins/lang-painless/licenses/antlr4-runtime-4.5.1-1.jar.sha1
rename to modules/lang-painless/licenses/antlr4-runtime-4.5.1-1.jar.sha1
diff --git a/plugins/lang-painless/licenses/antlr4-runtime-LICENSE.txt b/modules/lang-painless/licenses/antlr4-runtime-LICENSE.txt
similarity index 100%
rename from plugins/lang-painless/licenses/antlr4-runtime-LICENSE.txt
rename to modules/lang-painless/licenses/antlr4-runtime-LICENSE.txt
diff --git a/plugins/lang-painless/licenses/antlr4-runtime-NOTICE.txt b/modules/lang-painless/licenses/antlr4-runtime-NOTICE.txt
similarity index 100%
rename from plugins/lang-painless/licenses/antlr4-runtime-NOTICE.txt
rename to modules/lang-painless/licenses/antlr4-runtime-NOTICE.txt
diff --git a/plugins/lang-painless/licenses/asm-5.0.4.jar.sha1 b/modules/lang-painless/licenses/asm-5.0.4.jar.sha1
similarity index 100%
rename from plugins/lang-painless/licenses/asm-5.0.4.jar.sha1
rename to modules/lang-painless/licenses/asm-5.0.4.jar.sha1
diff --git a/plugins/lang-painless/licenses/asm-LICENSE.txt b/modules/lang-painless/licenses/asm-LICENSE.txt
similarity index 100%
rename from plugins/lang-painless/licenses/asm-LICENSE.txt
rename to modules/lang-painless/licenses/asm-LICENSE.txt
diff --git a/plugins/lang-painless/licenses/asm-NOTICE.txt b/modules/lang-painless/licenses/asm-NOTICE.txt
similarity index 100%
rename from plugins/lang-painless/licenses/asm-NOTICE.txt
rename to modules/lang-painless/licenses/asm-NOTICE.txt
diff --git a/plugins/lang-painless/licenses/asm-commons-5.0.4.jar.sha1 b/modules/lang-painless/licenses/asm-commons-5.0.4.jar.sha1
similarity index 100%
rename from plugins/lang-painless/licenses/asm-commons-5.0.4.jar.sha1
rename to modules/lang-painless/licenses/asm-commons-5.0.4.jar.sha1
diff --git a/plugins/lang-painless/licenses/asm-tree-5.0.4.jar.sha1 b/modules/lang-painless/licenses/asm-tree-5.0.4.jar.sha1
similarity index 100%
rename from plugins/lang-painless/licenses/asm-tree-5.0.4.jar.sha1
rename to modules/lang-painless/licenses/asm-tree-5.0.4.jar.sha1
diff --git a/plugins/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4
similarity index 100%
rename from plugins/lang-painless/src/main/antlr/PainlessLexer.g4
rename to modules/lang-painless/src/main/antlr/PainlessLexer.g4
diff --git a/plugins/lang-painless/src/main/antlr/PainlessParser.g4 b/modules/lang-painless/src/main/antlr/PainlessParser.g4
similarity index 100%
rename from plugins/lang-painless/src/main/antlr/PainlessParser.g4
rename to modules/lang-painless/src/main/antlr/PainlessParser.g4
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/Def.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java
similarity index 99%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java
index 94b372a1335..9266b118fcb 100644
--- a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java
@@ -956,7 +956,8 @@ class Definition {
 
         addConstructor("ArrayList<Object>", "new", new Type[] {}, null);
 
-        addMethod("List<String>", "set", null, false, objectType, new Type[] {intType, objectType}, stringType, new Type[] {intType, stringType});
+        addMethod("List<String>", "set", null, false, objectType, new Type[] {intType, objectType}, stringType,
+            new Type[] {intType, stringType});
         addMethod("List<String>", "get", null, false, objectType, new Type[] {intType}, stringType, null);
         addMethod("List<String>", "remove", null, false, objectType, new Type[] {intType}, stringType, null);
 
@@ -992,7 +993,8 @@ class Definition {
 
         addConstructor("HashMap<Object,Object>", "new", new Type[] {}, null);
 
-        addMethod("Map<String,def>", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {stringType, defType});
+        addMethod("Map<String,def>", "put", null, false, objectType, new Type[] {objectType, objectType}, defType,
+            new Type[] {stringType, defType});
         addMethod("Map<String,def>", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType});
         addMethod("Map<String,def>", "remove", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType});
         addMethod("Map<String,def>", "isEmpty", null, false, booleanType, new Type[] {}, null, null);
@@ -1004,7 +1006,8 @@ class Definition {
 
         addConstructor("HashMap<String,def>", "new", new Type[] {}, null);
 
-        addMethod("Map<String,Object>", "put", null, false, objectType, new Type[] {objectType, objectType}, null, new Type[] {stringType, objectType});
+        addMethod("Map<String,Object>", "put", null, false, objectType, new Type[] {objectType, objectType}, null,
+            new Type[] {stringType, objectType});
         addMethod("Map<String,Object>", "get", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType});
         addMethod("Map<String,Object>", "remove", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType});
         addMethod("Map<String,Object>", "isEmpty", null, false, booleanType, new Type[] {}, null, null);
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/ErrorHandlingLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ErrorHandlingLexer.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/ErrorHandlingLexer.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/ErrorHandlingLexer.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Executable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Executable.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/Executable.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/Executable.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessError.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessError.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessError.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessError.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java
similarity index 92%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java
index b64ed71f8de..a2e72e52bbb 100644
--- a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java
@@ -36,6 +36,7 @@ public final class PainlessPlugin extends Plugin {
     }
 
     public void onModule(final ScriptModule module) {
-        module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(PainlessScriptEngineService.class, PainlessScriptEngineService.TYPES));
+        module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(
+            PainlessScriptEngineService.class, PainlessScriptEngineService.TYPES));
     }
 }
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/ParserErrorStrategy.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ParserErrorStrategy.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/ParserErrorStrategy.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/ParserErrorStrategy.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java
diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java
similarity index 100%
rename from plugins/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java
rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java
diff --git a/plugins/lang-painless/src/main/plugin-metadata/plugin-security.policy b/modules/lang-painless/src/main/plugin-metadata/plugin-security.policy
similarity index 100%
rename from plugins/lang-painless/src/main/plugin-metadata/plugin-security.policy
rename to modules/lang-painless/src/main/plugin-metadata/plugin-security.policy
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/AdditionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AdditionTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/AdditionTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/AdditionTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/AndTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AndTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/AndTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/AndTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BinaryOperatorTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BinaryOperatorTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/BinaryOperatorTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/BinaryOperatorTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java
similarity index 94%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java
index a0eaf31c93b..859825f129a 100644
--- a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java
@@ -59,13 +59,15 @@ public class ConditionalTests extends ScriptTestCase {
 
     public void testNullArguments() {
         assertEquals(null, exec("boolean b = false, c = true; Object x; Map y; return b && c ? x : y;"));
-        assertEquals(HashMap.class, exec("boolean b = false, c = true; Object x; Map y = new HashMap(); return b && c ? x : y;").getClass());
+        assertEquals(HashMap.class,
+                exec("boolean b = false, c = true; Object x; Map y = new HashMap(); return b && c ? x : y;").getClass());
     }
 
     public void testPromotion() {
         assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? 2 : 4.0F);"));
         assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new Long(2) : new Float(4.0F));"));
-        assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? new HashMap() : new ArrayList()) == (y ? new Long(2) : new Float(4.0F));"));
+        assertEquals(false, exec("boolean x = false; boolean y = true; " +
+                "return (x ? new HashMap() : new ArrayList()) == (y ? new Long(2) : new Float(4.0F));"));
         assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new HashMap() : new ArrayList());"));
     }
 
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/DefTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/DefTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/DefTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/DivisionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DivisionTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/DivisionTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/DivisionTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FieldTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FieldTests.java
similarity index 96%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/FieldTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/FieldTests.java
index f31a022d038..04d0b1a64a1 100644
--- a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FieldTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FieldTests.java
@@ -80,7 +80,8 @@ public class FieldTests extends ScriptTestCase {
         assertEquals("s5t42", exec("def fc = new FieldClass() return fc.t += 2 + fc.j + \"t\" + 4 + (3 - 1)"));
         assertEquals(2.0f, exec("def fc = new FieldClass(); def l = new Double(3) Byte b = new Byte((byte)2) return fc.test(l, b)"));
         assertEquals(4, exec("def fc = new FieldClass() fc.i = 4 return fc.i"));
-        assertEquals(5, exec("FieldClass fc0 = new FieldClass() FieldClass fc1 = new FieldClass() fc0.i = 7 - fc0.i fc1.i = fc0.i return fc1.i"));
+        assertEquals(5,
+                exec("FieldClass fc0 = new FieldClass() FieldClass fc1 = new FieldClass() fc0.i = 7 - fc0.i fc1.i = fc0.i return fc1.i"));
         assertEquals(8, exec("def fc0 = new FieldClass() def fc1 = new FieldClass() fc0.i += fc1.i fc0.i += fc0.i return fc0.i"));
     }
 
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java
similarity index 98%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java
index 91a595680c5..ccfd2232e88 100644
--- a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java
@@ -85,7 +85,8 @@ public class FloatOverflowEnabledTests extends ScriptTestCase {
 
     public void testSubtraction() throws Exception {
         assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;"));
-        assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;"));
+        assertEquals(Double.NEGATIVE_INFINITY,
+                exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;"));
     }
 
     public void testSubtractionConst() throws Exception {
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IncrementTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IncrementTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/IncrementTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/IncrementTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java
similarity index 97%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java
index f74e193c3fb..41b3f857c0a 100644
--- a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java
@@ -143,7 +143,8 @@ public class IntegerOverflowEnabledTests extends ScriptTestCase {
 
     public void testAddition() throws Exception {
         assertEquals(2147483647 + 2147483647, exec("int x = 2147483647; int y = 2147483647; return x + y;"));
-        assertEquals(9223372036854775807L + 9223372036854775807L, exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;"));
+        assertEquals(9223372036854775807L + 9223372036854775807L,
+                exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;"));
     }
 
     public void testAdditionConst() throws Exception {
@@ -163,7 +164,8 @@ public class IntegerOverflowEnabledTests extends ScriptTestCase {
 
     public void testMultiplication() throws Exception {
         assertEquals(2147483647 * 2147483647, exec("int x = 2147483647; int y = 2147483647; return x * y;"));
-        assertEquals(9223372036854775807L * 9223372036854775807L, exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;"));
+        assertEquals(9223372036854775807L * 9223372036854775807L,
+                exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;"));
     }
 
     public void testMultiplicationConst() throws Exception {
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/MultiplicationTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/MultiplicationTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/MultiplicationTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/MultiplicationTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/OrTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/OrTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/OrTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/OrTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/RemainderTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RemainderTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/RemainderTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/RemainderTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java
similarity index 96%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java
index b5c3f8cc245..768de29e948 100644
--- a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java
@@ -81,7 +81,8 @@ public class ScriptEngineTests extends ScriptTestCase {
         Map<String, Object> ctx = new HashMap<>();
         vars.put("ctx", ctx);
 
-        Object compiledScript = scriptEngine.compile("return ((Map<String, Object>)input.get(\"ctx\")).get(\"value\");", Collections.emptyMap());
+        Object compiledScript = scriptEngine.compile(
+                "return ((Map<String, Object>)input.get(\"ctx\")).get(\"value\");", Collections.emptyMap());
         ExecutableScript script = scriptEngine.executable(new CompiledScript(ScriptService.ScriptType.INLINE,
                 "testChangingVarsCrossExecution1", "painless", compiledScript), vars);
 
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/SubtractionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SubtractionTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/SubtractionTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/SubtractionTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/UnaryTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/UnaryTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/UnaryTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/UnaryTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java
diff --git a/plugins/lang-painless/src/test/java/org/elasticsearch/painless/XorTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/XorTests.java
similarity index 100%
rename from plugins/lang-painless/src/test/java/org/elasticsearch/painless/XorTests.java
rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/XorTests.java
diff --git a/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml
similarity index 76%
rename from plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml
rename to modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml
index 7bdba7d0e95..1c81782f33a 100644
--- a/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml
+++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml
@@ -10,4 +10,4 @@
     - do:
         nodes.info: {}
 
-    - match:  { nodes.$master.plugins.0.name: lang-painless }
+    - match:  { nodes.$master.modules.0.name: lang-painless }
diff --git a/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml
similarity index 100%
rename from plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml
rename to modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml
diff --git a/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml
similarity index 99%
rename from plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml
rename to modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml
index 375b8c4986c..da28a1f4201 100644
--- a/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml
+++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml
@@ -94,4 +94,3 @@
     - match: { hits.hits.0.fields.sNum1.0: 1.0 }
     - match: { hits.hits.1.fields.sNum1.0: 2.0 }
     - match: { hits.hits.2.fields.sNum1.0: 3.0 }
-
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-850c6c2.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 39c8b9e5bb9..00000000000
--- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c0d6b8f891a803dc0ce92da01e868a6ef31f0f09
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1
new file mode 100644
index 00000000000..18440dcdc04
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1
@@ -0,0 +1 @@
+69a6e72d322b6643f1b419e6c9cc46623a2404e9
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-850c6c2.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index a1c1b8ff80e..00000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8a8bcbbdc2d44ae64885e1e353b2cb66e1f906f5
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1
new file mode 100644
index 00000000000..832db46564e
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1
@@ -0,0 +1 @@
+e9d68dd5d9fae3349b81de5952d0ee8115c696a4
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-850c6c2.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 36b1fb24495..00000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9f176b3bdd40c6ccfcce53e9f4eae5273a71958f
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1
new file mode 100644
index 00000000000..3436526863d
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1
@@ -0,0 +1 @@
+c4735c43440ebcb20f2b6f49f508fedc12f5366c
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-850c6c2.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index f58e5538717..00000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f2b1d0e000be8bfad3e3c88ba9d19f5b31edf69e
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1
new file mode 100644
index 00000000000..95b85f7edbd
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1
@@ -0,0 +1 @@
+a31a4d1476d45738a460374d9801dc5ed9b49c1a
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-850c6c2.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-850c6c2.jar.sha1
deleted file mode 100644
index 4b4ed2950fa..00000000000
--- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-850c6c2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-619040b891af8d2427a9f324148bb2e491685511
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1
new file mode 100644
index 00000000000..d5a28231e65
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1
@@ -0,0 +1 @@
+1a7505d011aca54c004d0fc86a490d5f054bb903
\ No newline at end of file
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java
index a1e5424a37e..a352bc02418 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java
@@ -21,21 +21,55 @@ package org.elasticsearch.cloud.gce;
 
 import com.google.api.services.compute.model.Instance;
 import org.elasticsearch.common.component.LifecycleComponent;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.TimeValue;
 
+import java.util.Arrays;
 import java.io.IOException;
 import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
 
 public interface GceComputeService extends LifecycleComponent<GceComputeService> {
-    final class Fields {
-        public static final String PROJECT = "cloud.gce.project_id";
-        public static final String ZONE = "cloud.gce.zone";
-        public static final String REFRESH = "cloud.gce.refresh_interval";
-        public static final String TAGS = "discovery.gce.tags";
-        public static final String VERSION = "Elasticsearch/GceCloud/1.0";
 
-        public static final String RETRY = "cloud.gce.retry";
-        public static final String MAXWAIT = "cloud.gce.max_wait";
-    }
+    /**
+     * GCE API Version: Elasticsearch/GceCloud/1.0
+     */
+    String VERSION = "Elasticsearch/GceCloud/1.0";
+
+    // cloud.gce settings
+
+    /**
+     * cloud.gce.project_id: Google project id
+     */
+    Setting<String> PROJECT_SETTING = Setting.simpleString("cloud.gce.project_id", false, Setting.Scope.CLUSTER);
+
+    /**
+     * cloud.gce.zone: Google Compute Engine zones
+     */
+    Setting<List<String>> ZONE_SETTING =
+        Setting.listSetting("cloud.gce.zone", Collections.emptyList(), s -> s, false, Setting.Scope.CLUSTER);
+
+    /**
+     * cloud.gce.refresh_interval: How long the list of hosts is cached to prevent further requests to the AWS API. 0 disables caching.
+     * A negative value will cause infinite caching. Defaults to 0s.
+     */
+    Setting<TimeValue> REFRESH_SETTING =
+        Setting.timeSetting("cloud.gce.refresh_interval", TimeValue.timeValueSeconds(0), false, Setting.Scope.CLUSTER);
+
+    /**
+     * cloud.gce.retry: Should we retry calling GCE API in case of error? Defaults to true.
+     */
+    Setting<Boolean> RETRY_SETTING = Setting.boolSetting("cloud.gce.retry", true, false, Setting.Scope.CLUSTER);
+
+    /**
+     * cloud.gce.max_wait: How long exponential backoff should retry before definitely failing.
+     * It's a total time since the the initial call is made.
+     * A negative value will retry indefinitely. Defaults to `-1s` (retry indefinitely).
+     */
+    Setting<TimeValue> MAX_WAIT_SETTING =
+        Setting.timeSetting("cloud.gce.max_wait", TimeValue.timeValueSeconds(-1), false, Setting.Scope.CLUSTER);
 
     /**
      * Return a collection of running instances within the same GCE project
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java
index 76172172bb8..cfddaf2548f 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java
@@ -48,7 +48,6 @@ import java.security.PrivilegedAction;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
@@ -157,9 +156,8 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent<GceCompute
     @Inject
     public GceComputeServiceImpl(Settings settings, NetworkService networkService) {
         super(settings);
-        this.project = settings.get(Fields.PROJECT);
-        String[] zoneList = settings.getAsArray(Fields.ZONE);
-        this.zones = Arrays.asList(zoneList);
+        this.project = PROJECT_SETTING.get(settings);
+        this.zones = ZONE_SETTING.get(settings);
         networkService.addCustomNameResolver(new GceNameResolver(settings, this));
     }
 
@@ -207,15 +205,13 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent<GceCompute
                 refreshInterval = TimeValue.timeValueSeconds(credential.getExpiresInSeconds() - 1);
             }
 
-            boolean ifRetry = settings.getAsBoolean(Fields.RETRY, true);
-            Compute.Builder builder = new Compute.Builder(getGceHttpTransport(), gceJsonFactory, null)
-                    .setApplicationName(Fields.VERSION);
+            Compute.Builder builder = new Compute.Builder(getGceHttpTransport(), gceJsonFactory, null).setApplicationName(VERSION);
 
-            if (ifRetry) {
-                int maxWait = settings.getAsInt(Fields.MAXWAIT, -1);
+            if (RETRY_SETTING.exists(settings)) {
+                TimeValue maxWait = MAX_WAIT_SETTING.get(settings);
                 RetryHttpInitializerWrapper retryHttpInitializerWrapper;
 
-                if (maxWait > 0) {
+                if (maxWait.getMillis() > 0) {
                     retryHttpInitializerWrapper = new RetryHttpInitializerWrapper(credential, maxWait);
                 } else {
                     retryHttpInitializerWrapper = new RetryHttpInitializerWrapper(credential);
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java
index fe87b9244d4..84e59d56bea 100755
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceDiscovery.java
@@ -23,6 +23,7 @@ import org.elasticsearch.cluster.ClusterName;
 import org.elasticsearch.cluster.ClusterService;
 import org.elasticsearch.common.inject.Inject;
 import org.elasticsearch.common.settings.ClusterSettings;
+import org.elasticsearch.common.settings.Setting;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.discovery.DiscoverySettings;
 import org.elasticsearch.discovery.zen.ZenDiscovery;
@@ -31,6 +32,9 @@ import org.elasticsearch.discovery.zen.ping.ZenPingService;
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.transport.TransportService;
 
+import java.util.Collections;
+import java.util.List;
+
 /**
  *
  */
@@ -38,6 +42,12 @@ public class GceDiscovery extends ZenDiscovery {
 
     public static final String GCE = "gce";
 
+    /**
+     * discovery.gce.tags: The gce discovery can filter machines to include in the cluster based on tags.
+     */
+    public static final Setting<List<String>> TAGS_SETTING =
+        Setting.listSetting("discovery.gce.tags", Collections.emptyList(), s -> s, false, Setting.Scope.CLUSTER);
+
     @Inject
     public GceDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService,
                         ClusterService clusterService, ClusterSettings clusterSettings, ZenPingService pingService,
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java
index 8ea93825bd1..f36d05df0f5 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java
@@ -39,12 +39,9 @@ import org.elasticsearch.transport.TransportService;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 
-import static org.elasticsearch.cloud.gce.GceComputeService.Fields;
-
 /**
  *
  */
@@ -60,8 +57,8 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas
 
     private final Version version;
     private final String project;
-    private final String[] zones;
-    private final String[] tags;
+    private final List<String> zones;
+    private final List<String> tags;
 
     private final TimeValue refreshInterval;
     private long lastRefresh;
@@ -78,24 +75,29 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas
         this.networkService = networkService;
         this.version = version;
 
-        this.refreshInterval = settings.getAsTime(Fields.REFRESH, TimeValue.timeValueSeconds(0));
-        this.project = settings.get(Fields.PROJECT);
-        this.zones = settings.getAsArray(Fields.ZONE);
+        this.refreshInterval = GceComputeService.REFRESH_SETTING.get(settings);
+        this.project = GceComputeService.PROJECT_SETTING.get(settings);
+        this.zones = GceComputeService.ZONE_SETTING.get(settings);
 
-        this.tags = settings.getAsArray(Fields.TAGS);
+        this.tags = GceDiscovery.TAGS_SETTING.get(settings);
         if (logger.isDebugEnabled()) {
-            logger.debug("using tags {}", Arrays.asList(this.tags));
+            logger.debug("using tags {}", this.tags);
         }
     }
 
     /**
      * We build the list of Nodes from GCE Management API
-     * Information can be cached using `plugins.refresh_interval` property if needed.
-     * Setting `plugins.refresh_interval` to `-1` will cause infinite caching.
-     * Setting `plugins.refresh_interval` to `0` will disable caching (default).
+     * Information can be cached using `cloud.gce.refresh_interval` property if needed.
      */
     @Override
     public List<DiscoveryNode> buildDynamicNodes() {
+        // We check that needed properties have been set
+        if (this.project == null || this.project.isEmpty() || this.zones == null || this.zones.isEmpty()) {
+            throw new IllegalArgumentException("one or more gce discovery settings are missing. " +
+                "Check elasticsearch.yml file. Should have [" + GceComputeService.PROJECT_SETTING.getKey() +
+                "] and [" + GceComputeService.ZONE_SETTING.getKey() + "].");
+        }
+
         if (refreshInterval.millis() != 0) {
             if (cachedDiscoNodes != null &&
                     (refreshInterval.millis() < 0 || (System.currentTimeMillis() - lastRefresh) < refreshInterval.millis())) {
@@ -142,7 +144,7 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas
 
                 // see if we need to filter by tag
                 boolean filterByTag = false;
-                if (tags.length > 0) {
+                if (tags.isEmpty() == false) {
                     logger.trace("start filtering instance {} with tags {}.", name, tags);
                     if (instance.getTags() == null || instance.getTags().isEmpty()
                             || instance.getTags().getItems() == null || instance.getTags().getItems().isEmpty()) {
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java
index 22d759fc2dd..b21d397d78a 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java
@@ -32,6 +32,7 @@ import com.google.api.client.util.Sleeper;
 import org.elasticsearch.SpecialPermission;
 import org.elasticsearch.common.logging.ESLogger;
 import org.elasticsearch.common.logging.ESLoggerFactory;
+import org.elasticsearch.common.unit.TimeValue;
 
 import java.io.IOException;
 import java.security.AccessController;
@@ -40,7 +41,7 @@ import java.util.Objects;
 
 public class RetryHttpInitializerWrapper implements HttpRequestInitializer {
 
-    private int maxWait;
+    private TimeValue maxWait;
 
     private static final ESLogger logger =
             ESLoggerFactory.getLogger(RetryHttpInitializerWrapper.class.getName());
@@ -55,16 +56,16 @@ public class RetryHttpInitializerWrapper implements HttpRequestInitializer {
     private final Sleeper sleeper;
 
     public RetryHttpInitializerWrapper(Credential wrappedCredential) {
-        this(wrappedCredential, Sleeper.DEFAULT, ExponentialBackOff.DEFAULT_MAX_ELAPSED_TIME_MILLIS);
+        this(wrappedCredential, Sleeper.DEFAULT, TimeValue.timeValueMillis(ExponentialBackOff.DEFAULT_MAX_ELAPSED_TIME_MILLIS));
     }
 
-    public RetryHttpInitializerWrapper(Credential wrappedCredential, int maxWait) {
+    public RetryHttpInitializerWrapper(Credential wrappedCredential, TimeValue maxWait) {
         this(wrappedCredential, Sleeper.DEFAULT, maxWait);
     }
 
     // Use only for testing.
     RetryHttpInitializerWrapper(
-            Credential wrappedCredential, Sleeper sleeper, int maxWait) {
+            Credential wrappedCredential, Sleeper sleeper, TimeValue maxWait) {
         this.wrappedCredential = Objects.requireNonNull(wrappedCredential);
         this.sleeper = sleeper;
         this.maxWait = maxWait;
@@ -77,12 +78,7 @@ public class RetryHttpInitializerWrapper implements HttpRequestInitializer {
         if (sm != null) {
             sm.checkPermission(new SpecialPermission());
         }
-        return AccessController.doPrivileged(new PrivilegedAction<MockGoogleCredential.Builder>() {
-            @Override
-            public MockGoogleCredential.Builder run() {
-                return new MockGoogleCredential.Builder();
-            }
-        });
+        return AccessController.doPrivileged((PrivilegedAction<MockGoogleCredential.Builder>) () -> new MockGoogleCredential.Builder());
     }
 
     @Override
@@ -90,7 +86,7 @@ public class RetryHttpInitializerWrapper implements HttpRequestInitializer {
         final HttpUnsuccessfulResponseHandler backoffHandler =
                 new HttpBackOffUnsuccessfulResponseHandler(
                         new ExponentialBackOff.Builder()
-                                .setMaxElapsedTimeMillis(maxWait)
+                                .setMaxElapsedTimeMillis(((int) maxWait.getMillis()))
                                 .build())
                         .setSleeper(sleeper);
 
@@ -122,7 +118,7 @@ public class RetryHttpInitializerWrapper implements HttpRequestInitializer {
         httpRequest.setIOExceptionHandler(
                 new HttpBackOffIOExceptionHandler(
                         new ExponentialBackOff.Builder()
-                                .setMaxElapsedTimeMillis(maxWait)
+                                .setMaxElapsedTimeMillis(((int) maxWait.getMillis()))
                                 .build())
                         .setSleeper(sleeper)
         );
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java
index 97e637abcc2..c404906b0cb 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java
@@ -25,12 +25,12 @@ import com.google.api.client.util.ClassInfo;
 import org.elasticsearch.SpecialPermission;
 import org.elasticsearch.cloud.gce.GceComputeService;
 import org.elasticsearch.cloud.gce.GceModule;
-import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.component.LifecycleComponent;
 import org.elasticsearch.common.inject.Module;
 import org.elasticsearch.common.logging.ESLogger;
 import org.elasticsearch.common.logging.Loggers;
 import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.settings.SettingsModule;
 import org.elasticsearch.discovery.DiscoveryModule;
 import org.elasticsearch.discovery.gce.GceDiscovery;
 import org.elasticsearch.discovery.gce.GceUnicastHostsProvider;
@@ -38,9 +38,8 @@ import org.elasticsearch.plugins.Plugin;
 
 import java.security.AccessController;
 import java.security.PrivilegedAction;
-import java.util.ArrayList;
 import java.util.Collection;
-import java.util.List;
+import java.util.Collections;
 
 public class GceDiscoveryPlugin extends Plugin {
     static {
@@ -84,70 +83,30 @@ public class GceDiscoveryPlugin extends Plugin {
 
     @Override
     public Collection<Module> nodeModules() {
-        List<Module> modules = new ArrayList<>();
-        if (isDiscoveryAlive(settings, logger)) {
-            modules.add(new GceModule());
-        }
-        return modules;
+        return Collections.singletonList(new GceModule());
     }
 
     @Override
     @SuppressWarnings("rawtypes") // Supertype uses raw type
     public Collection<Class<? extends LifecycleComponent>> nodeServices() {
-        Collection<Class<? extends LifecycleComponent>> services = new ArrayList<>();
-        if (isDiscoveryAlive(settings, logger)) {
-            services.add(GceModule.getComputeServiceImpl());
-        }
-        return services;
+        return Collections.singletonList(GceModule.getComputeServiceImpl());
     }
 
     public void onModule(DiscoveryModule discoveryModule) {
-        if (isDiscoveryAlive(settings, logger)) {
-            discoveryModule.addDiscoveryType("gce", GceDiscovery.class);
+        discoveryModule.addDiscoveryType("gce", GceDiscovery.class);
+        // If discovery.type: gce, we add Gce as a unicast provider
+        if (GceDiscovery.GCE.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings))) {
             discoveryModule.addUnicastHostProvider(GceUnicastHostsProvider.class);
         }
     }
 
-    /**
-     * Check if discovery is meant to start
-     *
-     * @return true if we can start gce discovery features
-     */
-    public static boolean isDiscoveryAlive(Settings settings, ESLogger logger) {
-        // User set discovery.type: gce
-        if (GceDiscovery.GCE.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings)) == false) {
-            logger.debug("discovery.type not set to {}", GceDiscovery.GCE);
-            return false;
-        }
-
-        if (checkProperty(GceComputeService.Fields.PROJECT, settings.get(GceComputeService.Fields.PROJECT), logger) == false ||
-                checkProperty(GceComputeService.Fields.ZONE, settings.getAsArray(GceComputeService.Fields.ZONE), logger) == false) {
-            logger.debug("one or more gce discovery settings are missing. " +
-                            "Check elasticsearch.yml file. Should have [{}] and [{}].",
-                    GceComputeService.Fields.PROJECT,
-                    GceComputeService.Fields.ZONE);
-            return false;
-        }
-
-        logger.trace("all required properties for gce discovery are set!");
-
-        return true;
+    public void onModule(SettingsModule settingsModule) {
+        // Register GCE settings
+        settingsModule.registerSetting(GceComputeService.PROJECT_SETTING);
+        settingsModule.registerSetting(GceComputeService.ZONE_SETTING);
+        settingsModule.registerSetting(GceDiscovery.TAGS_SETTING);
+        settingsModule.registerSetting(GceComputeService.REFRESH_SETTING);
+        settingsModule.registerSetting(GceComputeService.RETRY_SETTING);
+        settingsModule.registerSetting(GceComputeService.MAX_WAIT_SETTING);
     }
-
-    private static boolean checkProperty(String name, String value, ESLogger logger) {
-        if (!Strings.hasText(value)) {
-            logger.warn("{} is not set.", name);
-            return false;
-        }
-        return true;
-    }
-
-    private static boolean checkProperty(String name, String[] values, ESLogger logger) {
-        if (values == null || values.length == 0) {
-            logger.warn("{} is not set.", name);
-            return false;
-        }
-        return true;
-    }
-
 }
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverySettingsTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverySettingsTests.java
deleted file mode 100644
index 334c685aa36..00000000000
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverySettingsTests.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.discovery.gce;
-
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.plugin.discovery.gce.GceDiscoveryPlugin;
-import org.elasticsearch.test.ESTestCase;
-
-import static org.hamcrest.Matchers.is;
-
-public class GceDiscoverySettingsTests extends ESTestCase {
-    public void testDiscoveryReady() {
-        Settings settings = Settings.builder()
-                .put("discovery.type", "gce")
-                .put("cloud.gce.project_id", "gce_id")
-                .putArray("cloud.gce.zone", "gce_zones_1", "gce_zones_2")
-                .build();
-
-        boolean discoveryReady = GceDiscoveryPlugin.isDiscoveryAlive(settings, logger);
-        assertThat(discoveryReady, is(true));
-    }
-
-    public void testDiscoveryNotReady() {
-        Settings settings = Settings.EMPTY;
-        boolean discoveryReady = GceDiscoveryPlugin.isDiscoveryAlive(settings, logger);
-        assertThat(discoveryReady, is(false));
-
-        settings = Settings.builder()
-                .put("discovery.type", "gce")
-                .build();
-
-        discoveryReady = GceDiscoveryPlugin.isDiscoveryAlive(settings, logger);
-        assertThat(discoveryReady, is(false));
-
-        settings = Settings.builder()
-                .put("discovery.type", "gce")
-                .put("cloud.gce.project_id", "gce_id")
-                .build();
-
-        discoveryReady = GceDiscoveryPlugin.isDiscoveryAlive(settings, logger);
-        assertThat(discoveryReady, is(false));
-
-
-        settings = Settings.builder()
-                .put("discovery.type", "gce")
-                .putArray("cloud.gce.zone", "gce_zones_1", "gce_zones_2")
-                .build();
-
-        discoveryReady = GceDiscoveryPlugin.isDiscoveryAlive(settings, logger);
-        assertThat(discoveryReady, is(false));
-
-        settings = Settings.builder()
-                .put("cloud.gce.project_id", "gce_id")
-                .putArray("cloud.gce.zone", "gce_zones_1", "gce_zones_2")
-                .build();
-
-        discoveryReady = GceDiscoveryPlugin.isDiscoveryAlive(settings, logger);
-        assertThat(discoveryReady, is(false));
-    }
-}
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
index cb7cd8460bf..d493e71d762 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
@@ -35,6 +35,7 @@ import org.junit.BeforeClass;
 import java.util.List;
 import java.util.Locale;
 
+import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.is;
 
@@ -51,8 +52,8 @@ import static org.hamcrest.Matchers.is;
  * For example, if you create a test `myNewAwesomeTest` with following settings:
  *
  * Settings nodeSettings = Settings.builder()
- *  .put(GceComputeService.Fields.PROJECT, projectName)
- *  .put(GceComputeService.Fields.ZONE, "europe-west1-b")
+ *  .put(GceComputeService.PROJECT, projectName)
+ *  .put(GceComputeService.ZONE, "europe-west1-b")
  *  .build();
  *
  *  You need to create a file under `src/test/resources/org/elasticsearch/discovery/gce/` named:
@@ -118,8 +119,8 @@ public class GceDiscoveryTests extends ESTestCase {
 
     public void testNodesWithDifferentTagsAndNoTagSet() {
         Settings nodeSettings = Settings.builder()
-                .put(GceComputeService.Fields.PROJECT, projectName)
-                .put(GceComputeService.Fields.ZONE, "europe-west1-b")
+                .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+                .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b")
                 .build();
         mock = new GceComputeServiceMock(nodeSettings, networkService);
         List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -128,9 +129,9 @@ public class GceDiscoveryTests extends ESTestCase {
 
     public void testNodesWithDifferentTagsAndOneTagSet() {
         Settings nodeSettings = Settings.builder()
-                .put(GceComputeService.Fields.PROJECT, projectName)
-                .put(GceComputeService.Fields.ZONE, "europe-west1-b")
-                .putArray(GceComputeService.Fields.TAGS, "elasticsearch")
+                .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+                .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b")
+                .putArray(GceDiscovery.TAGS_SETTING.getKey(), "elasticsearch")
                 .build();
         mock = new GceComputeServiceMock(nodeSettings, networkService);
         List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -140,9 +141,9 @@ public class GceDiscoveryTests extends ESTestCase {
 
     public void testNodesWithDifferentTagsAndTwoTagSet() {
         Settings nodeSettings = Settings.builder()
-                .put(GceComputeService.Fields.PROJECT, projectName)
-                .put(GceComputeService.Fields.ZONE, "europe-west1-b")
-                .putArray(GceComputeService.Fields.TAGS, "elasticsearch", "dev")
+                .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+                .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b")
+                .putArray(GceDiscovery.TAGS_SETTING.getKey(), "elasticsearch", "dev")
                 .build();
         mock = new GceComputeServiceMock(nodeSettings, networkService);
         List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -152,8 +153,8 @@ public class GceDiscoveryTests extends ESTestCase {
 
     public void testNodesWithSameTagsAndNoTagSet() {
         Settings nodeSettings = Settings.builder()
-                .put(GceComputeService.Fields.PROJECT, projectName)
-                .put(GceComputeService.Fields.ZONE, "europe-west1-b")
+                .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+                .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b")
                 .build();
         mock = new GceComputeServiceMock(nodeSettings, networkService);
         List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -162,9 +163,9 @@ public class GceDiscoveryTests extends ESTestCase {
 
     public void testNodesWithSameTagsAndOneTagSet() {
         Settings nodeSettings = Settings.builder()
-                .put(GceComputeService.Fields.PROJECT, projectName)
-                .put(GceComputeService.Fields.ZONE, "europe-west1-b")
-                .putArray(GceComputeService.Fields.TAGS, "elasticsearch")
+                .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+                .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b")
+                .putArray(GceDiscovery.TAGS_SETTING.getKey(), "elasticsearch")
                 .build();
         mock = new GceComputeServiceMock(nodeSettings, networkService);
         List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -173,9 +174,9 @@ public class GceDiscoveryTests extends ESTestCase {
 
     public void testNodesWithSameTagsAndTwoTagsSet() {
         Settings nodeSettings = Settings.builder()
-                .put(GceComputeService.Fields.PROJECT, projectName)
-                .put(GceComputeService.Fields.ZONE, "europe-west1-b")
-                .putArray(GceComputeService.Fields.TAGS, "elasticsearch", "dev")
+                .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+                .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b")
+                .putArray(GceDiscovery.TAGS_SETTING.getKey(), "elasticsearch", "dev")
                 .build();
         mock = new GceComputeServiceMock(nodeSettings, networkService);
         List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -184,8 +185,8 @@ public class GceDiscoveryTests extends ESTestCase {
 
     public void testMultipleZonesAndTwoNodesInSameZone() {
         Settings nodeSettings = Settings.builder()
-                .put(GceComputeService.Fields.PROJECT, projectName)
-                .putArray(GceComputeService.Fields.ZONE, "us-central1-a", "europe-west1-b")
+                .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+                .putArray(GceComputeService.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
                 .build();
         mock = new GceComputeServiceMock(nodeSettings, networkService);
         List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -194,8 +195,8 @@ public class GceDiscoveryTests extends ESTestCase {
 
     public void testMultipleZonesAndTwoNodesInDifferentZones() {
         Settings nodeSettings = Settings.builder()
-                .put(GceComputeService.Fields.PROJECT, projectName)
-                .putArray(GceComputeService.Fields.ZONE, "us-central1-a", "europe-west1-b")
+                .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+                .putArray(GceComputeService.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
                 .build();
         mock = new GceComputeServiceMock(nodeSettings, networkService);
         List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -207,11 +208,48 @@ public class GceDiscoveryTests extends ESTestCase {
      */
     public void testZeroNode43() {
         Settings nodeSettings = Settings.builder()
-                .put(GceComputeService.Fields.PROJECT, projectName)
-                .putArray(GceComputeService.Fields.ZONE, "us-central1-a", "us-central1-b")
+                .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+                .putArray(GceComputeService.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
                 .build();
         mock = new GceComputeServiceMock(nodeSettings, networkService);
         List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
         assertThat(discoveryNodes, hasSize(0));
     }
+
+    public void testIllegalSettingsMissingAllRequired() {
+        Settings nodeSettings = Settings.EMPTY;
+        mock = new GceComputeServiceMock(Settings.EMPTY, networkService);
+        try {
+            buildDynamicNodes(mock, nodeSettings);
+            fail("We expect an IllegalArgumentException for incomplete settings");
+        } catch (IllegalArgumentException expected) {
+            assertThat(expected.getMessage(), containsString("one or more gce discovery settings are missing."));
+        }
+    }
+
+    public void testIllegalSettingsMissingProject() {
+        Settings nodeSettings = Settings.builder()
+            .putArray(GceComputeService.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
+            .build();
+        mock = new GceComputeServiceMock(nodeSettings, networkService);
+        try {
+            buildDynamicNodes(mock, nodeSettings);
+            fail("We expect an IllegalArgumentException for incomplete settings");
+        } catch (IllegalArgumentException expected) {
+            assertThat(expected.getMessage(), containsString("one or more gce discovery settings are missing."));
+        }
+    }
+
+    public void testIllegalSettingsMissingZone() {
+        Settings nodeSettings = Settings.builder()
+            .put(GceComputeService.PROJECT_SETTING.getKey(), projectName)
+            .build();
+        mock = new GceComputeServiceMock(nodeSettings, networkService);
+        try {
+            buildDynamicNodes(mock, nodeSettings);
+            fail("We expect an IllegalArgumentException for incomplete settings");
+        } catch (IllegalArgumentException expected) {
+            assertThat(expected.getMessage(), containsString("one or more gce discovery settings are missing."));
+        }
+    }
 }
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java
index 9e48bc7d3df..aa6e91fabdf 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java
@@ -34,6 +34,7 @@ import com.google.api.client.testing.http.MockLowLevelHttpRequest;
 import com.google.api.client.testing.http.MockLowLevelHttpResponse;
 import com.google.api.client.testing.util.MockSleeper;
 import com.google.api.services.compute.Compute;
+import org.elasticsearch.common.unit.TimeValue;
 import org.elasticsearch.test.ESTestCase;
 
 import java.io.IOException;
@@ -100,7 +101,8 @@ public class RetryHttpInitializerWrapperTests extends ESTestCase {
                 .build();
         MockSleeper mockSleeper = new MockSleeper();
 
-        RetryHttpInitializerWrapper retryHttpInitializerWrapper = new RetryHttpInitializerWrapper(credential, mockSleeper, 5000);
+        RetryHttpInitializerWrapper retryHttpInitializerWrapper = new RetryHttpInitializerWrapper(credential, mockSleeper,
+            TimeValue.timeValueSeconds(5));
 
         Compute client = new Compute.Builder(fakeTransport, new JacksonFactory(), null)
                 .setHttpRequestInitializer(retryHttpInitializerWrapper)
@@ -115,7 +117,7 @@ public class RetryHttpInitializerWrapperTests extends ESTestCase {
     }
 
     public void testRetryWaitTooLong() throws Exception {
-        int maxWaitTime = 10;
+        TimeValue maxWaitTime = TimeValue.timeValueMillis(10);
         int maxRetryTimes = 50;
 
         FailThenSuccessBackoffTransport fakeTransport =
@@ -127,7 +129,7 @@ public class RetryHttpInitializerWrapperTests extends ESTestCase {
         MockSleeper oneTimeSleeper = new MockSleeper() {
             @Override
             public void sleep(long millis) throws InterruptedException {
-                Thread.sleep(maxWaitTime);
+                Thread.sleep(maxWaitTime.getMillis());
                 super.sleep(0); // important number, use this to get count
             }
         };
@@ -157,7 +159,8 @@ public class RetryHttpInitializerWrapperTests extends ESTestCase {
         MockGoogleCredential credential = RetryHttpInitializerWrapper.newMockCredentialBuilder()
                 .build();
         MockSleeper mockSleeper = new MockSleeper();
-        RetryHttpInitializerWrapper retryHttpInitializerWrapper = new RetryHttpInitializerWrapper(credential, mockSleeper, 500);
+        RetryHttpInitializerWrapper retryHttpInitializerWrapper = new RetryHttpInitializerWrapper(credential, mockSleeper,
+            TimeValue.timeValueMillis(500));
 
         Compute client = new Compute.Builder(fakeTransport, new JacksonFactory(), null)
                 .setHttpRequestInitializer(retryHttpInitializerWrapper)
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java
new file mode 100644
index 00000000000..6c5fd797be4
--- /dev/null
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.env;
+
+import org.elasticsearch.common.io.PathUtils;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.PosixPermissionsResetter;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.attribute.PosixFileAttributeView;
+import java.nio.file.attribute.PosixFilePermission;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+
+public class NodeEnvironmentEvilTests extends ESTestCase {
+
+    private static boolean isPosix;
+
+    @BeforeClass
+    public static void checkPosix() throws IOException {
+        isPosix = Files.getFileAttributeView(createTempFile(), PosixFileAttributeView.class) != null;
+    }
+
+    public void testMissingWritePermission() throws IOException {
+        assumeTrue("posix filesystem", isPosix);
+        final String[] tempPaths = tmpPaths();
+        Path path = PathUtils.get(randomFrom(tempPaths));
+        try (PosixPermissionsResetter attr = new PosixPermissionsResetter(path)) {
+            attr.setPermissions(new HashSet<>(Arrays.asList(PosixFilePermission.OTHERS_READ, PosixFilePermission.GROUP_READ,
+                PosixFilePermission.OWNER_READ)));
+            Settings build = Settings.builder()
+                    .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
+                    .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
+            IOException ioException = expectThrows(IOException.class, () -> {
+                new NodeEnvironment(build, new Environment(build));
+            });
+            assertTrue(ioException.getMessage(), ioException.getMessage().startsWith(path.toString()));
+        }
+    }
+
+    public void testMissingWritePermissionOnIndex() throws IOException {
+        assumeTrue("posix filesystem", isPosix);
+        final String[] tempPaths = tmpPaths();
+        Path path = PathUtils.get(randomFrom(tempPaths));
+        Path fooIndex = path.resolve("elasticsearch").resolve("nodes").resolve("0").resolve(NodeEnvironment.INDICES_FOLDER)
+            .resolve("foo");
+        Files.createDirectories(fooIndex);
+        try (PosixPermissionsResetter attr = new PosixPermissionsResetter(fooIndex)) {
+            attr.setPermissions(new HashSet<>(Arrays.asList(PosixFilePermission.OTHERS_READ, PosixFilePermission.GROUP_READ,
+                PosixFilePermission.OWNER_READ)));
+            Settings build = Settings.builder()
+                .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
+                .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
+            IOException ioException = expectThrows(IOException.class, () -> {
+                new NodeEnvironment(build, new Environment(build));
+            });
+            assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to write in data directory"));
+        }
+    }
+
+    public void testMissingWritePermissionOnShard() throws IOException {
+        assumeTrue("posix filesystem", isPosix);
+        final String[] tempPaths = tmpPaths();
+        Path path = PathUtils.get(randomFrom(tempPaths));
+        Path fooIndex = path.resolve("elasticsearch").resolve("nodes").resolve("0").resolve(NodeEnvironment.INDICES_FOLDER)
+            .resolve("foo");
+        Path fooShard = fooIndex.resolve("0");
+        Path fooShardIndex = fooShard.resolve("index");
+        Path fooShardTranslog = fooShard.resolve("translog");
+        Path fooShardState = fooShard.resolve("_state");
+        Path pick = randomFrom(fooShard, fooShardIndex, fooShardTranslog, fooShardState);
+        Files.createDirectories(pick);
+        try (PosixPermissionsResetter attr = new PosixPermissionsResetter(pick)) {
+            attr.setPermissions(new HashSet<>(Arrays.asList(PosixFilePermission.OTHERS_READ, PosixFilePermission.GROUP_READ,
+                PosixFilePermission.OWNER_READ)));
+            Settings build = Settings.builder()
+                .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
+                .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
+            IOException ioException = expectThrows(IOException.class, () -> {
+                new NodeEnvironment(build, new Environment(build));
+            });
+            assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to write in data directory"));
+        }
+    }
+}
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java
index 0c37d7bb0ee..66dfa67ccbd 100644
--- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java
@@ -51,6 +51,7 @@ import org.elasticsearch.common.cli.UserError;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.env.Environment;
 import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.PosixPermissionsResetter;
 import org.junit.BeforeClass;
 
 @LuceneTestCase.SuppressFileSystems("*")
@@ -63,24 +64,6 @@ public class InstallPluginCommandTests extends ESTestCase {
         isPosix = Files.getFileAttributeView(createTempFile(), PosixFileAttributeView.class) != null;
     }
 
-    /** Stores the posix attributes for a path and resets them on close. */
-    static class PosixPermissionsResetter implements AutoCloseable {
-        private final PosixFileAttributeView attributeView;
-        final Set<PosixFilePermission> permissions;
-        public PosixPermissionsResetter(Path path) throws IOException {
-            attributeView = Files.getFileAttributeView(path, PosixFileAttributeView.class);
-            assertNotNull(attributeView);
-            permissions = attributeView.readAttributes().permissions();
-        }
-        @Override
-        public void close() throws IOException {
-            attributeView.setPermissions(permissions);
-        }
-        public void setPermissions(Set<PosixFilePermission> newPermissions) throws IOException {
-            attributeView.setPermissions(newPermissions);
-        }
-    }
-
     /** Creates a test environment with bin, config and plugins directories. */
     static Environment createEnv() throws IOException {
         Path home = createTempDir();
@@ -103,7 +86,7 @@ public class InstallPluginCommandTests extends ESTestCase {
             }
         }
     }
-    
+
     static String writeZip(Path structure, String prefix) throws IOException {
         Path zip = createTempDir().resolve(structure.getFileName() + ".zip");
         try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) {
@@ -382,7 +365,7 @@ public class InstallPluginCommandTests extends ESTestCase {
         Files.createFile(binDir.resolve("somescript"));
         String pluginZip = createPlugin("fake", pluginDir);
         try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.binFile())) {
-            Set<PosixFilePermission> perms = new HashSet<>(binAttrs.permissions);
+            Set<PosixFilePermission> perms = binAttrs.getCopyPermissions();
             // make sure at least one execute perm is missing, so we know we forced it during installation
             perms.remove(PosixFilePermission.GROUP_EXECUTE);
             binAttrs.setPermissions(perms);
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml
index 1e7911e519a..d942ef43b30 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/30_mutate.yaml
@@ -80,17 +80,18 @@
         type: test
         id: 1
         pipeline: "my_pipeline"
-        body: {
-          field_to_rename: "value",
-          field_to_remove: "old_value",
-          field_to_lowercase: "LOWERCASE",
-          field_to_uppercase: "uppercase",
-          field_to_trim: "   trimmed   ",
-          field_to_split: "127-0-0-1",
-          field_to_join: ["127","0","0","1"],
-          field_to_convert: ["127","0","0","1"],
-          field_to_gsub: "127-0-0-1"
-        }
+        body:  >
+          {
+            field_to_rename: "value",
+            field_to_remove: "old_value",
+            field_to_lowercase: "LOWERCASE",
+            field_to_uppercase: "uppercase",
+            field_to_trim: "   trimmed   ",
+            field_to_split: "127-0-0-1",
+            field_to_join: ["127","0","0","1"],
+            field_to_convert: ["127","0","0","1"],
+            field_to_gsub: "127-0-0-1"
+          }
 
   - do:
       get:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_foreach.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_foreach.yaml
index 09ef359a8c9..a26681152df 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_foreach.yaml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/80_foreach.yaml
@@ -29,9 +29,10 @@
         type: test
         id: 1
         pipeline: "my_pipeline"
-        body: {
-          values: ["foo", "bar", "baz"]
-        }
+        body: >
+          {
+            values: ["foo", "bar", "baz"]
+          }
 
   - do:
       get:
diff --git a/settings.gradle b/settings.gradle
index 228b95ff511..e013d3df452 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -15,6 +15,7 @@ List projects = [
   'modules:lang-expression',
   'modules:lang-groovy',
   'modules:lang-mustache',
+  'modules:lang-painless',
   'plugins:analysis-icu',
   'plugins:analysis-kuromoji',
   'plugins:analysis-phonetic',
@@ -27,7 +28,6 @@ List projects = [
   'plugins:ingest-geoip',
   'plugins:ingest-attachment',
   'plugins:lang-javascript',
-  'plugins:lang-painless',
   'plugins:lang-python',
   'plugins:mapper-attachments',
   'plugins:mapper-murmur3',
diff --git a/test/framework/src/main/java/org/elasticsearch/test/PosixPermissionsResetter.java b/test/framework/src/main/java/org/elasticsearch/test/PosixPermissionsResetter.java
new file mode 100644
index 00000000000..a644205bad9
--- /dev/null
+++ b/test/framework/src/main/java/org/elasticsearch/test/PosixPermissionsResetter.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.test;
+
+import org.junit.Assert;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.attribute.PosixFileAttributeView;
+import java.nio.file.attribute.PosixFilePermission;
+import java.util.HashSet;
+import java.util.Set;
+
+/** Stores the posix attributes for a path and resets them on close. */
+public class PosixPermissionsResetter implements AutoCloseable {
+    private final PosixFileAttributeView attributeView;
+    private final Set<PosixFilePermission> permissions;
+    public PosixPermissionsResetter(Path path) throws IOException {
+        attributeView = Files.getFileAttributeView(path, PosixFileAttributeView.class);
+        Assert.assertNotNull(attributeView);
+        permissions = attributeView.readAttributes().permissions();
+    }
+    @Override
+    public void close() throws IOException {
+        attributeView.setPermissions(permissions);
+    }
+    public void setPermissions(Set<PosixFilePermission> newPermissions) throws IOException {
+        attributeView.setPermissions(newPermissions);
+    }
+
+    public Set<PosixFilePermission> getCopyPermissions() {
+        return new HashSet<>(permissions);
+    }
+}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java
index 93eef969b43..00c35196a2d 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java
@@ -76,7 +76,7 @@ public class VersionUtils {
         assert version.before(Version.CURRENT);
         return version;
     }
-    
+
     /** Returns the oldest {@link Version} */
     public static Version getFirstVersion() {
         return SORTED_VERSIONS.get(0);
@@ -109,4 +109,11 @@ public class VersionUtils {
             return SORTED_VERSIONS.get(minVersionIndex + random.nextInt(range));
         }
     }
+
+    public static boolean isSnapshot(Version version) {
+        if (Version.CURRENT.equals(version) || Version.V_2_3_0.equals(version)) {
+            return true;
+        }
+        return false;
+    }
 }