Remove old and unsupported version constants

All version <= 2.0 are not supported anymore. This commit removes all
uses of these versions.
This commit is contained in:
Simon Willnauer 2016-03-07 11:02:16 +01:00
parent 93adddc61b
commit f071f327db
45 changed files with 122 additions and 1241 deletions

View File

@ -39,209 +39,6 @@ public class Version {
// AA values below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release
// the (internal) format of the id is there so we can easily do after/before checks on the id
// NOTE: ancient indexes created before 5.0 use this constant for e.g. analysis chain emulation (imperfect)
// its named lucene 3 but also lucene 4 or anything else we no longer support.
public static final org.apache.lucene.util.Version LUCENE_3_EMULATION_VERSION = org.apache.lucene.util.Version.LUCENE_5_0_0;
public static final int V_0_18_0_ID = /*00*/180099;
public static final Version V_0_18_0 = new Version(V_0_18_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_1_ID = /*00*/180199;
public static final Version V_0_18_1 = new Version(V_0_18_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_2_ID = /*00*/180299;
public static final Version V_0_18_2 = new Version(V_0_18_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_3_ID = /*00*/180399;
public static final Version V_0_18_3 = new Version(V_0_18_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_4_ID = /*00*/180499;
public static final Version V_0_18_4 = new Version(V_0_18_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_5_ID = /*00*/180599;
public static final Version V_0_18_5 = new Version(V_0_18_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_6_ID = /*00*/180699;
public static final Version V_0_18_6 = new Version(V_0_18_6_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_7_ID = /*00*/180799;
public static final Version V_0_18_7 = new Version(V_0_18_7_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_8_ID = /*00*/180899;
public static final Version V_0_18_8 = new Version(V_0_18_8_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_RC1_ID = /*00*/190051;
public static final Version V_0_19_0_RC1 = new Version(V_0_19_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_RC2_ID = /*00*/190052;
public static final Version V_0_19_0_RC2 = new Version(V_0_19_0_RC2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_RC3_ID = /*00*/190053;
public static final Version V_0_19_0_RC3 = new Version(V_0_19_0_RC3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_ID = /*00*/190099;
public static final Version V_0_19_0 = new Version(V_0_19_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_1_ID = /*00*/190199;
public static final Version V_0_19_1 = new Version(V_0_19_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_2_ID = /*00*/190299;
public static final Version V_0_19_2 = new Version(V_0_19_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_3_ID = /*00*/190399;
public static final Version V_0_19_3 = new Version(V_0_19_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_4_ID = /*00*/190499;
public static final Version V_0_19_4 = new Version(V_0_19_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_5_ID = /*00*/190599;
public static final Version V_0_19_5 = new Version(V_0_19_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_6_ID = /*00*/190699;
public static final Version V_0_19_6 = new Version(V_0_19_6_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_7_ID = /*00*/190799;
public static final Version V_0_19_7 = new Version(V_0_19_7_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_8_ID = /*00*/190899;
public static final Version V_0_19_8 = new Version(V_0_19_8_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_9_ID = /*00*/190999;
public static final Version V_0_19_9 = new Version(V_0_19_9_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_10_ID = /*00*/191099;
public static final Version V_0_19_10 = new Version(V_0_19_10_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_11_ID = /*00*/191199;
public static final Version V_0_19_11 = new Version(V_0_19_11_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_12_ID = /*00*/191299;
public static final Version V_0_19_12 = new Version(V_0_19_12_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_13_ID = /*00*/191399;
public static final Version V_0_19_13 = new Version(V_0_19_13_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_0_RC1_ID = /*00*/200051;
public static final Version V_0_20_0_RC1 = new Version(V_0_20_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_0_ID = /*00*/200099;
public static final Version V_0_20_0 = new Version(V_0_20_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_1_ID = /*00*/200199;
public static final Version V_0_20_1 = new Version(V_0_20_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_2_ID = /*00*/200299;
public static final Version V_0_20_2 = new Version(V_0_20_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_3_ID = /*00*/200399;
public static final Version V_0_20_3 = new Version(V_0_20_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_4_ID = /*00*/200499;
public static final Version V_0_20_4 = new Version(V_0_20_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_5_ID = /*00*/200599;
public static final Version V_0_20_5 = new Version(V_0_20_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_6_ID = /*00*/200699;
public static final Version V_0_20_6 = new Version(V_0_20_6_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_0_Beta1_ID = /*00*/900001;
public static final Version V_0_90_0_Beta1 = new Version(V_0_90_0_Beta1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_0_RC1_ID = /*00*/900051;
public static final Version V_0_90_0_RC1 = new Version(V_0_90_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_0_RC2_ID = /*00*/900052;
public static final Version V_0_90_0_RC2 = new Version(V_0_90_0_RC2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_0_ID = /*00*/900099;
public static final Version V_0_90_0 = new Version(V_0_90_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_1_ID = /*00*/900199;
public static final Version V_0_90_1 = new Version(V_0_90_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_2_ID = /*00*/900299;
public static final Version V_0_90_2 = new Version(V_0_90_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_3_ID = /*00*/900399;
public static final Version V_0_90_3 = new Version(V_0_90_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_4_ID = /*00*/900499;
public static final Version V_0_90_4 = new Version(V_0_90_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_5_ID = /*00*/900599;
public static final Version V_0_90_5 = new Version(V_0_90_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_6_ID = /*00*/900699;
public static final Version V_0_90_6 = new Version(V_0_90_6_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_7_ID = /*00*/900799;
public static final Version V_0_90_7 = new Version(V_0_90_7_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_8_ID = /*00*/900899;
public static final Version V_0_90_8 = new Version(V_0_90_8_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_9_ID = /*00*/900999;
public static final Version V_0_90_9 = new Version(V_0_90_9_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_10_ID = /*00*/901099;
public static final Version V_0_90_10 = new Version(V_0_90_10_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_11_ID = /*00*/901199;
public static final Version V_0_90_11 = new Version(V_0_90_11_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_12_ID = /*00*/901299;
public static final Version V_0_90_12 = new Version(V_0_90_12_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_13_ID = /*00*/901399;
public static final Version V_0_90_13 = new Version(V_0_90_13_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_0_0_Beta1_ID = 1000001;
public static final Version V_1_0_0_Beta1 = new Version(V_1_0_0_Beta1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_0_0_Beta2_ID = 1000002;
public static final Version V_1_0_0_Beta2 = new Version(V_1_0_0_Beta2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_0_0_RC1_ID = 1000051;
public static final Version V_1_0_0_RC1 = new Version(V_1_0_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_0_0_RC2_ID = 1000052;
public static final Version V_1_0_0_RC2 = new Version(V_1_0_0_RC2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_0_0_ID = 1000099;
public static final Version V_1_0_0 = new Version(V_1_0_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_0_1_ID = 1000199;
public static final Version V_1_0_1 = new Version(V_1_0_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_0_2_ID = 1000299;
public static final Version V_1_0_2 = new Version(V_1_0_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_0_3_ID = 1000399;
public static final Version V_1_0_3 = new Version(V_1_0_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_1_0_ID = 1010099;
public static final Version V_1_1_0 = new Version(V_1_1_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_1_1_ID = 1010199;
public static final Version V_1_1_1 = new Version(V_1_1_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_1_2_ID = 1010299;
public static final Version V_1_1_2 = new Version(V_1_1_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_2_0_ID = 1020099;
public static final Version V_1_2_0 = new Version(V_1_2_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_2_1_ID = 1020199;
public static final Version V_1_2_1 = new Version(V_1_2_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_2_2_ID = 1020299;
public static final Version V_1_2_2 = new Version(V_1_2_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_2_3_ID = 1020399;
public static final Version V_1_2_3 = new Version(V_1_2_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_2_4_ID = 1020499;
public static final Version V_1_2_4 = new Version(V_1_2_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_0_ID = 1030099;
public static final Version V_1_3_0 = new Version(V_1_3_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_1_ID = 1030199;
public static final Version V_1_3_1 = new Version(V_1_3_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_2_ID = 1030299;
public static final Version V_1_3_2 = new Version(V_1_3_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_3_ID = 1030399;
public static final Version V_1_3_3 = new Version(V_1_3_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_4_ID = 1030499;
public static final Version V_1_3_4 = new Version(V_1_3_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_5_ID = 1030599;
public static final Version V_1_3_5 = new Version(V_1_3_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_6_ID = 1030699;
public static final Version V_1_3_6 = new Version(V_1_3_6_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_7_ID = 1030799;
public static final Version V_1_3_7 = new Version(V_1_3_7_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_8_ID = 1030899;
public static final Version V_1_3_8 = new Version(V_1_3_8_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_3_9_ID = 1030999;
public static final Version V_1_3_9 = new Version(V_1_3_9_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_4_0_Beta1_ID = 1040001;
public static final Version V_1_4_0_Beta1 = new Version(V_1_4_0_Beta1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_4_0_ID = 1040099;
public static final Version V_1_4_0 = new Version(V_1_4_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_4_1_ID = 1040199;
public static final Version V_1_4_1 = new Version(V_1_4_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_4_2_ID = 1040299;
public static final Version V_1_4_2 = new Version(V_1_4_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_4_3_ID = 1040399;
public static final Version V_1_4_3 = new Version(V_1_4_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_4_4_ID = 1040499;
public static final Version V_1_4_4 = new Version(V_1_4_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_4_5_ID = 1040599;
public static final Version V_1_4_5 = new Version(V_1_4_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_5_0_ID = 1050099;
public static final Version V_1_5_0 = new Version(V_1_5_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_5_1_ID = 1050199;
public static final Version V_1_5_1 = new Version(V_1_5_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_5_2_ID = 1050299;
public static final Version V_1_5_2 = new Version(V_1_5_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_6_0_ID = 1060099;
public static final Version V_1_6_0 = new Version(V_1_6_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_6_1_ID = 1060199;
public static final Version V_1_6_1 = new Version(V_1_6_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_6_2_ID = 1060299;
public static final Version V_1_6_2 = new Version(V_1_6_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_7_0_ID = 1070099;
public static final Version V_1_7_0 = new Version(V_1_7_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_7_1_ID = 1070199;
public static final Version V_1_7_1 = new Version(V_1_7_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_7_2_ID = 1070299;
public static final Version V_1_7_2 = new Version(V_1_7_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_7_3_ID = 1070399;
public static final Version V_1_7_3 = new Version(V_1_7_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_7_4_ID = 1070499;
public static final Version V_1_7_4 = new Version(V_1_7_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_1_7_5_ID = 1070599;
public static final Version V_1_7_5 = new Version(V_1_7_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_2_0_0_beta1_ID = 2000001;
public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
@ -304,198 +101,6 @@ public class Version {
return V_2_0_0_beta2;
case V_2_0_0_beta1_ID:
return V_2_0_0_beta1;
case V_1_7_5_ID:
return V_1_7_5;
case V_1_7_4_ID:
return V_1_7_4;
case V_1_7_3_ID:
return V_1_7_3;
case V_1_7_2_ID:
return V_1_7_2;
case V_1_7_1_ID:
return V_1_7_1;
case V_1_7_0_ID:
return V_1_7_0;
case V_1_6_2_ID:
return V_1_6_2;
case V_1_6_1_ID:
return V_1_6_1;
case V_1_6_0_ID:
return V_1_6_0;
case V_1_5_2_ID:
return V_1_5_2;
case V_1_5_1_ID:
return V_1_5_1;
case V_1_5_0_ID:
return V_1_5_0;
case V_1_4_5_ID:
return V_1_4_5;
case V_1_4_4_ID:
return V_1_4_4;
case V_1_4_3_ID:
return V_1_4_3;
case V_1_4_2_ID:
return V_1_4_2;
case V_1_4_1_ID:
return V_1_4_1;
case V_1_4_0_ID:
return V_1_4_0;
case V_1_4_0_Beta1_ID:
return V_1_4_0_Beta1;
case V_1_3_9_ID:
return V_1_3_9;
case V_1_3_8_ID:
return V_1_3_8;
case V_1_3_7_ID:
return V_1_3_7;
case V_1_3_6_ID:
return V_1_3_6;
case V_1_3_5_ID:
return V_1_3_5;
case V_1_3_4_ID:
return V_1_3_4;
case V_1_3_3_ID:
return V_1_3_3;
case V_1_3_2_ID:
return V_1_3_2;
case V_1_3_1_ID:
return V_1_3_1;
case V_1_3_0_ID:
return V_1_3_0;
case V_1_2_4_ID:
return V_1_2_4;
case V_1_2_3_ID:
return V_1_2_3;
case V_1_2_2_ID:
return V_1_2_2;
case V_1_2_1_ID:
return V_1_2_1;
case V_1_2_0_ID:
return V_1_2_0;
case V_1_1_2_ID:
return V_1_1_2;
case V_1_1_1_ID:
return V_1_1_1;
case V_1_1_0_ID:
return V_1_1_0;
case V_1_0_3_ID:
return V_1_0_3;
case V_1_0_2_ID:
return V_1_0_2;
case V_1_0_1_ID:
return V_1_0_1;
case V_1_0_0_ID:
return V_1_0_0;
case V_1_0_0_RC2_ID:
return V_1_0_0_RC2;
case V_1_0_0_RC1_ID:
return V_1_0_0_RC1;
case V_1_0_0_Beta2_ID:
return V_1_0_0_Beta2;
case V_1_0_0_Beta1_ID:
return V_1_0_0_Beta1;
case V_0_90_13_ID:
return V_0_90_13;
case V_0_90_12_ID:
return V_0_90_12;
case V_0_90_11_ID:
return V_0_90_11;
case V_0_90_10_ID:
return V_0_90_10;
case V_0_90_9_ID:
return V_0_90_9;
case V_0_90_8_ID:
return V_0_90_8;
case V_0_90_7_ID:
return V_0_90_7;
case V_0_90_6_ID:
return V_0_90_6;
case V_0_90_5_ID:
return V_0_90_5;
case V_0_90_4_ID:
return V_0_90_4;
case V_0_90_3_ID:
return V_0_90_3;
case V_0_90_2_ID:
return V_0_90_2;
case V_0_90_1_ID:
return V_0_90_1;
case V_0_90_0_ID:
return V_0_90_0;
case V_0_90_0_RC2_ID:
return V_0_90_0_RC2;
case V_0_90_0_RC1_ID:
return V_0_90_0_RC1;
case V_0_90_0_Beta1_ID:
return V_0_90_0_Beta1;
case V_0_20_6_ID:
return V_0_20_6;
case V_0_20_5_ID:
return V_0_20_5;
case V_0_20_4_ID:
return V_0_20_4;
case V_0_20_3_ID:
return V_0_20_3;
case V_0_20_2_ID:
return V_0_20_2;
case V_0_20_1_ID:
return V_0_20_1;
case V_0_20_0_ID:
return V_0_20_0;
case V_0_20_0_RC1_ID:
return V_0_20_0_RC1;
case V_0_19_0_RC1_ID:
return V_0_19_0_RC1;
case V_0_19_0_RC2_ID:
return V_0_19_0_RC2;
case V_0_19_0_RC3_ID:
return V_0_19_0_RC3;
case V_0_19_0_ID:
return V_0_19_0;
case V_0_19_1_ID:
return V_0_19_1;
case V_0_19_2_ID:
return V_0_19_2;
case V_0_19_3_ID:
return V_0_19_3;
case V_0_19_4_ID:
return V_0_19_4;
case V_0_19_5_ID:
return V_0_19_5;
case V_0_19_6_ID:
return V_0_19_6;
case V_0_19_7_ID:
return V_0_19_7;
case V_0_19_8_ID:
return V_0_19_8;
case V_0_19_9_ID:
return V_0_19_9;
case V_0_19_10_ID:
return V_0_19_10;
case V_0_19_11_ID:
return V_0_19_11;
case V_0_19_12_ID:
return V_0_19_12;
case V_0_19_13_ID:
return V_0_19_13;
case V_0_18_0_ID:
return V_0_18_0;
case V_0_18_1_ID:
return V_0_18_1;
case V_0_18_2_ID:
return V_0_18_2;
case V_0_18_3_ID:
return V_0_18_3;
case V_0_18_4_ID:
return V_0_18_4;
case V_0_18_5_ID:
return V_0_18_5;
case V_0_18_6_ID:
return V_0_18_6;
case V_0_18_7_ID:
return V_0_18_7;
case V_0_18_8_ID:
return V_0_18_8;
default:
return new Version(id, org.apache.lucene.util.Version.LATEST);
}

View File

@ -197,9 +197,7 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo
numberOfPendingTasks = in.readInt();
timedOut = in.readBoolean();
numberOfInFlightFetch = in.readInt();
if (in.getVersion().onOrAfter(Version.V_1_7_0)) {
delayedUnassignedShards= in.readInt();
}
delayedUnassignedShards= in.readInt();
taskMaxWaitingTime = TimeValue.readTimeValue(in);
}
@ -212,9 +210,7 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo
out.writeInt(numberOfPendingTasks);
out.writeBoolean(timedOut);
out.writeInt(numberOfInFlightFetch);
if (out.getVersion().onOrAfter(Version.V_1_7_0)) {
out.writeInt(delayedUnassignedShards);
}
out.writeInt(delayedUnassignedShards);
taskMaxWaitingTime.writeTo(out);
}

View File

@ -40,13 +40,7 @@ public class PatternAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analy
public PatternAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
Version esVersion = indexSettings.getIndexVersionCreated();
final CharArraySet defaultStopwords;
if (esVersion.onOrAfter(Version.V_1_0_0_RC1)) {
defaultStopwords = CharArraySet.EMPTY_SET;
} else {
defaultStopwords = StopAnalyzer.ENGLISH_STOP_WORDS_SET;
}
final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET;
boolean lowercase = settings.getAsBoolean("lowercase", true);
CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords);

View File

@ -33,18 +33,10 @@ import org.elasticsearch.index.IndexSettings;
public class StandardAnalyzerProvider extends AbstractIndexAnalyzerProvider<StandardAnalyzer> {
private final StandardAnalyzer standardAnalyzer;
private final Version esVersion;
public StandardAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
this.esVersion = indexSettings.getIndexVersionCreated();
final CharArraySet defaultStopwords;
if (esVersion.onOrAfter(Version.V_1_0_0_Beta1)) {
defaultStopwords = CharArraySet.EMPTY_SET;
} else {
defaultStopwords = StopAnalyzer.ENGLISH_STOP_WORDS_SET;
}
final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET;
CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords);
int maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
standardAnalyzer = new StandardAnalyzer(stopWords);

View File

@ -32,17 +32,10 @@ import org.elasticsearch.index.IndexSettings;
public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProvider<StandardHtmlStripAnalyzer> {
private final StandardHtmlStripAnalyzer analyzer;
private final Version esVersion;
public StandardHtmlStripAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
this.esVersion = indexSettings.getIndexVersionCreated();
final CharArraySet defaultStopwords;
if (esVersion.onOrAfter(Version.V_1_0_0_RC1)) {
defaultStopwords = CharArraySet.EMPTY_SET;
} else {
defaultStopwords = StopAnalyzer.ENGLISH_STOP_WORDS_SET;
}
final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET;
CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords);
analyzer = new StandardHtmlStripAnalyzer(stopWords);
analyzer.setVersion(version);

View File

@ -122,11 +122,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
// English stemmers
} else if ("english".equalsIgnoreCase(language)) {
if (indexVersion.onOrAfter(Version.V_1_3_0)) {
return new PorterStemFilter(tokenStream);
} else {
return new SnowballFilter(tokenStream, new EnglishStemmer());
}
return new PorterStemFilter(tokenStream);
} else if ("light_english".equalsIgnoreCase(language) || "lightEnglish".equalsIgnoreCase(language)
|| "kstem".equalsIgnoreCase(language)) {
return new KStemFilter(tokenStream);
@ -135,11 +131,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
} else if ("porter".equalsIgnoreCase(language)) {
return new PorterStemFilter(tokenStream);
} else if ("porter2".equalsIgnoreCase(language)) {
if (indexVersion.onOrAfter(Version.V_1_3_0)) {
return new SnowballFilter(tokenStream, new EnglishStemmer());
} else {
return new SnowballFilter(tokenStream, new PorterStemmer());
}
return new SnowballFilter(tokenStream, new EnglishStemmer());
} else if ("minimal_english".equalsIgnoreCase(language) || "minimalEnglish".equalsIgnoreCase(language)) {
return new EnglishMinimalStemFilter(tokenStream);
} else if ("possessive_english".equalsIgnoreCase(language) || "possessiveEnglish".equalsIgnoreCase(language)) {

View File

@ -458,7 +458,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
if (globalMetaDataFormat.exists(snapshotsBlobContainer, snapshotId.getSnapshot())) {
snapshotVersion = Version.CURRENT;
} else if (globalMetaDataLegacyFormat.exists(snapshotsBlobContainer, snapshotId.getSnapshot())) {
snapshotVersion = Version.V_1_0_0;
throw new SnapshotException(snapshotId, "snapshot is too old");
} else {
throw new SnapshotMissingException(snapshotId);
}

View File

@ -193,11 +193,7 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
@Override
protected void doReadFrom(StreamInput in) throws IOException {
if (in.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) {
this.docCountError = in.readLong();
} else {
this.docCountError = -1;
}
this.docCountError = in.readLong();
this.order = InternalOrder.Streams.readOrder(in);
this.formatter = ValueFormatterStreams.readOptional(in);
this.requiredSize = readSize(in);
@ -218,9 +214,7 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
if (out.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) {
out.writeLong(docCountError);
}
out.writeLong(docCountError);
InternalOrder.Streams.writeOrder(order, out);
ValueFormatterStreams.writeOptional(formatter, out);
writeSize(requiredSize, out);

View File

@ -158,19 +158,13 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat
@Override
public void readOtherStatsFrom(StreamInput in) throws IOException {
sumOfSqrs = in.readDouble();
if (in.getVersion().onOrAfter(Version.V_1_4_3)) {
sigma = in.readDouble();
} else {
sigma = 2.0;
}
sigma = in.readDouble();
}
@Override
protected void writeOtherStatsTo(StreamOutput out) throws IOException {
out.writeDouble(sumOfSqrs);
if (out.getVersion().onOrAfter(Version.V_1_4_3)) {
out.writeDouble(sigma);
}
out.writeDouble(sigma);
}

View File

@ -31,8 +31,8 @@ import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import static org.elasticsearch.Version.V_0_20_0;
import static org.elasticsearch.Version.V_0_90_0;
import static org.elasticsearch.Version.V_2_2_0;
import static org.elasticsearch.Version.V_5_0_0;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.containsString;
@ -42,21 +42,21 @@ import static org.hamcrest.Matchers.sameInstance;
public class VersionTests extends ESTestCase {
public void testVersionComparison() throws Exception {
assertThat(V_0_20_0.before(V_0_90_0), is(true));
assertThat(V_0_20_0.before(V_0_20_0), is(false));
assertThat(V_0_90_0.before(V_0_20_0), is(false));
assertThat(V_2_2_0.before(V_5_0_0), is(true));
assertThat(V_2_2_0.before(V_2_2_0), is(false));
assertThat(V_5_0_0.before(V_2_2_0), is(false));
assertThat(V_0_20_0.onOrBefore(V_0_90_0), is(true));
assertThat(V_0_20_0.onOrBefore(V_0_20_0), is(true));
assertThat(V_0_90_0.onOrBefore(V_0_20_0), is(false));
assertThat(V_2_2_0.onOrBefore(V_5_0_0), is(true));
assertThat(V_2_2_0.onOrBefore(V_2_2_0), is(true));
assertThat(V_5_0_0.onOrBefore(V_2_2_0), is(false));
assertThat(V_0_20_0.after(V_0_90_0), is(false));
assertThat(V_0_20_0.after(V_0_20_0), is(false));
assertThat(V_0_90_0.after(V_0_20_0), is(true));
assertThat(V_2_2_0.after(V_5_0_0), is(false));
assertThat(V_2_2_0.after(V_2_2_0), is(false));
assertThat(V_5_0_0.after(V_2_2_0), is(true));
assertThat(V_0_20_0.onOrAfter(V_0_90_0), is(false));
assertThat(V_0_20_0.onOrAfter(V_0_20_0), is(true));
assertThat(V_0_90_0.onOrAfter(V_0_20_0), is(true));
assertThat(V_2_2_0.onOrAfter(V_5_0_0), is(false));
assertThat(V_2_2_0.onOrAfter(V_2_2_0), is(true));
assertThat(V_5_0_0.onOrAfter(V_2_2_0), is(true));
}
public void testVersionConstantPresent() {
@ -127,29 +127,27 @@ public class VersionTests extends ESTestCase {
public void testIndexCreatedVersion() {
// an actual index has a IndexMetaData.SETTING_INDEX_UUID
final Version version = randomFrom(Version.V_0_18_0, Version.V_0_90_13, Version.V_1_3_0);
final Version version = randomFrom(Version.V_2_0_0, Version.V_2_3_0, Version.V_5_0_0);
assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build()));
}
public void testMinCompatVersion() {
assertThat(Version.V_2_0_0_beta1.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0_beta1));
assertThat(Version.V_1_3_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0));
assertThat(Version.V_1_2_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0));
assertThat(Version.V_1_2_3.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0));
assertThat(Version.V_1_0_0_RC2.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0_RC2));
assertThat(Version.V_2_1_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
assertThat(Version.V_2_2_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
assertThat(Version.V_2_3_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
assertThat(Version.V_5_0_0.minimumCompatibilityVersion(), equalTo(Version.V_5_0_0));
}
public void testToString() {
// with 2.0.beta we lowercase
assertEquals("2.0.0-beta1", Version.V_2_0_0_beta1.toString());
assertEquals("1.4.0.Beta1", Version.V_1_4_0_Beta1.toString());
assertEquals("1.4.0", Version.V_1_4_0.toString());
assertEquals("5.0.0", Version.V_5_0_0.toString());
assertEquals("2.3.0", Version.V_2_3_0.toString());
}
public void testIsBeta() {
assertTrue(Version.V_2_0_0_beta1.isBeta());
assertTrue(Version.V_1_4_0_Beta1.isBeta());
assertFalse(Version.V_1_4_0.isBeta());
}
public void testParseVersion() {

View File

@ -55,13 +55,7 @@ public class ClusterStateRequestTests extends ESTestCase {
assertThat(deserializedCSRequest.nodes(), equalTo(clusterStateRequest.nodes()));
assertThat(deserializedCSRequest.blocks(), equalTo(clusterStateRequest.blocks()));
assertThat(deserializedCSRequest.indices(), equalTo(clusterStateRequest.indices()));
if (testVersion.onOrAfter(Version.V_1_5_0)) {
assertOptionsMatch(deserializedCSRequest.indicesOptions(), clusterStateRequest.indicesOptions());
} else {
// versions before V_1_5_0 use IndicesOptions.lenientExpandOpen()
assertOptionsMatch(deserializedCSRequest.indicesOptions(), IndicesOptions.lenientExpandOpen());
}
assertOptionsMatch(deserializedCSRequest.indicesOptions(), clusterStateRequest.indicesOptions());
}
}

View File

@ -98,13 +98,7 @@ public class UpgradeIT extends ESBackcompatTestCase {
}
indexRandom(true, docs);
ensureGreen(indexName);
if (globalCompatibilityVersion().before(Version.V_1_4_0_Beta1)) {
// before 1.4 and the wait_if_ongoing flag, flushes could fail randomly, so we
// need to continue to try flushing until all shards succeed
assertTrue(awaitBusy(() -> flush(indexName).getFailedShards() == 0));
} else {
assertEquals(0, flush(indexName).getFailedShards());
}
assertEquals(0, flush(indexName).getFailedShards());
// index more docs that won't be flushed
numDocs = scaledRandomIntBetween(100, 1000);

View File

@ -104,17 +104,8 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
}
private String randomAnalyzer() {
while(true) {
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values());
if (preBuiltAnalyzers == PreBuiltAnalyzers.SORANI && compatibilityVersion().before(Version.V_1_3_0)) {
continue; // SORANI was added in 1.3.0
}
if (preBuiltAnalyzers == PreBuiltAnalyzers.LITHUANIAN && compatibilityVersion().before(Version.V_2_1_0)) {
continue; // LITHUANIAN was added in 2.1.0
}
return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT);
}
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values());
return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT);
}
private static final class InputOutput {
@ -127,7 +118,5 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
this.input = input;
this.field = field;
}
}
}

View File

@ -188,10 +188,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(numDocs + i));
}
indexRandom(true, docs);
if (compatibilityVersion().before(Version.V_1_3_0)) {
// issue another refresh through a new node to side step issue #6545
assertNoFailures(backwardsCluster().internalCluster().dataNodeClient().admin().indices().prepareRefresh().setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get());
}
numDocs *= 2;
}

View File

@ -446,7 +446,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
// #10067: create-bwc-index.py deleted any doc with long_sort:[10-20]
void assertDeleteByQueryWorked(String indexName, Version version) throws Exception {
if (version.onOrBefore(Version.V_1_0_0_Beta2) || version.onOrAfter(Version.V_2_0_0_beta1)) {
if (version.onOrAfter(Version.V_2_0_0_beta1)) {
// TODO: remove this once #10262 is fixed
return;
}

View File

@ -194,14 +194,11 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase {
assertThat(template.settings().getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1), equalTo(1));
assertThat(template.mappings().size(), equalTo(1));
assertThat(template.mappings().get("type1").string(), equalTo("{\"type1\":{\"_source\":{\"enabled\":false}}}"));
if (Version.fromString(version).onOrAfter(Version.V_1_1_0)) {
// Support for aliases in templates was added in v1.1.0
assertThat(template.aliases().size(), equalTo(3));
assertThat(template.aliases().get("alias1"), notNullValue());
assertThat(template.aliases().get("alias2").filter().string(), containsString(version));
assertThat(template.aliases().get("alias2").indexRouting(), equalTo("kimchy"));
assertThat(template.aliases().get("{index}-alias"), notNullValue());
}
assertThat(template.aliases().size(), equalTo(3));
assertThat(template.aliases().get("alias1"), notNullValue());
assertThat(template.aliases().get("alias2").filter().string(), containsString(version));
assertThat(template.aliases().get("alias2").indexRouting(), equalTo("kimchy"));
assertThat(template.aliases().get("{index}-alias"), notNullValue());
logger.info("--> cleanup");
cluster().wipeIndices(restoreInfo.indices().toArray(new String[restoreInfo.indices().size()]));

View File

@ -83,7 +83,7 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase {
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_CREATION_DATE, 1)
.put(IndexMetaData.SETTING_INDEX_UUID, "BOOM")
.put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_0_18_1_ID)
.put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1)
.put(indexSettings)
.build();
IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build();

View File

@ -275,10 +275,10 @@ public class ZenDiscoveryIT extends ESIntegTestCase {
Settings nodeSettings = Settings.settingsBuilder()
.put("discovery.type", "zen") // <-- To override the local setting if set externally
.build();
String nodeName = internalCluster().startNode(nodeSettings, Version.V_2_0_0_beta1);
String nodeName = internalCluster().startNode(nodeSettings, Version.V_5_0_0);
ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, nodeName);
ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nodeName);
DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_1_6_0);
DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_2_0_0);
final AtomicReference<IllegalStateException> holder = new AtomicReference<>();
zenDiscovery.handleJoinRequest(node, clusterService.state(), new MembershipAction.JoinCallback() {
@Override
@ -292,16 +292,16 @@ public class ZenDiscoveryIT extends ESIntegTestCase {
});
assertThat(holder.get(), notNullValue());
assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [1.6.0] that is lower than the minimum compatible version [" + Version.V_2_0_0_beta1.minimumCompatibilityVersion() + "]"));
assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [2.0.0] that is lower than the minimum compatible version [" + Version.V_5_0_0.minimumCompatibilityVersion() + "]"));
}
public void testJoinElectedMaster_incompatibleMinVersion() {
ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_2_0_0_beta1);
ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_5_0_0);
DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0_beta1);
DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_5_0_0);
assertThat(electMasterService.electMaster(Collections.singletonList(node)), sameInstance(node));
node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_1_6_0);
assertThat("Can't join master because version 1.6.0 is lower than the minimum compatable version 2.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue());
node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0);
assertThat("Can't join master because version 2.0.0 is lower than the minimum compatable version 5.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue());
}
public void testDiscoveryStats() throws IOException {

View File

@ -65,11 +65,6 @@ import static org.hamcrest.Matchers.startsWith;
public class GetActionIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(InternalSettingsPlugin.class); // uses index.version.created
}
public void testSimpleGet() {
assertAcked(prepareCreate("test")
.setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))
@ -324,128 +319,6 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2"));
}
public void testThatGetFromTranslogShouldWorkWithExcludeBackcompat() throws Exception {
String index = "test";
String type = "type1";
String mapping = jsonBuilder()
.startObject()
.startObject(type)
.startObject("_source")
.array("excludes", "excluded")
.endObject()
.endObject()
.endObject()
.string();
assertAcked(prepareCreate(index)
.addMapping(type, mapping)
.setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id));
client().prepareIndex(index, type, "1")
.setSource(jsonBuilder().startObject().field("field", "1", "2").field("excluded", "should not be seen").endObject())
.get();
GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get();
client().admin().indices().prepareFlush(index).get();
GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get();
assertThat(responseBeforeFlush.isExists(), is(true));
assertThat(responseAfterFlush.isExists(), is(true));
assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("field"));
assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded")));
assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString()));
}
public void testThatGetFromTranslogShouldWorkWithIncludeBackcompat() throws Exception {
String index = "test";
String type = "type1";
String mapping = jsonBuilder()
.startObject()
.startObject(type)
.startObject("_source")
.array("includes", "included")
.endObject()
.endObject()
.endObject()
.string();
assertAcked(prepareCreate(index)
.addMapping(type, mapping)
.setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id));
client().prepareIndex(index, type, "1")
.setSource(jsonBuilder().startObject().field("field", "1", "2").field("included", "should be seen").endObject())
.get();
GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get();
flush();
GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get();
assertThat(responseBeforeFlush.isExists(), is(true));
assertThat(responseAfterFlush.isExists(), is(true));
assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field")));
assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included"));
assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString()));
}
@SuppressWarnings("unchecked")
public void testThatGetFromTranslogShouldWorkWithIncludeExcludeAndFieldsBackcompat() throws Exception {
String index = "test";
String type = "type1";
String mapping = jsonBuilder()
.startObject()
.startObject(type)
.startObject("_source")
.array("includes", "included")
.array("excludes", "excluded")
.endObject()
.endObject()
.endObject()
.string();
assertAcked(prepareCreate(index)
.addMapping(type, mapping)
.setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id));
client().prepareIndex(index, type, "1")
.setSource(jsonBuilder().startObject()
.field("field", "1", "2")
.startObject("included").field("field", "should be seen").field("field2", "extra field to remove").endObject()
.startObject("excluded").field("field", "should not be seen").field("field2", "should not be seen").endObject()
.endObject())
.get();
GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get();
assertThat(responseBeforeFlush.isExists(), is(true));
assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded")));
assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field")));
assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included"));
// now tests that extra source filtering works as expected
GetResponse responseBeforeFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field")
.setFetchSource(new String[]{"field", "*.field"}, new String[]{"*.field2"}).get();
assertThat(responseBeforeFlushWithExtraFilters.isExists(), is(true));
assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("excluded")));
assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("field")));
assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), hasKey("included"));
assertThat((Map<String, Object>) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), hasKey("field"));
assertThat((Map<String, Object>) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), not(hasKey("field2")));
flush();
GetResponse responseAfterFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get();
GetResponse responseAfterFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field")
.setFetchSource("*.field", "*.field2").get();
assertThat(responseAfterFlush.isExists(), is(true));
assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString()));
assertThat(responseAfterFlushWithExtraFilters.isExists(), is(true));
assertThat(responseBeforeFlushWithExtraFilters.getSourceAsString(), is(responseAfterFlushWithExtraFilters.getSourceAsString()));
}
public void testGetWithVersion() {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)));
@ -1006,8 +879,7 @@ public class GetActionIT extends ESIntegTestCase {
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
" \"refresh_interval\": \"-1\",\n" +
" \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" +
" \"refresh_interval\": \"-1\"\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +
@ -1058,8 +930,7 @@ public class GetActionIT extends ESIntegTestCase {
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
" \"refresh_interval\": \"-1\",\n" +
" \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" +
" \"refresh_interval\": \"-1\"\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +

View File

@ -108,7 +108,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
Settings settings2 = settingsBuilder()
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0)
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0)
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings2);
AnalysisService analysisService2 = getAnalysisService(newRegistry, settings2);
@ -121,8 +121,8 @@ public class AnalysisModuleTests extends ModuleTestCase {
// analysis service has the expected version
assertThat(analysisService2.analyzer("standard").analyzer(), is(instanceOf(StandardAnalyzer.class)));
assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion());
assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion());
assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion());
assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion());
assertThat(analysisService2.analyzer("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class)));
assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), analysisService2.analyzer("custom7").analyzer().getVersion());
@ -270,45 +270,6 @@ public class AnalysisModuleTests extends ModuleTestCase {
}
}
public void testBackwardCompatible() throws IOException {
Settings settings = settingsBuilder()
.put("index.analysis.analyzer.custom1.tokenizer", "standard")
.put("index.analysis.analyzer.custom1.position_offset_gap", "128")
.put("index.analysis.analyzer.custom2.tokenizer", "standard")
.put("index.analysis.analyzer.custom2.position_increment_gap", "256")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
Version.V_1_7_1))
.build();
AnalysisService analysisService = getAnalysisService(settings);
Analyzer custom1 = analysisService.analyzer("custom1").analyzer();
assertThat(custom1, instanceOf(CustomAnalyzer.class));
assertThat(custom1.getPositionIncrementGap("custom1"), equalTo(128));
Analyzer custom2 = analysisService.analyzer("custom2").analyzer();
assertThat(custom2, instanceOf(CustomAnalyzer.class));
assertThat(custom2.getPositionIncrementGap("custom2"), equalTo(256));
}
public void testWithBothSettings() throws IOException {
Settings settings = settingsBuilder()
.put("index.analysis.analyzer.custom.tokenizer", "standard")
.put("index.analysis.analyzer.custom.position_offset_gap", "128")
.put("index.analysis.analyzer.custom.position_increment_gap", "256")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
Version.V_1_7_1))
.build();
try {
getAnalysisService(settings);
fail("Analyzer has both position_offset_gap and position_increment_gap should fail");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("Custom Analyzer [custom] defined both [position_offset_gap] and [position_increment_gap]" +
", use only [position_increment_gap]"));
}
}
public void testDeprecatedPositionOffsetGap() throws IOException {
Settings settings = settingsBuilder()
.put("index.analysis.analyzer.custom.tokenizer", "standard")

View File

@ -1,69 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED;
public class AnalyzerBackwardsCompatTests extends ESTokenStreamTestCase {
private void assertNoStopwordsAfter(org.elasticsearch.Version noStopwordVersion, String type) throws IOException {
final int iters = scaledRandomIntBetween(10, 100);
org.elasticsearch.Version version = org.elasticsearch.Version.CURRENT;
for (int i = 0; i < iters; i++) {
Settings.Builder builder = Settings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop");
if (version.onOrAfter(noStopwordVersion)) {
if (random().nextBoolean()) {
builder.put(SETTING_VERSION_CREATED, version);
}
} else {
builder.put(SETTING_VERSION_CREATED, version);
}
builder.put("index.analysis.analyzer.foo.type", type);
builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build());
NamedAnalyzer analyzer = analysisService.analyzer("foo");
assertNotNull(analyzer);
if (version.onOrAfter(noStopwordVersion)) {
assertAnalyzesTo(analyzer, "this is bogus", new String[]{"this", "is", "bogus"});
} else {
assertAnalyzesTo(analyzer, "this is bogus", new String[]{"bogus"});
}
version = randomVersion();
}
}
public void testPatternAnalyzer() throws IOException {
assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_RC1, "pattern");
}
public void testStandardHTMLStripAnalyzer() throws IOException {
assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_RC1, "standard_html_strip");
}
public void testStandardAnalyzer() throws IOException {
assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_Beta1, "standard");
}
}

View File

@ -1,43 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
/**
*
*/
public class PreBuiltAnalyzerProviderFactoryTests extends ESTestCase {
public void testVersioningInFactoryProvider() throws Exception {
PreBuiltAnalyzerProviderFactory factory = new PreBuiltAnalyzerProviderFactory("default", AnalyzerScope.INDEX, PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT));
AnalyzerProvider former090AnalyzerProvider = factory.create("default", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build());
AnalyzerProvider currentAnalyzerProviderReference = factory.create("default", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
// would love to access the version inside of the lucene analyzer, but that is not possible...
assertThat(currentAnalyzerProviderReference, is(not(former090AnalyzerProvider)));
}
}

View File

@ -59,20 +59,14 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() {
assertThat(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT),
is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_0_18_0)));
is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_2_0_0)));
}
public void testThatInstancesAreCachedAndReused() {
assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT),
is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT)));
assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_0_18_0),
is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_0_18_0)));
}
public void testThatInstancesWithSameLuceneVersionAreReused() {
// both are lucene 4.4 and should return the same instance
assertThat(PreBuiltAnalyzers.CATALAN.getAnalyzer(Version.V_0_90_4),
is(PreBuiltAnalyzers.CATALAN.getAnalyzer(Version.V_0_90_5)));
assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0),
is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_2_0)));
}
public void testThatAnalyzersAreUsedInMapping() throws IOException {

View File

@ -1,45 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.analysis.PreBuiltCharFilters;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.is;
/**
*
*/
public class PreBuiltCharFilterFactoryFactoryTests extends ESTestCase {
public void testThatDifferentVersionsCanBeLoaded() throws IOException {
PreBuiltCharFilterFactoryFactory factory = new PreBuiltCharFilterFactoryFactory(PreBuiltCharFilters.HTML_STRIP.getCharFilterFactory(Version.CURRENT));
CharFilterFactory former090TokenizerFactory = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build());
CharFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build());
CharFilterFactory currentTokenizerFactory = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
assertThat(currentTokenizerFactory, is(former090TokenizerFactory));
assertThat(currentTokenizerFactory, is(former090TokenizerFactoryCopy));
}
}

View File

@ -1,57 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.analysis.PreBuiltTokenFilters;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
/**
*
*/
public class PreBuiltTokenFilterFactoryFactoryTests extends ESTestCase {
public void testThatCachingWorksForCachingStrategyOne() throws IOException {
PreBuiltTokenFilterFactoryFactory factory = new PreBuiltTokenFilterFactoryFactory(PreBuiltTokenFilters.WORD_DELIMITER.getTokenFilterFactory(Version.CURRENT));
TokenFilterFactory former090TokenizerFactory = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build());
TokenFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build());
TokenFilterFactory currentTokenizerFactory = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
assertThat(currentTokenizerFactory, is(former090TokenizerFactory));
assertThat(currentTokenizerFactory, is(former090TokenizerFactoryCopy));
}
public void testThatDifferentVersionsCanBeLoaded() throws IOException {
PreBuiltTokenFilterFactoryFactory factory = new PreBuiltTokenFilterFactoryFactory(PreBuiltTokenFilters.STOP.getTokenFilterFactory(Version.CURRENT));
TokenFilterFactory former090TokenizerFactory = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build());
TokenFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build());
TokenFilterFactory currentTokenizerFactory = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
assertThat(currentTokenizerFactory, is(not(former090TokenizerFactory)));
assertThat(former090TokenizerFactory, is(former090TokenizerFactoryCopy));
}
}

View File

@ -1,48 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.analysis.PreBuiltTokenizers;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
/**
*
*/
public class PreBuiltTokenizerFactoryFactoryTests extends ESTestCase {
public void testThatDifferentVersionsCanBeLoaded() throws IOException {
PreBuiltTokenizerFactoryFactory factory = new PreBuiltTokenizerFactoryFactory(PreBuiltTokenizers.STANDARD.getTokenizerFactory(Version.CURRENT));
// different es versions, same lucene version, thus cached
TokenizerFactory former090TokenizerFactory = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build());
TokenizerFactory former090TokenizerFactoryCopy = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build());
TokenizerFactory currentTokenizerFactory = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
assertThat(currentTokenizerFactory, is(not(former090TokenizerFactory)));
assertThat(currentTokenizerFactory, is(not(former090TokenizerFactoryCopy)));
assertThat(former090TokenizerFactory, is(former090TokenizerFactoryCopy));
}
}

View File

@ -40,10 +40,9 @@ import static org.hamcrest.Matchers.instanceOf;
*
*/
public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
public void testEnglishBackwardsCompatibility() throws IOException {
public void testEnglishFilterFactory() throws IOException {
int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
Version v = VersionUtils.randomVersion(random());
Settings settings = Settings.settingsBuilder()
.put("index.analysis.filter.my_english.type", "stemmer")
@ -61,19 +60,13 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
tokenizer.setReader(new StringReader("foo bar"));
TokenStream create = tokenFilter.create(tokenizer);
NamedAnalyzer analyzer = analysisService.analyzer("my_english");
if (v.onOrAfter(Version.V_1_3_0)) {
assertThat(create, instanceOf(PorterStemFilter.class));
assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"});
} else {
assertThat(create, instanceOf(SnowballFilter.class));
assertAnalyzesTo(analyzer, "consolingly", new String[]{"consol"});
}
assertThat(create, instanceOf(PorterStemFilter.class));
assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"});
}
}
public void testPorter2BackwardsCompatibility() throws IOException {
public void testPorter2FilterFactory() throws IOException {
int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
@ -95,12 +88,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
TokenStream create = tokenFilter.create(tokenizer);
NamedAnalyzer analyzer = analysisService.analyzer("my_porter2");
assertThat(create, instanceOf(SnowballFilter.class));
if (v.onOrAfter(Version.V_1_3_0)) {
assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"});
} else {
assertAnalyzesTo(analyzer, "possibly", new String[]{"possibli"});
}
assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"});
}
}

View File

@ -433,19 +433,6 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
}
mapping = jsonBuilder().startObject().startObject("type")
.startObject("_all")
.startObject("fielddata")
.field("format", "doc_values")
.endObject().endObject().endObject().endObject().string();
Settings legacySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
try {
createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
fail();
} catch (MapperParsingException e) {
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
}
}
public void testAutoBoost() throws Exception {

View File

@ -56,7 +56,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
}
public void testExternalValues() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexService indexService = createIndex("test", settings);
MapperRegistry mapperRegistry = new MapperRegistry(
@ -101,7 +101,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
}
public void testExternalValuesWithMultifield() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexService indexService = createIndex("test", settings);
Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>();
@ -159,7 +159,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
}
public void testExternalValuesWithMultifieldTwoLevels() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexService indexService = createIndex("test", settings);
Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>();

View File

@ -66,7 +66,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -96,7 +96,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.field("geohash", true).endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -116,7 +116,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -136,7 +136,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -156,7 +156,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -172,7 +172,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNormalizeLatLonValuesDefault() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
// default to normalize
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
@ -222,7 +222,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
public void testValidateLatLonValues() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true);
if (version.before(Version.V_2_2_0)) {
@ -285,7 +285,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNoValidateLatLonValues() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true);
if (version.before(Version.V_2_2_0)) {
@ -332,7 +332,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -359,7 +359,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -395,7 +395,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -419,7 +419,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", true).endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -445,7 +445,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -481,7 +481,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -506,7 +506,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("point").field("match", "point*").startObject("mapping").field("type", "geo_point")
.field("lat_lon", true).endObject().endObject().endObject().endArray().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -530,7 +530,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -556,7 +556,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -699,7 +699,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().string();
// create index and add a test point (dr5regy6rc6z)
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings)
.addMapping("pin", mapping);
@ -724,7 +724,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().endObject().string();
// create index and add a test point (dr5regy6rc6z)
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings)
.addMapping("pin", mapping);

View File

@ -57,7 +57,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
.endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -81,7 +81,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -105,7 +105,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true)
.endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@ -126,7 +126,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true)
.field("geohash_precision", 10).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
@ -140,7 +140,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", "5m").endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
@ -154,7 +154,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));

View File

@ -414,27 +414,11 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
assertThat(request.timestamp(), is("1433239200000"));
}
public void testThatIndicesBefore2xMustSupportUnixTimestampsInAnyDateFormat() throws Exception {
public void testThatIndicesAfter2_0DontSupportUnixTimestampsInAnyDateFormat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).field("format", "dateOptionalTime").endObject()
.endObject().endObject().string();
BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes();
//
// test with older versions
Settings oldSettings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersionBetween(random(), Version.V_0_90_0, Version.V_1_6_0)).build();
DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
// both index request are successfully processed
IndexRequest oldIndexDateIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1970-01-01");
oldIndexDateIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index");
IndexRequest oldIndexTimestampIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1234567890");
oldIndexTimestampIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index");
//
// test with 2.x
DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
MetaData newMetaData = client().admin().cluster().prepareState().get().getState().getMetaData();

View File

@ -256,7 +256,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
}
public void testTimestampParsing() throws IOException {
IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build());
IndexService indexService = createIndex("test");
XContentBuilder indexMapping = XContentFactory.jsonBuilder();
boolean enabled = randomBoolean();
indexMapping.startObject()

View File

@ -249,8 +249,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
QueryShardContext shardContext = createShardContext();
// the remaining tests requires either a mapping that we register with types in base test setup
// no strict field resolution (version before V_1_4_0_Beta1)
if (getCurrentTypes().length > 0 || shardContext.indexVersionCreated().before(Version.V_1_4_0_Beta1)) {
if (getCurrentTypes().length > 0) {
Query luceneQuery = queryBuilder.toQuery(shardContext);
assertThat(luceneQuery, instanceOf(TermQuery.class));
TermQuery termQuery = (TermQuery) luceneQuery;

View File

@ -43,7 +43,6 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
@ESBackcompatTestCase.CompatibilityVersion(version = Version.V_1_2_0_ID) // we throw an exception if we create an index with _field_names that is 1.3
public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {

View File

@ -176,12 +176,12 @@ public class PluginInfoTests extends ESTestCase {
"description", "fake desc",
"name", "my_plugin",
"version", "1.0",
"elasticsearch.version", Version.V_1_7_0.toString());
"elasticsearch.version", Version.V_2_0_0.toString());
try {
PluginInfo.readFromProperties(pluginDir);
fail("expected old elasticsearch version exception");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Was designed for version [1.7.0]"));
assertTrue(e.getMessage().contains("Was designed for version [2.0.0]"));
}
}

View File

@ -65,13 +65,6 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
return Collections.singleton(AssertingLocalTransport.TestPlugin.class);
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1).build();
}
@Before
public void beforeEachTest() throws IOException {
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();

View File

@ -67,7 +67,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
return pluginList(InternalSettingsPlugin.class); // uses index.version.created
}
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception {
XContentBuilder source = jsonBuilder().startObject().field("city", name);

View File

@ -63,7 +63,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
return pluginList(InternalSettingsPlugin.class); // uses index.version.created
}
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
static ObjectIntMap<String> expectedDocCountsForGeoHash = null;
static ObjectIntMap<String> multiValuedExpectedDocCountsForGeoHash = null;

View File

@ -20,15 +20,12 @@
package org.elasticsearch.search.innerhits;
import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.HasChildQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.support.QueryInnerHits;
import org.elasticsearch.plugins.Plugin;
@ -73,10 +70,6 @@ import static org.hamcrest.Matchers.nullValue;
/**
*/
public class InnerHitsIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(MockScriptEngine.TestPlugin.class, InternalSettingsPlugin.class);
}
public void testSimpleNested() throws Exception {
assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject().startObject("article").startObject("properties")
@ -753,160 +746,6 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
}
public void testNestedInnerHitsWithStoredFieldsAndNoSourceBackcompat() throws Exception {
assertAcked(prepareCreate("articles")
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
.addMapping("article", jsonBuilder().startObject()
.startObject("_source").field("enabled", false).endObject()
.startObject("properties")
.startObject("comments")
.field("type", "nested")
.startObject("properties")
.startObject("message").field("type", "text").field("store", true).endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject()
.field("title", "quick brown fox")
.startObject("comments").field("message", "fox eat quick").endObject()
.endObject()));
indexRandom(true, requests);
SearchResponse response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().field("comments.message"))))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).fields().get("comments.message").getValue(), equalTo("fox eat quick"));
}
public void testNestedInnerHitsWithHighlightOnStoredFieldBackcompat() throws Exception {
assertAcked(prepareCreate("articles")
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
.addMapping("article", jsonBuilder().startObject()
.startObject("_source").field("enabled", false).endObject()
.startObject("properties")
.startObject("comments")
.field("type", "nested")
.startObject("properties")
.startObject("message").field("type", "text").field("store", true).endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject()
.field("title", "quick brown fox")
.startObject("comments").field("message", "fox eat quick").endObject()
.endObject()));
indexRandom(true, requests);
InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit();
builder.highlighter(new HighlightBuilder().field("comments.message"));
SearchResponse response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder)))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
assertThat(String.valueOf(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).highlightFields().get("comments.message").getFragments()[0]), equalTo("<em>fox</em> eat quick"));
}
public void testNestedInnerHitsWithExcludeSourceBackcompat() throws Exception {
assertAcked(prepareCreate("articles").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
.addMapping("article", jsonBuilder().startObject()
.startObject("_source").field("excludes", new String[]{"comments"}).endObject()
.startObject("properties")
.startObject("comments")
.field("type", "nested")
.startObject("properties")
.startObject("message").field("type", "text").field("store", true).endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject()
.field("title", "quick brown fox")
.startObject("comments").field("message", "fox eat quick").endObject()
.endObject()));
indexRandom(true, requests);
InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit();
builder.field("comments.message");
builder.setFetchSource(true);
SearchResponse response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder)))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).fields().get("comments.message").getValue(), equalTo("fox eat quick"));
}
public void testNestedInnerHitsHiglightWithExcludeSourceBackcompat() throws Exception {
assertAcked(prepareCreate("articles").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
.addMapping("article", jsonBuilder().startObject()
.startObject("_source").field("excludes", new String[]{"comments"}).endObject()
.startObject("properties")
.startObject("comments")
.field("type", "nested")
.startObject("properties")
.startObject("message").field("type", "text").field("store", true).endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject()
.field("title", "quick brown fox")
.startObject("comments").field("message", "fox eat quick").endObject()
.endObject()));
indexRandom(true, requests);
InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit();
builder.highlighter(new HighlightBuilder().field("comments.message"));
SearchResponse response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder)))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
assertThat(String.valueOf(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).highlightFields().get("comments.message").getFragments()[0]), equalTo("<em>fox</em> eat quick"));
}
public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception {
assertAcked(prepareCreate("articles")
.addMapping("article", jsonBuilder().startObject()

View File

@ -74,7 +74,7 @@ public class RestTestParserTests extends ESTestCase {
"\"Get type mapping - pre 1.0\":\n" +
"\n" +
" - skip:\n" +
" version: \"0.90.9 - \"\n" +
" version: \"2.0.0 - \"\n" +
" reason: \"for newer versions the index name is always returned\"\n" +
"\n" +
" - do:\n" +
@ -121,7 +121,7 @@ public class RestTestParserTests extends ESTestCase {
assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Get type mapping - pre 1.0"));
assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(false));
assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), equalTo("for newer versions the index name is always returned"));
assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), equalTo(Version.V_0_90_9));
assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0));
assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getUpperVersion(), equalTo(Version.CURRENT));
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3));
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class));

View File

@ -57,7 +57,7 @@ public class SetupSectionParserTests extends AbstractParserTestCase {
public void testParseSetupAndSkipSectionNoSkip() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
" - skip:\n" +
" version: \"0.90.0 - 0.90.7\"\n" +
" version: \"2.0.0 - 2.3.0\"\n" +
" reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" +
" - do:\n" +
" index1:\n" +
@ -79,8 +79,8 @@ public class SetupSectionParserTests extends AbstractParserTestCase {
assertThat(setupSection, notNullValue());
assertThat(setupSection.getSkipSection().isEmpty(), equalTo(false));
assertThat(setupSection.getSkipSection(), notNullValue());
assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_0_90_0));
assertThat(setupSection.getSkipSection().getUpperVersion(), equalTo(Version.V_0_90_7));
assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0));
assertThat(setupSection.getSkipSection().getUpperVersion(), equalTo(Version.V_2_3_0));
assertThat(setupSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259"));
assertThat(setupSection.getDoSections().size(), equalTo(2));
assertThat(setupSection.getDoSections().get(0).getApiCallSection().getApi(), equalTo("index1"));

View File

@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.nullValue;
public class SkipSectionParserTests extends AbstractParserTestCase {
public void testParseSkipSectionVersionNoFeature() throws Exception {
parser = YamlXContent.yamlXContent.createParser(
"version: \" - 0.90.2\"\n" +
"version: \" - 2.1.0\"\n" +
"reason: Delete ignores the parent param"
);
@ -44,7 +44,7 @@ public class SkipSectionParserTests extends AbstractParserTestCase {
assertThat(skipSection, notNullValue());
assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion()));
assertThat(skipSection.getUpperVersion(), equalTo(Version.V_0_90_2));
assertThat(skipSection.getUpperVersion(), equalTo(Version.V_2_1_0));
assertThat(skipSection.getFeatures().size(), equalTo(0));
assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param"));
}
@ -144,4 +144,4 @@ public class SkipSectionParserTests extends AbstractParserTestCase {
assertThat(e.getMessage(), is("version or features is mandatory within skip section"));
}
}
}
}

View File

@ -70,7 +70,7 @@ public class TestSectionParserTests extends AbstractParserTestCase {
String yaml =
"\"First test section\": \n" +
" - skip:\n" +
" version: \"0.90.0 - 0.90.7\"\n" +
" version: \"2.0.0 - 2.2.0\"\n" +
" reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" +
" - do :\n" +
" catch: missing\n" +
@ -87,8 +87,8 @@ public class TestSectionParserTests extends AbstractParserTestCase {
assertThat(testSection, notNullValue());
assertThat(testSection.getName(), equalTo("First test section"));
assertThat(testSection.getSkipSection(), notNullValue());
assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_0_90_0));
assertThat(testSection.getSkipSection().getUpperVersion(), equalTo(Version.V_0_90_7));
assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0));
assertThat(testSection.getSkipSection().getUpperVersion(), equalTo(Version.V_2_2_0));
assertThat(testSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259"));
assertThat(testSection.getExecutableSections().size(), equalTo(2));
DoSection doSection = (DoSection)testSection.getExecutableSections().get(0);

View File

@ -32,7 +32,7 @@ public class VersionUtilsTests extends ESTestCase {
assertTrue(allVersions.get(i).before(allVersions.get(j)));
}
}
public void testRandomVersionBetween() {
// full range
Version got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), Version.CURRENT);
@ -46,34 +46,34 @@ public class VersionUtilsTests extends ESTestCase {
assertTrue(got.onOrBefore(Version.CURRENT));
// sub range
got = VersionUtils.randomVersionBetween(random(), Version.V_0_90_12, Version.V_1_4_5);
assertTrue(got.onOrAfter(Version.V_0_90_12));
assertTrue(got.onOrBefore(Version.V_1_4_5));
got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0);
assertTrue(got.onOrAfter(Version.V_2_0_0));
assertTrue(got.onOrBefore(Version.V_5_0_0));
// unbounded lower
got = VersionUtils.randomVersionBetween(random(), null, Version.V_1_4_5);
got = VersionUtils.randomVersionBetween(random(), null, Version.V_5_0_0);
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
assertTrue(got.onOrBefore(Version.V_1_4_5));
assertTrue(got.onOrBefore(Version.V_5_0_0));
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().get(0));
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
assertTrue(got.onOrBefore(VersionUtils.allVersions().get(0)));
// unbounded upper
got = VersionUtils.randomVersionBetween(random(), Version.V_0_90_12, null);
assertTrue(got.onOrAfter(Version.V_0_90_12));
got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, null);
assertTrue(got.onOrAfter(Version.V_2_0_0));
assertTrue(got.onOrBefore(Version.CURRENT));
got = VersionUtils.randomVersionBetween(random(), VersionUtils.getPreviousVersion(), null);
assertTrue(got.onOrAfter(VersionUtils.getPreviousVersion()));
assertTrue(got.onOrBefore(Version.CURRENT));
// range of one
got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getFirstVersion());
assertEquals(got, VersionUtils.getFirstVersion());
got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT);
assertEquals(got, Version.CURRENT);
got = VersionUtils.randomVersionBetween(random(), Version.V_1_2_4, Version.V_1_2_4);
assertEquals(got, Version.V_1_2_4);
got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_5_0_0);
assertEquals(got, Version.V_5_0_0);
// implicit range of one
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion());
assertEquals(got, VersionUtils.getFirstVersion());